1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register.
37 Call `put_var_into_stack' when you learn, belatedly, that a variable
38 previously given a pseudo-register must in fact go in the stack.
39 This function changes the DECL_RTL to be a stack slot instead of a reg
40 then scans all the RTL instructions so far generated to correct them. */
49 #include "insn-flags.h"
51 #include "insn-codes.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
57 #include "basic-block.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
73 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
76 /* Some systems use __main in a way incompatible with its use in gcc, in these
77 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
78 give the same symbol without quotes for an alternative entry point. You
79 must define both, or neither. */
81 #define NAME__MAIN "__main"
82 #define SYMBOL__MAIN __main
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
108 int current_function_is_leaf
;
110 /* Nonzero if function being compiled doesn't contain any instructions
111 that can throw an exception. This is set prior to final. */
113 int current_function_nothrow
;
115 /* Nonzero if function being compiled doesn't modify the stack pointer
116 (ignoring the prologue and epilogue). This is only valid after
117 life_analysis has run. */
118 int current_function_sp_is_unchanging
;
120 /* Nonzero if the function being compiled is a leaf function which only
121 uses leaf registers. This is valid after reload (specifically after
122 sched2) and is useful only if the port defines LEAF_REGISTERS. */
123 int current_function_uses_only_leaf_regs
;
125 /* Nonzero once virtual register instantiation has been done.
126 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
127 static int virtuals_instantiated
;
129 /* These variables hold pointers to functions to
130 save and restore machine-specific data,
131 in push_function_context and pop_function_context. */
132 void (*init_machine_status
) PARAMS ((struct function
*));
133 void (*save_machine_status
) PARAMS ((struct function
*));
134 void (*restore_machine_status
) PARAMS ((struct function
*));
135 void (*mark_machine_status
) PARAMS ((struct function
*));
136 void (*free_machine_status
) PARAMS ((struct function
*));
138 /* Likewise, but for language-specific data. */
139 void (*init_lang_status
) PARAMS ((struct function
*));
140 void (*save_lang_status
) PARAMS ((struct function
*));
141 void (*restore_lang_status
) PARAMS ((struct function
*));
142 void (*mark_lang_status
) PARAMS ((struct function
*));
143 void (*free_lang_status
) PARAMS ((struct function
*));
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl
;
148 /* The currently compiled function. */
149 struct function
*cfun
= 0;
151 /* Global list of all compiled functions. */
152 struct function
*all_functions
= 0;
154 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
155 static int *prologue
;
156 static int *epilogue
;
158 /* In order to evaluate some expressions, such as function calls returning
159 structures in memory, we need to temporarily allocate stack locations.
160 We record each allocated temporary in the following structure.
162 Associated with each temporary slot is a nesting level. When we pop up
163 one level, all temporaries associated with the previous level are freed.
164 Normally, all temporaries are freed after the execution of the statement
165 in which they were created. However, if we are inside a ({...}) grouping,
166 the result may be in a temporary and hence must be preserved. If the
167 result could be in a temporary, we preserve it if we can determine which
168 one it is in. If we cannot determine which temporary may contain the
169 result, all temporaries are preserved. A temporary is preserved by
170 pretending it was allocated at the previous nesting level.
172 Automatic variables are also assigned temporary slots, at the nesting
173 level where they are defined. They are marked a "kept" so that
174 free_temp_slots will not free them. */
178 /* Points to next temporary slot. */
179 struct temp_slot
*next
;
180 /* The rtx to used to reference the slot. */
182 /* The rtx used to represent the address if not the address of the
183 slot above. May be an EXPR_LIST if multiple addresses exist. */
185 /* The alignment (in bits) of the slot. */
187 /* The size, in units, of the slot. */
189 /* The alias set for the slot. If the alias set is zero, we don't
190 know anything about the alias set of the slot. We must only
191 reuse a slot if it is assigned an object of the same alias set.
192 Otherwise, the rest of the compiler may assume that the new use
193 of the slot cannot alias the old use of the slot, which is
194 false. If the slot has alias set zero, then we can't reuse the
195 slot at all, since we have no idea what alias set may have been
196 imposed on the memory. For example, if the stack slot is the
197 call frame for an inline functioned, we have no idea what alias
198 sets will be assigned to various pieces of the call frame. */
200 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
202 /* Non-zero if this temporary is currently in use. */
204 /* Non-zero if this temporary has its address taken. */
206 /* Nesting level at which this slot is being used. */
208 /* Non-zero if this should survive a call to free_temp_slots. */
210 /* The offset of the slot from the frame_pointer, including extra space
211 for alignment. This info is for combine_temp_slots. */
212 HOST_WIDE_INT base_offset
;
213 /* The size of the slot, including extra space for alignment. This
214 info is for combine_temp_slots. */
215 HOST_WIDE_INT full_size
;
218 /* This structure is used to record MEMs or pseudos used to replace VAR, any
219 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
220 maintain this list in case two operands of an insn were required to match;
221 in that case we must ensure we use the same replacement. */
223 struct fixup_replacement
227 struct fixup_replacement
*next
;
230 struct insns_for_mem_entry
{
231 /* The KEY in HE will be a MEM. */
232 struct hash_entry he
;
233 /* These are the INSNS which reference the MEM. */
237 /* Forward declarations. */
239 static rtx assign_stack_local_1
PARAMS ((enum machine_mode
, HOST_WIDE_INT
,
240 int, struct function
*));
241 static rtx assign_stack_temp_for_type
PARAMS ((enum machine_mode
,
242 HOST_WIDE_INT
, int, tree
));
243 static struct temp_slot
*find_temp_slot_from_address
PARAMS ((rtx
));
244 static void put_reg_into_stack
PARAMS ((struct function
*, rtx
, tree
,
245 enum machine_mode
, enum machine_mode
,
246 int, int, int, struct hash_table
*));
247 static void fixup_var_refs
PARAMS ((rtx
, enum machine_mode
, int,
248 struct hash_table
*));
249 static struct fixup_replacement
250 *find_fixup_replacement
PARAMS ((struct fixup_replacement
**, rtx
));
251 static void fixup_var_refs_insns
PARAMS ((rtx
, enum machine_mode
, int,
252 rtx
, int, struct hash_table
*));
253 static void fixup_var_refs_1
PARAMS ((rtx
, enum machine_mode
, rtx
*, rtx
,
254 struct fixup_replacement
**));
255 static rtx fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
256 static rtx walk_fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
257 static rtx fixup_stack_1
PARAMS ((rtx
, rtx
));
258 static void optimize_bit_field
PARAMS ((rtx
, rtx
, rtx
*));
259 static void instantiate_decls
PARAMS ((tree
, int));
260 static void instantiate_decls_1
PARAMS ((tree
, int));
261 static void instantiate_decl
PARAMS ((rtx
, int, int));
262 static int instantiate_virtual_regs_1
PARAMS ((rtx
*, rtx
, int));
263 static void delete_handlers
PARAMS ((void));
264 static void pad_to_arg_alignment
PARAMS ((struct args_size
*, int,
265 struct args_size
*));
266 #ifndef ARGS_GROW_DOWNWARD
267 static void pad_below
PARAMS ((struct args_size
*, enum machine_mode
,
270 #ifdef ARGS_GROW_DOWNWARD
271 static tree round_down
PARAMS ((tree
, int));
273 static rtx round_trampoline_addr
PARAMS ((rtx
));
274 static tree blocks_nreverse
PARAMS ((tree
));
275 static int all_blocks
PARAMS ((tree
, tree
*));
276 static tree
*get_block_vector
PARAMS ((tree
, int *));
277 /* We always define `record_insns' even if its not used so that we
278 can always export `prologue_epilogue_contains'. */
279 static int *record_insns
PARAMS ((rtx
)) ATTRIBUTE_UNUSED
;
280 static int contains
PARAMS ((rtx
, int *));
282 static void emit_return_into_block
PARAMS ((basic_block
));
284 static void put_addressof_into_stack
PARAMS ((rtx
, struct hash_table
*));
285 static boolean purge_addressof_1
PARAMS ((rtx
*, rtx
, int, int,
286 struct hash_table
*));
287 static int is_addressof
PARAMS ((rtx
*, void *));
288 static struct hash_entry
*insns_for_mem_newfunc
PARAMS ((struct hash_entry
*,
291 static unsigned long insns_for_mem_hash
PARAMS ((hash_table_key
));
292 static boolean insns_for_mem_comp
PARAMS ((hash_table_key
, hash_table_key
));
293 static int insns_for_mem_walk
PARAMS ((rtx
*, void *));
294 static void compute_insns_for_mem
PARAMS ((rtx
, rtx
, struct hash_table
*));
295 static void mark_temp_slot
PARAMS ((struct temp_slot
*));
296 static void mark_function_status
PARAMS ((struct function
*));
297 static void mark_function_chain
PARAMS ((void *));
298 static void prepare_function_start
PARAMS ((void));
299 static void do_clobber_return_reg
PARAMS ((rtx
, void *));
300 static void do_use_return_reg
PARAMS ((rtx
, void *));
302 /* Pointer to chain of `struct function' for containing functions. */
303 struct function
*outer_function_chain
;
305 /* Given a function decl for a containing function,
306 return the `struct function' for it. */
309 find_function_data (decl
)
314 for (p
= outer_function_chain
; p
; p
= p
->next
)
321 /* Save the current context for compilation of a nested function.
322 This is called from language-specific code. The caller should use
323 the save_lang_status callback to save any language-specific state,
324 since this function knows only about language-independent
328 push_function_context_to (context
)
331 struct function
*p
, *context_data
;
335 context_data
= (context
== current_function_decl
337 : find_function_data (context
));
338 context_data
->contains_functions
= 1;
342 init_dummy_function_start ();
345 p
->next
= outer_function_chain
;
346 outer_function_chain
= p
;
347 p
->fixup_var_refs_queue
= 0;
349 save_tree_status (p
);
350 if (save_lang_status
)
351 (*save_lang_status
) (p
);
352 if (save_machine_status
)
353 (*save_machine_status
) (p
);
359 push_function_context ()
361 push_function_context_to (current_function_decl
);
364 /* Restore the last saved context, at the end of a nested function.
365 This function is called from language-specific code. */
368 pop_function_context_from (context
)
369 tree context ATTRIBUTE_UNUSED
;
371 struct function
*p
= outer_function_chain
;
372 struct var_refs_queue
*queue
;
373 struct var_refs_queue
*next
;
376 outer_function_chain
= p
->next
;
378 current_function_decl
= p
->decl
;
381 restore_tree_status (p
);
382 restore_emit_status (p
);
384 if (restore_machine_status
)
385 (*restore_machine_status
) (p
);
386 if (restore_lang_status
)
387 (*restore_lang_status
) (p
);
389 /* Finish doing put_var_into_stack for any of our variables
390 which became addressable during the nested function. */
391 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= next
)
394 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
395 queue
->unsignedp
, 0);
398 p
->fixup_var_refs_queue
= 0;
400 /* Reset variables that have known state during rtx generation. */
401 rtx_equal_function_value_matters
= 1;
402 virtuals_instantiated
= 0;
406 pop_function_context ()
408 pop_function_context_from (current_function_decl
);
411 /* Clear out all parts of the state in F that can safely be discarded
412 after the function has been parsed, but not compiled, to let
413 garbage collection reclaim the memory. */
416 free_after_parsing (f
)
419 /* f->expr->forced_labels is used by code generation. */
420 /* f->emit->regno_reg_rtx is used by code generation. */
421 /* f->varasm is used by code generation. */
422 /* f->eh->eh_return_stub_label is used by code generation. */
424 if (free_lang_status
)
425 (*free_lang_status
) (f
);
426 free_stmt_status (f
);
429 /* Clear out all parts of the state in F that can safely be discarded
430 after the function has been compiled, to let garbage collection
431 reclaim the memory. */
434 free_after_compilation (f
)
438 free_expr_status (f
);
439 free_emit_status (f
);
440 free_varasm_status (f
);
442 if (free_machine_status
)
443 (*free_machine_status
) (f
);
445 if (f
->x_parm_reg_stack_loc
)
446 free (f
->x_parm_reg_stack_loc
);
448 f
->arg_offset_rtx
= NULL
;
449 f
->return_rtx
= NULL
;
450 f
->internal_arg_pointer
= NULL
;
451 f
->x_nonlocal_labels
= NULL
;
452 f
->x_nonlocal_goto_handler_slots
= NULL
;
453 f
->x_nonlocal_goto_handler_labels
= NULL
;
454 f
->x_nonlocal_goto_stack_level
= NULL
;
455 f
->x_cleanup_label
= NULL
;
456 f
->x_return_label
= NULL
;
457 f
->x_save_expr_regs
= NULL
;
458 f
->x_stack_slot_list
= NULL
;
459 f
->x_rtl_expr_chain
= NULL
;
460 f
->x_tail_recursion_label
= NULL
;
461 f
->x_tail_recursion_reentry
= NULL
;
462 f
->x_arg_pointer_save_area
= NULL
;
463 f
->x_context_display
= NULL
;
464 f
->x_trampoline_list
= NULL
;
465 f
->x_parm_birth_insn
= NULL
;
466 f
->x_last_parm_insn
= NULL
;
467 f
->x_parm_reg_stack_loc
= NULL
;
468 f
->x_temp_slots
= NULL
;
469 f
->fixup_var_refs_queue
= NULL
;
470 f
->original_arg_vector
= NULL
;
471 f
->original_decl_initial
= NULL
;
472 f
->inl_last_parm_insn
= NULL
;
473 f
->epilogue_delay_list
= NULL
;
477 /* Allocate fixed slots in the stack frame of the current function. */
479 /* Return size needed for stack frame based on slots so far allocated in
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
485 get_func_frame_size (f
)
488 #ifdef FRAME_GROWS_DOWNWARD
489 return -f
->x_frame_offset
;
491 return f
->x_frame_offset
;
495 /* Return size needed for stack frame based on slots so far allocated.
496 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
497 the caller may have to do that. */
501 return get_func_frame_size (cfun
);
504 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
505 with machine mode MODE.
507 ALIGN controls the amount of alignment for the address of the slot:
508 0 means according to MODE,
509 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
510 positive specifies alignment boundary in bits.
512 We do not round to stack_boundary here.
514 FUNCTION specifies the function to allocate in. */
517 assign_stack_local_1 (mode
, size
, align
, function
)
518 enum machine_mode mode
;
521 struct function
*function
;
523 register rtx x
, addr
;
524 int bigend_correction
= 0;
527 /* Allocate in the memory associated with the function in whose frame
529 if (function
!= cfun
)
530 push_obstacks (function
->function_obstack
,
531 function
->function_maybepermanent_obstack
);
537 alignment
= GET_MODE_ALIGNMENT (mode
);
539 alignment
= BIGGEST_ALIGNMENT
;
541 /* Allow the target to (possibly) increase the alignment of this
543 type
= type_for_mode (mode
, 0);
545 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
547 alignment
/= BITS_PER_UNIT
;
549 else if (align
== -1)
551 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
552 size
= CEIL_ROUND (size
, alignment
);
555 alignment
= align
/ BITS_PER_UNIT
;
557 #ifdef FRAME_GROWS_DOWNWARD
558 function
->x_frame_offset
-= size
;
561 /* Ignore alignment we can't do with expected alignment of the boundary. */
562 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
563 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
565 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
566 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
568 /* Round frame offset to that alignment.
569 We must be careful here, since FRAME_OFFSET might be negative and
570 division with a negative dividend isn't as well defined as we might
571 like. So we instead assume that ALIGNMENT is a power of two and
572 use logical operations which are unambiguous. */
573 #ifdef FRAME_GROWS_DOWNWARD
574 function
->x_frame_offset
= FLOOR_ROUND (function
->x_frame_offset
, alignment
);
576 function
->x_frame_offset
= CEIL_ROUND (function
->x_frame_offset
, alignment
);
579 /* On a big-endian machine, if we are allocating more space than we will use,
580 use the least significant bytes of those that are allocated. */
581 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
582 bigend_correction
= size
- GET_MODE_SIZE (mode
);
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (function
== cfun
&& virtuals_instantiated
)
587 addr
= plus_constant (frame_pointer_rtx
,
588 (frame_offset
+ bigend_correction
589 + STARTING_FRAME_OFFSET
));
591 addr
= plus_constant (virtual_stack_vars_rtx
,
592 function
->x_frame_offset
+ bigend_correction
);
594 #ifndef FRAME_GROWS_DOWNWARD
595 function
->x_frame_offset
+= size
;
598 x
= gen_rtx_MEM (mode
, addr
);
600 function
->x_stack_slot_list
601 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
603 if (function
!= cfun
)
609 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
612 assign_stack_local (mode
, size
, align
)
613 enum machine_mode mode
;
617 return assign_stack_local_1 (mode
, size
, align
, cfun
);
620 /* Allocate a temporary stack slot and record it for possible later
623 MODE is the machine mode to be given to the returned rtx.
625 SIZE is the size in units of the space required. We do no rounding here
626 since assign_stack_local will do any required rounding.
628 KEEP is 1 if this slot is to be retained after a call to
629 free_temp_slots. Automatic variables for a block are allocated
630 with this flag. KEEP is 2 if we allocate a longer term temporary,
631 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
632 if we are to allocate something at an inner level to be treated as
633 a variable in the block (e.g., a SAVE_EXPR).
635 TYPE is the type that will be used for the stack slot. */
638 assign_stack_temp_for_type (mode
, size
, keep
, type
)
639 enum machine_mode mode
;
646 struct temp_slot
*p
, *best_p
= 0;
648 /* If SIZE is -1 it means that somebody tried to allocate a temporary
649 of a variable size. */
653 /* If we know the alias set for the memory that will be used, use
654 it. If there's no TYPE, then we don't know anything about the
655 alias set for the memory. */
657 alias_set
= get_alias_set (type
);
661 align
= GET_MODE_ALIGNMENT (mode
);
663 align
= BIGGEST_ALIGNMENT
;
666 type
= type_for_mode (mode
, 0);
668 align
= LOCAL_ALIGNMENT (type
, align
);
670 /* Try to find an available, already-allocated temporary of the proper
671 mode which meets the size and alignment requirements. Choose the
672 smallest one with the closest alignment. */
673 for (p
= temp_slots
; p
; p
= p
->next
)
674 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
676 && (!flag_strict_aliasing
677 || (alias_set
&& p
->alias_set
== alias_set
))
678 && (best_p
== 0 || best_p
->size
> p
->size
679 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
681 if (p
->align
== align
&& p
->size
== size
)
689 /* Make our best, if any, the one to use. */
692 /* If there are enough aligned bytes left over, make them into a new
693 temp_slot so that the extra bytes don't get wasted. Do this only
694 for BLKmode slots, so that we can be sure of the alignment. */
695 if (GET_MODE (best_p
->slot
) == BLKmode
696 /* We can't split slots if -fstrict-aliasing because the
697 information about the alias set for the new slot will be
699 && !flag_strict_aliasing
)
701 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
702 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
704 if (best_p
->size
- rounded_size
>= alignment
)
706 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
707 p
->in_use
= p
->addr_taken
= 0;
708 p
->size
= best_p
->size
- rounded_size
;
709 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
710 p
->full_size
= best_p
->full_size
- rounded_size
;
711 p
->slot
= gen_rtx_MEM (BLKmode
,
712 plus_constant (XEXP (best_p
->slot
, 0),
714 p
->align
= best_p
->align
;
717 p
->next
= temp_slots
;
720 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
723 best_p
->size
= rounded_size
;
724 best_p
->full_size
= rounded_size
;
731 /* If we still didn't find one, make a new temporary. */
734 HOST_WIDE_INT frame_offset_old
= frame_offset
;
736 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
738 /* We are passing an explicit alignment request to assign_stack_local.
739 One side effect of that is assign_stack_local will not round SIZE
740 to ensure the frame offset remains suitably aligned.
742 So for requests which depended on the rounding of SIZE, we go ahead
743 and round it now. We also make sure ALIGNMENT is at least
744 BIGGEST_ALIGNMENT. */
745 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
747 p
->slot
= assign_stack_local (mode
,
749 ? CEIL_ROUND (size
, align
/ BITS_PER_UNIT
)
754 p
->alias_set
= alias_set
;
756 /* The following slot size computation is necessary because we don't
757 know the actual size of the temporary slot until assign_stack_local
758 has performed all the frame alignment and size rounding for the
759 requested temporary. Note that extra space added for alignment
760 can be either above or below this stack slot depending on which
761 way the frame grows. We include the extra space if and only if it
762 is above this slot. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 p
->size
= frame_offset_old
- frame_offset
;
769 /* Now define the fields used by combine_temp_slots. */
770 #ifdef FRAME_GROWS_DOWNWARD
771 p
->base_offset
= frame_offset
;
772 p
->full_size
= frame_offset_old
- frame_offset
;
774 p
->base_offset
= frame_offset_old
;
775 p
->full_size
= frame_offset
- frame_offset_old
;
778 p
->next
= temp_slots
;
784 p
->rtl_expr
= seq_rtl_expr
;
788 p
->level
= target_temp_slot_level
;
793 p
->level
= var_temp_slot_level
;
798 p
->level
= temp_slot_level
;
802 /* We may be reusing an old slot, so clear any MEM flags that may have been
804 RTX_UNCHANGING_P (p
->slot
) = 0;
805 MEM_IN_STRUCT_P (p
->slot
) = 0;
806 MEM_SCALAR_P (p
->slot
) = 0;
807 MEM_ALIAS_SET (p
->slot
) = 0;
811 /* Allocate a temporary stack slot and record it for possible later
812 reuse. First three arguments are same as in preceding function. */
815 assign_stack_temp (mode
, size
, keep
)
816 enum machine_mode mode
;
820 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
823 /* Assign a temporary of given TYPE.
824 KEEP is as for assign_stack_temp.
825 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
826 it is 0 if a register is OK.
827 DONT_PROMOTE is 1 if we should not promote values in register
831 assign_temp (type
, keep
, memory_required
, dont_promote
)
835 int dont_promote ATTRIBUTE_UNUSED
;
837 enum machine_mode mode
= TYPE_MODE (type
);
838 #ifndef PROMOTE_FOR_CALL_ONLY
839 int unsignedp
= TREE_UNSIGNED (type
);
842 if (mode
== BLKmode
|| memory_required
)
844 HOST_WIDE_INT size
= int_size_in_bytes (type
);
847 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
848 problems with allocating the stack space. */
852 /* Unfortunately, we don't yet know how to allocate variable-sized
853 temporaries. However, sometimes we have a fixed upper limit on
854 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
855 instead. This is the case for Chill variable-sized strings. */
856 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
857 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
858 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
859 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
861 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
862 MEM_SET_IN_STRUCT_P (tmp
, AGGREGATE_TYPE_P (type
));
866 #ifndef PROMOTE_FOR_CALL_ONLY
868 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
871 return gen_reg_rtx (mode
);
874 /* Combine temporary stack slots which are adjacent on the stack.
876 This allows for better use of already allocated stack space. This is only
877 done for BLKmode slots because we can be sure that we won't have alignment
878 problems in this case. */
881 combine_temp_slots ()
883 struct temp_slot
*p
, *q
;
884 struct temp_slot
*prev_p
, *prev_q
;
887 /* We can't combine slots, because the information about which slot
888 is in which alias set will be lost. */
889 if (flag_strict_aliasing
)
892 /* If there are a lot of temp slots, don't do anything unless
893 high levels of optimizaton. */
894 if (! flag_expensive_optimizations
)
895 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
896 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
899 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
903 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
904 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
907 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
909 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
911 /* Q comes after P; combine Q into P. */
913 p
->full_size
+= q
->full_size
;
916 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
918 /* P comes after Q; combine P into Q. */
920 q
->full_size
+= p
->full_size
;
925 /* Either delete Q or advance past it. */
927 prev_q
->next
= q
->next
;
931 /* Either delete P or advance past it. */
935 prev_p
->next
= p
->next
;
937 temp_slots
= p
->next
;
944 /* Find the temp slot corresponding to the object at address X. */
946 static struct temp_slot
*
947 find_temp_slot_from_address (x
)
953 for (p
= temp_slots
; p
; p
= p
->next
)
958 else if (XEXP (p
->slot
, 0) == x
960 || (GET_CODE (x
) == PLUS
961 && XEXP (x
, 0) == virtual_stack_vars_rtx
962 && GET_CODE (XEXP (x
, 1)) == CONST_INT
963 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
964 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
967 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
968 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
969 if (XEXP (next
, 0) == x
)
973 /* If we have a sum involving a register, see if it points to a temp
975 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
976 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
978 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
979 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
985 /* Indicate that NEW is an alternate way of referring to the temp slot
986 that previously was known by OLD. */
989 update_temp_slot_address (old
, new)
994 if (rtx_equal_p (old
, new))
997 p
= find_temp_slot_from_address (old
);
999 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1000 is a register, see if one operand of the PLUS is a temporary
1001 location. If so, NEW points into it. Otherwise, if both OLD and
1002 NEW are a PLUS and if there is a register in common between them.
1003 If so, try a recursive call on those values. */
1006 if (GET_CODE (old
) != PLUS
)
1009 if (GET_CODE (new) == REG
)
1011 update_temp_slot_address (XEXP (old
, 0), new);
1012 update_temp_slot_address (XEXP (old
, 1), new);
1015 else if (GET_CODE (new) != PLUS
)
1018 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1019 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1020 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1021 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1022 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1023 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1024 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1025 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1030 /* Otherwise add an alias for the temp's address. */
1031 else if (p
->address
== 0)
1035 if (GET_CODE (p
->address
) != EXPR_LIST
)
1036 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1038 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1042 /* If X could be a reference to a temporary slot, mark the fact that its
1043 address was taken. */
1046 mark_temp_addr_taken (x
)
1049 struct temp_slot
*p
;
1054 /* If X is not in memory or is at a constant address, it cannot be in
1055 a temporary slot. */
1056 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1059 p
= find_temp_slot_from_address (XEXP (x
, 0));
1064 /* If X could be a reference to a temporary slot, mark that slot as
1065 belonging to the to one level higher than the current level. If X
1066 matched one of our slots, just mark that one. Otherwise, we can't
1067 easily predict which it is, so upgrade all of them. Kept slots
1068 need not be touched.
1070 This is called when an ({...}) construct occurs and a statement
1071 returns a value in memory. */
1074 preserve_temp_slots (x
)
1077 struct temp_slot
*p
= 0;
1079 /* If there is no result, we still might have some objects whose address
1080 were taken, so we need to make sure they stay around. */
1083 for (p
= temp_slots
; p
; p
= p
->next
)
1084 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1090 /* If X is a register that is being used as a pointer, see if we have
1091 a temporary slot we know it points to. To be consistent with
1092 the code below, we really should preserve all non-kept slots
1093 if we can't find a match, but that seems to be much too costly. */
1094 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1095 p
= find_temp_slot_from_address (x
);
1097 /* If X is not in memory or is at a constant address, it cannot be in
1098 a temporary slot, but it can contain something whose address was
1100 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1102 for (p
= temp_slots
; p
; p
= p
->next
)
1103 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1109 /* First see if we can find a match. */
1111 p
= find_temp_slot_from_address (XEXP (x
, 0));
1115 /* Move everything at our level whose address was taken to our new
1116 level in case we used its address. */
1117 struct temp_slot
*q
;
1119 if (p
->level
== temp_slot_level
)
1121 for (q
= temp_slots
; q
; q
= q
->next
)
1122 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1131 /* Otherwise, preserve all non-kept slots at this level. */
1132 for (p
= temp_slots
; p
; p
= p
->next
)
1133 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1137 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1138 with that RTL_EXPR, promote it into a temporary slot at the present
1139 level so it will not be freed when we free slots made in the
1143 preserve_rtl_expr_result (x
)
1146 struct temp_slot
*p
;
1148 /* If X is not in memory or is at a constant address, it cannot be in
1149 a temporary slot. */
1150 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1153 /* If we can find a match, move it to our level unless it is already at
1155 p
= find_temp_slot_from_address (XEXP (x
, 0));
1158 p
->level
= MIN (p
->level
, temp_slot_level
);
1165 /* Free all temporaries used so far. This is normally called at the end
1166 of generating code for a statement. Don't free any temporaries
1167 currently in use for an RTL_EXPR that hasn't yet been emitted.
1168 We could eventually do better than this since it can be reused while
1169 generating the same RTL_EXPR, but this is complex and probably not
1175 struct temp_slot
*p
;
1177 for (p
= temp_slots
; p
; p
= p
->next
)
1178 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1179 && p
->rtl_expr
== 0)
1182 combine_temp_slots ();
1185 /* Free all temporary slots used in T, an RTL_EXPR node. */
1188 free_temps_for_rtl_expr (t
)
1191 struct temp_slot
*p
;
1193 for (p
= temp_slots
; p
; p
= p
->next
)
1194 if (p
->rtl_expr
== t
)
1196 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1197 needs to be preserved. This can happen if a temporary in
1198 the RTL_EXPR was addressed; preserve_temp_slots will move
1199 the temporary into a higher level. */
1200 if (temp_slot_level
<= p
->level
)
1203 p
->rtl_expr
= NULL_TREE
;
1206 combine_temp_slots ();
1209 /* Mark all temporaries ever allocated in this function as not suitable
1210 for reuse until the current level is exited. */
1213 mark_all_temps_used ()
1215 struct temp_slot
*p
;
1217 for (p
= temp_slots
; p
; p
= p
->next
)
1219 p
->in_use
= p
->keep
= 1;
1220 p
->level
= MIN (p
->level
, temp_slot_level
);
1224 /* Push deeper into the nesting level for stack temporaries. */
1232 /* Likewise, but save the new level as the place to allocate variables
1237 push_temp_slots_for_block ()
1241 var_temp_slot_level
= temp_slot_level
;
1244 /* Likewise, but save the new level as the place to allocate temporaries
1245 for TARGET_EXPRs. */
1248 push_temp_slots_for_target ()
1252 target_temp_slot_level
= temp_slot_level
;
1255 /* Set and get the value of target_temp_slot_level. The only
1256 permitted use of these functions is to save and restore this value. */
1259 get_target_temp_slot_level ()
1261 return target_temp_slot_level
;
1265 set_target_temp_slot_level (level
)
1268 target_temp_slot_level
= level
;
1272 /* Pop a temporary nesting level. All slots in use in the current level
1278 struct temp_slot
*p
;
1280 for (p
= temp_slots
; p
; p
= p
->next
)
1281 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1284 combine_temp_slots ();
1289 /* Initialize temporary slots. */
1294 /* We have not allocated any temporaries yet. */
1296 temp_slot_level
= 0;
1297 var_temp_slot_level
= 0;
1298 target_temp_slot_level
= 0;
1301 /* Retroactively move an auto variable from a register to a stack slot.
1302 This is done when an address-reference to the variable is seen. */
1305 put_var_into_stack (decl
)
1309 enum machine_mode promoted_mode
, decl_mode
;
1310 struct function
*function
= 0;
1312 int can_use_addressof
;
1314 context
= decl_function_context (decl
);
1316 /* Get the current rtl used for this object and its original mode. */
1317 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1319 /* No need to do anything if decl has no rtx yet
1320 since in that case caller is setting TREE_ADDRESSABLE
1321 and a stack slot will be assigned when the rtl is made. */
1325 /* Get the declared mode for this object. */
1326 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1327 : DECL_MODE (decl
));
1328 /* Get the mode it's actually stored in. */
1329 promoted_mode
= GET_MODE (reg
);
1331 /* If this variable comes from an outer function,
1332 find that function's saved context. */
1333 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1334 for (function
= outer_function_chain
; function
; function
= function
->next
)
1335 if (function
->decl
== context
)
1338 /* If this is a variable-size object with a pseudo to address it,
1339 put that pseudo into the stack, if the var is nonlocal. */
1340 if (DECL_NONLOCAL (decl
)
1341 && GET_CODE (reg
) == MEM
1342 && GET_CODE (XEXP (reg
, 0)) == REG
1343 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1345 reg
= XEXP (reg
, 0);
1346 decl_mode
= promoted_mode
= GET_MODE (reg
);
1352 /* FIXME make it work for promoted modes too */
1353 && decl_mode
== promoted_mode
1354 #ifdef NON_SAVING_SETJMP
1355 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1359 /* If we can't use ADDRESSOF, make sure we see through one we already
1361 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1362 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1363 reg
= XEXP (XEXP (reg
, 0), 0);
1365 /* Now we should have a value that resides in one or more pseudo regs. */
1367 if (GET_CODE (reg
) == REG
)
1369 /* If this variable lives in the current function and we don't need
1370 to put things in the stack for the sake of setjmp, try to keep it
1371 in a register until we know we actually need the address. */
1372 if (can_use_addressof
)
1373 gen_mem_addressof (reg
, decl
);
1375 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1376 promoted_mode
, decl_mode
,
1377 TREE_SIDE_EFFECTS (decl
), 0,
1378 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1381 else if (GET_CODE (reg
) == CONCAT
)
1383 /* A CONCAT contains two pseudos; put them both in the stack.
1384 We do it so they end up consecutive. */
1385 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1386 tree part_type
= type_for_mode (part_mode
, 0);
1387 #ifdef FRAME_GROWS_DOWNWARD
1388 /* Since part 0 should have a lower address, do it second. */
1389 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1390 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1391 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1393 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1394 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1395 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1398 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1399 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1400 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1402 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1403 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1404 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1408 /* Change the CONCAT into a combined MEM for both parts. */
1409 PUT_CODE (reg
, MEM
);
1410 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1411 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
1412 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (TREE_TYPE (decl
)));
1414 /* The two parts are in memory order already.
1415 Use the lower parts address as ours. */
1416 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1417 /* Prevent sharing of rtl that might lose. */
1418 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1419 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1424 if (current_function_check_memory_usage
)
1425 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1426 XEXP (reg
, 0), Pmode
,
1427 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1428 TYPE_MODE (sizetype
),
1429 GEN_INT (MEMORY_USE_RW
),
1430 TYPE_MODE (integer_type_node
));
1433 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1434 into the stack frame of FUNCTION (0 means the current function).
1435 DECL_MODE is the machine mode of the user-level data type.
1436 PROMOTED_MODE is the machine mode of the register.
1437 VOLATILE_P is nonzero if this is for a "volatile" decl.
1438 USED_P is nonzero if this reg might have already been used in an insn. */
1441 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1442 original_regno
, used_p
, ht
)
1443 struct function
*function
;
1446 enum machine_mode promoted_mode
, decl_mode
;
1450 struct hash_table
*ht
;
1452 struct function
*func
= function
? function
: cfun
;
1454 int regno
= original_regno
;
1457 regno
= REGNO (reg
);
1459 if (regno
< func
->x_max_parm_reg
)
1460 new = func
->x_parm_reg_stack_loc
[regno
];
1462 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
), 0, func
);
1464 PUT_CODE (reg
, MEM
);
1465 PUT_MODE (reg
, decl_mode
);
1466 XEXP (reg
, 0) = XEXP (new, 0);
1467 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1468 MEM_VOLATILE_P (reg
) = volatile_p
;
1470 /* If this is a memory ref that contains aggregate components,
1471 mark it as such for cse and loop optimize. If we are reusing a
1472 previously generated stack slot, then we need to copy the bit in
1473 case it was set for other reasons. For instance, it is set for
1474 __builtin_va_alist. */
1475 MEM_SET_IN_STRUCT_P (reg
,
1476 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1477 MEM_ALIAS_SET (reg
) = get_alias_set (type
);
1479 /* Now make sure that all refs to the variable, previously made
1480 when it was a register, are fixed up to be valid again. */
1482 if (used_p
&& function
!= 0)
1484 struct var_refs_queue
*temp
;
1487 = (struct var_refs_queue
*) xmalloc (sizeof (struct var_refs_queue
));
1488 temp
->modified
= reg
;
1489 temp
->promoted_mode
= promoted_mode
;
1490 temp
->unsignedp
= TREE_UNSIGNED (type
);
1491 temp
->next
= function
->fixup_var_refs_queue
;
1492 function
->fixup_var_refs_queue
= temp
;
1495 /* Variable is local; fix it up now. */
1496 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
), ht
);
1500 fixup_var_refs (var
, promoted_mode
, unsignedp
, ht
)
1502 enum machine_mode promoted_mode
;
1504 struct hash_table
*ht
;
1507 rtx first_insn
= get_insns ();
1508 struct sequence_stack
*stack
= seq_stack
;
1509 tree rtl_exps
= rtl_expr_chain
;
1511 /* Must scan all insns for stack-refs that exceed the limit. */
1512 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
,
1514 /* If there's a hash table, it must record all uses of VAR. */
1518 /* Scan all pending sequences too. */
1519 for (; stack
; stack
= stack
->next
)
1521 push_to_sequence (stack
->first
);
1522 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1523 stack
->first
, stack
->next
!= 0, 0);
1524 /* Update remembered end of sequence
1525 in case we added an insn at the end. */
1526 stack
->last
= get_last_insn ();
1530 /* Scan all waiting RTL_EXPRs too. */
1531 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1533 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1534 if (seq
!= const0_rtx
&& seq
!= 0)
1536 push_to_sequence (seq
);
1537 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0,
1543 /* Scan the catch clauses for exception handling too. */
1544 push_to_sequence (catch_clauses
);
1545 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, catch_clauses
,
1550 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1551 some part of an insn. Return a struct fixup_replacement whose OLD
1552 value is equal to X. Allocate a new structure if no such entry exists. */
1554 static struct fixup_replacement
*
1555 find_fixup_replacement (replacements
, x
)
1556 struct fixup_replacement
**replacements
;
1559 struct fixup_replacement
*p
;
1561 /* See if we have already replaced this. */
1562 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1567 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1570 p
->next
= *replacements
;
1577 /* Scan the insn-chain starting with INSN for refs to VAR
1578 and fix them up. TOPLEVEL is nonzero if this chain is the
1579 main chain of insns for the current function. */
1582 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
, ht
)
1584 enum machine_mode promoted_mode
;
1588 struct hash_table
*ht
;
1591 rtx insn_list
= NULL_RTX
;
1593 /* If we already know which INSNs reference VAR there's no need
1594 to walk the entire instruction chain. */
1597 insn_list
= ((struct insns_for_mem_entry
*)
1598 hash_lookup (ht
, var
, /*create=*/0, /*copy=*/0))->insns
;
1599 insn
= insn_list
? XEXP (insn_list
, 0) : NULL_RTX
;
1600 insn_list
= XEXP (insn_list
, 1);
1605 rtx next
= NEXT_INSN (insn
);
1606 rtx set
, prev
, prev_set
;
1609 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1611 /* Remember the notes in case we delete the insn. */
1612 note
= REG_NOTES (insn
);
1614 /* If this is a CLOBBER of VAR, delete it.
1616 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1617 and REG_RETVAL notes too. */
1618 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1619 && (XEXP (PATTERN (insn
), 0) == var
1620 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1621 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1622 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1624 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1625 /* The REG_LIBCALL note will go away since we are going to
1626 turn INSN into a NOTE, so just delete the
1627 corresponding REG_RETVAL note. */
1628 remove_note (XEXP (note
, 0),
1629 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1632 /* In unoptimized compilation, we shouldn't call delete_insn
1633 except in jump.c doing warnings. */
1634 PUT_CODE (insn
, NOTE
);
1635 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1636 NOTE_SOURCE_FILE (insn
) = 0;
1639 /* The insn to load VAR from a home in the arglist
1640 is now a no-op. When we see it, just delete it.
1641 Similarly if this is storing VAR from a register from which
1642 it was loaded in the previous insn. This will occur
1643 when an ADDRESSOF was made for an arglist slot. */
1645 && (set
= single_set (insn
)) != 0
1646 && SET_DEST (set
) == var
1647 /* If this represents the result of an insn group,
1648 don't delete the insn. */
1649 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1650 && (rtx_equal_p (SET_SRC (set
), var
)
1651 || (GET_CODE (SET_SRC (set
)) == REG
1652 && (prev
= prev_nonnote_insn (insn
)) != 0
1653 && (prev_set
= single_set (prev
)) != 0
1654 && SET_DEST (prev_set
) == SET_SRC (set
)
1655 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1657 /* In unoptimized compilation, we shouldn't call delete_insn
1658 except in jump.c doing warnings. */
1659 PUT_CODE (insn
, NOTE
);
1660 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1661 NOTE_SOURCE_FILE (insn
) = 0;
1662 if (insn
== last_parm_insn
)
1663 last_parm_insn
= PREV_INSN (next
);
1667 struct fixup_replacement
*replacements
= 0;
1668 rtx next_insn
= NEXT_INSN (insn
);
1670 if (SMALL_REGISTER_CLASSES
)
1672 /* If the insn that copies the results of a CALL_INSN
1673 into a pseudo now references VAR, we have to use an
1674 intermediate pseudo since we want the life of the
1675 return value register to be only a single insn.
1677 If we don't use an intermediate pseudo, such things as
1678 address computations to make the address of VAR valid
1679 if it is not can be placed between the CALL_INSN and INSN.
1681 To make sure this doesn't happen, we record the destination
1682 of the CALL_INSN and see if the next insn uses both that
1685 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1686 && reg_mentioned_p (var
, PATTERN (insn
))
1687 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1689 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1691 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1693 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1697 if (GET_CODE (insn
) == CALL_INSN
1698 && GET_CODE (PATTERN (insn
)) == SET
)
1699 call_dest
= SET_DEST (PATTERN (insn
));
1700 else if (GET_CODE (insn
) == CALL_INSN
1701 && GET_CODE (PATTERN (insn
)) == PARALLEL
1702 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1703 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1708 /* See if we have to do anything to INSN now that VAR is in
1709 memory. If it needs to be loaded into a pseudo, use a single
1710 pseudo for the entire insn in case there is a MATCH_DUP
1711 between two operands. We pass a pointer to the head of
1712 a list of struct fixup_replacements. If fixup_var_refs_1
1713 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1714 it will record them in this list.
1716 If it allocated a pseudo for any replacement, we copy into
1719 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1722 /* If this is last_parm_insn, and any instructions were output
1723 after it to fix it up, then we must set last_parm_insn to
1724 the last such instruction emitted. */
1725 if (insn
== last_parm_insn
)
1726 last_parm_insn
= PREV_INSN (next_insn
);
1728 while (replacements
)
1730 if (GET_CODE (replacements
->new) == REG
)
1735 /* OLD might be a (subreg (mem)). */
1736 if (GET_CODE (replacements
->old
) == SUBREG
)
1738 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1741 = fixup_stack_1 (replacements
->old
, insn
);
1743 insert_before
= insn
;
1745 /* If we are changing the mode, do a conversion.
1746 This might be wasteful, but combine.c will
1747 eliminate much of the waste. */
1749 if (GET_MODE (replacements
->new)
1750 != GET_MODE (replacements
->old
))
1753 convert_move (replacements
->new,
1754 replacements
->old
, unsignedp
);
1755 seq
= gen_sequence ();
1759 seq
= gen_move_insn (replacements
->new,
1762 emit_insn_before (seq
, insert_before
);
1765 replacements
= replacements
->next
;
1769 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1770 But don't touch other insns referred to by reg-notes;
1771 we will get them elsewhere. */
1774 if (GET_CODE (note
) != INSN_LIST
)
1776 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1777 note
= XEXP (note
, 1);
1785 insn
= XEXP (insn_list
, 0);
1786 insn_list
= XEXP (insn_list
, 1);
1793 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1794 See if the rtx expression at *LOC in INSN needs to be changed.
1796 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1797 contain a list of original rtx's and replacements. If we find that we need
1798 to modify this insn by replacing a memory reference with a pseudo or by
1799 making a new MEM to implement a SUBREG, we consult that list to see if
1800 we have already chosen a replacement. If none has already been allocated,
1801 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1802 or the SUBREG, as appropriate, to the pseudo. */
1805 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1807 enum machine_mode promoted_mode
;
1810 struct fixup_replacement
**replacements
;
1813 register rtx x
= *loc
;
1814 RTX_CODE code
= GET_CODE (x
);
1815 register const char *fmt
;
1816 register rtx tem
, tem1
;
1817 struct fixup_replacement
*replacement
;
1822 if (XEXP (x
, 0) == var
)
1824 /* Prevent sharing of rtl that might lose. */
1825 rtx sub
= copy_rtx (XEXP (var
, 0));
1827 if (! validate_change (insn
, loc
, sub
, 0))
1829 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1832 /* We should be able to replace with a register or all is lost.
1833 Note that we can't use validate_change to verify this, since
1834 we're not caring for replacing all dups simultaneously. */
1835 if (! validate_replace_rtx (*loc
, y
, insn
))
1838 /* Careful! First try to recognize a direct move of the
1839 value, mimicking how things are done in gen_reload wrt
1840 PLUS. Consider what happens when insn is a conditional
1841 move instruction and addsi3 clobbers flags. */
1844 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1845 seq
= gen_sequence ();
1848 if (recog_memoized (new_insn
) < 0)
1850 /* That failed. Fall back on force_operand and hope. */
1853 force_operand (sub
, y
);
1854 seq
= gen_sequence ();
1859 /* Don't separate setter from user. */
1860 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1861 insn
= PREV_INSN (insn
);
1864 emit_insn_before (seq
, insn
);
1872 /* If we already have a replacement, use it. Otherwise,
1873 try to fix up this address in case it is invalid. */
1875 replacement
= find_fixup_replacement (replacements
, var
);
1876 if (replacement
->new)
1878 *loc
= replacement
->new;
1882 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1884 /* Unless we are forcing memory to register or we changed the mode,
1885 we can leave things the way they are if the insn is valid. */
1887 INSN_CODE (insn
) = -1;
1888 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1889 && recog_memoized (insn
) >= 0)
1892 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1896 /* If X contains VAR, we need to unshare it here so that we update
1897 each occurrence separately. But all identical MEMs in one insn
1898 must be replaced with the same rtx because of the possibility of
1901 if (reg_mentioned_p (var
, x
))
1903 replacement
= find_fixup_replacement (replacements
, x
);
1904 if (replacement
->new == 0)
1905 replacement
->new = copy_most_rtx (x
, var
);
1907 *loc
= x
= replacement
->new;
1923 /* Note that in some cases those types of expressions are altered
1924 by optimize_bit_field, and do not survive to get here. */
1925 if (XEXP (x
, 0) == var
1926 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1927 && SUBREG_REG (XEXP (x
, 0)) == var
))
1929 /* Get TEM as a valid MEM in the mode presently in the insn.
1931 We don't worry about the possibility of MATCH_DUP here; it
1932 is highly unlikely and would be tricky to handle. */
1935 if (GET_CODE (tem
) == SUBREG
)
1937 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1938 > GET_MODE_BITSIZE (GET_MODE (var
)))
1940 replacement
= find_fixup_replacement (replacements
, var
);
1941 if (replacement
->new == 0)
1942 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1943 SUBREG_REG (tem
) = replacement
->new;
1946 tem
= fixup_memory_subreg (tem
, insn
, 0);
1949 tem
= fixup_stack_1 (tem
, insn
);
1951 /* Unless we want to load from memory, get TEM into the proper mode
1952 for an extract from memory. This can only be done if the
1953 extract is at a constant position and length. */
1955 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1956 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1957 && ! mode_dependent_address_p (XEXP (tem
, 0))
1958 && ! MEM_VOLATILE_P (tem
))
1960 enum machine_mode wanted_mode
= VOIDmode
;
1961 enum machine_mode is_mode
= GET_MODE (tem
);
1962 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1965 if (GET_CODE (x
) == ZERO_EXTRACT
)
1968 = insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
1969 if (wanted_mode
== VOIDmode
)
1970 wanted_mode
= word_mode
;
1974 if (GET_CODE (x
) == SIGN_EXTRACT
)
1976 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
1977 if (wanted_mode
== VOIDmode
)
1978 wanted_mode
= word_mode
;
1981 /* If we have a narrower mode, we can do something. */
1982 if (wanted_mode
!= VOIDmode
1983 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1985 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
1986 rtx old_pos
= XEXP (x
, 2);
1989 /* If the bytes and bits are counted differently, we
1990 must adjust the offset. */
1991 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1992 offset
= (GET_MODE_SIZE (is_mode
)
1993 - GET_MODE_SIZE (wanted_mode
) - offset
);
1995 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1997 newmem
= gen_rtx_MEM (wanted_mode
,
1998 plus_constant (XEXP (tem
, 0), offset
));
1999 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2000 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2002 /* Make the change and see if the insn remains valid. */
2003 INSN_CODE (insn
) = -1;
2004 XEXP (x
, 0) = newmem
;
2005 XEXP (x
, 2) = GEN_INT (pos
);
2007 if (recog_memoized (insn
) >= 0)
2010 /* Otherwise, restore old position. XEXP (x, 0) will be
2012 XEXP (x
, 2) = old_pos
;
2016 /* If we get here, the bitfield extract insn can't accept a memory
2017 reference. Copy the input into a register. */
2019 tem1
= gen_reg_rtx (GET_MODE (tem
));
2020 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2027 if (SUBREG_REG (x
) == var
)
2029 /* If this is a special SUBREG made because VAR was promoted
2030 from a wider mode, replace it with VAR and call ourself
2031 recursively, this time saying that the object previously
2032 had its current mode (by virtue of the SUBREG). */
2034 if (SUBREG_PROMOTED_VAR_P (x
))
2037 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
2041 /* If this SUBREG makes VAR wider, it has become a paradoxical
2042 SUBREG with VAR in memory, but these aren't allowed at this
2043 stage of the compilation. So load VAR into a pseudo and take
2044 a SUBREG of that pseudo. */
2045 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2047 replacement
= find_fixup_replacement (replacements
, var
);
2048 if (replacement
->new == 0)
2049 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2050 SUBREG_REG (x
) = replacement
->new;
2054 /* See if we have already found a replacement for this SUBREG.
2055 If so, use it. Otherwise, make a MEM and see if the insn
2056 is recognized. If not, or if we should force MEM into a register,
2057 make a pseudo for this SUBREG. */
2058 replacement
= find_fixup_replacement (replacements
, x
);
2059 if (replacement
->new)
2061 *loc
= replacement
->new;
2065 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2067 INSN_CODE (insn
) = -1;
2068 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2071 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2077 /* First do special simplification of bit-field references. */
2078 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2079 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2080 optimize_bit_field (x
, insn
, 0);
2081 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2082 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2083 optimize_bit_field (x
, insn
, NULL_PTR
);
2085 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2086 into a register and then store it back out. */
2087 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2088 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2089 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2090 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2091 > GET_MODE_SIZE (GET_MODE (var
))))
2093 replacement
= find_fixup_replacement (replacements
, var
);
2094 if (replacement
->new == 0)
2095 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2097 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2098 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2101 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2102 insn into a pseudo and store the low part of the pseudo into VAR. */
2103 if (GET_CODE (SET_DEST (x
)) == SUBREG
2104 && SUBREG_REG (SET_DEST (x
)) == var
2105 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2106 > GET_MODE_SIZE (GET_MODE (var
))))
2108 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2109 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2116 rtx dest
= SET_DEST (x
);
2117 rtx src
= SET_SRC (x
);
2119 rtx outerdest
= dest
;
2122 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2123 || GET_CODE (dest
) == SIGN_EXTRACT
2124 || GET_CODE (dest
) == ZERO_EXTRACT
)
2125 dest
= XEXP (dest
, 0);
2127 if (GET_CODE (src
) == SUBREG
)
2128 src
= XEXP (src
, 0);
2130 /* If VAR does not appear at the top level of the SET
2131 just scan the lower levels of the tree. */
2133 if (src
!= var
&& dest
!= var
)
2136 /* We will need to rerecognize this insn. */
2137 INSN_CODE (insn
) = -1;
2140 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2142 /* Since this case will return, ensure we fixup all the
2144 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2145 insn
, replacements
);
2146 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2147 insn
, replacements
);
2148 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2149 insn
, replacements
);
2151 tem
= XEXP (outerdest
, 0);
2153 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2154 that may appear inside a ZERO_EXTRACT.
2155 This was legitimate when the MEM was a REG. */
2156 if (GET_CODE (tem
) == SUBREG
2157 && SUBREG_REG (tem
) == var
)
2158 tem
= fixup_memory_subreg (tem
, insn
, 0);
2160 tem
= fixup_stack_1 (tem
, insn
);
2162 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2163 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2164 && ! mode_dependent_address_p (XEXP (tem
, 0))
2165 && ! MEM_VOLATILE_P (tem
))
2167 enum machine_mode wanted_mode
;
2168 enum machine_mode is_mode
= GET_MODE (tem
);
2169 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2171 wanted_mode
= insn_data
[(int) CODE_FOR_insv
].operand
[0].mode
;
2172 if (wanted_mode
== VOIDmode
)
2173 wanted_mode
= word_mode
;
2175 /* If we have a narrower mode, we can do something. */
2176 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2178 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2179 rtx old_pos
= XEXP (outerdest
, 2);
2182 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2183 offset
= (GET_MODE_SIZE (is_mode
)
2184 - GET_MODE_SIZE (wanted_mode
) - offset
);
2186 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2188 newmem
= gen_rtx_MEM (wanted_mode
,
2189 plus_constant (XEXP (tem
, 0),
2191 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2192 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2194 /* Make the change and see if the insn remains valid. */
2195 INSN_CODE (insn
) = -1;
2196 XEXP (outerdest
, 0) = newmem
;
2197 XEXP (outerdest
, 2) = GEN_INT (pos
);
2199 if (recog_memoized (insn
) >= 0)
2202 /* Otherwise, restore old position. XEXP (x, 0) will be
2204 XEXP (outerdest
, 2) = old_pos
;
2208 /* If we get here, the bit-field store doesn't allow memory
2209 or isn't located at a constant position. Load the value into
2210 a register, do the store, and put it back into memory. */
2212 tem1
= gen_reg_rtx (GET_MODE (tem
));
2213 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2214 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2215 XEXP (outerdest
, 0) = tem1
;
2220 /* STRICT_LOW_PART is a no-op on memory references
2221 and it can cause combinations to be unrecognizable,
2224 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2225 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2227 /* A valid insn to copy VAR into or out of a register
2228 must be left alone, to avoid an infinite loop here.
2229 If the reference to VAR is by a subreg, fix that up,
2230 since SUBREG is not valid for a memref.
2231 Also fix up the address of the stack slot.
2233 Note that we must not try to recognize the insn until
2234 after we know that we have valid addresses and no
2235 (subreg (mem ...) ...) constructs, since these interfere
2236 with determining the validity of the insn. */
2238 if ((SET_SRC (x
) == var
2239 || (GET_CODE (SET_SRC (x
)) == SUBREG
2240 && SUBREG_REG (SET_SRC (x
)) == var
))
2241 && (GET_CODE (SET_DEST (x
)) == REG
2242 || (GET_CODE (SET_DEST (x
)) == SUBREG
2243 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2244 && GET_MODE (var
) == promoted_mode
2245 && x
== single_set (insn
))
2249 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2250 if (replacement
->new)
2251 SET_SRC (x
) = replacement
->new;
2252 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2253 SET_SRC (x
) = replacement
->new
2254 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2256 SET_SRC (x
) = replacement
->new
2257 = fixup_stack_1 (SET_SRC (x
), insn
);
2259 if (recog_memoized (insn
) >= 0)
2262 /* INSN is not valid, but we know that we want to
2263 copy SET_SRC (x) to SET_DEST (x) in some way. So
2264 we generate the move and see whether it requires more
2265 than one insn. If it does, we emit those insns and
2266 delete INSN. Otherwise, we an just replace the pattern
2267 of INSN; we have already verified above that INSN has
2268 no other function that to do X. */
2270 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2271 if (GET_CODE (pat
) == SEQUENCE
)
2273 emit_insn_after (pat
, insn
);
2274 PUT_CODE (insn
, NOTE
);
2275 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2276 NOTE_SOURCE_FILE (insn
) = 0;
2279 PATTERN (insn
) = pat
;
2284 if ((SET_DEST (x
) == var
2285 || (GET_CODE (SET_DEST (x
)) == SUBREG
2286 && SUBREG_REG (SET_DEST (x
)) == var
))
2287 && (GET_CODE (SET_SRC (x
)) == REG
2288 || (GET_CODE (SET_SRC (x
)) == SUBREG
2289 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2290 && GET_MODE (var
) == promoted_mode
2291 && x
== single_set (insn
))
2295 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2296 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2298 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2300 if (recog_memoized (insn
) >= 0)
2303 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2304 if (GET_CODE (pat
) == SEQUENCE
)
2306 emit_insn_after (pat
, insn
);
2307 PUT_CODE (insn
, NOTE
);
2308 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2309 NOTE_SOURCE_FILE (insn
) = 0;
2312 PATTERN (insn
) = pat
;
2317 /* Otherwise, storing into VAR must be handled specially
2318 by storing into a temporary and copying that into VAR
2319 with a new insn after this one. Note that this case
2320 will be used when storing into a promoted scalar since
2321 the insn will now have different modes on the input
2322 and output and hence will be invalid (except for the case
2323 of setting it to a constant, which does not need any
2324 change if it is valid). We generate extra code in that case,
2325 but combine.c will eliminate it. */
2330 rtx fixeddest
= SET_DEST (x
);
2332 /* STRICT_LOW_PART can be discarded, around a MEM. */
2333 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2334 fixeddest
= XEXP (fixeddest
, 0);
2335 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2336 if (GET_CODE (fixeddest
) == SUBREG
)
2338 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2339 promoted_mode
= GET_MODE (fixeddest
);
2342 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2344 temp
= gen_reg_rtx (promoted_mode
);
2346 emit_insn_after (gen_move_insn (fixeddest
,
2347 gen_lowpart (GET_MODE (fixeddest
),
2351 SET_DEST (x
) = temp
;
2359 /* Nothing special about this RTX; fix its operands. */
2361 fmt
= GET_RTX_FORMAT (code
);
2362 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2365 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2366 else if (fmt
[i
] == 'E')
2369 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2370 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2371 insn
, replacements
);
2376 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2377 return an rtx (MEM:m1 newaddr) which is equivalent.
2378 If any insns must be emitted to compute NEWADDR, put them before INSN.
2380 UNCRITICAL nonzero means accept paradoxical subregs.
2381 This is used for subregs found inside REG_NOTES. */
2384 fixup_memory_subreg (x
, insn
, uncritical
)
2389 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2390 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2391 enum machine_mode mode
= GET_MODE (x
);
2394 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2395 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2399 if (BYTES_BIG_ENDIAN
)
2400 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2401 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2402 addr
= plus_constant (addr
, offset
);
2403 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2404 /* Shortcut if no insns need be emitted. */
2405 return change_address (SUBREG_REG (x
), mode
, addr
);
2407 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2408 emit_insn_before (gen_sequence (), insn
);
2413 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2414 Replace subexpressions of X in place.
2415 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2416 Otherwise return X, with its contents possibly altered.
2418 If any insns must be emitted to compute NEWADDR, put them before INSN.
2420 UNCRITICAL is as in fixup_memory_subreg. */
2423 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2428 register enum rtx_code code
;
2429 register const char *fmt
;
2435 code
= GET_CODE (x
);
2437 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2438 return fixup_memory_subreg (x
, insn
, uncritical
);
2440 /* Nothing special about this RTX; fix its operands. */
2442 fmt
= GET_RTX_FORMAT (code
);
2443 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2446 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2447 else if (fmt
[i
] == 'E')
2450 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2452 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2458 /* For each memory ref within X, if it refers to a stack slot
2459 with an out of range displacement, put the address in a temp register
2460 (emitting new insns before INSN to load these registers)
2461 and alter the memory ref to use that register.
2462 Replace each such MEM rtx with a copy, to avoid clobberage. */
2465 fixup_stack_1 (x
, insn
)
2470 register RTX_CODE code
= GET_CODE (x
);
2471 register const char *fmt
;
2475 register rtx ad
= XEXP (x
, 0);
2476 /* If we have address of a stack slot but it's not valid
2477 (displacement is too large), compute the sum in a register. */
2478 if (GET_CODE (ad
) == PLUS
2479 && GET_CODE (XEXP (ad
, 0)) == REG
2480 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2481 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2482 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2483 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2484 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2486 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2487 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2488 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2489 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2492 if (memory_address_p (GET_MODE (x
), ad
))
2496 temp
= copy_to_reg (ad
);
2497 seq
= gen_sequence ();
2499 emit_insn_before (seq
, insn
);
2500 return change_address (x
, VOIDmode
, temp
);
2505 fmt
= GET_RTX_FORMAT (code
);
2506 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2509 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2510 else if (fmt
[i
] == 'E')
2513 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2514 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2520 /* Optimization: a bit-field instruction whose field
2521 happens to be a byte or halfword in memory
2522 can be changed to a move instruction.
2524 We call here when INSN is an insn to examine or store into a bit-field.
2525 BODY is the SET-rtx to be altered.
2527 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2528 (Currently this is called only from function.c, and EQUIV_MEM
2532 optimize_bit_field (body
, insn
, equiv_mem
)
2537 register rtx bitfield
;
2540 enum machine_mode mode
;
2542 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2543 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2544 bitfield
= SET_DEST (body
), destflag
= 1;
2546 bitfield
= SET_SRC (body
), destflag
= 0;
2548 /* First check that the field being stored has constant size and position
2549 and is in fact a byte or halfword suitably aligned. */
2551 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2552 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2553 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2555 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2557 register rtx memref
= 0;
2559 /* Now check that the containing word is memory, not a register,
2560 and that it is safe to change the machine mode. */
2562 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2563 memref
= XEXP (bitfield
, 0);
2564 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2566 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2567 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2568 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2569 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2570 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2572 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2573 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2576 && ! mode_dependent_address_p (XEXP (memref
, 0))
2577 && ! MEM_VOLATILE_P (memref
))
2579 /* Now adjust the address, first for any subreg'ing
2580 that we are now getting rid of,
2581 and then for which byte of the word is wanted. */
2583 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2586 /* Adjust OFFSET to count bits from low-address byte. */
2587 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2588 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2589 - offset
- INTVAL (XEXP (bitfield
, 1)));
2591 /* Adjust OFFSET to count bytes from low-address byte. */
2592 offset
/= BITS_PER_UNIT
;
2593 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2595 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2596 if (BYTES_BIG_ENDIAN
)
2597 offset
-= (MIN (UNITS_PER_WORD
,
2598 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2599 - MIN (UNITS_PER_WORD
,
2600 GET_MODE_SIZE (GET_MODE (memref
))));
2604 memref
= change_address (memref
, mode
,
2605 plus_constant (XEXP (memref
, 0), offset
));
2606 insns
= get_insns ();
2608 emit_insns_before (insns
, insn
);
2610 /* Store this memory reference where
2611 we found the bit field reference. */
2615 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2616 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2618 rtx src
= SET_SRC (body
);
2619 while (GET_CODE (src
) == SUBREG
2620 && SUBREG_WORD (src
) == 0)
2621 src
= SUBREG_REG (src
);
2622 if (GET_MODE (src
) != GET_MODE (memref
))
2623 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2624 validate_change (insn
, &SET_SRC (body
), src
, 1);
2626 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2627 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2628 /* This shouldn't happen because anything that didn't have
2629 one of these modes should have got converted explicitly
2630 and then referenced through a subreg.
2631 This is so because the original bit-field was
2632 handled by agg_mode and so its tree structure had
2633 the same mode that memref now has. */
2638 rtx dest
= SET_DEST (body
);
2640 while (GET_CODE (dest
) == SUBREG
2641 && SUBREG_WORD (dest
) == 0
2642 && (GET_MODE_CLASS (GET_MODE (dest
))
2643 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2644 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2646 dest
= SUBREG_REG (dest
);
2648 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2650 if (GET_MODE (dest
) == GET_MODE (memref
))
2651 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2654 /* Convert the mem ref to the destination mode. */
2655 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2658 convert_move (newreg
, memref
,
2659 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2663 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2667 /* See if we can convert this extraction or insertion into
2668 a simple move insn. We might not be able to do so if this
2669 was, for example, part of a PARALLEL.
2671 If we succeed, write out any needed conversions. If we fail,
2672 it is hard to guess why we failed, so don't do anything
2673 special; just let the optimization be suppressed. */
2675 if (apply_change_group () && seq
)
2676 emit_insns_before (seq
, insn
);
2681 /* These routines are responsible for converting virtual register references
2682 to the actual hard register references once RTL generation is complete.
2684 The following four variables are used for communication between the
2685 routines. They contain the offsets of the virtual registers from their
2686 respective hard registers. */
2688 static int in_arg_offset
;
2689 static int var_offset
;
2690 static int dynamic_offset
;
2691 static int out_arg_offset
;
2692 static int cfa_offset
;
2694 /* In most machines, the stack pointer register is equivalent to the bottom
2697 #ifndef STACK_POINTER_OFFSET
2698 #define STACK_POINTER_OFFSET 0
2701 /* If not defined, pick an appropriate default for the offset of dynamically
2702 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2703 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2705 #ifndef STACK_DYNAMIC_OFFSET
2707 #ifdef ACCUMULATE_OUTGOING_ARGS
2708 /* The bottom of the stack points to the actual arguments. If
2709 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2710 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2711 stack space for register parameters is not pushed by the caller, but
2712 rather part of the fixed stack areas and hence not included in
2713 `current_function_outgoing_args_size'. Nevertheless, we must allow
2714 for it when allocating stack dynamic objects. */
2716 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2717 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2718 (current_function_outgoing_args_size \
2719 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2722 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2723 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2727 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2731 /* On a few machines, the CFA coincides with the arg pointer. */
2733 #ifndef ARG_POINTER_CFA_OFFSET
2734 #define ARG_POINTER_CFA_OFFSET 0
2738 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2739 its address taken. DECL is the decl for the object stored in the
2740 register, for later use if we do need to force REG into the stack.
2741 REG is overwritten by the MEM like in put_reg_into_stack. */
2744 gen_mem_addressof (reg
, decl
)
2748 tree type
= TREE_TYPE (decl
);
2749 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2752 /* If the original REG was a user-variable, then so is the REG whose
2753 address is being taken. Likewise for unchanging. */
2754 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2755 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2757 PUT_CODE (reg
, MEM
);
2758 PUT_MODE (reg
, DECL_MODE (decl
));
2760 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2761 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (type
));
2762 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
2764 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2765 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
), 0);
2770 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2774 flush_addressof (decl
)
2777 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2778 && DECL_RTL (decl
) != 0
2779 && GET_CODE (DECL_RTL (decl
)) == MEM
2780 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2781 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2782 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2786 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2789 put_addressof_into_stack (r
, ht
)
2791 struct hash_table
*ht
;
2793 tree decl
= ADDRESSOF_DECL (r
);
2794 rtx reg
= XEXP (r
, 0);
2796 if (GET_CODE (reg
) != REG
)
2799 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2800 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2801 ADDRESSOF_REGNO (r
),
2802 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0, ht
);
2805 /* List of replacements made below in purge_addressof_1 when creating
2806 bitfield insertions. */
2807 static rtx purge_bitfield_addressof_replacements
;
2809 /* List of replacements made below in purge_addressof_1 for patterns
2810 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2811 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2812 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2813 enough in complex cases, e.g. when some field values can be
2814 extracted by usage MEM with narrower mode. */
2815 static rtx purge_addressof_replacements
;
2817 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2818 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2819 the stack. If the function returns FALSE then the replacement could not
2823 purge_addressof_1 (loc
, insn
, force
, store
, ht
)
2827 struct hash_table
*ht
;
2833 boolean result
= true;
2835 /* Re-start here to avoid recursion in common cases. */
2842 code
= GET_CODE (x
);
2844 /* If we don't return in any of the cases below, we will recurse inside
2845 the RTX, which will normally result in any ADDRESSOF being forced into
2849 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
2850 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
2854 else if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2856 /* We must create a copy of the rtx because it was created by
2857 overwriting a REG rtx which is always shared. */
2858 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2861 if (validate_change (insn
, loc
, sub
, 0)
2862 || validate_replace_rtx (x
, sub
, insn
))
2866 sub
= force_operand (sub
, NULL_RTX
);
2867 if (! validate_change (insn
, loc
, sub
, 0)
2868 && ! validate_replace_rtx (x
, sub
, insn
))
2871 insns
= gen_sequence ();
2873 emit_insn_before (insns
, insn
);
2877 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2879 rtx sub
= XEXP (XEXP (x
, 0), 0);
2882 if (GET_CODE (sub
) == MEM
)
2884 sub2
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2885 MEM_COPY_ATTRIBUTES (sub2
, sub
);
2886 RTX_UNCHANGING_P (sub2
) = RTX_UNCHANGING_P (sub
);
2889 else if (GET_CODE (sub
) == REG
2890 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2892 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2894 int size_x
, size_sub
;
2898 /* When processing REG_NOTES look at the list of
2899 replacements done on the insn to find the register that X
2903 for (tem
= purge_bitfield_addressof_replacements
;
2905 tem
= XEXP (XEXP (tem
, 1), 1))
2906 if (rtx_equal_p (x
, XEXP (tem
, 0)))
2908 *loc
= XEXP (XEXP (tem
, 1), 0);
2912 /* See comment for purge_addressof_replacements. */
2913 for (tem
= purge_addressof_replacements
;
2915 tem
= XEXP (XEXP (tem
, 1), 1))
2916 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
2918 rtx z
= XEXP (XEXP (tem
, 1), 0);
2920 if (GET_MODE (x
) == GET_MODE (z
)
2921 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
2922 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
2925 /* It can happen that the note may speak of things
2926 in a wider (or just different) mode than the
2927 code did. This is especially true of
2930 if (GET_CODE (z
) == SUBREG
&& SUBREG_WORD (z
) == 0)
2933 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2934 && (GET_MODE_SIZE (GET_MODE (x
))
2935 > GET_MODE_SIZE (GET_MODE (z
))))
2937 /* This can occur as a result in invalid
2938 pointer casts, e.g. float f; ...
2939 *(long long int *)&f.
2940 ??? We could emit a warning here, but
2941 without a line number that wouldn't be
2943 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
2946 z
= gen_lowpart (GET_MODE (x
), z
);
2952 /* Sometimes we may not be able to find the replacement. For
2953 example when the original insn was a MEM in a wider mode,
2954 and the note is part of a sign extension of a narrowed
2955 version of that MEM. Gcc testcase compile/990829-1.c can
2956 generate an example of this siutation. Rather than complain
2957 we return false, which will prompt our caller to remove the
2962 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
2963 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
2965 /* Don't even consider working with paradoxical subregs,
2966 or the moral equivalent seen here. */
2967 if (size_x
<= size_sub
2968 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
2970 /* Do a bitfield insertion to mirror what would happen
2977 rtx p
= PREV_INSN (insn
);
2980 val
= gen_reg_rtx (GET_MODE (x
));
2981 if (! validate_change (insn
, loc
, val
, 0))
2983 /* Discard the current sequence and put the
2984 ADDRESSOF on stack. */
2988 seq
= gen_sequence ();
2990 emit_insn_before (seq
, insn
);
2991 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
2995 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
2996 val
, GET_MODE_SIZE (GET_MODE (sub
)),
2997 GET_MODE_SIZE (GET_MODE (sub
)));
2999 /* Make sure to unshare any shared rtl that store_bit_field
3000 might have created. */
3001 unshare_all_rtl_again (get_insns ());
3003 seq
= gen_sequence ();
3005 p
= emit_insn_after (seq
, insn
);
3006 if (NEXT_INSN (insn
))
3007 compute_insns_for_mem (NEXT_INSN (insn
),
3008 p
? NEXT_INSN (p
) : NULL_RTX
,
3013 rtx p
= PREV_INSN (insn
);
3016 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
3017 GET_MODE (x
), GET_MODE (x
),
3018 GET_MODE_SIZE (GET_MODE (sub
)),
3019 GET_MODE_SIZE (GET_MODE (sub
)));
3021 if (! validate_change (insn
, loc
, val
, 0))
3023 /* Discard the current sequence and put the
3024 ADDRESSOF on stack. */
3029 seq
= gen_sequence ();
3031 emit_insn_before (seq
, insn
);
3032 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3036 /* Remember the replacement so that the same one can be done
3037 on the REG_NOTES. */
3038 purge_bitfield_addressof_replacements
3039 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
3042 purge_bitfield_addressof_replacements
));
3044 /* We replaced with a reg -- all done. */
3049 else if (validate_change (insn
, loc
, sub
, 0))
3051 /* Remember the replacement so that the same one can be done
3052 on the REG_NOTES. */
3053 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3057 for (tem
= purge_addressof_replacements
;
3059 tem
= XEXP (XEXP (tem
, 1), 1))
3060 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3062 XEXP (XEXP (tem
, 1), 0) = sub
;
3065 purge_addressof_replacements
3066 = gen_rtx (EXPR_LIST
, VOIDmode
, XEXP (x
, 0),
3067 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3068 purge_addressof_replacements
));
3074 /* else give up and put it into the stack */
3077 else if (code
== ADDRESSOF
)
3079 put_addressof_into_stack (x
, ht
);
3082 else if (code
== SET
)
3084 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
3085 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
3089 /* Scan all subexpressions. */
3090 fmt
= GET_RTX_FORMAT (code
);
3091 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3094 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0, ht
);
3095 else if (*fmt
== 'E')
3096 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3097 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0, ht
);
3103 /* Return a new hash table entry in HT. */
3105 static struct hash_entry
*
3106 insns_for_mem_newfunc (he
, ht
, k
)
3107 struct hash_entry
*he
;
3108 struct hash_table
*ht
;
3109 hash_table_key k ATTRIBUTE_UNUSED
;
3111 struct insns_for_mem_entry
*ifmhe
;
3115 ifmhe
= ((struct insns_for_mem_entry
*)
3116 hash_allocate (ht
, sizeof (struct insns_for_mem_entry
)));
3117 ifmhe
->insns
= NULL_RTX
;
3122 /* Return a hash value for K, a REG. */
3124 static unsigned long
3125 insns_for_mem_hash (k
)
3128 /* K is really a RTX. Just use the address as the hash value. */
3129 return (unsigned long) k
;
3132 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3135 insns_for_mem_comp (k1
, k2
)
3142 struct insns_for_mem_walk_info
{
3143 /* The hash table that we are using to record which INSNs use which
3145 struct hash_table
*ht
;
3147 /* The INSN we are currently proessing. */
3150 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3151 to find the insns that use the REGs in the ADDRESSOFs. */
3155 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3156 that might be used in an ADDRESSOF expression, record this INSN in
3157 the hash table given by DATA (which is really a pointer to an
3158 insns_for_mem_walk_info structure). */
3161 insns_for_mem_walk (r
, data
)
3165 struct insns_for_mem_walk_info
*ifmwi
3166 = (struct insns_for_mem_walk_info
*) data
;
3168 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3169 && GET_CODE (XEXP (*r
, 0)) == REG
)
3170 hash_lookup (ifmwi
->ht
, XEXP (*r
, 0), /*create=*/1, /*copy=*/0);
3171 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3173 /* Lookup this MEM in the hashtable, creating it if necessary. */
3174 struct insns_for_mem_entry
*ifme
3175 = (struct insns_for_mem_entry
*) hash_lookup (ifmwi
->ht
,
3180 /* If we have not already recorded this INSN, do so now. Since
3181 we process the INSNs in order, we know that if we have
3182 recorded it it must be at the front of the list. */
3183 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3185 /* We do the allocation on the same obstack as is used for
3186 the hash table since this memory will not be used once
3187 the hash table is deallocated. */
3188 push_obstacks (&ifmwi
->ht
->memory
, &ifmwi
->ht
->memory
);
3189 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3198 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3199 which REGs in HT. */
3202 compute_insns_for_mem (insns
, last_insn
, ht
)
3205 struct hash_table
*ht
;
3208 struct insns_for_mem_walk_info ifmwi
;
3211 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3212 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3213 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3216 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3220 /* Helper function for purge_addressof called through for_each_rtx.
3221 Returns true iff the rtl is an ADDRESSOF. */
3223 is_addressof (rtl
, data
)
3225 void * data ATTRIBUTE_UNUSED
;
3227 return GET_CODE (* rtl
) == ADDRESSOF
;
3230 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3231 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3235 purge_addressof (insns
)
3239 struct hash_table ht
;
3241 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3242 requires a fixup pass over the instruction stream to correct
3243 INSNs that depended on the REG being a REG, and not a MEM. But,
3244 these fixup passes are slow. Furthermore, more MEMs are not
3245 mentioned in very many instructions. So, we speed up the process
3246 by pre-calculating which REGs occur in which INSNs; that allows
3247 us to perform the fixup passes much more quickly. */
3248 hash_table_init (&ht
,
3249 insns_for_mem_newfunc
,
3251 insns_for_mem_comp
);
3252 compute_insns_for_mem (insns
, NULL_RTX
, &ht
);
3254 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3255 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3256 || GET_CODE (insn
) == CALL_INSN
)
3258 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3259 asm_noperands (PATTERN (insn
)) > 0, 0, &ht
))
3260 /* If we could not replace the ADDRESSOFs in the insn,
3261 something is wrong. */
3264 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, &ht
))
3266 /* If we could not replace the ADDRESSOFs in the insn's notes,
3267 we can just remove the offending notes instead. */
3270 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3272 /* If we find a REG_RETVAL note then the insn is a libcall.
3273 Such insns must have REG_EQUAL notes as well, in order
3274 for later passes of the compiler to work. So it is not
3275 safe to delete the notes here, and instead we abort. */
3276 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3278 if (for_each_rtx (& note
, is_addressof
, NULL
))
3279 remove_note (insn
, note
);
3285 hash_table_free (&ht
);
3286 purge_bitfield_addressof_replacements
= 0;
3287 purge_addressof_replacements
= 0;
3290 /* Pass through the INSNS of function FNDECL and convert virtual register
3291 references to hard register references. */
3294 instantiate_virtual_regs (fndecl
, insns
)
3301 /* Compute the offsets to use for this function. */
3302 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3303 var_offset
= STARTING_FRAME_OFFSET
;
3304 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3305 out_arg_offset
= STACK_POINTER_OFFSET
;
3306 cfa_offset
= ARG_POINTER_CFA_OFFSET
;
3308 /* Scan all variables and parameters of this function. For each that is
3309 in memory, instantiate all virtual registers if the result is a valid
3310 address. If not, we do it later. That will handle most uses of virtual
3311 regs on many machines. */
3312 instantiate_decls (fndecl
, 1);
3314 /* Initialize recognition, indicating that volatile is OK. */
3317 /* Scan through all the insns, instantiating every virtual register still
3319 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3320 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3321 || GET_CODE (insn
) == CALL_INSN
)
3323 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3324 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3327 /* Instantiate the stack slots for the parm registers, for later use in
3328 addressof elimination. */
3329 for (i
= 0; i
< max_parm_reg
; ++i
)
3330 if (parm_reg_stack_loc
[i
])
3331 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3333 /* Now instantiate the remaining register equivalences for debugging info.
3334 These will not be valid addresses. */
3335 instantiate_decls (fndecl
, 0);
3337 /* Indicate that, from now on, assign_stack_local should use
3338 frame_pointer_rtx. */
3339 virtuals_instantiated
= 1;
3342 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3343 all virtual registers in their DECL_RTL's.
3345 If VALID_ONLY, do this only if the resulting address is still valid.
3346 Otherwise, always do it. */
3349 instantiate_decls (fndecl
, valid_only
)
3355 if (DECL_SAVED_INSNS (fndecl
))
3356 /* When compiling an inline function, the obstack used for
3357 rtl allocation is the maybepermanent_obstack. Calling
3358 `resume_temporary_allocation' switches us back to that
3359 obstack while we process this function's parameters. */
3360 resume_temporary_allocation ();
3362 /* Process all parameters of the function. */
3363 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3365 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3367 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3369 /* If the parameter was promoted, then the incoming RTL mode may be
3370 larger than the declared type size. We must use the larger of
3372 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
3373 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3376 /* Now process all variables defined in the function or its subblocks. */
3377 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3379 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
3381 /* Save all rtl allocated for this function by raising the
3382 high-water mark on the maybepermanent_obstack. */
3384 /* All further rtl allocation is now done in the current_obstack. */
3385 rtl_in_current_obstack ();
3389 /* Subroutine of instantiate_decls: Process all decls in the given
3390 BLOCK node and all its subblocks. */
3393 instantiate_decls_1 (let
, valid_only
)
3399 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3400 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
3403 /* Process all subblocks. */
3404 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3405 instantiate_decls_1 (t
, valid_only
);
3408 /* Subroutine of the preceding procedures: Given RTL representing a
3409 decl and the size of the object, do any instantiation required.
3411 If VALID_ONLY is non-zero, it means that the RTL should only be
3412 changed if the new address is valid. */
3415 instantiate_decl (x
, size
, valid_only
)
3420 enum machine_mode mode
;
3423 /* If this is not a MEM, no need to do anything. Similarly if the
3424 address is a constant or a register that is not a virtual register. */
3426 if (x
== 0 || GET_CODE (x
) != MEM
)
3430 if (CONSTANT_P (addr
)
3431 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3432 || (GET_CODE (addr
) == REG
3433 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3434 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3437 /* If we should only do this if the address is valid, copy the address.
3438 We need to do this so we can undo any changes that might make the
3439 address invalid. This copy is unfortunate, but probably can't be
3443 addr
= copy_rtx (addr
);
3445 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3449 /* Now verify that the resulting address is valid for every integer or
3450 floating-point mode up to and including SIZE bytes long. We do this
3451 since the object might be accessed in any mode and frame addresses
3454 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3455 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3456 mode
= GET_MODE_WIDER_MODE (mode
))
3457 if (! memory_address_p (mode
, addr
))
3460 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3461 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3462 mode
= GET_MODE_WIDER_MODE (mode
))
3463 if (! memory_address_p (mode
, addr
))
3467 /* Put back the address now that we have updated it and we either know
3468 it is valid or we don't care whether it is valid. */
3473 /* Given a pointer to a piece of rtx and an optional pointer to the
3474 containing object, instantiate any virtual registers present in it.
3476 If EXTRA_INSNS, we always do the replacement and generate
3477 any extra insns before OBJECT. If it zero, we do nothing if replacement
3480 Return 1 if we either had nothing to do or if we were able to do the
3481 needed replacement. Return 0 otherwise; we only return zero if
3482 EXTRA_INSNS is zero.
3484 We first try some simple transformations to avoid the creation of extra
3488 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3496 HOST_WIDE_INT offset
= 0;
3502 /* Re-start here to avoid recursion in common cases. */
3509 code
= GET_CODE (x
);
3511 /* Check for some special cases. */
3528 /* We are allowed to set the virtual registers. This means that
3529 the actual register should receive the source minus the
3530 appropriate offset. This is used, for example, in the handling
3531 of non-local gotos. */
3532 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3533 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3534 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3535 new = frame_pointer_rtx
, offset
= - var_offset
;
3536 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3537 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3538 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3539 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3540 else if (SET_DEST (x
) == virtual_cfa_rtx
)
3541 new = arg_pointer_rtx
, offset
= - cfa_offset
;
3545 rtx src
= SET_SRC (x
);
3547 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3549 /* The only valid sources here are PLUS or REG. Just do
3550 the simplest possible thing to handle them. */
3551 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3555 if (GET_CODE (src
) != REG
)
3556 temp
= force_operand (src
, NULL_RTX
);
3559 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3563 emit_insns_before (seq
, object
);
3566 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3573 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3578 /* Handle special case of virtual register plus constant. */
3579 if (CONSTANT_P (XEXP (x
, 1)))
3581 rtx old
, new_offset
;
3583 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3584 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3586 rtx inner
= XEXP (XEXP (x
, 0), 0);
3588 if (inner
== virtual_incoming_args_rtx
)
3589 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3590 else if (inner
== virtual_stack_vars_rtx
)
3591 new = frame_pointer_rtx
, offset
= var_offset
;
3592 else if (inner
== virtual_stack_dynamic_rtx
)
3593 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3594 else if (inner
== virtual_outgoing_args_rtx
)
3595 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3596 else if (inner
== virtual_cfa_rtx
)
3597 new = arg_pointer_rtx
, offset
= cfa_offset
;
3604 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3606 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3609 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3610 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3611 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3612 new = frame_pointer_rtx
, offset
= var_offset
;
3613 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3614 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3615 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3616 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3617 else if (XEXP (x
, 0) == virtual_cfa_rtx
)
3618 new = arg_pointer_rtx
, offset
= cfa_offset
;
3621 /* We know the second operand is a constant. Unless the
3622 first operand is a REG (which has been already checked),
3623 it needs to be checked. */
3624 if (GET_CODE (XEXP (x
, 0)) != REG
)
3632 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3634 /* If the new constant is zero, try to replace the sum with just
3636 if (new_offset
== const0_rtx
3637 && validate_change (object
, loc
, new, 0))
3640 /* Next try to replace the register and new offset.
3641 There are two changes to validate here and we can't assume that
3642 in the case of old offset equals new just changing the register
3643 will yield a valid insn. In the interests of a little efficiency,
3644 however, we only call validate change once (we don't queue up the
3645 changes and then call apply_change_group). */
3649 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3650 : (XEXP (x
, 0) = new,
3651 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3659 /* Otherwise copy the new constant into a register and replace
3660 constant with that register. */
3661 temp
= gen_reg_rtx (Pmode
);
3663 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3664 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3667 /* If that didn't work, replace this expression with a
3668 register containing the sum. */
3671 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3674 temp
= force_operand (new, NULL_RTX
);
3678 emit_insns_before (seq
, object
);
3679 if (! validate_change (object
, loc
, temp
, 0)
3680 && ! validate_replace_rtx (x
, temp
, object
))
3688 /* Fall through to generic two-operand expression case. */
3694 case DIV
: case UDIV
:
3695 case MOD
: case UMOD
:
3696 case AND
: case IOR
: case XOR
:
3697 case ROTATERT
: case ROTATE
:
3698 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3700 case GE
: case GT
: case GEU
: case GTU
:
3701 case LE
: case LT
: case LEU
: case LTU
:
3702 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3703 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3708 /* Most cases of MEM that convert to valid addresses have already been
3709 handled by our scan of decls. The only special handling we
3710 need here is to make a copy of the rtx to ensure it isn't being
3711 shared if we have to change it to a pseudo.
3713 If the rtx is a simple reference to an address via a virtual register,
3714 it can potentially be shared. In such cases, first try to make it
3715 a valid address, which can also be shared. Otherwise, copy it and
3718 First check for common cases that need no processing. These are
3719 usually due to instantiation already being done on a previous instance
3723 if (CONSTANT_ADDRESS_P (temp
)
3724 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3725 || temp
== arg_pointer_rtx
3727 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3728 || temp
== hard_frame_pointer_rtx
3730 || temp
== frame_pointer_rtx
)
3733 if (GET_CODE (temp
) == PLUS
3734 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3735 && (XEXP (temp
, 0) == frame_pointer_rtx
3736 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3737 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3739 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3740 || XEXP (temp
, 0) == arg_pointer_rtx
3745 if (temp
== virtual_stack_vars_rtx
3746 || temp
== virtual_incoming_args_rtx
3747 || (GET_CODE (temp
) == PLUS
3748 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3749 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3750 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3752 /* This MEM may be shared. If the substitution can be done without
3753 the need to generate new pseudos, we want to do it in place
3754 so all copies of the shared rtx benefit. The call below will
3755 only make substitutions if the resulting address is still
3758 Note that we cannot pass X as the object in the recursive call
3759 since the insn being processed may not allow all valid
3760 addresses. However, if we were not passed on object, we can
3761 only modify X without copying it if X will have a valid
3764 ??? Also note that this can still lose if OBJECT is an insn that
3765 has less restrictions on an address that some other insn.
3766 In that case, we will modify the shared address. This case
3767 doesn't seem very likely, though. One case where this could
3768 happen is in the case of a USE or CLOBBER reference, but we
3769 take care of that below. */
3771 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3772 object
? object
: x
, 0))
3775 /* Otherwise make a copy and process that copy. We copy the entire
3776 RTL expression since it might be a PLUS which could also be
3778 *loc
= x
= copy_rtx (x
);
3781 /* Fall through to generic unary operation case. */
3783 case STRICT_LOW_PART
:
3785 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3786 case SIGN_EXTEND
: case ZERO_EXTEND
:
3787 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3788 case FLOAT
: case FIX
:
3789 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3793 /* These case either have just one operand or we know that we need not
3794 check the rest of the operands. */
3800 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3801 go ahead and make the invalid one, but do it to a copy. For a REG,
3802 just make the recursive call, since there's no chance of a problem. */
3804 if ((GET_CODE (XEXP (x
, 0)) == MEM
3805 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3807 || (GET_CODE (XEXP (x
, 0)) == REG
3808 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3811 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3816 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3817 in front of this insn and substitute the temporary. */
3818 if (x
== virtual_incoming_args_rtx
)
3819 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3820 else if (x
== virtual_stack_vars_rtx
)
3821 new = frame_pointer_rtx
, offset
= var_offset
;
3822 else if (x
== virtual_stack_dynamic_rtx
)
3823 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3824 else if (x
== virtual_outgoing_args_rtx
)
3825 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3826 else if (x
== virtual_cfa_rtx
)
3827 new = arg_pointer_rtx
, offset
= cfa_offset
;
3831 temp
= plus_constant (new, offset
);
3832 if (!validate_change (object
, loc
, temp
, 0))
3838 temp
= force_operand (temp
, NULL_RTX
);
3842 emit_insns_before (seq
, object
);
3843 if (! validate_change (object
, loc
, temp
, 0)
3844 && ! validate_replace_rtx (x
, temp
, object
))
3852 if (GET_CODE (XEXP (x
, 0)) == REG
)
3855 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3857 /* If we have a (addressof (mem ..)), do any instantiation inside
3858 since we know we'll be making the inside valid when we finally
3859 remove the ADDRESSOF. */
3860 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3869 /* Scan all subexpressions. */
3870 fmt
= GET_RTX_FORMAT (code
);
3871 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3874 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3877 else if (*fmt
== 'E')
3878 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3879 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3886 /* Optimization: assuming this function does not receive nonlocal gotos,
3887 delete the handlers for such, as well as the insns to establish
3888 and disestablish them. */
3894 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3896 /* Delete the handler by turning off the flag that would
3897 prevent jump_optimize from deleting it.
3898 Also permit deletion of the nonlocal labels themselves
3899 if nothing local refers to them. */
3900 if (GET_CODE (insn
) == CODE_LABEL
)
3904 LABEL_PRESERVE_P (insn
) = 0;
3906 /* Remove it from the nonlocal_label list, to avoid confusing
3908 for (t
= nonlocal_labels
, last_t
= 0; t
;
3909 last_t
= t
, t
= TREE_CHAIN (t
))
3910 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3915 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3917 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3920 if (GET_CODE (insn
) == INSN
)
3924 for (t
= nonlocal_goto_handler_slots
; t
!= 0; t
= XEXP (t
, 1))
3925 if (reg_mentioned_p (t
, PATTERN (insn
)))
3931 || (nonlocal_goto_stack_level
!= 0
3932 && reg_mentioned_p (nonlocal_goto_stack_level
,
3942 return max_parm_reg
;
3945 /* Return the first insn following those generated by `assign_parms'. */
3948 get_first_nonparm_insn ()
3951 return NEXT_INSN (last_parm_insn
);
3952 return get_insns ();
3955 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3956 Crash if there is none. */
3959 get_first_block_beg ()
3961 register rtx searcher
;
3962 register rtx insn
= get_first_nonparm_insn ();
3964 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3965 if (GET_CODE (searcher
) == NOTE
3966 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3969 abort (); /* Invalid call to this function. (See comments above.) */
3973 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3974 This means a type for which function calls must pass an address to the
3975 function or get an address back from the function.
3976 EXP may be a type node or an expression (whose type is tested). */
3979 aggregate_value_p (exp
)
3982 int i
, regno
, nregs
;
3985 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3988 type
= TREE_TYPE (exp
);
3990 if (RETURN_IN_MEMORY (type
))
3992 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3993 and thus can't be returned in registers. */
3994 if (TREE_ADDRESSABLE (type
))
3996 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3998 /* Make sure we have suitable call-clobbered regs to return
3999 the value in; if not, we must return it in memory. */
4000 reg
= hard_function_value (type
, 0, 0);
4002 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4004 if (GET_CODE (reg
) != REG
)
4007 regno
= REGNO (reg
);
4008 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
4009 for (i
= 0; i
< nregs
; i
++)
4010 if (! call_used_regs
[regno
+ i
])
4015 /* Assign RTL expressions to the function's parameters.
4016 This may involve copying them into registers and using
4017 those registers as the RTL for them. */
4020 assign_parms (fndecl
)
4024 register rtx entry_parm
= 0;
4025 register rtx stack_parm
= 0;
4026 CUMULATIVE_ARGS args_so_far
;
4027 enum machine_mode promoted_mode
, passed_mode
;
4028 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4030 /* Total space needed so far for args on the stack,
4031 given as a constant and a tree-expression. */
4032 struct args_size stack_args_size
;
4033 tree fntype
= TREE_TYPE (fndecl
);
4034 tree fnargs
= DECL_ARGUMENTS (fndecl
);
4035 /* This is used for the arg pointer when referring to stack args. */
4036 rtx internal_arg_pointer
;
4037 /* This is a dummy PARM_DECL that we used for the function result if
4038 the function returns a structure. */
4039 tree function_result_decl
= 0;
4040 #ifdef SETUP_INCOMING_VARARGS
4041 int varargs_setup
= 0;
4043 rtx conversion_insns
= 0;
4044 struct args_size alignment_pad
;
4046 /* Nonzero if the last arg is named `__builtin_va_alist',
4047 which is used on some machines for old-fashioned non-ANSI varargs.h;
4048 this should be stuck onto the stack as if it had arrived there. */
4050 = (current_function_varargs
4052 && (parm
= tree_last (fnargs
)) != 0
4054 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
4055 "__builtin_va_alist")));
4057 /* Nonzero if function takes extra anonymous args.
4058 This means the last named arg must be on the stack
4059 right before the anonymous ones. */
4061 = (TYPE_ARG_TYPES (fntype
) != 0
4062 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4063 != void_type_node
));
4065 current_function_stdarg
= stdarg
;
4067 /* If the reg that the virtual arg pointer will be translated into is
4068 not a fixed reg or is the stack pointer, make a copy of the virtual
4069 arg pointer, and address parms via the copy. The frame pointer is
4070 considered fixed even though it is not marked as such.
4072 The second time through, simply use ap to avoid generating rtx. */
4074 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4075 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4076 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4077 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4079 internal_arg_pointer
= virtual_incoming_args_rtx
;
4080 current_function_internal_arg_pointer
= internal_arg_pointer
;
4082 stack_args_size
.constant
= 0;
4083 stack_args_size
.var
= 0;
4085 /* If struct value address is treated as the first argument, make it so. */
4086 if (aggregate_value_p (DECL_RESULT (fndecl
))
4087 && ! current_function_returns_pcc_struct
4088 && struct_value_incoming_rtx
== 0)
4090 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4092 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4094 DECL_ARG_TYPE (function_result_decl
) = type
;
4095 TREE_CHAIN (function_result_decl
) = fnargs
;
4096 fnargs
= function_result_decl
;
4099 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4100 parm_reg_stack_loc
= (rtx
*) xcalloc (max_parm_reg
, sizeof (rtx
));
4102 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4103 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4105 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
4108 /* We haven't yet found an argument that we must push and pretend the
4110 current_function_pretend_args_size
= 0;
4112 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4114 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
4115 struct args_size stack_offset
;
4116 struct args_size arg_size
;
4117 int passed_pointer
= 0;
4118 int did_conversion
= 0;
4119 tree passed_type
= DECL_ARG_TYPE (parm
);
4120 tree nominal_type
= TREE_TYPE (parm
);
4123 /* Set LAST_NAMED if this is last named arg before some
4125 int last_named
= ((TREE_CHAIN (parm
) == 0
4126 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
4127 && (stdarg
|| current_function_varargs
));
4128 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4129 most machines, if this is a varargs/stdarg function, then we treat
4130 the last named arg as if it were anonymous too. */
4131 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
4133 if (TREE_TYPE (parm
) == error_mark_node
4134 /* This can happen after weird syntax errors
4135 or if an enum type is defined among the parms. */
4136 || TREE_CODE (parm
) != PARM_DECL
4137 || passed_type
== NULL
)
4139 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
4140 = gen_rtx_MEM (BLKmode
, const0_rtx
);
4141 TREE_USED (parm
) = 1;
4145 /* For varargs.h function, save info about regs and stack space
4146 used by the individual args, not including the va_alist arg. */
4147 if (hide_last_arg
&& last_named
)
4148 current_function_args_info
= args_so_far
;
4150 /* Find mode of arg as it is passed, and mode of arg
4151 as it should be during execution of this function. */
4152 passed_mode
= TYPE_MODE (passed_type
);
4153 nominal_mode
= TYPE_MODE (nominal_type
);
4155 /* If the parm's mode is VOID, its value doesn't matter,
4156 and avoid the usual things like emit_move_insn that could crash. */
4157 if (nominal_mode
== VOIDmode
)
4159 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
4163 /* If the parm is to be passed as a transparent union, use the
4164 type of the first field for the tests below. We have already
4165 verified that the modes are the same. */
4166 if (DECL_TRANSPARENT_UNION (parm
)
4167 || TYPE_TRANSPARENT_UNION (passed_type
))
4168 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4170 /* See if this arg was passed by invisible reference. It is if
4171 it is an object whose size depends on the contents of the
4172 object itself or if the machine requires these objects be passed
4175 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
4176 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
4177 || TREE_ADDRESSABLE (passed_type
)
4178 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4179 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4180 passed_type
, named_arg
)
4184 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4186 passed_mode
= nominal_mode
= Pmode
;
4189 promoted_mode
= passed_mode
;
4191 #ifdef PROMOTE_FUNCTION_ARGS
4192 /* Compute the mode in which the arg is actually extended to. */
4193 unsignedp
= TREE_UNSIGNED (passed_type
);
4194 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
4197 /* Let machine desc say which reg (if any) the parm arrives in.
4198 0 means it arrives on the stack. */
4199 #ifdef FUNCTION_INCOMING_ARG
4200 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4201 passed_type
, named_arg
);
4203 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4204 passed_type
, named_arg
);
4207 if (entry_parm
== 0)
4208 promoted_mode
= passed_mode
;
4210 #ifdef SETUP_INCOMING_VARARGS
4211 /* If this is the last named parameter, do any required setup for
4212 varargs or stdargs. We need to know about the case of this being an
4213 addressable type, in which case we skip the registers it
4214 would have arrived in.
4216 For stdargs, LAST_NAMED will be set for two parameters, the one that
4217 is actually the last named, and the dummy parameter. We only
4218 want to do this action once.
4220 Also, indicate when RTL generation is to be suppressed. */
4221 if (last_named
&& !varargs_setup
)
4223 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
4224 current_function_pretend_args_size
, 0);
4229 /* Determine parm's home in the stack,
4230 in case it arrives in the stack or we should pretend it did.
4232 Compute the stack position and rtx where the argument arrives
4235 There is one complexity here: If this was a parameter that would
4236 have been passed in registers, but wasn't only because it is
4237 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4238 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4239 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4240 0 as it was the previous time. */
4242 pretend_named
= named_arg
|| PRETEND_OUTGOING_VARARGS_NAMED
;
4243 locate_and_pad_parm (promoted_mode
, passed_type
,
4244 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4247 #ifdef FUNCTION_INCOMING_ARG
4248 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4250 pretend_named
) != 0,
4252 FUNCTION_ARG (args_so_far
, promoted_mode
,
4254 pretend_named
) != 0,
4257 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
,
4261 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4263 if (offset_rtx
== const0_rtx
)
4264 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4266 stack_parm
= gen_rtx_MEM (promoted_mode
,
4267 gen_rtx_PLUS (Pmode
,
4268 internal_arg_pointer
,
4271 /* If this is a memory ref that contains aggregate components,
4272 mark it as such for cse and loop optimize. Likewise if it
4274 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4275 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
4276 MEM_ALIAS_SET (stack_parm
) = get_alias_set (parm
);
4279 /* If this parameter was passed both in registers and in the stack,
4280 use the copy on the stack. */
4281 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4284 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4285 /* If this parm was passed part in regs and part in memory,
4286 pretend it arrived entirely in memory
4287 by pushing the register-part onto the stack.
4289 In the special case of a DImode or DFmode that is split,
4290 we could put it together in a pseudoreg directly,
4291 but for now that's not worth bothering with. */
4295 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4296 passed_type
, named_arg
);
4300 current_function_pretend_args_size
4301 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4302 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4303 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4305 /* Handle calls that pass values in multiple non-contiguous
4306 locations. The Irix 6 ABI has examples of this. */
4307 if (GET_CODE (entry_parm
) == PARALLEL
)
4308 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4309 int_size_in_bytes (TREE_TYPE (parm
)),
4310 (TYPE_ALIGN (TREE_TYPE (parm
))
4313 move_block_from_reg (REGNO (entry_parm
),
4314 validize_mem (stack_parm
), nregs
,
4315 int_size_in_bytes (TREE_TYPE (parm
)));
4317 entry_parm
= stack_parm
;
4322 /* If we didn't decide this parm came in a register,
4323 by default it came on the stack. */
4324 if (entry_parm
== 0)
4325 entry_parm
= stack_parm
;
4327 /* Record permanently how this parm was passed. */
4328 DECL_INCOMING_RTL (parm
) = entry_parm
;
4330 /* If there is actually space on the stack for this parm,
4331 count it in stack_args_size; otherwise set stack_parm to 0
4332 to indicate there is no preallocated stack slot for the parm. */
4334 if (entry_parm
== stack_parm
4335 || (GET_CODE (entry_parm
) == PARALLEL
4336 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4337 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4338 /* On some machines, even if a parm value arrives in a register
4339 there is still an (uninitialized) stack slot allocated for it.
4341 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4342 whether this parameter already has a stack slot allocated,
4343 because an arg block exists only if current_function_args_size
4344 is larger than some threshold, and we haven't calculated that
4345 yet. So, for now, we just assume that stack slots never exist
4347 || REG_PARM_STACK_SPACE (fndecl
) > 0
4351 stack_args_size
.constant
+= arg_size
.constant
;
4353 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
4356 /* No stack slot was pushed for this parm. */
4359 /* Update info on where next arg arrives in registers. */
4361 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4362 passed_type
, named_arg
);
4364 /* If we can't trust the parm stack slot to be aligned enough
4365 for its ultimate type, don't use that slot after entry.
4366 We'll make another stack slot, if we need one. */
4368 unsigned int thisparm_boundary
4369 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4371 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4375 /* If parm was passed in memory, and we need to convert it on entry,
4376 don't store it back in that same slot. */
4378 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4382 /* Now adjust STACK_PARM to the mode and precise location
4383 where this parameter should live during execution,
4384 if we discover that it must live in the stack during execution.
4385 To make debuggers happier on big-endian machines, we store
4386 the value in the last bytes of the space available. */
4388 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4393 if (BYTES_BIG_ENDIAN
4394 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4395 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4396 - GET_MODE_SIZE (nominal_mode
));
4398 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4399 if (offset_rtx
== const0_rtx
)
4400 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4402 stack_parm
= gen_rtx_MEM (nominal_mode
,
4403 gen_rtx_PLUS (Pmode
,
4404 internal_arg_pointer
,
4407 /* If this is a memory ref that contains aggregate components,
4408 mark it as such for cse and loop optimize. */
4409 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4413 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4414 in the mode in which it arrives.
4415 STACK_PARM is an RTX for a stack slot where the parameter can live
4416 during the function (in case we want to put it there).
4417 STACK_PARM is 0 if no stack slot was pushed for it.
4419 Now output code if necessary to convert ENTRY_PARM to
4420 the type in which this function declares it,
4421 and store that result in an appropriate place,
4422 which may be a pseudo reg, may be STACK_PARM,
4423 or may be a local stack slot if STACK_PARM is 0.
4425 Set DECL_RTL to that place. */
4427 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4429 /* If a BLKmode arrives in registers, copy it to a stack slot.
4430 Handle calls that pass values in multiple non-contiguous
4431 locations. The Irix 6 ABI has examples of this. */
4432 if (GET_CODE (entry_parm
) == REG
4433 || GET_CODE (entry_parm
) == PARALLEL
)
4436 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4439 /* Note that we will be storing an integral number of words.
4440 So we have to be careful to ensure that we allocate an
4441 integral number of words. We do this below in the
4442 assign_stack_local if space was not allocated in the argument
4443 list. If it was, this will not work if PARM_BOUNDARY is not
4444 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4445 if it becomes a problem. */
4447 if (stack_parm
== 0)
4450 = assign_stack_local (GET_MODE (entry_parm
),
4453 /* If this is a memory ref that contains aggregate
4454 components, mark it as such for cse and loop optimize. */
4455 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4458 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4461 if (TREE_READONLY (parm
))
4462 RTX_UNCHANGING_P (stack_parm
) = 1;
4464 /* Handle calls that pass values in multiple non-contiguous
4465 locations. The Irix 6 ABI has examples of this. */
4466 if (GET_CODE (entry_parm
) == PARALLEL
)
4467 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4468 int_size_in_bytes (TREE_TYPE (parm
)),
4469 (TYPE_ALIGN (TREE_TYPE (parm
))
4472 move_block_from_reg (REGNO (entry_parm
),
4473 validize_mem (stack_parm
),
4474 size_stored
/ UNITS_PER_WORD
,
4475 int_size_in_bytes (TREE_TYPE (parm
)));
4477 DECL_RTL (parm
) = stack_parm
;
4479 else if (! ((! optimize
4480 && ! DECL_REGISTER (parm
)
4481 && ! DECL_INLINE (fndecl
))
4482 /* layout_decl may set this. */
4483 || TREE_ADDRESSABLE (parm
)
4484 || TREE_SIDE_EFFECTS (parm
)
4485 /* If -ffloat-store specified, don't put explicit
4486 float variables into registers. */
4487 || (flag_float_store
4488 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4489 /* Always assign pseudo to structure return or item passed
4490 by invisible reference. */
4491 || passed_pointer
|| parm
== function_result_decl
)
4493 /* Store the parm in a pseudoregister during the function, but we
4494 may need to do it in a wider mode. */
4496 register rtx parmreg
;
4497 int regno
, regnoi
= 0, regnor
= 0;
4499 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4501 promoted_nominal_mode
4502 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4504 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4505 mark_user_reg (parmreg
);
4507 /* If this was an item that we received a pointer to, set DECL_RTL
4512 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4513 MEM_SET_IN_STRUCT_P (DECL_RTL (parm
), aggregate
);
4516 DECL_RTL (parm
) = parmreg
;
4518 /* Copy the value into the register. */
4519 if (nominal_mode
!= passed_mode
4520 || promoted_nominal_mode
!= promoted_mode
)
4523 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4524 mode, by the caller. We now have to convert it to
4525 NOMINAL_MODE, if different. However, PARMREG may be in
4526 a different mode than NOMINAL_MODE if it is being stored
4529 If ENTRY_PARM is a hard register, it might be in a register
4530 not valid for operating in its mode (e.g., an odd-numbered
4531 register for a DFmode). In that case, moves are the only
4532 thing valid, so we can't do a convert from there. This
4533 occurs when the calling sequence allow such misaligned
4536 In addition, the conversion may involve a call, which could
4537 clobber parameters which haven't been copied to pseudo
4538 registers yet. Therefore, we must first copy the parm to
4539 a pseudo reg here, and save the conversion until after all
4540 parameters have been moved. */
4542 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4544 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4546 push_to_sequence (conversion_insns
);
4547 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4549 /* TREE_USED gets set erroneously during expand_assignment. */
4550 save_tree_used
= TREE_USED (parm
);
4551 expand_assignment (parm
,
4552 make_tree (nominal_type
, tempreg
), 0, 0);
4553 TREE_USED (parm
) = save_tree_used
;
4554 conversion_insns
= get_insns ();
4559 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4561 /* If we were passed a pointer but the actual value
4562 can safely live in a register, put it in one. */
4563 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4565 && ! DECL_REGISTER (parm
)
4566 && ! DECL_INLINE (fndecl
))
4567 /* layout_decl may set this. */
4568 || TREE_ADDRESSABLE (parm
)
4569 || TREE_SIDE_EFFECTS (parm
)
4570 /* If -ffloat-store specified, don't put explicit
4571 float variables into registers. */
4572 || (flag_float_store
4573 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4575 /* We can't use nominal_mode, because it will have been set to
4576 Pmode above. We must use the actual mode of the parm. */
4577 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4578 mark_user_reg (parmreg
);
4579 emit_move_insn (parmreg
, DECL_RTL (parm
));
4580 DECL_RTL (parm
) = parmreg
;
4581 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4585 #ifdef FUNCTION_ARG_CALLEE_COPIES
4586 /* If we are passed an arg by reference and it is our responsibility
4587 to make a copy, do it now.
4588 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4589 original argument, so we must recreate them in the call to
4590 FUNCTION_ARG_CALLEE_COPIES. */
4591 /* ??? Later add code to handle the case that if the argument isn't
4592 modified, don't do the copy. */
4594 else if (passed_pointer
4595 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4596 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4597 DECL_ARG_TYPE (parm
),
4599 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4602 tree type
= DECL_ARG_TYPE (parm
);
4604 /* This sequence may involve a library call perhaps clobbering
4605 registers that haven't been copied to pseudos yet. */
4607 push_to_sequence (conversion_insns
);
4609 if (TYPE_SIZE (type
) == 0
4610 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4611 /* This is a variable sized object. */
4612 copy
= gen_rtx_MEM (BLKmode
,
4613 allocate_dynamic_stack_space
4614 (expr_size (parm
), NULL_RTX
,
4615 TYPE_ALIGN (type
)));
4617 copy
= assign_stack_temp (TYPE_MODE (type
),
4618 int_size_in_bytes (type
), 1);
4619 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
4620 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4622 store_expr (parm
, copy
, 0);
4623 emit_move_insn (parmreg
, XEXP (copy
, 0));
4624 if (current_function_check_memory_usage
)
4625 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4626 XEXP (copy
, 0), Pmode
,
4627 GEN_INT (int_size_in_bytes (type
)),
4628 TYPE_MODE (sizetype
),
4629 GEN_INT (MEMORY_USE_RW
),
4630 TYPE_MODE (integer_type_node
));
4631 conversion_insns
= get_insns ();
4635 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4637 /* In any case, record the parm's desired stack location
4638 in case we later discover it must live in the stack.
4640 If it is a COMPLEX value, store the stack location for both
4643 if (GET_CODE (parmreg
) == CONCAT
)
4644 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4646 regno
= REGNO (parmreg
);
4648 if (regno
>= max_parm_reg
)
4651 int old_max_parm_reg
= max_parm_reg
;
4653 /* It's slow to expand this one register at a time,
4654 but it's also rare and we need max_parm_reg to be
4655 precisely correct. */
4656 max_parm_reg
= regno
+ 1;
4657 new = (rtx
*) xrealloc (parm_reg_stack_loc
,
4658 max_parm_reg
* sizeof (rtx
));
4659 bzero ((char *) (new + old_max_parm_reg
),
4660 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4661 parm_reg_stack_loc
= new;
4664 if (GET_CODE (parmreg
) == CONCAT
)
4666 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4668 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4669 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4671 if (stack_parm
!= 0)
4673 parm_reg_stack_loc
[regnor
]
4674 = gen_realpart (submode
, stack_parm
);
4675 parm_reg_stack_loc
[regnoi
]
4676 = gen_imagpart (submode
, stack_parm
);
4680 parm_reg_stack_loc
[regnor
] = 0;
4681 parm_reg_stack_loc
[regnoi
] = 0;
4685 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4687 /* Mark the register as eliminable if we did no conversion
4688 and it was copied from memory at a fixed offset,
4689 and the arg pointer was not copied to a pseudo-reg.
4690 If the arg pointer is a pseudo reg or the offset formed
4691 an invalid address, such memory-equivalences
4692 as we make here would screw up life analysis for it. */
4693 if (nominal_mode
== passed_mode
4696 && GET_CODE (stack_parm
) == MEM
4697 && stack_offset
.var
== 0
4698 && reg_mentioned_p (virtual_incoming_args_rtx
,
4699 XEXP (stack_parm
, 0)))
4701 rtx linsn
= get_last_insn ();
4704 /* Mark complex types separately. */
4705 if (GET_CODE (parmreg
) == CONCAT
)
4706 /* Scan backwards for the set of the real and
4708 for (sinsn
= linsn
; sinsn
!= 0;
4709 sinsn
= prev_nonnote_insn (sinsn
))
4711 set
= single_set (sinsn
);
4713 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4715 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4716 parm_reg_stack_loc
[regnoi
],
4719 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4721 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4722 parm_reg_stack_loc
[regnor
],
4725 else if ((set
= single_set (linsn
)) != 0
4726 && SET_DEST (set
) == parmreg
)
4728 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4729 stack_parm
, REG_NOTES (linsn
));
4732 /* For pointer data type, suggest pointer register. */
4733 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4734 mark_reg_pointer (parmreg
,
4735 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4740 /* Value must be stored in the stack slot STACK_PARM
4741 during function execution. */
4743 if (promoted_mode
!= nominal_mode
)
4745 /* Conversion is required. */
4746 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4748 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4750 push_to_sequence (conversion_insns
);
4751 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4752 TREE_UNSIGNED (TREE_TYPE (parm
)));
4755 /* ??? This may need a big-endian conversion on sparc64. */
4756 stack_parm
= change_address (stack_parm
, nominal_mode
,
4759 conversion_insns
= get_insns ();
4764 if (entry_parm
!= stack_parm
)
4766 if (stack_parm
== 0)
4769 = assign_stack_local (GET_MODE (entry_parm
),
4770 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4771 /* If this is a memory ref that contains aggregate components,
4772 mark it as such for cse and loop optimize. */
4773 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4776 if (promoted_mode
!= nominal_mode
)
4778 push_to_sequence (conversion_insns
);
4779 emit_move_insn (validize_mem (stack_parm
),
4780 validize_mem (entry_parm
));
4781 conversion_insns
= get_insns ();
4785 emit_move_insn (validize_mem (stack_parm
),
4786 validize_mem (entry_parm
));
4788 if (current_function_check_memory_usage
)
4790 push_to_sequence (conversion_insns
);
4791 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4792 XEXP (stack_parm
, 0), Pmode
,
4793 GEN_INT (GET_MODE_SIZE (GET_MODE
4795 TYPE_MODE (sizetype
),
4796 GEN_INT (MEMORY_USE_RW
),
4797 TYPE_MODE (integer_type_node
));
4799 conversion_insns
= get_insns ();
4802 DECL_RTL (parm
) = stack_parm
;
4805 /* If this "parameter" was the place where we are receiving the
4806 function's incoming structure pointer, set up the result. */
4807 if (parm
== function_result_decl
)
4809 tree result
= DECL_RESULT (fndecl
);
4810 tree restype
= TREE_TYPE (result
);
4813 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4815 MEM_SET_IN_STRUCT_P (DECL_RTL (result
),
4816 AGGREGATE_TYPE_P (restype
));
4819 if (TREE_THIS_VOLATILE (parm
))
4820 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4821 if (TREE_READONLY (parm
))
4822 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4825 /* Output all parameter conversion instructions (possibly including calls)
4826 now that all parameters have been copied out of hard registers. */
4827 emit_insns (conversion_insns
);
4829 last_parm_insn
= get_last_insn ();
4831 current_function_args_size
= stack_args_size
.constant
;
4833 /* Adjust function incoming argument size for alignment and
4836 #ifdef REG_PARM_STACK_SPACE
4837 #ifndef MAYBE_REG_PARM_STACK_SPACE
4838 current_function_args_size
= MAX (current_function_args_size
,
4839 REG_PARM_STACK_SPACE (fndecl
));
4843 #ifdef STACK_BOUNDARY
4844 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4846 current_function_args_size
4847 = ((current_function_args_size
+ STACK_BYTES
- 1)
4848 / STACK_BYTES
) * STACK_BYTES
;
4851 #ifdef ARGS_GROW_DOWNWARD
4852 current_function_arg_offset_rtx
4853 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4854 : expand_expr (size_diffop (stack_args_size
.var
,
4855 size_int (-stack_args_size
.constant
)),
4856 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4858 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4861 /* See how many bytes, if any, of its args a function should try to pop
4864 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4865 current_function_args_size
);
4867 /* For stdarg.h function, save info about
4868 regs and stack space used by the named args. */
4871 current_function_args_info
= args_so_far
;
4873 /* Set the rtx used for the function return value. Put this in its
4874 own variable so any optimizers that need this information don't have
4875 to include tree.h. Do this here so it gets done when an inlined
4876 function gets output. */
4878 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4881 /* Indicate whether REGNO is an incoming argument to the current function
4882 that was promoted to a wider mode. If so, return the RTX for the
4883 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4884 that REGNO is promoted from and whether the promotion was signed or
4887 #ifdef PROMOTE_FUNCTION_ARGS
4890 promoted_input_arg (regno
, pmode
, punsignedp
)
4892 enum machine_mode
*pmode
;
4897 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4898 arg
= TREE_CHAIN (arg
))
4899 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4900 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4901 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4903 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4904 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4906 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4907 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4908 && mode
!= DECL_MODE (arg
))
4910 *pmode
= DECL_MODE (arg
);
4911 *punsignedp
= unsignedp
;
4912 return DECL_INCOMING_RTL (arg
);
4921 /* Compute the size and offset from the start of the stacked arguments for a
4922 parm passed in mode PASSED_MODE and with type TYPE.
4924 INITIAL_OFFSET_PTR points to the current offset into the stacked
4927 The starting offset and size for this parm are returned in *OFFSET_PTR
4928 and *ARG_SIZE_PTR, respectively.
4930 IN_REGS is non-zero if the argument will be passed in registers. It will
4931 never be set if REG_PARM_STACK_SPACE is not defined.
4933 FNDECL is the function in which the argument was defined.
4935 There are two types of rounding that are done. The first, controlled by
4936 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4937 list to be aligned to the specific boundary (in bits). This rounding
4938 affects the initial and starting offsets, but not the argument size.
4940 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4941 optionally rounds the size of the parm to PARM_BOUNDARY. The
4942 initial offset is not affected by this rounding, while the size always
4943 is and the starting offset may be. */
4945 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4946 initial_offset_ptr is positive because locate_and_pad_parm's
4947 callers pass in the total size of args so far as
4948 initial_offset_ptr. arg_size_ptr is always positive.*/
4951 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4952 initial_offset_ptr
, offset_ptr
, arg_size_ptr
,
4954 enum machine_mode passed_mode
;
4956 int in_regs ATTRIBUTE_UNUSED
;
4957 tree fndecl ATTRIBUTE_UNUSED
;
4958 struct args_size
*initial_offset_ptr
;
4959 struct args_size
*offset_ptr
;
4960 struct args_size
*arg_size_ptr
;
4961 struct args_size
*alignment_pad
;
4965 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4966 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4967 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4969 #ifdef REG_PARM_STACK_SPACE
4970 /* If we have found a stack parm before we reach the end of the
4971 area reserved for registers, skip that area. */
4974 int reg_parm_stack_space
= 0;
4976 #ifdef MAYBE_REG_PARM_STACK_SPACE
4977 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4979 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4981 if (reg_parm_stack_space
> 0)
4983 if (initial_offset_ptr
->var
)
4985 initial_offset_ptr
->var
4986 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4987 ssize_int (reg_parm_stack_space
));
4988 initial_offset_ptr
->constant
= 0;
4990 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4991 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4994 #endif /* REG_PARM_STACK_SPACE */
4996 arg_size_ptr
->var
= 0;
4997 arg_size_ptr
->constant
= 0;
4999 #ifdef ARGS_GROW_DOWNWARD
5000 if (initial_offset_ptr
->var
)
5002 offset_ptr
->constant
= 0;
5003 offset_ptr
->var
= size_binop (MINUS_EXPR
, ssize_int (0),
5004 initial_offset_ptr
->var
);
5008 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
5009 offset_ptr
->var
= 0;
5011 if (where_pad
!= none
5012 && (TREE_CODE (sizetree
) != INTEGER_CST
5013 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5014 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5015 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5016 if (where_pad
!= downward
)
5017 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
);
5018 if (initial_offset_ptr
->var
)
5019 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
5020 size_binop (MINUS_EXPR
,
5022 initial_offset_ptr
->var
),
5026 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
5027 - offset_ptr
->constant
);
5029 #else /* !ARGS_GROW_DOWNWARD */
5030 pad_to_arg_alignment (initial_offset_ptr
, boundary
, alignment_pad
);
5031 *offset_ptr
= *initial_offset_ptr
;
5033 #ifdef PUSH_ROUNDING
5034 if (passed_mode
!= BLKmode
)
5035 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5038 /* Pad_below needs the pre-rounded size to know how much to pad below
5039 so this must be done before rounding up. */
5040 if (where_pad
== downward
5041 /* However, BLKmode args passed in regs have their padding done elsewhere.
5042 The stack slot must be able to hold the entire register. */
5043 && !(in_regs
&& passed_mode
== BLKmode
))
5044 pad_below (offset_ptr
, passed_mode
, sizetree
);
5046 if (where_pad
!= none
5047 && (TREE_CODE (sizetree
) != INTEGER_CST
5048 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5049 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5051 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
5052 #endif /* ARGS_GROW_DOWNWARD */
5055 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5056 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5059 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
)
5060 struct args_size
*offset_ptr
;
5062 struct args_size
*alignment_pad
;
5064 tree save_var
= NULL_TREE
;
5065 HOST_WIDE_INT save_constant
= 0;
5067 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5069 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5071 save_var
= offset_ptr
->var
;
5072 save_constant
= offset_ptr
->constant
;
5075 alignment_pad
->var
= NULL_TREE
;
5076 alignment_pad
->constant
= 0;
5078 if (boundary
> BITS_PER_UNIT
)
5080 if (offset_ptr
->var
)
5083 #ifdef ARGS_GROW_DOWNWARD
5088 (ARGS_SIZE_TREE (*offset_ptr
),
5089 boundary
/ BITS_PER_UNIT
);
5090 offset_ptr
->constant
= 0; /*?*/
5091 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5092 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
5097 offset_ptr
->constant
=
5098 #ifdef ARGS_GROW_DOWNWARD
5099 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5101 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5103 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5104 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5109 #ifndef ARGS_GROW_DOWNWARD
5111 pad_below (offset_ptr
, passed_mode
, sizetree
)
5112 struct args_size
*offset_ptr
;
5113 enum machine_mode passed_mode
;
5116 if (passed_mode
!= BLKmode
)
5118 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5119 offset_ptr
->constant
5120 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5121 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5122 - GET_MODE_SIZE (passed_mode
));
5126 if (TREE_CODE (sizetree
) != INTEGER_CST
5127 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5129 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5130 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5132 ADD_PARM_SIZE (*offset_ptr
, s2
);
5133 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5139 /* Walk the tree of blocks describing the binding levels within a function
5140 and warn about uninitialized variables.
5141 This is done after calling flow_analysis and before global_alloc
5142 clobbers the pseudo-regs to hard regs. */
5145 uninitialized_vars_warning (block
)
5148 register tree decl
, sub
;
5149 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5151 if (warn_uninitialized
5152 && TREE_CODE (decl
) == VAR_DECL
5153 /* These warnings are unreliable for and aggregates
5154 because assigning the fields one by one can fail to convince
5155 flow.c that the entire aggregate was initialized.
5156 Unions are troublesome because members may be shorter. */
5157 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5158 && DECL_RTL (decl
) != 0
5159 && GET_CODE (DECL_RTL (decl
)) == REG
5160 /* Global optimizations can make it difficult to determine if a
5161 particular variable has been initialized. However, a VAR_DECL
5162 with a nonzero DECL_INITIAL had an initializer, so do not
5163 claim it is potentially uninitialized.
5165 We do not care about the actual value in DECL_INITIAL, so we do
5166 not worry that it may be a dangling pointer. */
5167 && DECL_INITIAL (decl
) == NULL_TREE
5168 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
5169 warning_with_decl (decl
,
5170 "`%s' might be used uninitialized in this function");
5172 && TREE_CODE (decl
) == VAR_DECL
5173 && DECL_RTL (decl
) != 0
5174 && GET_CODE (DECL_RTL (decl
)) == REG
5175 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5176 warning_with_decl (decl
,
5177 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5179 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5180 uninitialized_vars_warning (sub
);
5183 /* Do the appropriate part of uninitialized_vars_warning
5184 but for arguments instead of local variables. */
5187 setjmp_args_warning ()
5190 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5191 decl
; decl
= TREE_CHAIN (decl
))
5192 if (DECL_RTL (decl
) != 0
5193 && GET_CODE (DECL_RTL (decl
)) == REG
5194 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5195 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5198 /* If this function call setjmp, put all vars into the stack
5199 unless they were declared `register'. */
5202 setjmp_protect (block
)
5205 register tree decl
, sub
;
5206 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5207 if ((TREE_CODE (decl
) == VAR_DECL
5208 || TREE_CODE (decl
) == PARM_DECL
)
5209 && DECL_RTL (decl
) != 0
5210 && (GET_CODE (DECL_RTL (decl
)) == REG
5211 || (GET_CODE (DECL_RTL (decl
)) == MEM
5212 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5213 /* If this variable came from an inline function, it must be
5214 that its life doesn't overlap the setjmp. If there was a
5215 setjmp in the function, it would already be in memory. We
5216 must exclude such variable because their DECL_RTL might be
5217 set to strange things such as virtual_stack_vars_rtx. */
5218 && ! DECL_FROM_INLINE (decl
)
5220 #ifdef NON_SAVING_SETJMP
5221 /* If longjmp doesn't restore the registers,
5222 don't put anything in them. */
5226 ! DECL_REGISTER (decl
)))
5227 put_var_into_stack (decl
);
5228 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5229 setjmp_protect (sub
);
5232 /* Like the previous function, but for args instead of local variables. */
5235 setjmp_protect_args ()
5238 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5239 decl
; decl
= TREE_CHAIN (decl
))
5240 if ((TREE_CODE (decl
) == VAR_DECL
5241 || TREE_CODE (decl
) == PARM_DECL
)
5242 && DECL_RTL (decl
) != 0
5243 && (GET_CODE (DECL_RTL (decl
)) == REG
5244 || (GET_CODE (DECL_RTL (decl
)) == MEM
5245 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5247 /* If longjmp doesn't restore the registers,
5248 don't put anything in them. */
5249 #ifdef NON_SAVING_SETJMP
5253 ! DECL_REGISTER (decl
)))
5254 put_var_into_stack (decl
);
5257 /* Return the context-pointer register corresponding to DECL,
5258 or 0 if it does not need one. */
5261 lookup_static_chain (decl
)
5264 tree context
= decl_function_context (decl
);
5268 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
5271 /* We treat inline_function_decl as an alias for the current function
5272 because that is the inline function whose vars, types, etc.
5273 are being merged into the current function.
5274 See expand_inline_function. */
5275 if (context
== current_function_decl
|| context
== inline_function_decl
)
5276 return virtual_stack_vars_rtx
;
5278 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5279 if (TREE_PURPOSE (link
) == context
)
5280 return RTL_EXPR_RTL (TREE_VALUE (link
));
5285 /* Convert a stack slot address ADDR for variable VAR
5286 (from a containing function)
5287 into an address valid in this function (using a static chain). */
5290 fix_lexical_addr (addr
, var
)
5295 HOST_WIDE_INT displacement
;
5296 tree context
= decl_function_context (var
);
5297 struct function
*fp
;
5300 /* If this is the present function, we need not do anything. */
5301 if (context
== current_function_decl
|| context
== inline_function_decl
)
5304 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5305 if (fp
->decl
== context
)
5311 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5312 addr
= XEXP (XEXP (addr
, 0), 0);
5314 /* Decode given address as base reg plus displacement. */
5315 if (GET_CODE (addr
) == REG
)
5316 basereg
= addr
, displacement
= 0;
5317 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5318 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5322 /* We accept vars reached via the containing function's
5323 incoming arg pointer and via its stack variables pointer. */
5324 if (basereg
== fp
->internal_arg_pointer
)
5326 /* If reached via arg pointer, get the arg pointer value
5327 out of that function's stack frame.
5329 There are two cases: If a separate ap is needed, allocate a
5330 slot in the outer function for it and dereference it that way.
5331 This is correct even if the real ap is actually a pseudo.
5332 Otherwise, just adjust the offset from the frame pointer to
5335 #ifdef NEED_SEPARATE_AP
5338 if (fp
->x_arg_pointer_save_area
== 0)
5339 fp
->x_arg_pointer_save_area
5340 = assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
5342 addr
= fix_lexical_addr (XEXP (fp
->x_arg_pointer_save_area
, 0), var
);
5343 addr
= memory_address (Pmode
, addr
);
5345 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
5347 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
5348 base
= lookup_static_chain (var
);
5352 else if (basereg
== virtual_stack_vars_rtx
)
5354 /* This is the same code as lookup_static_chain, duplicated here to
5355 avoid an extra call to decl_function_context. */
5358 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5359 if (TREE_PURPOSE (link
) == context
)
5361 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
5369 /* Use same offset, relative to appropriate static chain or argument
5371 return plus_constant (base
, displacement
);
5374 /* Return the address of the trampoline for entering nested fn FUNCTION.
5375 If necessary, allocate a trampoline (in the stack frame)
5376 and emit rtl to initialize its contents (at entry to this function). */
5379 trampoline_address (function
)
5385 struct function
*fp
;
5388 /* Find an existing trampoline and return it. */
5389 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5390 if (TREE_PURPOSE (link
) == function
)
5392 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5394 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5395 for (link
= fp
->x_trampoline_list
; link
; link
= TREE_CHAIN (link
))
5396 if (TREE_PURPOSE (link
) == function
)
5398 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5400 return round_trampoline_addr (tramp
);
5403 /* None exists; we must make one. */
5405 /* Find the `struct function' for the function containing FUNCTION. */
5407 fn_context
= decl_function_context (function
);
5408 if (fn_context
!= current_function_decl
5409 && fn_context
!= inline_function_decl
)
5410 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5411 if (fp
->decl
== fn_context
)
5414 /* Allocate run-time space for this trampoline
5415 (usually in the defining function's stack frame). */
5416 #ifdef ALLOCATE_TRAMPOLINE
5417 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5419 /* If rounding needed, allocate extra space
5420 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5421 #ifdef TRAMPOLINE_ALIGNMENT
5422 #define TRAMPOLINE_REAL_SIZE \
5423 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5425 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5427 tramp
= assign_stack_local_1 (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0,
5431 /* Record the trampoline for reuse and note it for later initialization
5432 by expand_function_end. */
5435 push_obstacks (fp
->function_maybepermanent_obstack
,
5436 fp
->function_maybepermanent_obstack
);
5437 rtlexp
= make_node (RTL_EXPR
);
5438 RTL_EXPR_RTL (rtlexp
) = tramp
;
5439 fp
->x_trampoline_list
= tree_cons (function
, rtlexp
,
5440 fp
->x_trampoline_list
);
5445 /* Make the RTL_EXPR node temporary, not momentary, so that the
5446 trampoline_list doesn't become garbage. */
5447 int momentary
= suspend_momentary ();
5448 rtlexp
= make_node (RTL_EXPR
);
5449 resume_momentary (momentary
);
5451 RTL_EXPR_RTL (rtlexp
) = tramp
;
5452 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5455 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5456 return round_trampoline_addr (tramp
);
5459 /* Given a trampoline address,
5460 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5463 round_trampoline_addr (tramp
)
5466 #ifdef TRAMPOLINE_ALIGNMENT
5467 /* Round address up to desired boundary. */
5468 rtx temp
= gen_reg_rtx (Pmode
);
5469 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5470 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5471 temp
, 0, OPTAB_LIB_WIDEN
);
5472 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5473 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5474 temp
, 0, OPTAB_LIB_WIDEN
);
5479 /* The functions identify_blocks and reorder_blocks provide a way to
5480 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5481 duplicate portions of the RTL code. Call identify_blocks before
5482 changing the RTL, and call reorder_blocks after. */
5484 /* Put all this function's BLOCK nodes including those that are chained
5485 onto the first block into a vector, and return it.
5486 Also store in each NOTE for the beginning or end of a block
5487 the index of that block in the vector.
5488 The arguments are BLOCK, the chain of top-level blocks of the function,
5489 and INSNS, the insn chain of the function. */
5492 identify_blocks (block
, insns
)
5500 int current_block_number
= 1;
5506 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5507 depth-first order. */
5508 block_vector
= get_block_vector (block
, &n_blocks
);
5509 block_stack
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5511 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5512 if (GET_CODE (insn
) == NOTE
)
5514 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5518 /* If there are more block notes than BLOCKs, something
5520 if (current_block_number
== n_blocks
)
5523 b
= block_vector
[current_block_number
++];
5524 NOTE_BLOCK (insn
) = b
;
5525 block_stack
[depth
++] = b
;
5527 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5530 /* There are more NOTE_INSN_BLOCK_ENDs that
5531 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5534 NOTE_BLOCK (insn
) = block_stack
[--depth
];
5538 free (block_vector
);
5542 /* Given a revised instruction chain, rebuild the tree structure of
5543 BLOCK nodes to correspond to the new order of RTL. The new block
5544 tree is inserted below TOP_BLOCK. Returns the current top-level
5548 reorder_blocks (block
, insns
)
5552 tree current_block
= block
;
5554 varray_type block_stack
;
5556 if (block
== NULL_TREE
)
5559 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
5561 /* Prune the old trees away, so that it doesn't get in the way. */
5562 BLOCK_SUBBLOCKS (current_block
) = 0;
5563 BLOCK_CHAIN (current_block
) = 0;
5565 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5566 if (GET_CODE (insn
) == NOTE
)
5568 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5570 tree block
= NOTE_BLOCK (insn
);
5571 /* If we have seen this block before, copy it. */
5572 if (TREE_ASM_WRITTEN (block
))
5574 block
= copy_node (block
);
5575 NOTE_BLOCK (insn
) = block
;
5577 BLOCK_SUBBLOCKS (block
) = 0;
5578 TREE_ASM_WRITTEN (block
) = 1;
5579 BLOCK_SUPERCONTEXT (block
) = current_block
;
5580 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5581 BLOCK_SUBBLOCKS (current_block
) = block
;
5582 current_block
= block
;
5583 VARRAY_PUSH_TREE (block_stack
, block
);
5585 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5587 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (block_stack
);
5588 VARRAY_POP (block_stack
);
5589 BLOCK_SUBBLOCKS (current_block
)
5590 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5591 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5595 BLOCK_SUBBLOCKS (current_block
)
5596 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5598 VARRAY_FREE (block_stack
);
5600 return current_block
;
5603 /* Reverse the order of elements in the chain T of blocks,
5604 and return the new head of the chain (old last element). */
5610 register tree prev
= 0, decl
, next
;
5611 for (decl
= t
; decl
; decl
= next
)
5613 next
= BLOCK_CHAIN (decl
);
5614 BLOCK_CHAIN (decl
) = prev
;
5620 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5621 non-NULL, list them all into VECTOR, in a depth-first preorder
5622 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5626 all_blocks (block
, vector
)
5634 TREE_ASM_WRITTEN (block
) = 0;
5636 /* Record this block. */
5638 vector
[n_blocks
] = block
;
5642 /* Record the subblocks, and their subblocks... */
5643 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5644 vector
? vector
+ n_blocks
: 0);
5645 block
= BLOCK_CHAIN (block
);
5651 /* Return a vector containing all the blocks rooted at BLOCK. The
5652 number of elements in the vector is stored in N_BLOCKS_P. The
5653 vector is dynamically allocated; it is the caller's responsibility
5654 to call `free' on the pointer returned. */
5657 get_block_vector (block
, n_blocks_p
)
5663 *n_blocks_p
= all_blocks (block
, NULL
);
5664 block_vector
= (tree
*) xmalloc (*n_blocks_p
* sizeof (tree
));
5665 all_blocks (block
, block_vector
);
5667 return block_vector
;
5670 static int next_block_index
= 2;
5672 /* Set BLOCK_NUMBER for all the blocks in FN. */
5682 /* For SDB and XCOFF debugging output, we start numbering the blocks
5683 from 1 within each function, rather than keeping a running
5685 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5686 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
5687 next_block_index
= 1;
5690 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
5692 /* The top-level BLOCK isn't numbered at all. */
5693 for (i
= 1; i
< n_blocks
; ++i
)
5694 /* We number the blocks from two. */
5695 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
5697 free (block_vector
);
5703 /* Allocate a function structure and reset its contents to the defaults. */
5705 prepare_function_start ()
5707 cfun
= (struct function
*) xcalloc (1, sizeof (struct function
));
5709 init_stmt_for_function ();
5710 init_eh_for_function ();
5712 cse_not_expected
= ! optimize
;
5714 /* Caller save not needed yet. */
5715 caller_save_needed
= 0;
5717 /* No stack slots have been made yet. */
5718 stack_slot_list
= 0;
5720 current_function_has_nonlocal_label
= 0;
5721 current_function_has_nonlocal_goto
= 0;
5723 /* There is no stack slot for handling nonlocal gotos. */
5724 nonlocal_goto_handler_slots
= 0;
5725 nonlocal_goto_stack_level
= 0;
5727 /* No labels have been declared for nonlocal use. */
5728 nonlocal_labels
= 0;
5729 nonlocal_goto_handler_labels
= 0;
5731 /* No function calls so far in this function. */
5732 function_call_count
= 0;
5734 /* No parm regs have been allocated.
5735 (This is important for output_inline_function.) */
5736 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5738 /* Initialize the RTL mechanism. */
5741 /* Initialize the queue of pending postincrement and postdecrements,
5742 and some other info in expr.c. */
5745 /* We haven't done register allocation yet. */
5748 init_varasm_status (cfun
);
5750 /* Clear out data used for inlining. */
5751 cfun
->inlinable
= 0;
5752 cfun
->original_decl_initial
= 0;
5753 cfun
->original_arg_vector
= 0;
5755 #ifdef STACK_BOUNDARY
5756 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
5757 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
5759 cfun
->stack_alignment_needed
= 0;
5762 /* Set if a call to setjmp is seen. */
5763 current_function_calls_setjmp
= 0;
5765 /* Set if a call to longjmp is seen. */
5766 current_function_calls_longjmp
= 0;
5768 current_function_calls_alloca
= 0;
5769 current_function_contains_functions
= 0;
5770 current_function_is_leaf
= 0;
5771 current_function_nothrow
= 0;
5772 current_function_sp_is_unchanging
= 0;
5773 current_function_uses_only_leaf_regs
= 0;
5774 current_function_has_computed_jump
= 0;
5775 current_function_is_thunk
= 0;
5777 current_function_returns_pcc_struct
= 0;
5778 current_function_returns_struct
= 0;
5779 current_function_epilogue_delay_list
= 0;
5780 current_function_uses_const_pool
= 0;
5781 current_function_uses_pic_offset_table
= 0;
5782 current_function_cannot_inline
= 0;
5784 /* We have not yet needed to make a label to jump to for tail-recursion. */
5785 tail_recursion_label
= 0;
5787 /* We haven't had a need to make a save area for ap yet. */
5788 arg_pointer_save_area
= 0;
5790 /* No stack slots allocated yet. */
5793 /* No SAVE_EXPRs in this function yet. */
5796 /* No RTL_EXPRs in this function yet. */
5799 /* Set up to allocate temporaries. */
5802 /* Indicate that we need to distinguish between the return value of the
5803 present function and the return value of a function being called. */
5804 rtx_equal_function_value_matters
= 1;
5806 /* Indicate that we have not instantiated virtual registers yet. */
5807 virtuals_instantiated
= 0;
5809 /* Indicate we have no need of a frame pointer yet. */
5810 frame_pointer_needed
= 0;
5812 /* By default assume not varargs or stdarg. */
5813 current_function_varargs
= 0;
5814 current_function_stdarg
= 0;
5816 /* We haven't made any trampolines for this function yet. */
5817 trampoline_list
= 0;
5819 init_pending_stack_adjust ();
5820 inhibit_defer_pop
= 0;
5822 current_function_outgoing_args_size
= 0;
5824 if (init_lang_status
)
5825 (*init_lang_status
) (cfun
);
5826 if (init_machine_status
)
5827 (*init_machine_status
) (cfun
);
5830 /* Initialize the rtl expansion mechanism so that we can do simple things
5831 like generate sequences. This is used to provide a context during global
5832 initialization of some passes. */
5834 init_dummy_function_start ()
5836 prepare_function_start ();
5839 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5840 and initialize static variables for generating RTL for the statements
5844 init_function_start (subr
, filename
, line
)
5849 prepare_function_start ();
5851 /* Remember this function for later. */
5852 cfun
->next_global
= all_functions
;
5853 all_functions
= cfun
;
5855 current_function_name
= (*decl_printable_name
) (subr
, 2);
5858 /* Nonzero if this is a nested function that uses a static chain. */
5860 current_function_needs_context
5861 = (decl_function_context (current_function_decl
) != 0
5862 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5864 /* Within function body, compute a type's size as soon it is laid out. */
5865 immediate_size_expand
++;
5867 /* Prevent ever trying to delete the first instruction of a function.
5868 Also tell final how to output a linenum before the function prologue.
5869 Note linenums could be missing, e.g. when compiling a Java .class file. */
5871 emit_line_note (filename
, line
);
5873 /* Make sure first insn is a note even if we don't want linenums.
5874 This makes sure the first insn will never be deleted.
5875 Also, final expects a note to appear there. */
5876 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5878 /* Set flags used by final.c. */
5879 if (aggregate_value_p (DECL_RESULT (subr
)))
5881 #ifdef PCC_STATIC_STRUCT_RETURN
5882 current_function_returns_pcc_struct
= 1;
5884 current_function_returns_struct
= 1;
5887 /* Warn if this value is an aggregate type,
5888 regardless of which calling convention we are using for it. */
5889 if (warn_aggregate_return
5890 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5891 warning ("function returns an aggregate");
5893 current_function_returns_pointer
5894 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5897 /* Make sure all values used by the optimization passes have sane
5900 init_function_for_compilation ()
5903 /* No prologue/epilogue insns yet. */
5904 prologue
= epilogue
= 0;
5907 /* Indicate that the current function uses extra args
5908 not explicitly mentioned in the argument list in any fashion. */
5913 current_function_varargs
= 1;
5916 /* Expand a call to __main at the beginning of a possible main function. */
5918 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5919 #undef HAS_INIT_SECTION
5920 #define HAS_INIT_SECTION
5924 expand_main_function ()
5926 #if !defined (HAS_INIT_SECTION)
5927 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
5929 #endif /* not HAS_INIT_SECTION */
5932 extern struct obstack permanent_obstack
;
5934 /* Start the RTL for a new function, and set variables used for
5936 SUBR is the FUNCTION_DECL node.
5937 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5938 the function's parameters, which must be run at any return statement. */
5941 expand_function_start (subr
, parms_have_cleanups
)
5943 int parms_have_cleanups
;
5946 rtx last_ptr
= NULL_RTX
;
5948 /* Make sure volatile mem refs aren't considered
5949 valid operands of arithmetic insns. */
5950 init_recog_no_volatile ();
5952 /* Set this before generating any memory accesses. */
5953 current_function_check_memory_usage
5954 = (flag_check_memory_usage
5955 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl
));
5957 current_function_instrument_entry_exit
5958 = (flag_instrument_function_entry_exit
5959 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5961 current_function_limit_stack
5962 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5964 /* If function gets a static chain arg, store it in the stack frame.
5965 Do this first, so it gets the first stack slot offset. */
5966 if (current_function_needs_context
)
5968 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5970 /* Delay copying static chain if it is not a register to avoid
5971 conflicts with regs used for parameters. */
5972 if (! SMALL_REGISTER_CLASSES
5973 || GET_CODE (static_chain_incoming_rtx
) == REG
)
5974 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5977 /* If the parameters of this function need cleaning up, get a label
5978 for the beginning of the code which executes those cleanups. This must
5979 be done before doing anything with return_label. */
5980 if (parms_have_cleanups
)
5981 cleanup_label
= gen_label_rtx ();
5985 /* Make the label for return statements to jump to, if this machine
5986 does not have a one-instruction return and uses an epilogue,
5987 or if it returns a structure, or if it has parm cleanups. */
5989 if (cleanup_label
== 0 && HAVE_return
5990 && ! current_function_instrument_entry_exit
5991 && ! current_function_returns_pcc_struct
5992 && ! (current_function_returns_struct
&& ! optimize
))
5995 return_label
= gen_label_rtx ();
5997 return_label
= gen_label_rtx ();
6000 /* Initialize rtx used to return the value. */
6001 /* Do this before assign_parms so that we copy the struct value address
6002 before any library calls that assign parms might generate. */
6004 /* Decide whether to return the value in memory or in a register. */
6005 if (aggregate_value_p (DECL_RESULT (subr
)))
6007 /* Returning something that won't go in a register. */
6008 register rtx value_address
= 0;
6010 #ifdef PCC_STATIC_STRUCT_RETURN
6011 if (current_function_returns_pcc_struct
)
6013 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
6014 value_address
= assemble_static_space (size
);
6019 /* Expect to be passed the address of a place to store the value.
6020 If it is passed as an argument, assign_parms will take care of
6022 if (struct_value_incoming_rtx
)
6024 value_address
= gen_reg_rtx (Pmode
);
6025 emit_move_insn (value_address
, struct_value_incoming_rtx
);
6030 DECL_RTL (DECL_RESULT (subr
))
6031 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
6032 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)),
6033 AGGREGATE_TYPE_P (TREE_TYPE
6038 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
6039 /* If return mode is void, this decl rtl should not be used. */
6040 DECL_RTL (DECL_RESULT (subr
)) = 0;
6041 else if (parms_have_cleanups
|| current_function_instrument_entry_exit
)
6043 /* If function will end with cleanup code for parms,
6044 compute the return values into a pseudo reg,
6045 which we will copy into the true return register
6046 after the cleanups are done. */
6048 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
6050 #ifdef PROMOTE_FUNCTION_RETURN
6051 tree type
= TREE_TYPE (DECL_RESULT (subr
));
6052 int unsignedp
= TREE_UNSIGNED (type
);
6054 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
6057 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
6060 /* Scalar, returned in a register. */
6062 #ifdef FUNCTION_OUTGOING_VALUE
6063 DECL_RTL (DECL_RESULT (subr
))
6064 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6066 DECL_RTL (DECL_RESULT (subr
))
6067 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6070 /* Mark this reg as the function's return value. */
6071 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
6073 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
6074 /* Needed because we may need to move this to memory
6075 in case it's a named return value whose address is taken. */
6076 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6080 /* Initialize rtx for parameters and local variables.
6081 In some cases this requires emitting insns. */
6083 assign_parms (subr
);
6085 /* Copy the static chain now if it wasn't a register. The delay is to
6086 avoid conflicts with the parameter passing registers. */
6088 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
6089 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
6090 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6092 /* The following was moved from init_function_start.
6093 The move is supposed to make sdb output more accurate. */
6094 /* Indicate the beginning of the function body,
6095 as opposed to parm setup. */
6096 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
6098 if (GET_CODE (get_last_insn ()) != NOTE
)
6099 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6100 parm_birth_insn
= get_last_insn ();
6102 context_display
= 0;
6103 if (current_function_needs_context
)
6105 /* Fetch static chain values for containing functions. */
6106 tem
= decl_function_context (current_function_decl
);
6107 /* Copy the static chain pointer into a pseudo. If we have
6108 small register classes, copy the value from memory if
6109 static_chain_incoming_rtx is a REG. */
6112 /* If the static chain originally came in a register, put it back
6113 there, then move it out in the next insn. The reason for
6114 this peculiar code is to satisfy function integration. */
6115 if (SMALL_REGISTER_CLASSES
6116 && GET_CODE (static_chain_incoming_rtx
) == REG
)
6117 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
6118 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
6123 tree rtlexp
= make_node (RTL_EXPR
);
6125 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
6126 context_display
= tree_cons (tem
, rtlexp
, context_display
);
6127 tem
= decl_function_context (tem
);
6130 /* Chain thru stack frames, assuming pointer to next lexical frame
6131 is found at the place we always store it. */
6132 #ifdef FRAME_GROWS_DOWNWARD
6133 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
6135 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
6136 memory_address (Pmode
,
6139 /* If we are not optimizing, ensure that we know that this
6140 piece of context is live over the entire function. */
6142 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
6147 if (current_function_instrument_entry_exit
)
6149 rtx fun
= DECL_RTL (current_function_decl
);
6150 if (GET_CODE (fun
) == MEM
)
6151 fun
= XEXP (fun
, 0);
6154 emit_library_call (profile_function_entry_libfunc
, 0, VOIDmode
, 2,
6156 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6158 hard_frame_pointer_rtx
),
6162 /* After the display initializations is where the tail-recursion label
6163 should go, if we end up needing one. Ensure we have a NOTE here
6164 since some things (like trampolines) get placed before this. */
6165 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6167 /* Evaluate now the sizes of any types declared among the arguments. */
6168 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
6170 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
6171 EXPAND_MEMORY_USE_BAD
);
6172 /* Flush the queue in case this parameter declaration has
6177 /* Make sure there is a line number after the function entry setup code. */
6178 force_next_line_note ();
6181 /* Undo the effects of init_dummy_function_start. */
6183 expand_dummy_function_end ()
6185 /* End any sequences that failed to be closed due to syntax errors. */
6186 while (in_sequence_p ())
6189 /* Outside function body, can't compute type's actual size
6190 until next function's body starts. */
6192 free_after_parsing (cfun
);
6193 free_after_compilation (cfun
);
6198 /* Call DOIT for each hard register used as a return value from
6199 the current function. */
6202 diddle_return_value (doit
, arg
)
6203 void (*doit
) PARAMS ((rtx
, void *));
6206 rtx outgoing
= current_function_return_rtx
;
6211 if (GET_CODE (outgoing
) == REG
6212 && REGNO (outgoing
) >= FIRST_PSEUDO_REGISTER
)
6214 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6215 #ifdef FUNCTION_OUTGOING_VALUE
6216 outgoing
= FUNCTION_OUTGOING_VALUE (type
, current_function_decl
);
6218 outgoing
= FUNCTION_VALUE (type
, current_function_decl
);
6220 /* If this is a BLKmode structure being returned in registers, then use
6221 the mode computed in expand_return. */
6222 if (GET_MODE (outgoing
) == BLKmode
)
6224 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6227 if (GET_CODE (outgoing
) == REG
)
6228 (*doit
) (outgoing
, arg
);
6229 else if (GET_CODE (outgoing
) == PARALLEL
)
6233 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
6235 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
6237 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6244 do_clobber_return_reg (reg
, arg
)
6246 void *arg ATTRIBUTE_UNUSED
;
6248 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
6252 clobber_return_register ()
6254 diddle_return_value (do_clobber_return_reg
, NULL
);
6258 do_use_return_reg (reg
, arg
)
6260 void *arg ATTRIBUTE_UNUSED
;
6262 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
6266 use_return_register ()
6268 diddle_return_value (do_use_return_reg
, NULL
);
6271 /* Generate RTL for the end of the current function.
6272 FILENAME and LINE are the current position in the source file.
6274 It is up to language-specific callers to do cleanups for parameters--
6275 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6278 expand_function_end (filename
, line
, end_bindings
)
6285 #ifdef TRAMPOLINE_TEMPLATE
6286 static rtx initial_trampoline
;
6289 finish_expr_for_function ();
6291 #ifdef NON_SAVING_SETJMP
6292 /* Don't put any variables in registers if we call setjmp
6293 on a machine that fails to restore the registers. */
6294 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6296 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6297 setjmp_protect (DECL_INITIAL (current_function_decl
));
6299 setjmp_protect_args ();
6303 /* Save the argument pointer if a save area was made for it. */
6304 if (arg_pointer_save_area
)
6306 /* arg_pointer_save_area may not be a valid memory address, so we
6307 have to check it and fix it if necessary. */
6310 emit_move_insn (validize_mem (arg_pointer_save_area
),
6311 virtual_incoming_args_rtx
);
6312 seq
= gen_sequence ();
6314 emit_insn_before (seq
, tail_recursion_reentry
);
6317 /* Initialize any trampolines required by this function. */
6318 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
6320 tree function
= TREE_PURPOSE (link
);
6321 rtx context ATTRIBUTE_UNUSED
= lookup_static_chain (function
);
6322 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
6323 #ifdef TRAMPOLINE_TEMPLATE
6328 #ifdef TRAMPOLINE_TEMPLATE
6329 /* First make sure this compilation has a template for
6330 initializing trampolines. */
6331 if (initial_trampoline
== 0)
6333 end_temporary_allocation ();
6335 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
6336 resume_temporary_allocation ();
6338 ggc_add_rtx_root (&initial_trampoline
, 1);
6342 /* Generate insns to initialize the trampoline. */
6344 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
6345 #ifdef TRAMPOLINE_TEMPLATE
6346 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
6347 emit_block_move (blktramp
, initial_trampoline
,
6348 GEN_INT (TRAMPOLINE_SIZE
),
6349 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
6351 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
6355 /* Put those insns at entry to the containing function (this one). */
6356 emit_insns_before (seq
, tail_recursion_reentry
);
6359 /* If we are doing stack checking and this function makes calls,
6360 do a stack probe at the start of the function to ensure we have enough
6361 space for another stack frame. */
6362 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6366 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6367 if (GET_CODE (insn
) == CALL_INSN
)
6370 probe_stack_range (STACK_CHECK_PROTECT
,
6371 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6374 emit_insns_before (seq
, tail_recursion_reentry
);
6379 /* Warn about unused parms if extra warnings were specified. */
6380 if (warn_unused
&& extra_warnings
)
6384 for (decl
= DECL_ARGUMENTS (current_function_decl
);
6385 decl
; decl
= TREE_CHAIN (decl
))
6386 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
6387 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
6388 warning_with_decl (decl
, "unused parameter `%s'");
6391 /* Delete handlers for nonlocal gotos if nothing uses them. */
6392 if (nonlocal_goto_handler_slots
!= 0
6393 && ! current_function_has_nonlocal_label
)
6396 /* End any sequences that failed to be closed due to syntax errors. */
6397 while (in_sequence_p ())
6400 /* Outside function body, can't compute type's actual size
6401 until next function's body starts. */
6402 immediate_size_expand
--;
6404 clear_pending_stack_adjust ();
6405 do_pending_stack_adjust ();
6407 /* Mark the end of the function body.
6408 If control reaches this insn, the function can drop through
6409 without returning a value. */
6410 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
6412 /* Must mark the last line number note in the function, so that the test
6413 coverage code can avoid counting the last line twice. This just tells
6414 the code to ignore the immediately following line note, since there
6415 already exists a copy of this note somewhere above. This line number
6416 note is still needed for debugging though, so we can't delete it. */
6417 if (flag_test_coverage
)
6418 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
6420 /* Output a linenumber for the end of the function.
6421 SDB depends on this. */
6422 emit_line_note_force (filename
, line
);
6424 /* Output the label for the actual return from the function,
6425 if one is expected. This happens either because a function epilogue
6426 is used instead of a return instruction, or because a return was done
6427 with a goto in order to run local cleanups, or because of pcc-style
6428 structure returning. */
6432 /* Before the return label, clobber the return registers so that
6433 they are not propogated live to the rest of the function. This
6434 can only happen with functions that drop through; if there had
6435 been a return statement, there would have either been a return
6436 rtx, or a jump to the return label. */
6437 clobber_return_register ();
6439 emit_label (return_label
);
6442 /* C++ uses this. */
6444 expand_end_bindings (0, 0, 0);
6446 /* Now handle any leftover exception regions that may have been
6447 created for the parameters. */
6449 rtx last
= get_last_insn ();
6452 expand_leftover_cleanups ();
6454 /* If there are any catch_clauses remaining, output them now. */
6455 emit_insns (catch_clauses
);
6456 catch_clauses
= NULL_RTX
;
6457 /* If the above emitted any code, may sure we jump around it. */
6458 if (last
!= get_last_insn ())
6460 label
= gen_label_rtx ();
6461 last
= emit_jump_insn_after (gen_jump (label
), last
);
6462 last
= emit_barrier_after (last
);
6467 if (current_function_instrument_entry_exit
)
6469 rtx fun
= DECL_RTL (current_function_decl
);
6470 if (GET_CODE (fun
) == MEM
)
6471 fun
= XEXP (fun
, 0);
6474 emit_library_call (profile_function_exit_libfunc
, 0, VOIDmode
, 2,
6476 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6478 hard_frame_pointer_rtx
),
6482 /* If we had calls to alloca, and this machine needs
6483 an accurate stack pointer to exit the function,
6484 insert some code to save and restore the stack pointer. */
6485 #ifdef EXIT_IGNORE_STACK
6486 if (! EXIT_IGNORE_STACK
)
6488 if (current_function_calls_alloca
)
6492 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
6493 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
6496 /* If scalar return value was computed in a pseudo-reg,
6497 copy that to the hard return register. */
6498 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
6499 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
6500 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
6501 >= FIRST_PSEUDO_REGISTER
))
6503 rtx real_decl_result
;
6505 #ifdef FUNCTION_OUTGOING_VALUE
6507 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6508 current_function_decl
);
6511 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6512 current_function_decl
);
6514 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
6515 /* If this is a BLKmode structure being returned in registers, then use
6516 the mode computed in expand_return. */
6517 if (GET_MODE (real_decl_result
) == BLKmode
)
6518 PUT_MODE (real_decl_result
,
6519 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6520 emit_move_insn (real_decl_result
,
6521 DECL_RTL (DECL_RESULT (current_function_decl
)));
6523 /* The delay slot scheduler assumes that current_function_return_rtx
6524 holds the hard register containing the return value, not a temporary
6526 current_function_return_rtx
= real_decl_result
;
6529 /* If returning a structure, arrange to return the address of the value
6530 in a place where debuggers expect to find it.
6532 If returning a structure PCC style,
6533 the caller also depends on this value.
6534 And current_function_returns_pcc_struct is not necessarily set. */
6535 if (current_function_returns_struct
6536 || current_function_returns_pcc_struct
)
6538 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6539 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6540 #ifdef FUNCTION_OUTGOING_VALUE
6542 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
6543 current_function_decl
);
6546 = FUNCTION_VALUE (build_pointer_type (type
),
6547 current_function_decl
);
6550 /* Mark this as a function return value so integrate will delete the
6551 assignment and USE below when inlining this function. */
6552 REG_FUNCTION_VALUE_P (outgoing
) = 1;
6554 emit_move_insn (outgoing
, value_address
);
6557 /* ??? This should no longer be necessary since stupid is no longer with
6558 us, but there are some parts of the compiler (eg reload_combine, and
6559 sh mach_dep_reorg) that still try and compute their own lifetime info
6560 instead of using the general framework. */
6561 use_return_register ();
6563 /* If this is an implementation of __throw, do what's necessary to
6564 communicate between __builtin_eh_return and the epilogue. */
6565 expand_eh_return ();
6567 /* Output a return insn if we are using one.
6568 Otherwise, let the rtl chain end here, to drop through
6569 into the epilogue. */
6574 emit_jump_insn (gen_return ());
6579 /* Fix up any gotos that jumped out to the outermost
6580 binding level of the function.
6581 Must follow emitting RETURN_LABEL. */
6583 /* If you have any cleanups to do at this point,
6584 and they need to create temporary variables,
6585 then you will lose. */
6586 expand_fixups (get_insns ());
6589 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6590 or a single insn). */
6593 record_insns (insns
)
6598 if (GET_CODE (insns
) == SEQUENCE
)
6600 int len
= XVECLEN (insns
, 0);
6601 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
6604 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
6608 vec
= (int *) oballoc (2 * sizeof (int));
6609 vec
[0] = INSN_UID (insns
);
6615 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6618 contains (insn
, vec
)
6624 if (GET_CODE (insn
) == INSN
6625 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6628 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6629 for (j
= 0; vec
[j
]; j
++)
6630 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
6636 for (j
= 0; vec
[j
]; j
++)
6637 if (INSN_UID (insn
) == vec
[j
])
6644 prologue_epilogue_contains (insn
)
6647 if (prologue
&& contains (insn
, prologue
))
6649 if (epilogue
&& contains (insn
, epilogue
))
6655 /* Insert gen_return at the end of block BB. This also means updating
6656 block_for_insn appropriately. */
6659 emit_return_into_block (bb
)
6664 end
= emit_jump_insn_after (gen_return (), bb
->end
);
6665 p
= NEXT_INSN (bb
->end
);
6668 set_block_for_insn (p
, bb
);
6675 #endif /* HAVE_return */
6677 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6678 this into place with notes indicating where the prologue ends and where
6679 the epilogue begins. Update the basic block information when possible. */
6682 thread_prologue_and_epilogue_insns (f
)
6683 rtx f ATTRIBUTE_UNUSED
;
6689 #ifdef HAVE_prologue
6695 seq
= gen_prologue();
6698 /* Retain a map of the prologue insns. */
6699 if (GET_CODE (seq
) != SEQUENCE
)
6701 prologue
= record_insns (seq
);
6702 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
6704 /* GDB handles `break f' by setting a breakpoint on the first
6705 line note *after* the prologue. That means that we should
6706 insert a line note here; otherwise, if the next line note
6707 comes part way into the next block, GDB will skip all the way
6709 insn
= next_nonnote_insn (f
);
6712 if (GET_CODE (insn
) == NOTE
6713 && NOTE_LINE_NUMBER (insn
) >= 0)
6715 emit_line_note_force (NOTE_SOURCE_FILE (insn
),
6716 NOTE_LINE_NUMBER (insn
));
6720 insn
= PREV_INSN (insn
);
6723 seq
= gen_sequence ();
6726 /* If optimization is off, and perhaps in an empty function,
6727 the entry block will have no successors. */
6728 if (ENTRY_BLOCK_PTR
->succ
)
6730 /* Can't deal with multiple successsors of the entry block. */
6731 if (ENTRY_BLOCK_PTR
->succ
->succ_next
)
6734 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
6738 emit_insn_after (seq
, f
);
6742 /* If the exit block has no non-fake predecessors, we don't need
6744 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6745 if ((e
->flags
& EDGE_FAKE
) == 0)
6751 if (optimize
&& HAVE_return
)
6753 /* If we're allowed to generate a simple return instruction,
6754 then by definition we don't need a full epilogue. Examine
6755 the block that falls through to EXIT. If it does not
6756 contain any code, examine its predecessors and try to
6757 emit (conditional) return instructions. */
6763 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6764 if (e
->flags
& EDGE_FALLTHRU
)
6770 /* Verify that there are no active instructions in the last block. */
6772 while (label
&& GET_CODE (label
) != CODE_LABEL
)
6774 if (active_insn_p (label
))
6776 label
= PREV_INSN (label
);
6779 if (last
->head
== label
&& GET_CODE (label
) == CODE_LABEL
)
6781 for (e
= last
->pred
; e
; e
= e_next
)
6783 basic_block bb
= e
->src
;
6786 e_next
= e
->pred_next
;
6787 if (bb
== ENTRY_BLOCK_PTR
)
6791 if ((GET_CODE (jump
) != JUMP_INSN
) || JUMP_LABEL (jump
) != label
)
6794 /* If we have an unconditional jump, we can replace that
6795 with a simple return instruction. */
6796 if (simplejump_p (jump
))
6798 emit_return_into_block (bb
);
6799 flow_delete_insn (jump
);
6802 /* If we have a conditional jump, we can try to replace
6803 that with a conditional return instruction. */
6804 else if (condjump_p (jump
))
6808 ret
= SET_SRC (PATTERN (jump
));
6809 if (GET_CODE (XEXP (ret
, 1)) == LABEL_REF
)
6810 loc
= &XEXP (ret
, 1);
6812 loc
= &XEXP (ret
, 2);
6813 ret
= gen_rtx_RETURN (VOIDmode
);
6815 if (! validate_change (jump
, loc
, ret
, 0))
6817 if (JUMP_LABEL (jump
))
6818 LABEL_NUSES (JUMP_LABEL (jump
))--;
6820 /* If this block has only one successor, it both jumps
6821 and falls through to the fallthru block, so we can't
6823 if (bb
->succ
->succ_next
== NULL
)
6829 /* Fix up the CFG for the successful change we just made. */
6831 make_edge (NULL
, bb
, EXIT_BLOCK_PTR
, 0);
6834 /* Emit a return insn for the exit fallthru block. Whether
6835 this is still reachable will be determined later. */
6837 emit_barrier_after (last
->end
);
6838 emit_return_into_block (last
);
6842 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6843 as it may be the exit block can go elsewhere as well
6846 emit_jump_insn (gen_return ());
6847 seq
= gen_sequence ();
6849 insert_insn_on_edge (seq
, e
);
6855 #ifdef HAVE_epilogue
6858 /* Find the edge that falls through to EXIT. Other edges may exist
6859 due to RETURN instructions, but those don't need epilogues.
6860 There really shouldn't be a mixture -- either all should have
6861 been converted or none, however... */
6863 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6864 if (e
->flags
& EDGE_FALLTHRU
)
6870 emit_note (NULL
, NOTE_INSN_EPILOGUE_BEG
);
6872 seq
= gen_epilogue ();
6873 emit_jump_insn (seq
);
6875 /* Retain a map of the epilogue insns. */
6876 if (GET_CODE (seq
) != SEQUENCE
)
6878 epilogue
= record_insns (seq
);
6880 seq
= gen_sequence ();
6883 insert_insn_on_edge (seq
, e
);
6890 commit_edge_insertions ();
6893 /* Reposition the prologue-end and epilogue-begin notes after instruction
6894 scheduling and delayed branch scheduling. */
6897 reposition_prologue_and_epilogue_notes (f
)
6898 rtx f ATTRIBUTE_UNUSED
;
6900 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6901 /* Reposition the prologue and epilogue notes. */
6908 register rtx insn
, note
= 0;
6910 /* Scan from the beginning until we reach the last prologue insn.
6911 We apparently can't depend on basic_block_{head,end} after
6913 for (len
= 0; prologue
[len
]; len
++)
6915 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
6917 if (GET_CODE (insn
) == NOTE
)
6919 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
6922 else if ((len
-= contains (insn
, prologue
)) == 0)
6925 /* Find the prologue-end note if we haven't already, and
6926 move it to just after the last prologue insn. */
6929 for (note
= insn
; (note
= NEXT_INSN (note
));)
6930 if (GET_CODE (note
) == NOTE
6931 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
6935 next
= NEXT_INSN (note
);
6937 /* Whether or not we can depend on BLOCK_HEAD,
6938 attempt to keep it up-to-date. */
6939 if (BLOCK_HEAD (0) == note
)
6940 BLOCK_HEAD (0) = next
;
6943 add_insn_after (note
, insn
);
6950 register rtx insn
, note
= 0;
6952 /* Scan from the end until we reach the first epilogue insn.
6953 We apparently can't depend on basic_block_{head,end} after
6955 for (len
= 0; epilogue
[len
]; len
++)
6957 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
6959 if (GET_CODE (insn
) == NOTE
)
6961 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6964 else if ((len
-= contains (insn
, epilogue
)) == 0)
6966 /* Find the epilogue-begin note if we haven't already, and
6967 move it to just before the first epilogue insn. */
6970 for (note
= insn
; (note
= PREV_INSN (note
));)
6971 if (GET_CODE (note
) == NOTE
6972 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
6976 /* Whether or not we can depend on BLOCK_HEAD,
6977 attempt to keep it up-to-date. */
6979 && BLOCK_HEAD (n_basic_blocks
-1) == insn
)
6980 BLOCK_HEAD (n_basic_blocks
-1) = note
;
6983 add_insn_before (note
, insn
);
6988 #endif /* HAVE_prologue or HAVE_epilogue */
6991 /* Mark T for GC. */
6995 struct temp_slot
*t
;
6999 ggc_mark_rtx (t
->slot
);
7000 ggc_mark_rtx (t
->address
);
7001 ggc_mark_tree (t
->rtl_expr
);
7007 /* Mark P for GC. */
7010 mark_function_status (p
)
7019 ggc_mark_rtx (p
->arg_offset_rtx
);
7021 if (p
->x_parm_reg_stack_loc
)
7022 for (i
= p
->x_max_parm_reg
, r
= p
->x_parm_reg_stack_loc
;
7026 ggc_mark_rtx (p
->return_rtx
);
7027 ggc_mark_rtx (p
->x_cleanup_label
);
7028 ggc_mark_rtx (p
->x_return_label
);
7029 ggc_mark_rtx (p
->x_save_expr_regs
);
7030 ggc_mark_rtx (p
->x_stack_slot_list
);
7031 ggc_mark_rtx (p
->x_parm_birth_insn
);
7032 ggc_mark_rtx (p
->x_tail_recursion_label
);
7033 ggc_mark_rtx (p
->x_tail_recursion_reentry
);
7034 ggc_mark_rtx (p
->internal_arg_pointer
);
7035 ggc_mark_rtx (p
->x_arg_pointer_save_area
);
7036 ggc_mark_tree (p
->x_rtl_expr_chain
);
7037 ggc_mark_rtx (p
->x_last_parm_insn
);
7038 ggc_mark_tree (p
->x_context_display
);
7039 ggc_mark_tree (p
->x_trampoline_list
);
7040 ggc_mark_rtx (p
->epilogue_delay_list
);
7042 mark_temp_slot (p
->x_temp_slots
);
7045 struct var_refs_queue
*q
= p
->fixup_var_refs_queue
;
7048 ggc_mark_rtx (q
->modified
);
7053 ggc_mark_rtx (p
->x_nonlocal_goto_handler_slots
);
7054 ggc_mark_rtx (p
->x_nonlocal_goto_handler_labels
);
7055 ggc_mark_rtx (p
->x_nonlocal_goto_stack_level
);
7056 ggc_mark_tree (p
->x_nonlocal_labels
);
7059 /* Mark the function chain ARG (which is really a struct function **)
7063 mark_function_chain (arg
)
7066 struct function
*f
= *(struct function
**) arg
;
7068 for (; f
; f
= f
->next_global
)
7070 ggc_mark_tree (f
->decl
);
7072 mark_function_status (f
);
7073 mark_eh_status (f
->eh
);
7074 mark_stmt_status (f
->stmt
);
7075 mark_expr_status (f
->expr
);
7076 mark_emit_status (f
->emit
);
7077 mark_varasm_status (f
->varasm
);
7079 if (mark_machine_status
)
7080 (*mark_machine_status
) (f
);
7081 if (mark_lang_status
)
7082 (*mark_lang_status
) (f
);
7084 if (f
->original_arg_vector
)
7085 ggc_mark_rtvec ((rtvec
) f
->original_arg_vector
);
7086 if (f
->original_decl_initial
)
7087 ggc_mark_tree (f
->original_decl_initial
);
7091 /* Called once, at initialization, to initialize function.c. */
7094 init_function_once ()
7096 ggc_add_root (&all_functions
, 1, sizeof all_functions
,
7097 mark_function_chain
);