1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89 /* Similar, but round to the next highest integer that meets the
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
107 int current_function_is_leaf
;
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging
;
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs
;
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated
;
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status
) PARAMS ((struct function
*));
127 void (*save_machine_status
) PARAMS ((struct function
*));
128 void (*restore_machine_status
) PARAMS ((struct function
*));
129 void (*mark_machine_status
) PARAMS ((struct function
*));
130 void (*free_machine_status
) PARAMS ((struct function
*));
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status
) PARAMS ((struct function
*));
134 void (*save_lang_status
) PARAMS ((struct function
*));
135 void (*restore_lang_status
) PARAMS ((struct function
*));
136 void (*mark_lang_status
) PARAMS ((struct function
*));
137 void (*free_lang_status
) PARAMS ((struct function
*));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl
;
142 /* The currently compiled function. */
143 struct function
*cfun
= 0;
145 /* Global list of all compiled functions. */
146 struct function
*all_functions
= 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue
;
150 static int *epilogue
;
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
172 /* Points to next temporary slot. */
173 struct temp_slot
*next
;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 /* Non-zero if this temporary is currently in use. */
198 /* Non-zero if this temporary has its address taken. */
200 /* Nesting level at which this slot is being used. */
202 /* Non-zero if this should survive a call to free_temp_slots. */
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset
;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size
;
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
217 struct fixup_replacement
221 struct fixup_replacement
*next
;
224 struct insns_for_mem_entry
{
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he
;
227 /* These are the INSNS which reference the MEM. */
231 /* Forward declarations. */
233 static rtx assign_stack_local_1
PARAMS ((enum machine_mode
, HOST_WIDE_INT
,
234 int, struct function
*));
235 static rtx assign_stack_temp_for_type
PARAMS ((enum machine_mode
,
236 HOST_WIDE_INT
, int, tree
));
237 static struct temp_slot
*find_temp_slot_from_address
PARAMS ((rtx
));
238 static void put_reg_into_stack
PARAMS ((struct function
*, rtx
, tree
,
239 enum machine_mode
, enum machine_mode
,
240 int, int, int, struct hash_table
*));
241 static void fixup_var_refs
PARAMS ((rtx
, enum machine_mode
, int,
242 struct hash_table
*));
243 static struct fixup_replacement
244 *find_fixup_replacement
PARAMS ((struct fixup_replacement
**, rtx
));
245 static void fixup_var_refs_insns
PARAMS ((rtx
, enum machine_mode
, int,
246 rtx
, int, struct hash_table
*));
247 static void fixup_var_refs_1
PARAMS ((rtx
, enum machine_mode
, rtx
*, rtx
,
248 struct fixup_replacement
**));
249 static rtx fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
250 static rtx walk_fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
251 static rtx fixup_stack_1
PARAMS ((rtx
, rtx
));
252 static void optimize_bit_field
PARAMS ((rtx
, rtx
, rtx
*));
253 static void instantiate_decls
PARAMS ((tree
, int));
254 static void instantiate_decls_1
PARAMS ((tree
, int));
255 static void instantiate_decl
PARAMS ((rtx
, int, int));
256 static int instantiate_virtual_regs_1
PARAMS ((rtx
*, rtx
, int));
257 static void delete_handlers
PARAMS ((void));
258 static void pad_to_arg_alignment
PARAMS ((struct args_size
*, int,
259 struct args_size
*));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below
PARAMS ((struct args_size
*, enum machine_mode
,
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down
PARAMS ((tree
, int));
267 static rtx round_trampoline_addr
PARAMS ((rtx
));
268 static tree blocks_nreverse
PARAMS ((tree
));
269 static int all_blocks
PARAMS ((tree
, tree
*));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns
PARAMS ((rtx
)) ATTRIBUTE_UNUSED
;
273 static int contains
PARAMS ((rtx
, int *));
275 static void emit_return_into_block
PARAMS ((basic_block
));
277 static void put_addressof_into_stack
PARAMS ((rtx
, struct hash_table
*));
278 static boolean purge_addressof_1
PARAMS ((rtx
*, rtx
, int, int,
279 struct hash_table
*));
280 static int is_addressof
PARAMS ((rtx
*, void *));
281 static struct hash_entry
*insns_for_mem_newfunc
PARAMS ((struct hash_entry
*,
284 static unsigned long insns_for_mem_hash
PARAMS ((hash_table_key
));
285 static boolean insns_for_mem_comp
PARAMS ((hash_table_key
, hash_table_key
));
286 static int insns_for_mem_walk
PARAMS ((rtx
*, void *));
287 static void compute_insns_for_mem
PARAMS ((rtx
, rtx
, struct hash_table
*));
288 static void mark_temp_slot
PARAMS ((struct temp_slot
*));
289 static void mark_function_status
PARAMS ((struct function
*));
290 static void mark_function_chain
PARAMS ((void *));
291 static void prepare_function_start
PARAMS ((void));
292 static void do_clobber_return_reg
PARAMS ((rtx
, void *));
293 static void do_use_return_reg
PARAMS ((rtx
, void *));
295 /* Pointer to chain of `struct function' for containing functions. */
296 struct function
*outer_function_chain
;
298 /* Given a function decl for a containing function,
299 return the `struct function' for it. */
302 find_function_data (decl
)
307 for (p
= outer_function_chain
; p
; p
= p
->next
)
314 /* Save the current context for compilation of a nested function.
315 This is called from language-specific code. The caller should use
316 the save_lang_status callback to save any language-specific state,
317 since this function knows only about language-independent
321 push_function_context_to (context
)
324 struct function
*p
, *context_data
;
328 context_data
= (context
== current_function_decl
330 : find_function_data (context
));
331 context_data
->contains_functions
= 1;
335 init_dummy_function_start ();
338 p
->next
= outer_function_chain
;
339 outer_function_chain
= p
;
340 p
->fixup_var_refs_queue
= 0;
342 save_tree_status (p
);
343 if (save_lang_status
)
344 (*save_lang_status
) (p
);
345 if (save_machine_status
)
346 (*save_machine_status
) (p
);
352 push_function_context ()
354 push_function_context_to (current_function_decl
);
357 /* Restore the last saved context, at the end of a nested function.
358 This function is called from language-specific code. */
361 pop_function_context_from (context
)
362 tree context ATTRIBUTE_UNUSED
;
364 struct function
*p
= outer_function_chain
;
365 struct var_refs_queue
*queue
;
366 struct var_refs_queue
*next
;
369 outer_function_chain
= p
->next
;
371 current_function_decl
= p
->decl
;
374 restore_tree_status (p
);
375 restore_emit_status (p
);
377 if (restore_machine_status
)
378 (*restore_machine_status
) (p
);
379 if (restore_lang_status
)
380 (*restore_lang_status
) (p
);
382 /* Finish doing put_var_into_stack for any of our variables
383 which became addressable during the nested function. */
384 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= next
)
387 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
388 queue
->unsignedp
, 0);
391 p
->fixup_var_refs_queue
= 0;
393 /* Reset variables that have known state during rtx generation. */
394 rtx_equal_function_value_matters
= 1;
395 virtuals_instantiated
= 0;
399 pop_function_context ()
401 pop_function_context_from (current_function_decl
);
404 /* Clear out all parts of the state in F that can safely be discarded
405 after the function has been parsed, but not compiled, to let
406 garbage collection reclaim the memory. */
409 free_after_parsing (f
)
412 /* f->expr->forced_labels is used by code generation. */
413 /* f->emit->regno_reg_rtx is used by code generation. */
414 /* f->varasm is used by code generation. */
415 /* f->eh->eh_return_stub_label is used by code generation. */
417 if (free_lang_status
)
418 (*free_lang_status
) (f
);
419 free_stmt_status (f
);
422 /* Clear out all parts of the state in F that can safely be discarded
423 after the function has been compiled, to let garbage collection
424 reclaim the memory. */
427 free_after_compilation (f
)
431 free_expr_status (f
);
432 free_emit_status (f
);
433 free_varasm_status (f
);
435 if (free_machine_status
)
436 (*free_machine_status
) (f
);
438 if (f
->x_parm_reg_stack_loc
)
439 free (f
->x_parm_reg_stack_loc
);
441 f
->arg_offset_rtx
= NULL
;
442 f
->return_rtx
= NULL
;
443 f
->internal_arg_pointer
= NULL
;
444 f
->x_nonlocal_labels
= NULL
;
445 f
->x_nonlocal_goto_handler_slots
= NULL
;
446 f
->x_nonlocal_goto_handler_labels
= NULL
;
447 f
->x_nonlocal_goto_stack_level
= NULL
;
448 f
->x_cleanup_label
= NULL
;
449 f
->x_return_label
= NULL
;
450 f
->x_save_expr_regs
= NULL
;
451 f
->x_stack_slot_list
= NULL
;
452 f
->x_rtl_expr_chain
= NULL
;
453 f
->x_tail_recursion_label
= NULL
;
454 f
->x_tail_recursion_reentry
= NULL
;
455 f
->x_arg_pointer_save_area
= NULL
;
456 f
->x_context_display
= NULL
;
457 f
->x_trampoline_list
= NULL
;
458 f
->x_parm_birth_insn
= NULL
;
459 f
->x_last_parm_insn
= NULL
;
460 f
->x_parm_reg_stack_loc
= NULL
;
461 f
->x_temp_slots
= NULL
;
462 f
->fixup_var_refs_queue
= NULL
;
463 f
->original_arg_vector
= NULL
;
464 f
->original_decl_initial
= NULL
;
465 f
->inl_last_parm_insn
= NULL
;
466 f
->epilogue_delay_list
= NULL
;
470 /* Allocate fixed slots in the stack frame of the current function. */
472 /* Return size needed for stack frame based on slots so far allocated in
474 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
475 the caller may have to do that. */
478 get_func_frame_size (f
)
481 #ifdef FRAME_GROWS_DOWNWARD
482 return -f
->x_frame_offset
;
484 return f
->x_frame_offset
;
488 /* Return size needed for stack frame based on slots so far allocated.
489 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
490 the caller may have to do that. */
494 return get_func_frame_size (cfun
);
497 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
498 with machine mode MODE.
500 ALIGN controls the amount of alignment for the address of the slot:
501 0 means according to MODE,
502 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
503 positive specifies alignment boundary in bits.
505 We do not round to stack_boundary here.
507 FUNCTION specifies the function to allocate in. */
510 assign_stack_local_1 (mode
, size
, align
, function
)
511 enum machine_mode mode
;
514 struct function
*function
;
516 register rtx x
, addr
;
517 int bigend_correction
= 0;
520 /* Allocate in the memory associated with the function in whose frame
522 if (function
!= cfun
)
523 push_obstacks (function
->function_obstack
,
524 function
->function_maybepermanent_obstack
);
530 alignment
= GET_MODE_ALIGNMENT (mode
);
532 alignment
= BIGGEST_ALIGNMENT
;
534 /* Allow the target to (possibly) increase the alignment of this
536 type
= type_for_mode (mode
, 0);
538 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
540 alignment
/= BITS_PER_UNIT
;
542 else if (align
== -1)
544 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
545 size
= CEIL_ROUND (size
, alignment
);
548 alignment
= align
/ BITS_PER_UNIT
;
550 #ifdef FRAME_GROWS_DOWNWARD
551 function
->x_frame_offset
-= size
;
554 /* Ignore alignment we can't do with expected alignment of the boundary. */
555 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
556 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
558 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
559 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
561 /* Round frame offset to that alignment.
562 We must be careful here, since FRAME_OFFSET might be negative and
563 division with a negative dividend isn't as well defined as we might
564 like. So we instead assume that ALIGNMENT is a power of two and
565 use logical operations which are unambiguous. */
566 #ifdef FRAME_GROWS_DOWNWARD
567 function
->x_frame_offset
= FLOOR_ROUND (function
->x_frame_offset
, alignment
);
569 function
->x_frame_offset
= CEIL_ROUND (function
->x_frame_offset
, alignment
);
572 /* On a big-endian machine, if we are allocating more space than we will use,
573 use the least significant bytes of those that are allocated. */
574 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
575 bigend_correction
= size
- GET_MODE_SIZE (mode
);
577 /* If we have already instantiated virtual registers, return the actual
578 address relative to the frame pointer. */
579 if (function
== cfun
&& virtuals_instantiated
)
580 addr
= plus_constant (frame_pointer_rtx
,
581 (frame_offset
+ bigend_correction
582 + STARTING_FRAME_OFFSET
));
584 addr
= plus_constant (virtual_stack_vars_rtx
,
585 function
->x_frame_offset
+ bigend_correction
);
587 #ifndef FRAME_GROWS_DOWNWARD
588 function
->x_frame_offset
+= size
;
591 x
= gen_rtx_MEM (mode
, addr
);
593 function
->x_stack_slot_list
594 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
596 if (function
!= cfun
)
602 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
605 assign_stack_local (mode
, size
, align
)
606 enum machine_mode mode
;
610 return assign_stack_local_1 (mode
, size
, align
, cfun
);
613 /* Allocate a temporary stack slot and record it for possible later
616 MODE is the machine mode to be given to the returned rtx.
618 SIZE is the size in units of the space required. We do no rounding here
619 since assign_stack_local will do any required rounding.
621 KEEP is 1 if this slot is to be retained after a call to
622 free_temp_slots. Automatic variables for a block are allocated
623 with this flag. KEEP is 2 if we allocate a longer term temporary,
624 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
625 if we are to allocate something at an inner level to be treated as
626 a variable in the block (e.g., a SAVE_EXPR).
628 TYPE is the type that will be used for the stack slot. */
631 assign_stack_temp_for_type (mode
, size
, keep
, type
)
632 enum machine_mode mode
;
639 struct temp_slot
*p
, *best_p
= 0;
641 /* If SIZE is -1 it means that somebody tried to allocate a temporary
642 of a variable size. */
646 /* If we know the alias set for the memory that will be used, use
647 it. If there's no TYPE, then we don't know anything about the
648 alias set for the memory. */
650 alias_set
= get_alias_set (type
);
654 align
= GET_MODE_ALIGNMENT (mode
);
656 align
= BIGGEST_ALIGNMENT
;
659 type
= type_for_mode (mode
, 0);
661 align
= LOCAL_ALIGNMENT (type
, align
);
663 /* Try to find an available, already-allocated temporary of the proper
664 mode which meets the size and alignment requirements. Choose the
665 smallest one with the closest alignment. */
666 for (p
= temp_slots
; p
; p
= p
->next
)
667 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
669 && (!flag_strict_aliasing
670 || (alias_set
&& p
->alias_set
== alias_set
))
671 && (best_p
== 0 || best_p
->size
> p
->size
672 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
674 if (p
->align
== align
&& p
->size
== size
)
682 /* Make our best, if any, the one to use. */
685 /* If there are enough aligned bytes left over, make them into a new
686 temp_slot so that the extra bytes don't get wasted. Do this only
687 for BLKmode slots, so that we can be sure of the alignment. */
688 if (GET_MODE (best_p
->slot
) == BLKmode
689 /* We can't split slots if -fstrict-aliasing because the
690 information about the alias set for the new slot will be
692 && !flag_strict_aliasing
)
694 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
695 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
697 if (best_p
->size
- rounded_size
>= alignment
)
699 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
700 p
->in_use
= p
->addr_taken
= 0;
701 p
->size
= best_p
->size
- rounded_size
;
702 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
703 p
->full_size
= best_p
->full_size
- rounded_size
;
704 p
->slot
= gen_rtx_MEM (BLKmode
,
705 plus_constant (XEXP (best_p
->slot
, 0),
707 p
->align
= best_p
->align
;
710 p
->next
= temp_slots
;
713 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
716 best_p
->size
= rounded_size
;
717 best_p
->full_size
= rounded_size
;
724 /* If we still didn't find one, make a new temporary. */
727 HOST_WIDE_INT frame_offset_old
= frame_offset
;
729 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
731 /* We are passing an explicit alignment request to assign_stack_local.
732 One side effect of that is assign_stack_local will not round SIZE
733 to ensure the frame offset remains suitably aligned.
735 So for requests which depended on the rounding of SIZE, we go ahead
736 and round it now. We also make sure ALIGNMENT is at least
737 BIGGEST_ALIGNMENT. */
738 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
740 p
->slot
= assign_stack_local (mode
,
742 ? CEIL_ROUND (size
, align
/ BITS_PER_UNIT
)
747 p
->alias_set
= alias_set
;
749 /* The following slot size computation is necessary because we don't
750 know the actual size of the temporary slot until assign_stack_local
751 has performed all the frame alignment and size rounding for the
752 requested temporary. Note that extra space added for alignment
753 can be either above or below this stack slot depending on which
754 way the frame grows. We include the extra space if and only if it
755 is above this slot. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 p
->size
= frame_offset_old
- frame_offset
;
762 /* Now define the fields used by combine_temp_slots. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 p
->base_offset
= frame_offset
;
765 p
->full_size
= frame_offset_old
- frame_offset
;
767 p
->base_offset
= frame_offset_old
;
768 p
->full_size
= frame_offset
- frame_offset_old
;
771 p
->next
= temp_slots
;
777 p
->rtl_expr
= seq_rtl_expr
;
781 p
->level
= target_temp_slot_level
;
786 p
->level
= var_temp_slot_level
;
791 p
->level
= temp_slot_level
;
795 /* We may be reusing an old slot, so clear any MEM flags that may have been
797 RTX_UNCHANGING_P (p
->slot
) = 0;
798 MEM_IN_STRUCT_P (p
->slot
) = 0;
799 MEM_SCALAR_P (p
->slot
) = 0;
800 MEM_ALIAS_SET (p
->slot
) = 0;
804 /* Allocate a temporary stack slot and record it for possible later
805 reuse. First three arguments are same as in preceding function. */
808 assign_stack_temp (mode
, size
, keep
)
809 enum machine_mode mode
;
813 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
816 /* Assign a temporary of given TYPE.
817 KEEP is as for assign_stack_temp.
818 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
819 it is 0 if a register is OK.
820 DONT_PROMOTE is 1 if we should not promote values in register
824 assign_temp (type
, keep
, memory_required
, dont_promote
)
828 int dont_promote ATTRIBUTE_UNUSED
;
830 enum machine_mode mode
= TYPE_MODE (type
);
831 #ifndef PROMOTE_FOR_CALL_ONLY
832 int unsignedp
= TREE_UNSIGNED (type
);
835 if (mode
== BLKmode
|| memory_required
)
837 HOST_WIDE_INT size
= int_size_in_bytes (type
);
840 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
841 problems with allocating the stack space. */
845 /* Unfortunately, we don't yet know how to allocate variable-sized
846 temporaries. However, sometimes we have a fixed upper limit on
847 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
848 instead. This is the case for Chill variable-sized strings. */
849 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
850 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
851 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
852 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
854 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
855 MEM_SET_IN_STRUCT_P (tmp
, AGGREGATE_TYPE_P (type
));
859 #ifndef PROMOTE_FOR_CALL_ONLY
861 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
864 return gen_reg_rtx (mode
);
867 /* Combine temporary stack slots which are adjacent on the stack.
869 This allows for better use of already allocated stack space. This is only
870 done for BLKmode slots because we can be sure that we won't have alignment
871 problems in this case. */
874 combine_temp_slots ()
876 struct temp_slot
*p
, *q
;
877 struct temp_slot
*prev_p
, *prev_q
;
880 /* We can't combine slots, because the information about which slot
881 is in which alias set will be lost. */
882 if (flag_strict_aliasing
)
885 /* If there are a lot of temp slots, don't do anything unless
886 high levels of optimizaton. */
887 if (! flag_expensive_optimizations
)
888 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
889 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
892 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
896 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
897 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
900 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
902 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
904 /* Q comes after P; combine Q into P. */
906 p
->full_size
+= q
->full_size
;
909 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
911 /* P comes after Q; combine P into Q. */
913 q
->full_size
+= p
->full_size
;
918 /* Either delete Q or advance past it. */
920 prev_q
->next
= q
->next
;
924 /* Either delete P or advance past it. */
928 prev_p
->next
= p
->next
;
930 temp_slots
= p
->next
;
937 /* Find the temp slot corresponding to the object at address X. */
939 static struct temp_slot
*
940 find_temp_slot_from_address (x
)
946 for (p
= temp_slots
; p
; p
= p
->next
)
951 else if (XEXP (p
->slot
, 0) == x
953 || (GET_CODE (x
) == PLUS
954 && XEXP (x
, 0) == virtual_stack_vars_rtx
955 && GET_CODE (XEXP (x
, 1)) == CONST_INT
956 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
957 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
960 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
961 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
962 if (XEXP (next
, 0) == x
)
966 /* If we have a sum involving a register, see if it points to a temp
968 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
969 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
971 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
972 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
978 /* Indicate that NEW is an alternate way of referring to the temp slot
979 that previously was known by OLD. */
982 update_temp_slot_address (old
, new)
987 if (rtx_equal_p (old
, new))
990 p
= find_temp_slot_from_address (old
);
992 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
993 is a register, see if one operand of the PLUS is a temporary
994 location. If so, NEW points into it. Otherwise, if both OLD and
995 NEW are a PLUS and if there is a register in common between them.
996 If so, try a recursive call on those values. */
999 if (GET_CODE (old
) != PLUS
)
1002 if (GET_CODE (new) == REG
)
1004 update_temp_slot_address (XEXP (old
, 0), new);
1005 update_temp_slot_address (XEXP (old
, 1), new);
1008 else if (GET_CODE (new) != PLUS
)
1011 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1012 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1013 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1014 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1015 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1016 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1017 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1018 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1023 /* Otherwise add an alias for the temp's address. */
1024 else if (p
->address
== 0)
1028 if (GET_CODE (p
->address
) != EXPR_LIST
)
1029 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1031 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1035 /* If X could be a reference to a temporary slot, mark the fact that its
1036 address was taken. */
1039 mark_temp_addr_taken (x
)
1042 struct temp_slot
*p
;
1047 /* If X is not in memory or is at a constant address, it cannot be in
1048 a temporary slot. */
1049 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1052 p
= find_temp_slot_from_address (XEXP (x
, 0));
1057 /* If X could be a reference to a temporary slot, mark that slot as
1058 belonging to the to one level higher than the current level. If X
1059 matched one of our slots, just mark that one. Otherwise, we can't
1060 easily predict which it is, so upgrade all of them. Kept slots
1061 need not be touched.
1063 This is called when an ({...}) construct occurs and a statement
1064 returns a value in memory. */
1067 preserve_temp_slots (x
)
1070 struct temp_slot
*p
= 0;
1072 /* If there is no result, we still might have some objects whose address
1073 were taken, so we need to make sure they stay around. */
1076 for (p
= temp_slots
; p
; p
= p
->next
)
1077 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1083 /* If X is a register that is being used as a pointer, see if we have
1084 a temporary slot we know it points to. To be consistent with
1085 the code below, we really should preserve all non-kept slots
1086 if we can't find a match, but that seems to be much too costly. */
1087 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1088 p
= find_temp_slot_from_address (x
);
1090 /* If X is not in memory or is at a constant address, it cannot be in
1091 a temporary slot, but it can contain something whose address was
1093 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1095 for (p
= temp_slots
; p
; p
= p
->next
)
1096 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1102 /* First see if we can find a match. */
1104 p
= find_temp_slot_from_address (XEXP (x
, 0));
1108 /* Move everything at our level whose address was taken to our new
1109 level in case we used its address. */
1110 struct temp_slot
*q
;
1112 if (p
->level
== temp_slot_level
)
1114 for (q
= temp_slots
; q
; q
= q
->next
)
1115 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1124 /* Otherwise, preserve all non-kept slots at this level. */
1125 for (p
= temp_slots
; p
; p
= p
->next
)
1126 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1130 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1131 with that RTL_EXPR, promote it into a temporary slot at the present
1132 level so it will not be freed when we free slots made in the
1136 preserve_rtl_expr_result (x
)
1139 struct temp_slot
*p
;
1141 /* If X is not in memory or is at a constant address, it cannot be in
1142 a temporary slot. */
1143 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1146 /* If we can find a match, move it to our level unless it is already at
1148 p
= find_temp_slot_from_address (XEXP (x
, 0));
1151 p
->level
= MIN (p
->level
, temp_slot_level
);
1158 /* Free all temporaries used so far. This is normally called at the end
1159 of generating code for a statement. Don't free any temporaries
1160 currently in use for an RTL_EXPR that hasn't yet been emitted.
1161 We could eventually do better than this since it can be reused while
1162 generating the same RTL_EXPR, but this is complex and probably not
1168 struct temp_slot
*p
;
1170 for (p
= temp_slots
; p
; p
= p
->next
)
1171 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1172 && p
->rtl_expr
== 0)
1175 combine_temp_slots ();
1178 /* Free all temporary slots used in T, an RTL_EXPR node. */
1181 free_temps_for_rtl_expr (t
)
1184 struct temp_slot
*p
;
1186 for (p
= temp_slots
; p
; p
= p
->next
)
1187 if (p
->rtl_expr
== t
)
1190 combine_temp_slots ();
1193 /* Mark all temporaries ever allocated in this function as not suitable
1194 for reuse until the current level is exited. */
1197 mark_all_temps_used ()
1199 struct temp_slot
*p
;
1201 for (p
= temp_slots
; p
; p
= p
->next
)
1203 p
->in_use
= p
->keep
= 1;
1204 p
->level
= MIN (p
->level
, temp_slot_level
);
1208 /* Push deeper into the nesting level for stack temporaries. */
1216 /* Likewise, but save the new level as the place to allocate variables
1221 push_temp_slots_for_block ()
1225 var_temp_slot_level
= temp_slot_level
;
1228 /* Likewise, but save the new level as the place to allocate temporaries
1229 for TARGET_EXPRs. */
1232 push_temp_slots_for_target ()
1236 target_temp_slot_level
= temp_slot_level
;
1239 /* Set and get the value of target_temp_slot_level. The only
1240 permitted use of these functions is to save and restore this value. */
1243 get_target_temp_slot_level ()
1245 return target_temp_slot_level
;
1249 set_target_temp_slot_level (level
)
1252 target_temp_slot_level
= level
;
1256 /* Pop a temporary nesting level. All slots in use in the current level
1262 struct temp_slot
*p
;
1264 for (p
= temp_slots
; p
; p
= p
->next
)
1265 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1268 combine_temp_slots ();
1273 /* Initialize temporary slots. */
1278 /* We have not allocated any temporaries yet. */
1280 temp_slot_level
= 0;
1281 var_temp_slot_level
= 0;
1282 target_temp_slot_level
= 0;
1285 /* Retroactively move an auto variable from a register to a stack slot.
1286 This is done when an address-reference to the variable is seen. */
1289 put_var_into_stack (decl
)
1293 enum machine_mode promoted_mode
, decl_mode
;
1294 struct function
*function
= 0;
1296 int can_use_addressof
;
1298 context
= decl_function_context (decl
);
1300 /* Get the current rtl used for this object and its original mode. */
1301 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1303 /* No need to do anything if decl has no rtx yet
1304 since in that case caller is setting TREE_ADDRESSABLE
1305 and a stack slot will be assigned when the rtl is made. */
1309 /* Get the declared mode for this object. */
1310 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1311 : DECL_MODE (decl
));
1312 /* Get the mode it's actually stored in. */
1313 promoted_mode
= GET_MODE (reg
);
1315 /* If this variable comes from an outer function,
1316 find that function's saved context. */
1317 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1318 for (function
= outer_function_chain
; function
; function
= function
->next
)
1319 if (function
->decl
== context
)
1322 /* If this is a variable-size object with a pseudo to address it,
1323 put that pseudo into the stack, if the var is nonlocal. */
1324 if (DECL_NONLOCAL (decl
)
1325 && GET_CODE (reg
) == MEM
1326 && GET_CODE (XEXP (reg
, 0)) == REG
1327 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1329 reg
= XEXP (reg
, 0);
1330 decl_mode
= promoted_mode
= GET_MODE (reg
);
1336 /* FIXME make it work for promoted modes too */
1337 && decl_mode
== promoted_mode
1338 #ifdef NON_SAVING_SETJMP
1339 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1343 /* If we can't use ADDRESSOF, make sure we see through one we already
1345 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1346 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1347 reg
= XEXP (XEXP (reg
, 0), 0);
1349 /* Now we should have a value that resides in one or more pseudo regs. */
1351 if (GET_CODE (reg
) == REG
)
1353 /* If this variable lives in the current function and we don't need
1354 to put things in the stack for the sake of setjmp, try to keep it
1355 in a register until we know we actually need the address. */
1356 if (can_use_addressof
)
1357 gen_mem_addressof (reg
, decl
);
1359 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1360 promoted_mode
, decl_mode
,
1361 TREE_SIDE_EFFECTS (decl
), 0,
1362 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1365 else if (GET_CODE (reg
) == CONCAT
)
1367 /* A CONCAT contains two pseudos; put them both in the stack.
1368 We do it so they end up consecutive. */
1369 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1370 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1371 #ifdef FRAME_GROWS_DOWNWARD
1372 /* Since part 0 should have a lower address, do it second. */
1373 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1374 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1375 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1377 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1378 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1379 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1382 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1383 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1384 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1386 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1387 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1388 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1392 /* Change the CONCAT into a combined MEM for both parts. */
1393 PUT_CODE (reg
, MEM
);
1394 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1395 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
1397 /* The two parts are in memory order already.
1398 Use the lower parts address as ours. */
1399 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1400 /* Prevent sharing of rtl that might lose. */
1401 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1402 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1407 if (current_function_check_memory_usage
)
1408 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1409 XEXP (reg
, 0), Pmode
,
1410 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1411 TYPE_MODE (sizetype
),
1412 GEN_INT (MEMORY_USE_RW
),
1413 TYPE_MODE (integer_type_node
));
1416 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1417 into the stack frame of FUNCTION (0 means the current function).
1418 DECL_MODE is the machine mode of the user-level data type.
1419 PROMOTED_MODE is the machine mode of the register.
1420 VOLATILE_P is nonzero if this is for a "volatile" decl.
1421 USED_P is nonzero if this reg might have already been used in an insn. */
1424 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1425 original_regno
, used_p
, ht
)
1426 struct function
*function
;
1429 enum machine_mode promoted_mode
, decl_mode
;
1433 struct hash_table
*ht
;
1435 struct function
*func
= function
? function
: cfun
;
1437 int regno
= original_regno
;
1440 regno
= REGNO (reg
);
1442 if (regno
< func
->x_max_parm_reg
)
1443 new = func
->x_parm_reg_stack_loc
[regno
];
1445 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
), 0, func
);
1447 PUT_CODE (reg
, MEM
);
1448 PUT_MODE (reg
, decl_mode
);
1449 XEXP (reg
, 0) = XEXP (new, 0);
1450 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1451 MEM_VOLATILE_P (reg
) = volatile_p
;
1453 /* If this is a memory ref that contains aggregate components,
1454 mark it as such for cse and loop optimize. If we are reusing a
1455 previously generated stack slot, then we need to copy the bit in
1456 case it was set for other reasons. For instance, it is set for
1457 __builtin_va_alist. */
1458 MEM_SET_IN_STRUCT_P (reg
,
1459 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1460 MEM_ALIAS_SET (reg
) = get_alias_set (type
);
1462 /* Now make sure that all refs to the variable, previously made
1463 when it was a register, are fixed up to be valid again. */
1465 if (used_p
&& function
!= 0)
1467 struct var_refs_queue
*temp
;
1470 = (struct var_refs_queue
*) xmalloc (sizeof (struct var_refs_queue
));
1471 temp
->modified
= reg
;
1472 temp
->promoted_mode
= promoted_mode
;
1473 temp
->unsignedp
= TREE_UNSIGNED (type
);
1474 temp
->next
= function
->fixup_var_refs_queue
;
1475 function
->fixup_var_refs_queue
= temp
;
1478 /* Variable is local; fix it up now. */
1479 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
), ht
);
1483 fixup_var_refs (var
, promoted_mode
, unsignedp
, ht
)
1485 enum machine_mode promoted_mode
;
1487 struct hash_table
*ht
;
1490 rtx first_insn
= get_insns ();
1491 struct sequence_stack
*stack
= seq_stack
;
1492 tree rtl_exps
= rtl_expr_chain
;
1494 /* Must scan all insns for stack-refs that exceed the limit. */
1495 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
,
1497 /* If there's a hash table, it must record all uses of VAR. */
1501 /* Scan all pending sequences too. */
1502 for (; stack
; stack
= stack
->next
)
1504 push_to_sequence (stack
->first
);
1505 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1506 stack
->first
, stack
->next
!= 0, 0);
1507 /* Update remembered end of sequence
1508 in case we added an insn at the end. */
1509 stack
->last
= get_last_insn ();
1513 /* Scan all waiting RTL_EXPRs too. */
1514 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1516 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1517 if (seq
!= const0_rtx
&& seq
!= 0)
1519 push_to_sequence (seq
);
1520 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0,
1526 /* Scan the catch clauses for exception handling too. */
1527 push_to_sequence (catch_clauses
);
1528 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, catch_clauses
,
1533 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1534 some part of an insn. Return a struct fixup_replacement whose OLD
1535 value is equal to X. Allocate a new structure if no such entry exists. */
1537 static struct fixup_replacement
*
1538 find_fixup_replacement (replacements
, x
)
1539 struct fixup_replacement
**replacements
;
1542 struct fixup_replacement
*p
;
1544 /* See if we have already replaced this. */
1545 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1550 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1553 p
->next
= *replacements
;
1560 /* Scan the insn-chain starting with INSN for refs to VAR
1561 and fix them up. TOPLEVEL is nonzero if this chain is the
1562 main chain of insns for the current function. */
1565 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
, ht
)
1567 enum machine_mode promoted_mode
;
1571 struct hash_table
*ht
;
1574 rtx insn_list
= NULL_RTX
;
1576 /* If we already know which INSNs reference VAR there's no need
1577 to walk the entire instruction chain. */
1580 insn_list
= ((struct insns_for_mem_entry
*)
1581 hash_lookup (ht
, var
, /*create=*/0, /*copy=*/0))->insns
;
1582 insn
= insn_list
? XEXP (insn_list
, 0) : NULL_RTX
;
1583 insn_list
= XEXP (insn_list
, 1);
1588 rtx next
= NEXT_INSN (insn
);
1589 rtx set
, prev
, prev_set
;
1592 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1594 /* Remember the notes in case we delete the insn. */
1595 note
= REG_NOTES (insn
);
1597 /* If this is a CLOBBER of VAR, delete it.
1599 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1600 and REG_RETVAL notes too. */
1601 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1602 && (XEXP (PATTERN (insn
), 0) == var
1603 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1604 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1605 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1607 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1608 /* The REG_LIBCALL note will go away since we are going to
1609 turn INSN into a NOTE, so just delete the
1610 corresponding REG_RETVAL note. */
1611 remove_note (XEXP (note
, 0),
1612 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1615 /* In unoptimized compilation, we shouldn't call delete_insn
1616 except in jump.c doing warnings. */
1617 PUT_CODE (insn
, NOTE
);
1618 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1619 NOTE_SOURCE_FILE (insn
) = 0;
1622 /* The insn to load VAR from a home in the arglist
1623 is now a no-op. When we see it, just delete it.
1624 Similarly if this is storing VAR from a register from which
1625 it was loaded in the previous insn. This will occur
1626 when an ADDRESSOF was made for an arglist slot. */
1628 && (set
= single_set (insn
)) != 0
1629 && SET_DEST (set
) == var
1630 /* If this represents the result of an insn group,
1631 don't delete the insn. */
1632 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1633 && (rtx_equal_p (SET_SRC (set
), var
)
1634 || (GET_CODE (SET_SRC (set
)) == REG
1635 && (prev
= prev_nonnote_insn (insn
)) != 0
1636 && (prev_set
= single_set (prev
)) != 0
1637 && SET_DEST (prev_set
) == SET_SRC (set
)
1638 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1640 /* In unoptimized compilation, we shouldn't call delete_insn
1641 except in jump.c doing warnings. */
1642 PUT_CODE (insn
, NOTE
);
1643 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1644 NOTE_SOURCE_FILE (insn
) = 0;
1645 if (insn
== last_parm_insn
)
1646 last_parm_insn
= PREV_INSN (next
);
1650 struct fixup_replacement
*replacements
= 0;
1651 rtx next_insn
= NEXT_INSN (insn
);
1653 if (SMALL_REGISTER_CLASSES
)
1655 /* If the insn that copies the results of a CALL_INSN
1656 into a pseudo now references VAR, we have to use an
1657 intermediate pseudo since we want the life of the
1658 return value register to be only a single insn.
1660 If we don't use an intermediate pseudo, such things as
1661 address computations to make the address of VAR valid
1662 if it is not can be placed between the CALL_INSN and INSN.
1664 To make sure this doesn't happen, we record the destination
1665 of the CALL_INSN and see if the next insn uses both that
1668 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1669 && reg_mentioned_p (var
, PATTERN (insn
))
1670 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1672 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1674 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1676 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1680 if (GET_CODE (insn
) == CALL_INSN
1681 && GET_CODE (PATTERN (insn
)) == SET
)
1682 call_dest
= SET_DEST (PATTERN (insn
));
1683 else if (GET_CODE (insn
) == CALL_INSN
1684 && GET_CODE (PATTERN (insn
)) == PARALLEL
1685 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1686 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1691 /* See if we have to do anything to INSN now that VAR is in
1692 memory. If it needs to be loaded into a pseudo, use a single
1693 pseudo for the entire insn in case there is a MATCH_DUP
1694 between two operands. We pass a pointer to the head of
1695 a list of struct fixup_replacements. If fixup_var_refs_1
1696 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1697 it will record them in this list.
1699 If it allocated a pseudo for any replacement, we copy into
1702 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1705 /* If this is last_parm_insn, and any instructions were output
1706 after it to fix it up, then we must set last_parm_insn to
1707 the last such instruction emitted. */
1708 if (insn
== last_parm_insn
)
1709 last_parm_insn
= PREV_INSN (next_insn
);
1711 while (replacements
)
1713 if (GET_CODE (replacements
->new) == REG
)
1718 /* OLD might be a (subreg (mem)). */
1719 if (GET_CODE (replacements
->old
) == SUBREG
)
1721 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1724 = fixup_stack_1 (replacements
->old
, insn
);
1726 insert_before
= insn
;
1728 /* If we are changing the mode, do a conversion.
1729 This might be wasteful, but combine.c will
1730 eliminate much of the waste. */
1732 if (GET_MODE (replacements
->new)
1733 != GET_MODE (replacements
->old
))
1736 convert_move (replacements
->new,
1737 replacements
->old
, unsignedp
);
1738 seq
= gen_sequence ();
1742 seq
= gen_move_insn (replacements
->new,
1745 emit_insn_before (seq
, insert_before
);
1748 replacements
= replacements
->next
;
1752 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1753 But don't touch other insns referred to by reg-notes;
1754 we will get them elsewhere. */
1757 if (GET_CODE (note
) != INSN_LIST
)
1759 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1760 note
= XEXP (note
, 1);
1768 insn
= XEXP (insn_list
, 0);
1769 insn_list
= XEXP (insn_list
, 1);
1776 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1777 See if the rtx expression at *LOC in INSN needs to be changed.
1779 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1780 contain a list of original rtx's and replacements. If we find that we need
1781 to modify this insn by replacing a memory reference with a pseudo or by
1782 making a new MEM to implement a SUBREG, we consult that list to see if
1783 we have already chosen a replacement. If none has already been allocated,
1784 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1785 or the SUBREG, as appropriate, to the pseudo. */
1788 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1790 enum machine_mode promoted_mode
;
1793 struct fixup_replacement
**replacements
;
1796 register rtx x
= *loc
;
1797 RTX_CODE code
= GET_CODE (x
);
1798 register const char *fmt
;
1799 register rtx tem
, tem1
;
1800 struct fixup_replacement
*replacement
;
1805 if (XEXP (x
, 0) == var
)
1807 /* Prevent sharing of rtl that might lose. */
1808 rtx sub
= copy_rtx (XEXP (var
, 0));
1810 if (! validate_change (insn
, loc
, sub
, 0))
1812 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1815 /* We should be able to replace with a register or all is lost.
1816 Note that we can't use validate_change to verify this, since
1817 we're not caring for replacing all dups simultaneously. */
1818 if (! validate_replace_rtx (*loc
, y
, insn
))
1821 /* Careful! First try to recognize a direct move of the
1822 value, mimicking how things are done in gen_reload wrt
1823 PLUS. Consider what happens when insn is a conditional
1824 move instruction and addsi3 clobbers flags. */
1827 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1828 seq
= gen_sequence ();
1831 if (recog_memoized (new_insn
) < 0)
1833 /* That failed. Fall back on force_operand and hope. */
1836 force_operand (sub
, y
);
1837 seq
= gen_sequence ();
1842 /* Don't separate setter from user. */
1843 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1844 insn
= PREV_INSN (insn
);
1847 emit_insn_before (seq
, insn
);
1855 /* If we already have a replacement, use it. Otherwise,
1856 try to fix up this address in case it is invalid. */
1858 replacement
= find_fixup_replacement (replacements
, var
);
1859 if (replacement
->new)
1861 *loc
= replacement
->new;
1865 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1867 /* Unless we are forcing memory to register or we changed the mode,
1868 we can leave things the way they are if the insn is valid. */
1870 INSN_CODE (insn
) = -1;
1871 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1872 && recog_memoized (insn
) >= 0)
1875 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1879 /* If X contains VAR, we need to unshare it here so that we update
1880 each occurrence separately. But all identical MEMs in one insn
1881 must be replaced with the same rtx because of the possibility of
1884 if (reg_mentioned_p (var
, x
))
1886 replacement
= find_fixup_replacement (replacements
, x
);
1887 if (replacement
->new == 0)
1888 replacement
->new = copy_most_rtx (x
, var
);
1890 *loc
= x
= replacement
->new;
1906 /* Note that in some cases those types of expressions are altered
1907 by optimize_bit_field, and do not survive to get here. */
1908 if (XEXP (x
, 0) == var
1909 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1910 && SUBREG_REG (XEXP (x
, 0)) == var
))
1912 /* Get TEM as a valid MEM in the mode presently in the insn.
1914 We don't worry about the possibility of MATCH_DUP here; it
1915 is highly unlikely and would be tricky to handle. */
1918 if (GET_CODE (tem
) == SUBREG
)
1920 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1921 > GET_MODE_BITSIZE (GET_MODE (var
)))
1923 replacement
= find_fixup_replacement (replacements
, var
);
1924 if (replacement
->new == 0)
1925 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1926 SUBREG_REG (tem
) = replacement
->new;
1929 tem
= fixup_memory_subreg (tem
, insn
, 0);
1932 tem
= fixup_stack_1 (tem
, insn
);
1934 /* Unless we want to load from memory, get TEM into the proper mode
1935 for an extract from memory. This can only be done if the
1936 extract is at a constant position and length. */
1938 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1939 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1940 && ! mode_dependent_address_p (XEXP (tem
, 0))
1941 && ! MEM_VOLATILE_P (tem
))
1943 enum machine_mode wanted_mode
= VOIDmode
;
1944 enum machine_mode is_mode
= GET_MODE (tem
);
1945 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1948 if (GET_CODE (x
) == ZERO_EXTRACT
)
1951 = insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
1952 if (wanted_mode
== VOIDmode
)
1953 wanted_mode
= word_mode
;
1957 if (GET_CODE (x
) == SIGN_EXTRACT
)
1959 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
1960 if (wanted_mode
== VOIDmode
)
1961 wanted_mode
= word_mode
;
1964 /* If we have a narrower mode, we can do something. */
1965 if (wanted_mode
!= VOIDmode
1966 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1968 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
1969 rtx old_pos
= XEXP (x
, 2);
1972 /* If the bytes and bits are counted differently, we
1973 must adjust the offset. */
1974 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1975 offset
= (GET_MODE_SIZE (is_mode
)
1976 - GET_MODE_SIZE (wanted_mode
) - offset
);
1978 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1980 newmem
= gen_rtx_MEM (wanted_mode
,
1981 plus_constant (XEXP (tem
, 0), offset
));
1982 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1983 MEM_COPY_ATTRIBUTES (newmem
, tem
);
1985 /* Make the change and see if the insn remains valid. */
1986 INSN_CODE (insn
) = -1;
1987 XEXP (x
, 0) = newmem
;
1988 XEXP (x
, 2) = GEN_INT (pos
);
1990 if (recog_memoized (insn
) >= 0)
1993 /* Otherwise, restore old position. XEXP (x, 0) will be
1995 XEXP (x
, 2) = old_pos
;
1999 /* If we get here, the bitfield extract insn can't accept a memory
2000 reference. Copy the input into a register. */
2002 tem1
= gen_reg_rtx (GET_MODE (tem
));
2003 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2010 if (SUBREG_REG (x
) == var
)
2012 /* If this is a special SUBREG made because VAR was promoted
2013 from a wider mode, replace it with VAR and call ourself
2014 recursively, this time saying that the object previously
2015 had its current mode (by virtue of the SUBREG). */
2017 if (SUBREG_PROMOTED_VAR_P (x
))
2020 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
2024 /* If this SUBREG makes VAR wider, it has become a paradoxical
2025 SUBREG with VAR in memory, but these aren't allowed at this
2026 stage of the compilation. So load VAR into a pseudo and take
2027 a SUBREG of that pseudo. */
2028 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2030 replacement
= find_fixup_replacement (replacements
, var
);
2031 if (replacement
->new == 0)
2032 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2033 SUBREG_REG (x
) = replacement
->new;
2037 /* See if we have already found a replacement for this SUBREG.
2038 If so, use it. Otherwise, make a MEM and see if the insn
2039 is recognized. If not, or if we should force MEM into a register,
2040 make a pseudo for this SUBREG. */
2041 replacement
= find_fixup_replacement (replacements
, x
);
2042 if (replacement
->new)
2044 *loc
= replacement
->new;
2048 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2050 INSN_CODE (insn
) = -1;
2051 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2054 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2060 /* First do special simplification of bit-field references. */
2061 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2062 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2063 optimize_bit_field (x
, insn
, 0);
2064 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2065 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2066 optimize_bit_field (x
, insn
, NULL_PTR
);
2068 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2069 into a register and then store it back out. */
2070 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2071 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2072 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2073 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2074 > GET_MODE_SIZE (GET_MODE (var
))))
2076 replacement
= find_fixup_replacement (replacements
, var
);
2077 if (replacement
->new == 0)
2078 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2080 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2081 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2084 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2085 insn into a pseudo and store the low part of the pseudo into VAR. */
2086 if (GET_CODE (SET_DEST (x
)) == SUBREG
2087 && SUBREG_REG (SET_DEST (x
)) == var
2088 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2089 > GET_MODE_SIZE (GET_MODE (var
))))
2091 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2092 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2099 rtx dest
= SET_DEST (x
);
2100 rtx src
= SET_SRC (x
);
2102 rtx outerdest
= dest
;
2105 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2106 || GET_CODE (dest
) == SIGN_EXTRACT
2107 || GET_CODE (dest
) == ZERO_EXTRACT
)
2108 dest
= XEXP (dest
, 0);
2110 if (GET_CODE (src
) == SUBREG
)
2111 src
= XEXP (src
, 0);
2113 /* If VAR does not appear at the top level of the SET
2114 just scan the lower levels of the tree. */
2116 if (src
!= var
&& dest
!= var
)
2119 /* We will need to rerecognize this insn. */
2120 INSN_CODE (insn
) = -1;
2123 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2125 /* Since this case will return, ensure we fixup all the
2127 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2128 insn
, replacements
);
2129 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2130 insn
, replacements
);
2131 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2132 insn
, replacements
);
2134 tem
= XEXP (outerdest
, 0);
2136 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2137 that may appear inside a ZERO_EXTRACT.
2138 This was legitimate when the MEM was a REG. */
2139 if (GET_CODE (tem
) == SUBREG
2140 && SUBREG_REG (tem
) == var
)
2141 tem
= fixup_memory_subreg (tem
, insn
, 0);
2143 tem
= fixup_stack_1 (tem
, insn
);
2145 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2146 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2147 && ! mode_dependent_address_p (XEXP (tem
, 0))
2148 && ! MEM_VOLATILE_P (tem
))
2150 enum machine_mode wanted_mode
;
2151 enum machine_mode is_mode
= GET_MODE (tem
);
2152 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2154 wanted_mode
= insn_data
[(int) CODE_FOR_insv
].operand
[0].mode
;
2155 if (wanted_mode
== VOIDmode
)
2156 wanted_mode
= word_mode
;
2158 /* If we have a narrower mode, we can do something. */
2159 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2161 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2162 rtx old_pos
= XEXP (outerdest
, 2);
2165 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2166 offset
= (GET_MODE_SIZE (is_mode
)
2167 - GET_MODE_SIZE (wanted_mode
) - offset
);
2169 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2171 newmem
= gen_rtx_MEM (wanted_mode
,
2172 plus_constant (XEXP (tem
, 0),
2174 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2175 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2177 /* Make the change and see if the insn remains valid. */
2178 INSN_CODE (insn
) = -1;
2179 XEXP (outerdest
, 0) = newmem
;
2180 XEXP (outerdest
, 2) = GEN_INT (pos
);
2182 if (recog_memoized (insn
) >= 0)
2185 /* Otherwise, restore old position. XEXP (x, 0) will be
2187 XEXP (outerdest
, 2) = old_pos
;
2191 /* If we get here, the bit-field store doesn't allow memory
2192 or isn't located at a constant position. Load the value into
2193 a register, do the store, and put it back into memory. */
2195 tem1
= gen_reg_rtx (GET_MODE (tem
));
2196 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2197 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2198 XEXP (outerdest
, 0) = tem1
;
2203 /* STRICT_LOW_PART is a no-op on memory references
2204 and it can cause combinations to be unrecognizable,
2207 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2208 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2210 /* A valid insn to copy VAR into or out of a register
2211 must be left alone, to avoid an infinite loop here.
2212 If the reference to VAR is by a subreg, fix that up,
2213 since SUBREG is not valid for a memref.
2214 Also fix up the address of the stack slot.
2216 Note that we must not try to recognize the insn until
2217 after we know that we have valid addresses and no
2218 (subreg (mem ...) ...) constructs, since these interfere
2219 with determining the validity of the insn. */
2221 if ((SET_SRC (x
) == var
2222 || (GET_CODE (SET_SRC (x
)) == SUBREG
2223 && SUBREG_REG (SET_SRC (x
)) == var
))
2224 && (GET_CODE (SET_DEST (x
)) == REG
2225 || (GET_CODE (SET_DEST (x
)) == SUBREG
2226 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2227 && GET_MODE (var
) == promoted_mode
2228 && x
== single_set (insn
))
2232 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2233 if (replacement
->new)
2234 SET_SRC (x
) = replacement
->new;
2235 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2236 SET_SRC (x
) = replacement
->new
2237 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2239 SET_SRC (x
) = replacement
->new
2240 = fixup_stack_1 (SET_SRC (x
), insn
);
2242 if (recog_memoized (insn
) >= 0)
2245 /* INSN is not valid, but we know that we want to
2246 copy SET_SRC (x) to SET_DEST (x) in some way. So
2247 we generate the move and see whether it requires more
2248 than one insn. If it does, we emit those insns and
2249 delete INSN. Otherwise, we an just replace the pattern
2250 of INSN; we have already verified above that INSN has
2251 no other function that to do X. */
2253 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2254 if (GET_CODE (pat
) == SEQUENCE
)
2256 emit_insn_after (pat
, insn
);
2257 PUT_CODE (insn
, NOTE
);
2258 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2259 NOTE_SOURCE_FILE (insn
) = 0;
2262 PATTERN (insn
) = pat
;
2267 if ((SET_DEST (x
) == var
2268 || (GET_CODE (SET_DEST (x
)) == SUBREG
2269 && SUBREG_REG (SET_DEST (x
)) == var
))
2270 && (GET_CODE (SET_SRC (x
)) == REG
2271 || (GET_CODE (SET_SRC (x
)) == SUBREG
2272 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2273 && GET_MODE (var
) == promoted_mode
2274 && x
== single_set (insn
))
2278 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2279 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2281 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2283 if (recog_memoized (insn
) >= 0)
2286 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2287 if (GET_CODE (pat
) == SEQUENCE
)
2289 emit_insn_after (pat
, insn
);
2290 PUT_CODE (insn
, NOTE
);
2291 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2292 NOTE_SOURCE_FILE (insn
) = 0;
2295 PATTERN (insn
) = pat
;
2300 /* Otherwise, storing into VAR must be handled specially
2301 by storing into a temporary and copying that into VAR
2302 with a new insn after this one. Note that this case
2303 will be used when storing into a promoted scalar since
2304 the insn will now have different modes on the input
2305 and output and hence will be invalid (except for the case
2306 of setting it to a constant, which does not need any
2307 change if it is valid). We generate extra code in that case,
2308 but combine.c will eliminate it. */
2313 rtx fixeddest
= SET_DEST (x
);
2315 /* STRICT_LOW_PART can be discarded, around a MEM. */
2316 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2317 fixeddest
= XEXP (fixeddest
, 0);
2318 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2319 if (GET_CODE (fixeddest
) == SUBREG
)
2321 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2322 promoted_mode
= GET_MODE (fixeddest
);
2325 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2327 temp
= gen_reg_rtx (promoted_mode
);
2329 emit_insn_after (gen_move_insn (fixeddest
,
2330 gen_lowpart (GET_MODE (fixeddest
),
2334 SET_DEST (x
) = temp
;
2342 /* Nothing special about this RTX; fix its operands. */
2344 fmt
= GET_RTX_FORMAT (code
);
2345 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2348 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2349 else if (fmt
[i
] == 'E')
2352 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2353 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2354 insn
, replacements
);
2359 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2360 return an rtx (MEM:m1 newaddr) which is equivalent.
2361 If any insns must be emitted to compute NEWADDR, put them before INSN.
2363 UNCRITICAL nonzero means accept paradoxical subregs.
2364 This is used for subregs found inside REG_NOTES. */
2367 fixup_memory_subreg (x
, insn
, uncritical
)
2372 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2373 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2374 enum machine_mode mode
= GET_MODE (x
);
2377 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2378 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2382 if (BYTES_BIG_ENDIAN
)
2383 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2384 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2385 addr
= plus_constant (addr
, offset
);
2386 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2387 /* Shortcut if no insns need be emitted. */
2388 return change_address (SUBREG_REG (x
), mode
, addr
);
2390 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2391 emit_insn_before (gen_sequence (), insn
);
2396 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2397 Replace subexpressions of X in place.
2398 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2399 Otherwise return X, with its contents possibly altered.
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2403 UNCRITICAL is as in fixup_memory_subreg. */
2406 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2411 register enum rtx_code code
;
2412 register const char *fmt
;
2418 code
= GET_CODE (x
);
2420 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2421 return fixup_memory_subreg (x
, insn
, uncritical
);
2423 /* Nothing special about this RTX; fix its operands. */
2425 fmt
= GET_RTX_FORMAT (code
);
2426 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2429 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2430 else if (fmt
[i
] == 'E')
2433 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2435 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2441 /* For each memory ref within X, if it refers to a stack slot
2442 with an out of range displacement, put the address in a temp register
2443 (emitting new insns before INSN to load these registers)
2444 and alter the memory ref to use that register.
2445 Replace each such MEM rtx with a copy, to avoid clobberage. */
2448 fixup_stack_1 (x
, insn
)
2453 register RTX_CODE code
= GET_CODE (x
);
2454 register const char *fmt
;
2458 register rtx ad
= XEXP (x
, 0);
2459 /* If we have address of a stack slot but it's not valid
2460 (displacement is too large), compute the sum in a register. */
2461 if (GET_CODE (ad
) == PLUS
2462 && GET_CODE (XEXP (ad
, 0)) == REG
2463 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2464 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2465 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2466 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2467 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2469 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2470 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2471 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2472 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2475 if (memory_address_p (GET_MODE (x
), ad
))
2479 temp
= copy_to_reg (ad
);
2480 seq
= gen_sequence ();
2482 emit_insn_before (seq
, insn
);
2483 return change_address (x
, VOIDmode
, temp
);
2488 fmt
= GET_RTX_FORMAT (code
);
2489 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2492 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2493 else if (fmt
[i
] == 'E')
2496 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2497 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2503 /* Optimization: a bit-field instruction whose field
2504 happens to be a byte or halfword in memory
2505 can be changed to a move instruction.
2507 We call here when INSN is an insn to examine or store into a bit-field.
2508 BODY is the SET-rtx to be altered.
2510 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2511 (Currently this is called only from function.c, and EQUIV_MEM
2515 optimize_bit_field (body
, insn
, equiv_mem
)
2520 register rtx bitfield
;
2523 enum machine_mode mode
;
2525 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2526 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2527 bitfield
= SET_DEST (body
), destflag
= 1;
2529 bitfield
= SET_SRC (body
), destflag
= 0;
2531 /* First check that the field being stored has constant size and position
2532 and is in fact a byte or halfword suitably aligned. */
2534 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2535 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2536 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2538 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2540 register rtx memref
= 0;
2542 /* Now check that the containing word is memory, not a register,
2543 and that it is safe to change the machine mode. */
2545 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2546 memref
= XEXP (bitfield
, 0);
2547 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2549 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2550 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2551 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2552 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2553 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2555 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2556 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2559 && ! mode_dependent_address_p (XEXP (memref
, 0))
2560 && ! MEM_VOLATILE_P (memref
))
2562 /* Now adjust the address, first for any subreg'ing
2563 that we are now getting rid of,
2564 and then for which byte of the word is wanted. */
2566 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2569 /* Adjust OFFSET to count bits from low-address byte. */
2570 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2571 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2572 - offset
- INTVAL (XEXP (bitfield
, 1)));
2574 /* Adjust OFFSET to count bytes from low-address byte. */
2575 offset
/= BITS_PER_UNIT
;
2576 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2578 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2579 if (BYTES_BIG_ENDIAN
)
2580 offset
-= (MIN (UNITS_PER_WORD
,
2581 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2582 - MIN (UNITS_PER_WORD
,
2583 GET_MODE_SIZE (GET_MODE (memref
))));
2587 memref
= change_address (memref
, mode
,
2588 plus_constant (XEXP (memref
, 0), offset
));
2589 insns
= get_insns ();
2591 emit_insns_before (insns
, insn
);
2593 /* Store this memory reference where
2594 we found the bit field reference. */
2598 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2599 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2601 rtx src
= SET_SRC (body
);
2602 while (GET_CODE (src
) == SUBREG
2603 && SUBREG_WORD (src
) == 0)
2604 src
= SUBREG_REG (src
);
2605 if (GET_MODE (src
) != GET_MODE (memref
))
2606 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2607 validate_change (insn
, &SET_SRC (body
), src
, 1);
2609 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2610 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2611 /* This shouldn't happen because anything that didn't have
2612 one of these modes should have got converted explicitly
2613 and then referenced through a subreg.
2614 This is so because the original bit-field was
2615 handled by agg_mode and so its tree structure had
2616 the same mode that memref now has. */
2621 rtx dest
= SET_DEST (body
);
2623 while (GET_CODE (dest
) == SUBREG
2624 && SUBREG_WORD (dest
) == 0
2625 && (GET_MODE_CLASS (GET_MODE (dest
))
2626 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2627 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2629 dest
= SUBREG_REG (dest
);
2631 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2633 if (GET_MODE (dest
) == GET_MODE (memref
))
2634 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2637 /* Convert the mem ref to the destination mode. */
2638 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2641 convert_move (newreg
, memref
,
2642 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2646 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2650 /* See if we can convert this extraction or insertion into
2651 a simple move insn. We might not be able to do so if this
2652 was, for example, part of a PARALLEL.
2654 If we succeed, write out any needed conversions. If we fail,
2655 it is hard to guess why we failed, so don't do anything
2656 special; just let the optimization be suppressed. */
2658 if (apply_change_group () && seq
)
2659 emit_insns_before (seq
, insn
);
2664 /* These routines are responsible for converting virtual register references
2665 to the actual hard register references once RTL generation is complete.
2667 The following four variables are used for communication between the
2668 routines. They contain the offsets of the virtual registers from their
2669 respective hard registers. */
2671 static int in_arg_offset
;
2672 static int var_offset
;
2673 static int dynamic_offset
;
2674 static int out_arg_offset
;
2675 static int cfa_offset
;
2677 /* In most machines, the stack pointer register is equivalent to the bottom
2680 #ifndef STACK_POINTER_OFFSET
2681 #define STACK_POINTER_OFFSET 0
2684 /* If not defined, pick an appropriate default for the offset of dynamically
2685 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2686 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2688 #ifndef STACK_DYNAMIC_OFFSET
2690 #ifdef ACCUMULATE_OUTGOING_ARGS
2691 /* The bottom of the stack points to the actual arguments. If
2692 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2693 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2694 stack space for register parameters is not pushed by the caller, but
2695 rather part of the fixed stack areas and hence not included in
2696 `current_function_outgoing_args_size'. Nevertheless, we must allow
2697 for it when allocating stack dynamic objects. */
2699 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2700 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2701 (current_function_outgoing_args_size \
2702 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2705 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2706 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2710 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2714 /* On a few machines, the CFA coincides with the arg pointer. */
2716 #ifndef ARG_POINTER_CFA_OFFSET
2717 #define ARG_POINTER_CFA_OFFSET 0
2721 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2722 its address taken. DECL is the decl for the object stored in the
2723 register, for later use if we do need to force REG into the stack.
2724 REG is overwritten by the MEM like in put_reg_into_stack. */
2727 gen_mem_addressof (reg
, decl
)
2731 tree type
= TREE_TYPE (decl
);
2732 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2735 /* If the original REG was a user-variable, then so is the REG whose
2736 address is being taken. Likewise for unchanging. */
2737 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2738 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2740 PUT_CODE (reg
, MEM
);
2741 PUT_MODE (reg
, DECL_MODE (decl
));
2743 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2744 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (type
));
2745 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
2747 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2748 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
), 0);
2753 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2757 flush_addressof (decl
)
2760 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2761 && DECL_RTL (decl
) != 0
2762 && GET_CODE (DECL_RTL (decl
)) == MEM
2763 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2764 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2765 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2769 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2772 put_addressof_into_stack (r
, ht
)
2774 struct hash_table
*ht
;
2776 tree decl
= ADDRESSOF_DECL (r
);
2777 rtx reg
= XEXP (r
, 0);
2779 if (GET_CODE (reg
) != REG
)
2782 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2783 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2784 ADDRESSOF_REGNO (r
),
2785 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0, ht
);
2788 /* List of replacements made below in purge_addressof_1 when creating
2789 bitfield insertions. */
2790 static rtx purge_bitfield_addressof_replacements
;
2792 /* List of replacements made below in purge_addressof_1 for patterns
2793 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2794 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2795 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2796 enough in complex cases, e.g. when some field values can be
2797 extracted by usage MEM with narrower mode. */
2798 static rtx purge_addressof_replacements
;
2800 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2801 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2802 the stack. If the function returns FALSE then the replacement could not
2806 purge_addressof_1 (loc
, insn
, force
, store
, ht
)
2810 struct hash_table
*ht
;
2816 boolean result
= true;
2818 /* Re-start here to avoid recursion in common cases. */
2825 code
= GET_CODE (x
);
2827 /* If we don't return in any of the cases below, we will recurse inside
2828 the RTX, which will normally result in any ADDRESSOF being forced into
2832 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
2833 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
2837 else if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2839 /* We must create a copy of the rtx because it was created by
2840 overwriting a REG rtx which is always shared. */
2841 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2844 if (validate_change (insn
, loc
, sub
, 0)
2845 || validate_replace_rtx (x
, sub
, insn
))
2849 sub
= force_operand (sub
, NULL_RTX
);
2850 if (! validate_change (insn
, loc
, sub
, 0)
2851 && ! validate_replace_rtx (x
, sub
, insn
))
2854 insns
= gen_sequence ();
2856 emit_insn_before (insns
, insn
);
2860 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2862 rtx sub
= XEXP (XEXP (x
, 0), 0);
2865 if (GET_CODE (sub
) == MEM
)
2867 sub2
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2868 MEM_COPY_ATTRIBUTES (sub2
, sub
);
2869 RTX_UNCHANGING_P (sub2
) = RTX_UNCHANGING_P (sub
);
2872 else if (GET_CODE (sub
) == REG
2873 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2875 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2877 int size_x
, size_sub
;
2881 /* When processing REG_NOTES look at the list of
2882 replacements done on the insn to find the register that X
2886 for (tem
= purge_bitfield_addressof_replacements
;
2888 tem
= XEXP (XEXP (tem
, 1), 1))
2889 if (rtx_equal_p (x
, XEXP (tem
, 0)))
2891 *loc
= XEXP (XEXP (tem
, 1), 0);
2895 /* See comment for purge_addressof_replacements. */
2896 for (tem
= purge_addressof_replacements
;
2898 tem
= XEXP (XEXP (tem
, 1), 1))
2899 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
2901 rtx z
= XEXP (XEXP (tem
, 1), 0);
2903 if (GET_MODE (x
) == GET_MODE (z
)
2904 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
2905 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
2908 /* It can happen that the note may speak of things
2909 in a wider (or just different) mode than the
2910 code did. This is especially true of
2913 if (GET_CODE (z
) == SUBREG
&& SUBREG_WORD (z
) == 0)
2916 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2917 && (GET_MODE_SIZE (GET_MODE (x
))
2918 > GET_MODE_SIZE (GET_MODE (z
))))
2920 /* This can occur as a result in invalid
2921 pointer casts, e.g. float f; ...
2922 *(long long int *)&f.
2923 ??? We could emit a warning here, but
2924 without a line number that wouldn't be
2926 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
2929 z
= gen_lowpart (GET_MODE (x
), z
);
2935 /* Sometimes we may not be able to find the replacement. For
2936 example when the original insn was a MEM in a wider mode,
2937 and the note is part of a sign extension of a narrowed
2938 version of that MEM. Gcc testcase compile/990829-1.c can
2939 generate an example of this siutation. Rather than complain
2940 we return false, which will prompt our caller to remove the
2945 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
2946 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
2948 /* Don't even consider working with paradoxical subregs,
2949 or the moral equivalent seen here. */
2950 if (size_x
<= size_sub
2951 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
2953 /* Do a bitfield insertion to mirror what would happen
2960 rtx p
= PREV_INSN (insn
);
2963 val
= gen_reg_rtx (GET_MODE (x
));
2964 if (! validate_change (insn
, loc
, val
, 0))
2966 /* Discard the current sequence and put the
2967 ADDRESSOF on stack. */
2971 seq
= gen_sequence ();
2973 emit_insn_before (seq
, insn
);
2974 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
2978 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
2979 val
, GET_MODE_SIZE (GET_MODE (sub
)),
2980 GET_MODE_SIZE (GET_MODE (sub
)));
2982 /* Make sure to unshare any shared rtl that store_bit_field
2983 might have created. */
2984 unshare_all_rtl_again (get_insns ());
2986 seq
= gen_sequence ();
2988 p
= emit_insn_after (seq
, insn
);
2989 if (NEXT_INSN (insn
))
2990 compute_insns_for_mem (NEXT_INSN (insn
),
2991 p
? NEXT_INSN (p
) : NULL_RTX
,
2996 rtx p
= PREV_INSN (insn
);
2999 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
3000 GET_MODE (x
), GET_MODE (x
),
3001 GET_MODE_SIZE (GET_MODE (sub
)),
3002 GET_MODE_SIZE (GET_MODE (sub
)));
3004 if (! validate_change (insn
, loc
, val
, 0))
3006 /* Discard the current sequence and put the
3007 ADDRESSOF on stack. */
3012 seq
= gen_sequence ();
3014 emit_insn_before (seq
, insn
);
3015 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3019 /* Remember the replacement so that the same one can be done
3020 on the REG_NOTES. */
3021 purge_bitfield_addressof_replacements
3022 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
3025 purge_bitfield_addressof_replacements
));
3027 /* We replaced with a reg -- all done. */
3032 else if (validate_change (insn
, loc
, sub
, 0))
3034 /* Remember the replacement so that the same one can be done
3035 on the REG_NOTES. */
3036 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3040 for (tem
= purge_addressof_replacements
;
3042 tem
= XEXP (XEXP (tem
, 1), 1))
3043 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3045 XEXP (XEXP (tem
, 1), 0) = sub
;
3048 purge_addressof_replacements
3049 = gen_rtx (EXPR_LIST
, VOIDmode
, XEXP (x
, 0),
3050 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3051 purge_addressof_replacements
));
3057 /* else give up and put it into the stack */
3060 else if (code
== ADDRESSOF
)
3062 put_addressof_into_stack (x
, ht
);
3065 else if (code
== SET
)
3067 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
3068 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
3072 /* Scan all subexpressions. */
3073 fmt
= GET_RTX_FORMAT (code
);
3074 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3077 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0, ht
);
3078 else if (*fmt
== 'E')
3079 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3080 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0, ht
);
3086 /* Return a new hash table entry in HT. */
3088 static struct hash_entry
*
3089 insns_for_mem_newfunc (he
, ht
, k
)
3090 struct hash_entry
*he
;
3091 struct hash_table
*ht
;
3092 hash_table_key k ATTRIBUTE_UNUSED
;
3094 struct insns_for_mem_entry
*ifmhe
;
3098 ifmhe
= ((struct insns_for_mem_entry
*)
3099 hash_allocate (ht
, sizeof (struct insns_for_mem_entry
)));
3100 ifmhe
->insns
= NULL_RTX
;
3105 /* Return a hash value for K, a REG. */
3107 static unsigned long
3108 insns_for_mem_hash (k
)
3111 /* K is really a RTX. Just use the address as the hash value. */
3112 return (unsigned long) k
;
3115 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3118 insns_for_mem_comp (k1
, k2
)
3125 struct insns_for_mem_walk_info
{
3126 /* The hash table that we are using to record which INSNs use which
3128 struct hash_table
*ht
;
3130 /* The INSN we are currently proessing. */
3133 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3134 to find the insns that use the REGs in the ADDRESSOFs. */
3138 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3139 that might be used in an ADDRESSOF expression, record this INSN in
3140 the hash table given by DATA (which is really a pointer to an
3141 insns_for_mem_walk_info structure). */
3144 insns_for_mem_walk (r
, data
)
3148 struct insns_for_mem_walk_info
*ifmwi
3149 = (struct insns_for_mem_walk_info
*) data
;
3151 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3152 && GET_CODE (XEXP (*r
, 0)) == REG
)
3153 hash_lookup (ifmwi
->ht
, XEXP (*r
, 0), /*create=*/1, /*copy=*/0);
3154 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3156 /* Lookup this MEM in the hashtable, creating it if necessary. */
3157 struct insns_for_mem_entry
*ifme
3158 = (struct insns_for_mem_entry
*) hash_lookup (ifmwi
->ht
,
3163 /* If we have not already recorded this INSN, do so now. Since
3164 we process the INSNs in order, we know that if we have
3165 recorded it it must be at the front of the list. */
3166 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3168 /* We do the allocation on the same obstack as is used for
3169 the hash table since this memory will not be used once
3170 the hash table is deallocated. */
3171 push_obstacks (&ifmwi
->ht
->memory
, &ifmwi
->ht
->memory
);
3172 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3181 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3182 which REGs in HT. */
3185 compute_insns_for_mem (insns
, last_insn
, ht
)
3188 struct hash_table
*ht
;
3191 struct insns_for_mem_walk_info ifmwi
;
3194 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3195 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3196 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3199 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3203 /* Helper function for purge_addressof called through for_each_rtx.
3204 Returns true iff the rtl is an ADDRESSOF. */
3206 is_addressof (rtl
, data
)
3208 void * data ATTRIBUTE_UNUSED
;
3210 return GET_CODE (* rtl
) == ADDRESSOF
;
3213 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3214 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3218 purge_addressof (insns
)
3222 struct hash_table ht
;
3224 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3225 requires a fixup pass over the instruction stream to correct
3226 INSNs that depended on the REG being a REG, and not a MEM. But,
3227 these fixup passes are slow. Furthermore, more MEMs are not
3228 mentioned in very many instructions. So, we speed up the process
3229 by pre-calculating which REGs occur in which INSNs; that allows
3230 us to perform the fixup passes much more quickly. */
3231 hash_table_init (&ht
,
3232 insns_for_mem_newfunc
,
3234 insns_for_mem_comp
);
3235 compute_insns_for_mem (insns
, NULL_RTX
, &ht
);
3237 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3238 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3239 || GET_CODE (insn
) == CALL_INSN
)
3241 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3242 asm_noperands (PATTERN (insn
)) > 0, 0, &ht
))
3243 /* If we could not replace the ADDRESSOFs in the insn,
3244 something is wrong. */
3247 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, &ht
))
3249 /* If we could not replace the ADDRESSOFs in the insn's notes,
3250 we can just remove the offending notes instead. */
3253 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3255 /* If we find a REG_RETVAL note then the insn is a libcall.
3256 Such insns must have REG_EQUAL notes as well, in order
3257 for later passes of the compiler to work. So it is not
3258 safe to delete the notes here, and instead we abort. */
3259 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3261 if (for_each_rtx (& note
, is_addressof
, NULL
))
3262 remove_note (insn
, note
);
3268 hash_table_free (&ht
);
3269 purge_bitfield_addressof_replacements
= 0;
3270 purge_addressof_replacements
= 0;
3273 /* Pass through the INSNS of function FNDECL and convert virtual register
3274 references to hard register references. */
3277 instantiate_virtual_regs (fndecl
, insns
)
3284 /* Compute the offsets to use for this function. */
3285 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3286 var_offset
= STARTING_FRAME_OFFSET
;
3287 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3288 out_arg_offset
= STACK_POINTER_OFFSET
;
3289 cfa_offset
= ARG_POINTER_CFA_OFFSET
;
3291 /* Scan all variables and parameters of this function. For each that is
3292 in memory, instantiate all virtual registers if the result is a valid
3293 address. If not, we do it later. That will handle most uses of virtual
3294 regs on many machines. */
3295 instantiate_decls (fndecl
, 1);
3297 /* Initialize recognition, indicating that volatile is OK. */
3300 /* Scan through all the insns, instantiating every virtual register still
3302 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3303 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3304 || GET_CODE (insn
) == CALL_INSN
)
3306 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3307 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3310 /* Instantiate the stack slots for the parm registers, for later use in
3311 addressof elimination. */
3312 for (i
= 0; i
< max_parm_reg
; ++i
)
3313 if (parm_reg_stack_loc
[i
])
3314 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3316 /* Now instantiate the remaining register equivalences for debugging info.
3317 These will not be valid addresses. */
3318 instantiate_decls (fndecl
, 0);
3320 /* Indicate that, from now on, assign_stack_local should use
3321 frame_pointer_rtx. */
3322 virtuals_instantiated
= 1;
3325 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3326 all virtual registers in their DECL_RTL's.
3328 If VALID_ONLY, do this only if the resulting address is still valid.
3329 Otherwise, always do it. */
3332 instantiate_decls (fndecl
, valid_only
)
3338 if (DECL_SAVED_INSNS (fndecl
))
3339 /* When compiling an inline function, the obstack used for
3340 rtl allocation is the maybepermanent_obstack. Calling
3341 `resume_temporary_allocation' switches us back to that
3342 obstack while we process this function's parameters. */
3343 resume_temporary_allocation ();
3345 /* Process all parameters of the function. */
3346 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3348 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3350 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3352 /* If the parameter was promoted, then the incoming RTL mode may be
3353 larger than the declared type size. We must use the larger of
3355 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
3356 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3359 /* Now process all variables defined in the function or its subblocks. */
3360 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3362 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
3364 /* Save all rtl allocated for this function by raising the
3365 high-water mark on the maybepermanent_obstack. */
3367 /* All further rtl allocation is now done in the current_obstack. */
3368 rtl_in_current_obstack ();
3372 /* Subroutine of instantiate_decls: Process all decls in the given
3373 BLOCK node and all its subblocks. */
3376 instantiate_decls_1 (let
, valid_only
)
3382 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3383 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
3386 /* Process all subblocks. */
3387 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3388 instantiate_decls_1 (t
, valid_only
);
3391 /* Subroutine of the preceding procedures: Given RTL representing a
3392 decl and the size of the object, do any instantiation required.
3394 If VALID_ONLY is non-zero, it means that the RTL should only be
3395 changed if the new address is valid. */
3398 instantiate_decl (x
, size
, valid_only
)
3403 enum machine_mode mode
;
3406 /* If this is not a MEM, no need to do anything. Similarly if the
3407 address is a constant or a register that is not a virtual register. */
3409 if (x
== 0 || GET_CODE (x
) != MEM
)
3413 if (CONSTANT_P (addr
)
3414 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3415 || (GET_CODE (addr
) == REG
3416 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3417 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3420 /* If we should only do this if the address is valid, copy the address.
3421 We need to do this so we can undo any changes that might make the
3422 address invalid. This copy is unfortunate, but probably can't be
3426 addr
= copy_rtx (addr
);
3428 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3432 /* Now verify that the resulting address is valid for every integer or
3433 floating-point mode up to and including SIZE bytes long. We do this
3434 since the object might be accessed in any mode and frame addresses
3437 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3438 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3439 mode
= GET_MODE_WIDER_MODE (mode
))
3440 if (! memory_address_p (mode
, addr
))
3443 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3444 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3445 mode
= GET_MODE_WIDER_MODE (mode
))
3446 if (! memory_address_p (mode
, addr
))
3450 /* Put back the address now that we have updated it and we either know
3451 it is valid or we don't care whether it is valid. */
3456 /* Given a pointer to a piece of rtx and an optional pointer to the
3457 containing object, instantiate any virtual registers present in it.
3459 If EXTRA_INSNS, we always do the replacement and generate
3460 any extra insns before OBJECT. If it zero, we do nothing if replacement
3463 Return 1 if we either had nothing to do or if we were able to do the
3464 needed replacement. Return 0 otherwise; we only return zero if
3465 EXTRA_INSNS is zero.
3467 We first try some simple transformations to avoid the creation of extra
3471 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3479 HOST_WIDE_INT offset
= 0;
3485 /* Re-start here to avoid recursion in common cases. */
3492 code
= GET_CODE (x
);
3494 /* Check for some special cases. */
3511 /* We are allowed to set the virtual registers. This means that
3512 the actual register should receive the source minus the
3513 appropriate offset. This is used, for example, in the handling
3514 of non-local gotos. */
3515 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3516 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3517 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3518 new = frame_pointer_rtx
, offset
= - var_offset
;
3519 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3520 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3521 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3522 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3523 else if (SET_DEST (x
) == virtual_cfa_rtx
)
3524 new = arg_pointer_rtx
, offset
= - cfa_offset
;
3528 rtx src
= SET_SRC (x
);
3530 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3532 /* The only valid sources here are PLUS or REG. Just do
3533 the simplest possible thing to handle them. */
3534 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3538 if (GET_CODE (src
) != REG
)
3539 temp
= force_operand (src
, NULL_RTX
);
3542 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3546 emit_insns_before (seq
, object
);
3549 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3556 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3561 /* Handle special case of virtual register plus constant. */
3562 if (CONSTANT_P (XEXP (x
, 1)))
3564 rtx old
, new_offset
;
3566 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3567 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3569 rtx inner
= XEXP (XEXP (x
, 0), 0);
3571 if (inner
== virtual_incoming_args_rtx
)
3572 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3573 else if (inner
== virtual_stack_vars_rtx
)
3574 new = frame_pointer_rtx
, offset
= var_offset
;
3575 else if (inner
== virtual_stack_dynamic_rtx
)
3576 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3577 else if (inner
== virtual_outgoing_args_rtx
)
3578 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3579 else if (inner
== virtual_cfa_rtx
)
3580 new = arg_pointer_rtx
, offset
= cfa_offset
;
3587 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3589 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3592 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3593 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3594 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3595 new = frame_pointer_rtx
, offset
= var_offset
;
3596 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3597 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3598 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3599 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3600 else if (XEXP (x
, 0) == virtual_cfa_rtx
)
3601 new = arg_pointer_rtx
, offset
= cfa_offset
;
3604 /* We know the second operand is a constant. Unless the
3605 first operand is a REG (which has been already checked),
3606 it needs to be checked. */
3607 if (GET_CODE (XEXP (x
, 0)) != REG
)
3615 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3617 /* If the new constant is zero, try to replace the sum with just
3619 if (new_offset
== const0_rtx
3620 && validate_change (object
, loc
, new, 0))
3623 /* Next try to replace the register and new offset.
3624 There are two changes to validate here and we can't assume that
3625 in the case of old offset equals new just changing the register
3626 will yield a valid insn. In the interests of a little efficiency,
3627 however, we only call validate change once (we don't queue up the
3628 changes and then call apply_change_group). */
3632 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3633 : (XEXP (x
, 0) = new,
3634 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3642 /* Otherwise copy the new constant into a register and replace
3643 constant with that register. */
3644 temp
= gen_reg_rtx (Pmode
);
3646 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3647 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3650 /* If that didn't work, replace this expression with a
3651 register containing the sum. */
3654 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3657 temp
= force_operand (new, NULL_RTX
);
3661 emit_insns_before (seq
, object
);
3662 if (! validate_change (object
, loc
, temp
, 0)
3663 && ! validate_replace_rtx (x
, temp
, object
))
3671 /* Fall through to generic two-operand expression case. */
3677 case DIV
: case UDIV
:
3678 case MOD
: case UMOD
:
3679 case AND
: case IOR
: case XOR
:
3680 case ROTATERT
: case ROTATE
:
3681 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3683 case GE
: case GT
: case GEU
: case GTU
:
3684 case LE
: case LT
: case LEU
: case LTU
:
3685 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3686 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3691 /* Most cases of MEM that convert to valid addresses have already been
3692 handled by our scan of decls. The only special handling we
3693 need here is to make a copy of the rtx to ensure it isn't being
3694 shared if we have to change it to a pseudo.
3696 If the rtx is a simple reference to an address via a virtual register,
3697 it can potentially be shared. In such cases, first try to make it
3698 a valid address, which can also be shared. Otherwise, copy it and
3701 First check for common cases that need no processing. These are
3702 usually due to instantiation already being done on a previous instance
3706 if (CONSTANT_ADDRESS_P (temp
)
3707 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3708 || temp
== arg_pointer_rtx
3710 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3711 || temp
== hard_frame_pointer_rtx
3713 || temp
== frame_pointer_rtx
)
3716 if (GET_CODE (temp
) == PLUS
3717 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3718 && (XEXP (temp
, 0) == frame_pointer_rtx
3719 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3720 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3722 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3723 || XEXP (temp
, 0) == arg_pointer_rtx
3728 if (temp
== virtual_stack_vars_rtx
3729 || temp
== virtual_incoming_args_rtx
3730 || (GET_CODE (temp
) == PLUS
3731 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3732 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3733 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3735 /* This MEM may be shared. If the substitution can be done without
3736 the need to generate new pseudos, we want to do it in place
3737 so all copies of the shared rtx benefit. The call below will
3738 only make substitutions if the resulting address is still
3741 Note that we cannot pass X as the object in the recursive call
3742 since the insn being processed may not allow all valid
3743 addresses. However, if we were not passed on object, we can
3744 only modify X without copying it if X will have a valid
3747 ??? Also note that this can still lose if OBJECT is an insn that
3748 has less restrictions on an address that some other insn.
3749 In that case, we will modify the shared address. This case
3750 doesn't seem very likely, though. One case where this could
3751 happen is in the case of a USE or CLOBBER reference, but we
3752 take care of that below. */
3754 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3755 object
? object
: x
, 0))
3758 /* Otherwise make a copy and process that copy. We copy the entire
3759 RTL expression since it might be a PLUS which could also be
3761 *loc
= x
= copy_rtx (x
);
3764 /* Fall through to generic unary operation case. */
3766 case STRICT_LOW_PART
:
3768 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3769 case SIGN_EXTEND
: case ZERO_EXTEND
:
3770 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3771 case FLOAT
: case FIX
:
3772 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3776 /* These case either have just one operand or we know that we need not
3777 check the rest of the operands. */
3783 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3784 go ahead and make the invalid one, but do it to a copy. For a REG,
3785 just make the recursive call, since there's no chance of a problem. */
3787 if ((GET_CODE (XEXP (x
, 0)) == MEM
3788 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3790 || (GET_CODE (XEXP (x
, 0)) == REG
3791 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3794 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3799 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3800 in front of this insn and substitute the temporary. */
3801 if (x
== virtual_incoming_args_rtx
)
3802 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3803 else if (x
== virtual_stack_vars_rtx
)
3804 new = frame_pointer_rtx
, offset
= var_offset
;
3805 else if (x
== virtual_stack_dynamic_rtx
)
3806 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3807 else if (x
== virtual_outgoing_args_rtx
)
3808 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3809 else if (x
== virtual_cfa_rtx
)
3810 new = arg_pointer_rtx
, offset
= cfa_offset
;
3814 temp
= plus_constant (new, offset
);
3815 if (!validate_change (object
, loc
, temp
, 0))
3821 temp
= force_operand (temp
, NULL_RTX
);
3825 emit_insns_before (seq
, object
);
3826 if (! validate_change (object
, loc
, temp
, 0)
3827 && ! validate_replace_rtx (x
, temp
, object
))
3835 if (GET_CODE (XEXP (x
, 0)) == REG
)
3838 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3840 /* If we have a (addressof (mem ..)), do any instantiation inside
3841 since we know we'll be making the inside valid when we finally
3842 remove the ADDRESSOF. */
3843 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3852 /* Scan all subexpressions. */
3853 fmt
= GET_RTX_FORMAT (code
);
3854 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3857 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3860 else if (*fmt
== 'E')
3861 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3862 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3869 /* Optimization: assuming this function does not receive nonlocal gotos,
3870 delete the handlers for such, as well as the insns to establish
3871 and disestablish them. */
3877 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3879 /* Delete the handler by turning off the flag that would
3880 prevent jump_optimize from deleting it.
3881 Also permit deletion of the nonlocal labels themselves
3882 if nothing local refers to them. */
3883 if (GET_CODE (insn
) == CODE_LABEL
)
3887 LABEL_PRESERVE_P (insn
) = 0;
3889 /* Remove it from the nonlocal_label list, to avoid confusing
3891 for (t
= nonlocal_labels
, last_t
= 0; t
;
3892 last_t
= t
, t
= TREE_CHAIN (t
))
3893 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3898 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3900 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3903 if (GET_CODE (insn
) == INSN
)
3907 for (t
= nonlocal_goto_handler_slots
; t
!= 0; t
= XEXP (t
, 1))
3908 if (reg_mentioned_p (t
, PATTERN (insn
)))
3914 || (nonlocal_goto_stack_level
!= 0
3915 && reg_mentioned_p (nonlocal_goto_stack_level
,
3925 return max_parm_reg
;
3928 /* Return the first insn following those generated by `assign_parms'. */
3931 get_first_nonparm_insn ()
3934 return NEXT_INSN (last_parm_insn
);
3935 return get_insns ();
3938 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3939 Crash if there is none. */
3942 get_first_block_beg ()
3944 register rtx searcher
;
3945 register rtx insn
= get_first_nonparm_insn ();
3947 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3948 if (GET_CODE (searcher
) == NOTE
3949 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3952 abort (); /* Invalid call to this function. (See comments above.) */
3956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3957 This means a type for which function calls must pass an address to the
3958 function or get an address back from the function.
3959 EXP may be a type node or an expression (whose type is tested). */
3962 aggregate_value_p (exp
)
3965 int i
, regno
, nregs
;
3968 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3971 type
= TREE_TYPE (exp
);
3973 if (RETURN_IN_MEMORY (type
))
3975 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3976 and thus can't be returned in registers. */
3977 if (TREE_ADDRESSABLE (type
))
3979 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3981 /* Make sure we have suitable call-clobbered regs to return
3982 the value in; if not, we must return it in memory. */
3983 reg
= hard_function_value (type
, 0, 0);
3985 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3987 if (GET_CODE (reg
) != REG
)
3990 regno
= REGNO (reg
);
3991 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3992 for (i
= 0; i
< nregs
; i
++)
3993 if (! call_used_regs
[regno
+ i
])
3998 /* Assign RTL expressions to the function's parameters.
3999 This may involve copying them into registers and using
4000 those registers as the RTL for them. */
4003 assign_parms (fndecl
)
4007 register rtx entry_parm
= 0;
4008 register rtx stack_parm
= 0;
4009 CUMULATIVE_ARGS args_so_far
;
4010 enum machine_mode promoted_mode
, passed_mode
;
4011 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4013 /* Total space needed so far for args on the stack,
4014 given as a constant and a tree-expression. */
4015 struct args_size stack_args_size
;
4016 tree fntype
= TREE_TYPE (fndecl
);
4017 tree fnargs
= DECL_ARGUMENTS (fndecl
);
4018 /* This is used for the arg pointer when referring to stack args. */
4019 rtx internal_arg_pointer
;
4020 /* This is a dummy PARM_DECL that we used for the function result if
4021 the function returns a structure. */
4022 tree function_result_decl
= 0;
4023 #ifdef SETUP_INCOMING_VARARGS
4024 int varargs_setup
= 0;
4026 rtx conversion_insns
= 0;
4027 struct args_size alignment_pad
;
4029 /* Nonzero if the last arg is named `__builtin_va_alist',
4030 which is used on some machines for old-fashioned non-ANSI varargs.h;
4031 this should be stuck onto the stack as if it had arrived there. */
4033 = (current_function_varargs
4035 && (parm
= tree_last (fnargs
)) != 0
4037 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
4038 "__builtin_va_alist")));
4040 /* Nonzero if function takes extra anonymous args.
4041 This means the last named arg must be on the stack
4042 right before the anonymous ones. */
4044 = (TYPE_ARG_TYPES (fntype
) != 0
4045 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4046 != void_type_node
));
4048 current_function_stdarg
= stdarg
;
4050 /* If the reg that the virtual arg pointer will be translated into is
4051 not a fixed reg or is the stack pointer, make a copy of the virtual
4052 arg pointer, and address parms via the copy. The frame pointer is
4053 considered fixed even though it is not marked as such.
4055 The second time through, simply use ap to avoid generating rtx. */
4057 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4058 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4059 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4060 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4062 internal_arg_pointer
= virtual_incoming_args_rtx
;
4063 current_function_internal_arg_pointer
= internal_arg_pointer
;
4065 stack_args_size
.constant
= 0;
4066 stack_args_size
.var
= 0;
4068 /* If struct value address is treated as the first argument, make it so. */
4069 if (aggregate_value_p (DECL_RESULT (fndecl
))
4070 && ! current_function_returns_pcc_struct
4071 && struct_value_incoming_rtx
== 0)
4073 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4075 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4077 DECL_ARG_TYPE (function_result_decl
) = type
;
4078 TREE_CHAIN (function_result_decl
) = fnargs
;
4079 fnargs
= function_result_decl
;
4082 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4083 parm_reg_stack_loc
= (rtx
*) xcalloc (max_parm_reg
, sizeof (rtx
));
4085 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4086 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4088 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
4091 /* We haven't yet found an argument that we must push and pretend the
4093 current_function_pretend_args_size
= 0;
4095 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4097 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
4098 struct args_size stack_offset
;
4099 struct args_size arg_size
;
4100 int passed_pointer
= 0;
4101 int did_conversion
= 0;
4102 tree passed_type
= DECL_ARG_TYPE (parm
);
4103 tree nominal_type
= TREE_TYPE (parm
);
4106 /* Set LAST_NAMED if this is last named arg before some
4108 int last_named
= ((TREE_CHAIN (parm
) == 0
4109 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
4110 && (stdarg
|| current_function_varargs
));
4111 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4112 most machines, if this is a varargs/stdarg function, then we treat
4113 the last named arg as if it were anonymous too. */
4114 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
4116 if (TREE_TYPE (parm
) == error_mark_node
4117 /* This can happen after weird syntax errors
4118 or if an enum type is defined among the parms. */
4119 || TREE_CODE (parm
) != PARM_DECL
4120 || passed_type
== NULL
)
4122 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
4123 = gen_rtx_MEM (BLKmode
, const0_rtx
);
4124 TREE_USED (parm
) = 1;
4128 /* For varargs.h function, save info about regs and stack space
4129 used by the individual args, not including the va_alist arg. */
4130 if (hide_last_arg
&& last_named
)
4131 current_function_args_info
= args_so_far
;
4133 /* Find mode of arg as it is passed, and mode of arg
4134 as it should be during execution of this function. */
4135 passed_mode
= TYPE_MODE (passed_type
);
4136 nominal_mode
= TYPE_MODE (nominal_type
);
4138 /* If the parm's mode is VOID, its value doesn't matter,
4139 and avoid the usual things like emit_move_insn that could crash. */
4140 if (nominal_mode
== VOIDmode
)
4142 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
4146 /* If the parm is to be passed as a transparent union, use the
4147 type of the first field for the tests below. We have already
4148 verified that the modes are the same. */
4149 if (DECL_TRANSPARENT_UNION (parm
)
4150 || TYPE_TRANSPARENT_UNION (passed_type
))
4151 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4153 /* See if this arg was passed by invisible reference. It is if
4154 it is an object whose size depends on the contents of the
4155 object itself or if the machine requires these objects be passed
4158 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
4159 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
4160 || TREE_ADDRESSABLE (passed_type
)
4161 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4162 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4163 passed_type
, named_arg
)
4167 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4169 passed_mode
= nominal_mode
= Pmode
;
4172 promoted_mode
= passed_mode
;
4174 #ifdef PROMOTE_FUNCTION_ARGS
4175 /* Compute the mode in which the arg is actually extended to. */
4176 unsignedp
= TREE_UNSIGNED (passed_type
);
4177 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
4180 /* Let machine desc say which reg (if any) the parm arrives in.
4181 0 means it arrives on the stack. */
4182 #ifdef FUNCTION_INCOMING_ARG
4183 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4184 passed_type
, named_arg
);
4186 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4187 passed_type
, named_arg
);
4190 if (entry_parm
== 0)
4191 promoted_mode
= passed_mode
;
4193 #ifdef SETUP_INCOMING_VARARGS
4194 /* If this is the last named parameter, do any required setup for
4195 varargs or stdargs. We need to know about the case of this being an
4196 addressable type, in which case we skip the registers it
4197 would have arrived in.
4199 For stdargs, LAST_NAMED will be set for two parameters, the one that
4200 is actually the last named, and the dummy parameter. We only
4201 want to do this action once.
4203 Also, indicate when RTL generation is to be suppressed. */
4204 if (last_named
&& !varargs_setup
)
4206 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
4207 current_function_pretend_args_size
, 0);
4212 /* Determine parm's home in the stack,
4213 in case it arrives in the stack or we should pretend it did.
4215 Compute the stack position and rtx where the argument arrives
4218 There is one complexity here: If this was a parameter that would
4219 have been passed in registers, but wasn't only because it is
4220 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4221 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4222 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4223 0 as it was the previous time. */
4225 pretend_named
= named_arg
|| PRETEND_OUTGOING_VARARGS_NAMED
;
4226 locate_and_pad_parm (promoted_mode
, passed_type
,
4227 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4230 #ifdef FUNCTION_INCOMING_ARG
4231 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4233 pretend_named
) != 0,
4235 FUNCTION_ARG (args_so_far
, promoted_mode
,
4237 pretend_named
) != 0,
4240 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
,
4244 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4246 if (offset_rtx
== const0_rtx
)
4247 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4249 stack_parm
= gen_rtx_MEM (promoted_mode
,
4250 gen_rtx_PLUS (Pmode
,
4251 internal_arg_pointer
,
4254 /* If this is a memory ref that contains aggregate components,
4255 mark it as such for cse and loop optimize. Likewise if it
4257 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4258 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
4259 MEM_ALIAS_SET (stack_parm
) = get_alias_set (parm
);
4262 /* If this parameter was passed both in registers and in the stack,
4263 use the copy on the stack. */
4264 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4267 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4268 /* If this parm was passed part in regs and part in memory,
4269 pretend it arrived entirely in memory
4270 by pushing the register-part onto the stack.
4272 In the special case of a DImode or DFmode that is split,
4273 we could put it together in a pseudoreg directly,
4274 but for now that's not worth bothering with. */
4278 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4279 passed_type
, named_arg
);
4283 current_function_pretend_args_size
4284 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4285 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4286 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4288 /* Handle calls that pass values in multiple non-contiguous
4289 locations. The Irix 6 ABI has examples of this. */
4290 if (GET_CODE (entry_parm
) == PARALLEL
)
4291 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4292 int_size_in_bytes (TREE_TYPE (parm
)),
4293 (TYPE_ALIGN (TREE_TYPE (parm
))
4296 move_block_from_reg (REGNO (entry_parm
),
4297 validize_mem (stack_parm
), nregs
,
4298 int_size_in_bytes (TREE_TYPE (parm
)));
4300 entry_parm
= stack_parm
;
4305 /* If we didn't decide this parm came in a register,
4306 by default it came on the stack. */
4307 if (entry_parm
== 0)
4308 entry_parm
= stack_parm
;
4310 /* Record permanently how this parm was passed. */
4311 DECL_INCOMING_RTL (parm
) = entry_parm
;
4313 /* If there is actually space on the stack for this parm,
4314 count it in stack_args_size; otherwise set stack_parm to 0
4315 to indicate there is no preallocated stack slot for the parm. */
4317 if (entry_parm
== stack_parm
4318 || (GET_CODE (entry_parm
) == PARALLEL
4319 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4320 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4321 /* On some machines, even if a parm value arrives in a register
4322 there is still an (uninitialized) stack slot allocated for it.
4324 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4325 whether this parameter already has a stack slot allocated,
4326 because an arg block exists only if current_function_args_size
4327 is larger than some threshold, and we haven't calculated that
4328 yet. So, for now, we just assume that stack slots never exist
4330 || REG_PARM_STACK_SPACE (fndecl
) > 0
4334 stack_args_size
.constant
+= arg_size
.constant
;
4336 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
4339 /* No stack slot was pushed for this parm. */
4342 /* Update info on where next arg arrives in registers. */
4344 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4345 passed_type
, named_arg
);
4347 /* If we can't trust the parm stack slot to be aligned enough
4348 for its ultimate type, don't use that slot after entry.
4349 We'll make another stack slot, if we need one. */
4351 unsigned int thisparm_boundary
4352 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4354 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4358 /* If parm was passed in memory, and we need to convert it on entry,
4359 don't store it back in that same slot. */
4361 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4365 /* Now adjust STACK_PARM to the mode and precise location
4366 where this parameter should live during execution,
4367 if we discover that it must live in the stack during execution.
4368 To make debuggers happier on big-endian machines, we store
4369 the value in the last bytes of the space available. */
4371 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4376 if (BYTES_BIG_ENDIAN
4377 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4378 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4379 - GET_MODE_SIZE (nominal_mode
));
4381 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4382 if (offset_rtx
== const0_rtx
)
4383 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4385 stack_parm
= gen_rtx_MEM (nominal_mode
,
4386 gen_rtx_PLUS (Pmode
,
4387 internal_arg_pointer
,
4390 /* If this is a memory ref that contains aggregate components,
4391 mark it as such for cse and loop optimize. */
4392 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4396 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4397 in the mode in which it arrives.
4398 STACK_PARM is an RTX for a stack slot where the parameter can live
4399 during the function (in case we want to put it there).
4400 STACK_PARM is 0 if no stack slot was pushed for it.
4402 Now output code if necessary to convert ENTRY_PARM to
4403 the type in which this function declares it,
4404 and store that result in an appropriate place,
4405 which may be a pseudo reg, may be STACK_PARM,
4406 or may be a local stack slot if STACK_PARM is 0.
4408 Set DECL_RTL to that place. */
4410 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4412 /* If a BLKmode arrives in registers, copy it to a stack slot.
4413 Handle calls that pass values in multiple non-contiguous
4414 locations. The Irix 6 ABI has examples of this. */
4415 if (GET_CODE (entry_parm
) == REG
4416 || GET_CODE (entry_parm
) == PARALLEL
)
4419 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4422 /* Note that we will be storing an integral number of words.
4423 So we have to be careful to ensure that we allocate an
4424 integral number of words. We do this below in the
4425 assign_stack_local if space was not allocated in the argument
4426 list. If it was, this will not work if PARM_BOUNDARY is not
4427 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4428 if it becomes a problem. */
4430 if (stack_parm
== 0)
4433 = assign_stack_local (GET_MODE (entry_parm
),
4436 /* If this is a memory ref that contains aggregate
4437 components, mark it as such for cse and loop optimize. */
4438 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4441 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4444 if (TREE_READONLY (parm
))
4445 RTX_UNCHANGING_P (stack_parm
) = 1;
4447 /* Handle calls that pass values in multiple non-contiguous
4448 locations. The Irix 6 ABI has examples of this. */
4449 if (GET_CODE (entry_parm
) == PARALLEL
)
4450 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4451 int_size_in_bytes (TREE_TYPE (parm
)),
4452 (TYPE_ALIGN (TREE_TYPE (parm
))
4455 move_block_from_reg (REGNO (entry_parm
),
4456 validize_mem (stack_parm
),
4457 size_stored
/ UNITS_PER_WORD
,
4458 int_size_in_bytes (TREE_TYPE (parm
)));
4460 DECL_RTL (parm
) = stack_parm
;
4462 else if (! ((! optimize
4463 && ! DECL_REGISTER (parm
)
4464 && ! DECL_INLINE (fndecl
))
4465 /* layout_decl may set this. */
4466 || TREE_ADDRESSABLE (parm
)
4467 || TREE_SIDE_EFFECTS (parm
)
4468 /* If -ffloat-store specified, don't put explicit
4469 float variables into registers. */
4470 || (flag_float_store
4471 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4472 /* Always assign pseudo to structure return or item passed
4473 by invisible reference. */
4474 || passed_pointer
|| parm
== function_result_decl
)
4476 /* Store the parm in a pseudoregister during the function, but we
4477 may need to do it in a wider mode. */
4479 register rtx parmreg
;
4480 int regno
, regnoi
= 0, regnor
= 0;
4482 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4484 promoted_nominal_mode
4485 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4487 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4488 mark_user_reg (parmreg
);
4490 /* If this was an item that we received a pointer to, set DECL_RTL
4495 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4496 MEM_SET_IN_STRUCT_P (DECL_RTL (parm
), aggregate
);
4499 DECL_RTL (parm
) = parmreg
;
4501 /* Copy the value into the register. */
4502 if (nominal_mode
!= passed_mode
4503 || promoted_nominal_mode
!= promoted_mode
)
4506 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4507 mode, by the caller. We now have to convert it to
4508 NOMINAL_MODE, if different. However, PARMREG may be in
4509 a different mode than NOMINAL_MODE if it is being stored
4512 If ENTRY_PARM is a hard register, it might be in a register
4513 not valid for operating in its mode (e.g., an odd-numbered
4514 register for a DFmode). In that case, moves are the only
4515 thing valid, so we can't do a convert from there. This
4516 occurs when the calling sequence allow such misaligned
4519 In addition, the conversion may involve a call, which could
4520 clobber parameters which haven't been copied to pseudo
4521 registers yet. Therefore, we must first copy the parm to
4522 a pseudo reg here, and save the conversion until after all
4523 parameters have been moved. */
4525 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4527 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4529 push_to_sequence (conversion_insns
);
4530 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4532 /* TREE_USED gets set erroneously during expand_assignment. */
4533 save_tree_used
= TREE_USED (parm
);
4534 expand_assignment (parm
,
4535 make_tree (nominal_type
, tempreg
), 0, 0);
4536 TREE_USED (parm
) = save_tree_used
;
4537 conversion_insns
= get_insns ();
4542 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4544 /* If we were passed a pointer but the actual value
4545 can safely live in a register, put it in one. */
4546 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4548 && ! DECL_REGISTER (parm
)
4549 && ! DECL_INLINE (fndecl
))
4550 /* layout_decl may set this. */
4551 || TREE_ADDRESSABLE (parm
)
4552 || TREE_SIDE_EFFECTS (parm
)
4553 /* If -ffloat-store specified, don't put explicit
4554 float variables into registers. */
4555 || (flag_float_store
4556 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4558 /* We can't use nominal_mode, because it will have been set to
4559 Pmode above. We must use the actual mode of the parm. */
4560 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4561 mark_user_reg (parmreg
);
4562 emit_move_insn (parmreg
, DECL_RTL (parm
));
4563 DECL_RTL (parm
) = parmreg
;
4564 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4568 #ifdef FUNCTION_ARG_CALLEE_COPIES
4569 /* If we are passed an arg by reference and it is our responsibility
4570 to make a copy, do it now.
4571 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4572 original argument, so we must recreate them in the call to
4573 FUNCTION_ARG_CALLEE_COPIES. */
4574 /* ??? Later add code to handle the case that if the argument isn't
4575 modified, don't do the copy. */
4577 else if (passed_pointer
4578 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4579 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4580 DECL_ARG_TYPE (parm
),
4582 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4585 tree type
= DECL_ARG_TYPE (parm
);
4587 /* This sequence may involve a library call perhaps clobbering
4588 registers that haven't been copied to pseudos yet. */
4590 push_to_sequence (conversion_insns
);
4592 if (TYPE_SIZE (type
) == 0
4593 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4594 /* This is a variable sized object. */
4595 copy
= gen_rtx_MEM (BLKmode
,
4596 allocate_dynamic_stack_space
4597 (expr_size (parm
), NULL_RTX
,
4598 TYPE_ALIGN (type
)));
4600 copy
= assign_stack_temp (TYPE_MODE (type
),
4601 int_size_in_bytes (type
), 1);
4602 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
4603 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4605 store_expr (parm
, copy
, 0);
4606 emit_move_insn (parmreg
, XEXP (copy
, 0));
4607 if (current_function_check_memory_usage
)
4608 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4609 XEXP (copy
, 0), Pmode
,
4610 GEN_INT (int_size_in_bytes (type
)),
4611 TYPE_MODE (sizetype
),
4612 GEN_INT (MEMORY_USE_RW
),
4613 TYPE_MODE (integer_type_node
));
4614 conversion_insns
= get_insns ();
4618 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4620 /* In any case, record the parm's desired stack location
4621 in case we later discover it must live in the stack.
4623 If it is a COMPLEX value, store the stack location for both
4626 if (GET_CODE (parmreg
) == CONCAT
)
4627 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4629 regno
= REGNO (parmreg
);
4631 if (regno
>= max_parm_reg
)
4634 int old_max_parm_reg
= max_parm_reg
;
4636 /* It's slow to expand this one register at a time,
4637 but it's also rare and we need max_parm_reg to be
4638 precisely correct. */
4639 max_parm_reg
= regno
+ 1;
4640 new = (rtx
*) xrealloc (parm_reg_stack_loc
,
4641 max_parm_reg
* sizeof (rtx
));
4642 bzero ((char *) (new + old_max_parm_reg
),
4643 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4644 parm_reg_stack_loc
= new;
4647 if (GET_CODE (parmreg
) == CONCAT
)
4649 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4651 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4652 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4654 if (stack_parm
!= 0)
4656 parm_reg_stack_loc
[regnor
]
4657 = gen_realpart (submode
, stack_parm
);
4658 parm_reg_stack_loc
[regnoi
]
4659 = gen_imagpart (submode
, stack_parm
);
4663 parm_reg_stack_loc
[regnor
] = 0;
4664 parm_reg_stack_loc
[regnoi
] = 0;
4668 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4670 /* Mark the register as eliminable if we did no conversion
4671 and it was copied from memory at a fixed offset,
4672 and the arg pointer was not copied to a pseudo-reg.
4673 If the arg pointer is a pseudo reg or the offset formed
4674 an invalid address, such memory-equivalences
4675 as we make here would screw up life analysis for it. */
4676 if (nominal_mode
== passed_mode
4679 && GET_CODE (stack_parm
) == MEM
4680 && stack_offset
.var
== 0
4681 && reg_mentioned_p (virtual_incoming_args_rtx
,
4682 XEXP (stack_parm
, 0)))
4684 rtx linsn
= get_last_insn ();
4687 /* Mark complex types separately. */
4688 if (GET_CODE (parmreg
) == CONCAT
)
4689 /* Scan backwards for the set of the real and
4691 for (sinsn
= linsn
; sinsn
!= 0;
4692 sinsn
= prev_nonnote_insn (sinsn
))
4694 set
= single_set (sinsn
);
4696 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4698 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4699 parm_reg_stack_loc
[regnoi
],
4702 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4704 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4705 parm_reg_stack_loc
[regnor
],
4708 else if ((set
= single_set (linsn
)) != 0
4709 && SET_DEST (set
) == parmreg
)
4711 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4712 stack_parm
, REG_NOTES (linsn
));
4715 /* For pointer data type, suggest pointer register. */
4716 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4717 mark_reg_pointer (parmreg
,
4718 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4723 /* Value must be stored in the stack slot STACK_PARM
4724 during function execution. */
4726 if (promoted_mode
!= nominal_mode
)
4728 /* Conversion is required. */
4729 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4731 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4733 push_to_sequence (conversion_insns
);
4734 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4735 TREE_UNSIGNED (TREE_TYPE (parm
)));
4738 /* ??? This may need a big-endian conversion on sparc64. */
4739 stack_parm
= change_address (stack_parm
, nominal_mode
,
4742 conversion_insns
= get_insns ();
4747 if (entry_parm
!= stack_parm
)
4749 if (stack_parm
== 0)
4752 = assign_stack_local (GET_MODE (entry_parm
),
4753 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4754 /* If this is a memory ref that contains aggregate components,
4755 mark it as such for cse and loop optimize. */
4756 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4759 if (promoted_mode
!= nominal_mode
)
4761 push_to_sequence (conversion_insns
);
4762 emit_move_insn (validize_mem (stack_parm
),
4763 validize_mem (entry_parm
));
4764 conversion_insns
= get_insns ();
4768 emit_move_insn (validize_mem (stack_parm
),
4769 validize_mem (entry_parm
));
4771 if (current_function_check_memory_usage
)
4773 push_to_sequence (conversion_insns
);
4774 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4775 XEXP (stack_parm
, 0), Pmode
,
4776 GEN_INT (GET_MODE_SIZE (GET_MODE
4778 TYPE_MODE (sizetype
),
4779 GEN_INT (MEMORY_USE_RW
),
4780 TYPE_MODE (integer_type_node
));
4782 conversion_insns
= get_insns ();
4785 DECL_RTL (parm
) = stack_parm
;
4788 /* If this "parameter" was the place where we are receiving the
4789 function's incoming structure pointer, set up the result. */
4790 if (parm
== function_result_decl
)
4792 tree result
= DECL_RESULT (fndecl
);
4793 tree restype
= TREE_TYPE (result
);
4796 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4798 MEM_SET_IN_STRUCT_P (DECL_RTL (result
),
4799 AGGREGATE_TYPE_P (restype
));
4802 if (TREE_THIS_VOLATILE (parm
))
4803 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4804 if (TREE_READONLY (parm
))
4805 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4808 /* Output all parameter conversion instructions (possibly including calls)
4809 now that all parameters have been copied out of hard registers. */
4810 emit_insns (conversion_insns
);
4812 last_parm_insn
= get_last_insn ();
4814 current_function_args_size
= stack_args_size
.constant
;
4816 /* Adjust function incoming argument size for alignment and
4819 #ifdef REG_PARM_STACK_SPACE
4820 #ifndef MAYBE_REG_PARM_STACK_SPACE
4821 current_function_args_size
= MAX (current_function_args_size
,
4822 REG_PARM_STACK_SPACE (fndecl
));
4826 #ifdef STACK_BOUNDARY
4827 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4829 current_function_args_size
4830 = ((current_function_args_size
+ STACK_BYTES
- 1)
4831 / STACK_BYTES
) * STACK_BYTES
;
4834 #ifdef ARGS_GROW_DOWNWARD
4835 current_function_arg_offset_rtx
4836 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4837 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4838 size_int (-stack_args_size
.constant
)),
4839 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4841 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4844 /* See how many bytes, if any, of its args a function should try to pop
4847 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4848 current_function_args_size
);
4850 /* For stdarg.h function, save info about
4851 regs and stack space used by the named args. */
4854 current_function_args_info
= args_so_far
;
4856 /* Set the rtx used for the function return value. Put this in its
4857 own variable so any optimizers that need this information don't have
4858 to include tree.h. Do this here so it gets done when an inlined
4859 function gets output. */
4861 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4864 /* Indicate whether REGNO is an incoming argument to the current function
4865 that was promoted to a wider mode. If so, return the RTX for the
4866 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4867 that REGNO is promoted from and whether the promotion was signed or
4870 #ifdef PROMOTE_FUNCTION_ARGS
4873 promoted_input_arg (regno
, pmode
, punsignedp
)
4875 enum machine_mode
*pmode
;
4880 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4881 arg
= TREE_CHAIN (arg
))
4882 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4883 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4884 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4886 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4887 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4889 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4890 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4891 && mode
!= DECL_MODE (arg
))
4893 *pmode
= DECL_MODE (arg
);
4894 *punsignedp
= unsignedp
;
4895 return DECL_INCOMING_RTL (arg
);
4904 /* Compute the size and offset from the start of the stacked arguments for a
4905 parm passed in mode PASSED_MODE and with type TYPE.
4907 INITIAL_OFFSET_PTR points to the current offset into the stacked
4910 The starting offset and size for this parm are returned in *OFFSET_PTR
4911 and *ARG_SIZE_PTR, respectively.
4913 IN_REGS is non-zero if the argument will be passed in registers. It will
4914 never be set if REG_PARM_STACK_SPACE is not defined.
4916 FNDECL is the function in which the argument was defined.
4918 There are two types of rounding that are done. The first, controlled by
4919 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4920 list to be aligned to the specific boundary (in bits). This rounding
4921 affects the initial and starting offsets, but not the argument size.
4923 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4924 optionally rounds the size of the parm to PARM_BOUNDARY. The
4925 initial offset is not affected by this rounding, while the size always
4926 is and the starting offset may be. */
4928 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4929 initial_offset_ptr is positive because locate_and_pad_parm's
4930 callers pass in the total size of args so far as
4931 initial_offset_ptr. arg_size_ptr is always positive.*/
4934 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4935 initial_offset_ptr
, offset_ptr
, arg_size_ptr
,
4937 enum machine_mode passed_mode
;
4939 int in_regs ATTRIBUTE_UNUSED
;
4940 tree fndecl ATTRIBUTE_UNUSED
;
4941 struct args_size
*initial_offset_ptr
;
4942 struct args_size
*offset_ptr
;
4943 struct args_size
*arg_size_ptr
;
4944 struct args_size
*alignment_pad
;
4948 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4949 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4950 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4952 #ifdef REG_PARM_STACK_SPACE
4953 /* If we have found a stack parm before we reach the end of the
4954 area reserved for registers, skip that area. */
4957 int reg_parm_stack_space
= 0;
4959 #ifdef MAYBE_REG_PARM_STACK_SPACE
4960 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4962 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4964 if (reg_parm_stack_space
> 0)
4966 if (initial_offset_ptr
->var
)
4968 initial_offset_ptr
->var
4969 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4970 size_int (reg_parm_stack_space
));
4971 initial_offset_ptr
->constant
= 0;
4973 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4974 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4977 #endif /* REG_PARM_STACK_SPACE */
4979 arg_size_ptr
->var
= 0;
4980 arg_size_ptr
->constant
= 0;
4982 #ifdef ARGS_GROW_DOWNWARD
4983 if (initial_offset_ptr
->var
)
4985 offset_ptr
->constant
= 0;
4986 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4987 initial_offset_ptr
->var
);
4991 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4992 offset_ptr
->var
= 0;
4994 if (where_pad
!= none
4995 && (TREE_CODE (sizetree
) != INTEGER_CST
4996 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4997 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4998 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4999 if (where_pad
!= downward
)
5000 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
);
5001 if (initial_offset_ptr
->var
)
5003 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
5004 size_binop (MINUS_EXPR
,
5006 initial_offset_ptr
->var
),
5011 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
5012 - offset_ptr
->constant
);
5014 #else /* !ARGS_GROW_DOWNWARD */
5015 pad_to_arg_alignment (initial_offset_ptr
, boundary
, alignment_pad
);
5016 *offset_ptr
= *initial_offset_ptr
;
5018 #ifdef PUSH_ROUNDING
5019 if (passed_mode
!= BLKmode
)
5020 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5023 /* Pad_below needs the pre-rounded size to know how much to pad below
5024 so this must be done before rounding up. */
5025 if (where_pad
== downward
5026 /* However, BLKmode args passed in regs have their padding done elsewhere.
5027 The stack slot must be able to hold the entire register. */
5028 && !(in_regs
&& passed_mode
== BLKmode
))
5029 pad_below (offset_ptr
, passed_mode
, sizetree
);
5031 if (where_pad
!= none
5032 && (TREE_CODE (sizetree
) != INTEGER_CST
5033 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5034 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5036 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
5037 #endif /* ARGS_GROW_DOWNWARD */
5040 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5041 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5044 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
)
5045 struct args_size
*offset_ptr
;
5047 struct args_size
*alignment_pad
;
5049 tree save_var
= NULL_TREE
;
5050 HOST_WIDE_INT save_constant
= 0;
5052 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5054 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5056 save_var
= offset_ptr
->var
;
5057 save_constant
= offset_ptr
->constant
;
5060 alignment_pad
->var
= NULL_TREE
;
5061 alignment_pad
->constant
= 0;
5063 if (boundary
> BITS_PER_UNIT
)
5065 if (offset_ptr
->var
)
5068 #ifdef ARGS_GROW_DOWNWARD
5073 (ARGS_SIZE_TREE (*offset_ptr
),
5074 boundary
/ BITS_PER_UNIT
);
5075 offset_ptr
->constant
= 0; /*?*/
5076 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5077 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
, save_var
);
5081 offset_ptr
->constant
=
5082 #ifdef ARGS_GROW_DOWNWARD
5083 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5085 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5087 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5088 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5093 #ifndef ARGS_GROW_DOWNWARD
5095 pad_below (offset_ptr
, passed_mode
, sizetree
)
5096 struct args_size
*offset_ptr
;
5097 enum machine_mode passed_mode
;
5100 if (passed_mode
!= BLKmode
)
5102 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5103 offset_ptr
->constant
5104 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5105 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5106 - GET_MODE_SIZE (passed_mode
));
5110 if (TREE_CODE (sizetree
) != INTEGER_CST
5111 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5113 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5114 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5116 ADD_PARM_SIZE (*offset_ptr
, s2
);
5117 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5123 #ifdef ARGS_GROW_DOWNWARD
5125 round_down (value
, divisor
)
5129 return size_binop (MULT_EXPR
,
5130 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
5131 size_int (divisor
));
5135 /* Walk the tree of blocks describing the binding levels within a function
5136 and warn about uninitialized variables.
5137 This is done after calling flow_analysis and before global_alloc
5138 clobbers the pseudo-regs to hard regs. */
5141 uninitialized_vars_warning (block
)
5144 register tree decl
, sub
;
5145 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5147 if (warn_uninitialized
5148 && TREE_CODE (decl
) == VAR_DECL
5149 /* These warnings are unreliable for and aggregates
5150 because assigning the fields one by one can fail to convince
5151 flow.c that the entire aggregate was initialized.
5152 Unions are troublesome because members may be shorter. */
5153 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5154 && DECL_RTL (decl
) != 0
5155 && GET_CODE (DECL_RTL (decl
)) == REG
5156 /* Global optimizations can make it difficult to determine if a
5157 particular variable has been initialized. However, a VAR_DECL
5158 with a nonzero DECL_INITIAL had an initializer, so do not
5159 claim it is potentially uninitialized.
5161 We do not care about the actual value in DECL_INITIAL, so we do
5162 not worry that it may be a dangling pointer. */
5163 && DECL_INITIAL (decl
) == NULL_TREE
5164 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
5165 warning_with_decl (decl
,
5166 "`%s' might be used uninitialized in this function");
5168 && TREE_CODE (decl
) == VAR_DECL
5169 && DECL_RTL (decl
) != 0
5170 && GET_CODE (DECL_RTL (decl
)) == REG
5171 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5172 warning_with_decl (decl
,
5173 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5175 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5176 uninitialized_vars_warning (sub
);
5179 /* Do the appropriate part of uninitialized_vars_warning
5180 but for arguments instead of local variables. */
5183 setjmp_args_warning ()
5186 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5187 decl
; decl
= TREE_CHAIN (decl
))
5188 if (DECL_RTL (decl
) != 0
5189 && GET_CODE (DECL_RTL (decl
)) == REG
5190 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5191 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5194 /* If this function call setjmp, put all vars into the stack
5195 unless they were declared `register'. */
5198 setjmp_protect (block
)
5201 register tree decl
, sub
;
5202 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5203 if ((TREE_CODE (decl
) == VAR_DECL
5204 || TREE_CODE (decl
) == PARM_DECL
)
5205 && DECL_RTL (decl
) != 0
5206 && (GET_CODE (DECL_RTL (decl
)) == REG
5207 || (GET_CODE (DECL_RTL (decl
)) == MEM
5208 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5209 /* If this variable came from an inline function, it must be
5210 that its life doesn't overlap the setjmp. If there was a
5211 setjmp in the function, it would already be in memory. We
5212 must exclude such variable because their DECL_RTL might be
5213 set to strange things such as virtual_stack_vars_rtx. */
5214 && ! DECL_FROM_INLINE (decl
)
5216 #ifdef NON_SAVING_SETJMP
5217 /* If longjmp doesn't restore the registers,
5218 don't put anything in them. */
5222 ! DECL_REGISTER (decl
)))
5223 put_var_into_stack (decl
);
5224 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5225 setjmp_protect (sub
);
5228 /* Like the previous function, but for args instead of local variables. */
5231 setjmp_protect_args ()
5234 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5235 decl
; decl
= TREE_CHAIN (decl
))
5236 if ((TREE_CODE (decl
) == VAR_DECL
5237 || TREE_CODE (decl
) == PARM_DECL
)
5238 && DECL_RTL (decl
) != 0
5239 && (GET_CODE (DECL_RTL (decl
)) == REG
5240 || (GET_CODE (DECL_RTL (decl
)) == MEM
5241 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5243 /* If longjmp doesn't restore the registers,
5244 don't put anything in them. */
5245 #ifdef NON_SAVING_SETJMP
5249 ! DECL_REGISTER (decl
)))
5250 put_var_into_stack (decl
);
5253 /* Return the context-pointer register corresponding to DECL,
5254 or 0 if it does not need one. */
5257 lookup_static_chain (decl
)
5260 tree context
= decl_function_context (decl
);
5264 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
5267 /* We treat inline_function_decl as an alias for the current function
5268 because that is the inline function whose vars, types, etc.
5269 are being merged into the current function.
5270 See expand_inline_function. */
5271 if (context
== current_function_decl
|| context
== inline_function_decl
)
5272 return virtual_stack_vars_rtx
;
5274 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5275 if (TREE_PURPOSE (link
) == context
)
5276 return RTL_EXPR_RTL (TREE_VALUE (link
));
5281 /* Convert a stack slot address ADDR for variable VAR
5282 (from a containing function)
5283 into an address valid in this function (using a static chain). */
5286 fix_lexical_addr (addr
, var
)
5291 HOST_WIDE_INT displacement
;
5292 tree context
= decl_function_context (var
);
5293 struct function
*fp
;
5296 /* If this is the present function, we need not do anything. */
5297 if (context
== current_function_decl
|| context
== inline_function_decl
)
5300 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5301 if (fp
->decl
== context
)
5307 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5308 addr
= XEXP (XEXP (addr
, 0), 0);
5310 /* Decode given address as base reg plus displacement. */
5311 if (GET_CODE (addr
) == REG
)
5312 basereg
= addr
, displacement
= 0;
5313 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5314 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5318 /* We accept vars reached via the containing function's
5319 incoming arg pointer and via its stack variables pointer. */
5320 if (basereg
== fp
->internal_arg_pointer
)
5322 /* If reached via arg pointer, get the arg pointer value
5323 out of that function's stack frame.
5325 There are two cases: If a separate ap is needed, allocate a
5326 slot in the outer function for it and dereference it that way.
5327 This is correct even if the real ap is actually a pseudo.
5328 Otherwise, just adjust the offset from the frame pointer to
5331 #ifdef NEED_SEPARATE_AP
5334 if (fp
->x_arg_pointer_save_area
== 0)
5335 fp
->x_arg_pointer_save_area
5336 = assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
5338 addr
= fix_lexical_addr (XEXP (fp
->x_arg_pointer_save_area
, 0), var
);
5339 addr
= memory_address (Pmode
, addr
);
5341 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
5343 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
5344 base
= lookup_static_chain (var
);
5348 else if (basereg
== virtual_stack_vars_rtx
)
5350 /* This is the same code as lookup_static_chain, duplicated here to
5351 avoid an extra call to decl_function_context. */
5354 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5355 if (TREE_PURPOSE (link
) == context
)
5357 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
5365 /* Use same offset, relative to appropriate static chain or argument
5367 return plus_constant (base
, displacement
);
5370 /* Return the address of the trampoline for entering nested fn FUNCTION.
5371 If necessary, allocate a trampoline (in the stack frame)
5372 and emit rtl to initialize its contents (at entry to this function). */
5375 trampoline_address (function
)
5381 struct function
*fp
;
5384 /* Find an existing trampoline and return it. */
5385 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5386 if (TREE_PURPOSE (link
) == function
)
5388 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5390 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5391 for (link
= fp
->x_trampoline_list
; link
; link
= TREE_CHAIN (link
))
5392 if (TREE_PURPOSE (link
) == function
)
5394 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5396 return round_trampoline_addr (tramp
);
5399 /* None exists; we must make one. */
5401 /* Find the `struct function' for the function containing FUNCTION. */
5403 fn_context
= decl_function_context (function
);
5404 if (fn_context
!= current_function_decl
5405 && fn_context
!= inline_function_decl
)
5406 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5407 if (fp
->decl
== fn_context
)
5410 /* Allocate run-time space for this trampoline
5411 (usually in the defining function's stack frame). */
5412 #ifdef ALLOCATE_TRAMPOLINE
5413 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5415 /* If rounding needed, allocate extra space
5416 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5417 #ifdef TRAMPOLINE_ALIGNMENT
5418 #define TRAMPOLINE_REAL_SIZE \
5419 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5421 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5423 tramp
= assign_stack_local_1 (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0,
5427 /* Record the trampoline for reuse and note it for later initialization
5428 by expand_function_end. */
5431 push_obstacks (fp
->function_maybepermanent_obstack
,
5432 fp
->function_maybepermanent_obstack
);
5433 rtlexp
= make_node (RTL_EXPR
);
5434 RTL_EXPR_RTL (rtlexp
) = tramp
;
5435 fp
->x_trampoline_list
= tree_cons (function
, rtlexp
,
5436 fp
->x_trampoline_list
);
5441 /* Make the RTL_EXPR node temporary, not momentary, so that the
5442 trampoline_list doesn't become garbage. */
5443 int momentary
= suspend_momentary ();
5444 rtlexp
= make_node (RTL_EXPR
);
5445 resume_momentary (momentary
);
5447 RTL_EXPR_RTL (rtlexp
) = tramp
;
5448 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5451 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5452 return round_trampoline_addr (tramp
);
5455 /* Given a trampoline address,
5456 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5459 round_trampoline_addr (tramp
)
5462 #ifdef TRAMPOLINE_ALIGNMENT
5463 /* Round address up to desired boundary. */
5464 rtx temp
= gen_reg_rtx (Pmode
);
5465 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5466 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5467 temp
, 0, OPTAB_LIB_WIDEN
);
5468 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5469 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5470 temp
, 0, OPTAB_LIB_WIDEN
);
5475 /* The functions identify_blocks and reorder_blocks provide a way to
5476 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5477 duplicate portions of the RTL code. Call identify_blocks before
5478 changing the RTL, and call reorder_blocks after. */
5480 /* Put all this function's BLOCK nodes including those that are chained
5481 onto the first block into a vector, and return it.
5482 Also store in each NOTE for the beginning or end of a block
5483 the index of that block in the vector.
5484 The arguments are BLOCK, the chain of top-level blocks of the function,
5485 and INSNS, the insn chain of the function. */
5488 identify_blocks (block
, insns
)
5496 int current_block_number
= 1;
5502 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5503 depth-first order. */
5504 n_blocks
= all_blocks (block
, 0);
5505 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5506 all_blocks (block
, block_vector
);
5508 block_stack
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5510 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5511 if (GET_CODE (insn
) == NOTE
)
5513 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5517 /* If there are more block notes than BLOCKs, something
5519 if (current_block_number
== n_blocks
)
5522 b
= block_vector
[current_block_number
++];
5523 NOTE_BLOCK (insn
) = b
;
5524 block_stack
[depth
++] = b
;
5526 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5529 /* There are more NOTE_INSN_BLOCK_ENDs that
5530 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5533 NOTE_BLOCK (insn
) = block_stack
[--depth
];
5537 /* In whole-function mode, we might not have seen the whole function
5538 yet, so we might not use up all the blocks. */
5539 if (n_blocks
!= current_block_number
5540 && !cfun
->x_whole_function_mode_p
)
5543 free (block_vector
);
5547 /* Given a revised instruction chain, rebuild the tree structure of
5548 BLOCK nodes to correspond to the new order of RTL. The new block
5549 tree is inserted below TOP_BLOCK. Returns the current top-level
5553 reorder_blocks (block
, insns
)
5557 tree current_block
= block
;
5560 if (block
== NULL_TREE
)
5563 /* Prune the old trees away, so that it doesn't get in the way. */
5564 BLOCK_SUBBLOCKS (current_block
) = 0;
5565 BLOCK_CHAIN (current_block
) = 0;
5567 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5568 if (GET_CODE (insn
) == NOTE
)
5570 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5572 tree block
= NOTE_BLOCK (insn
);
5573 /* If we have seen this block before, copy it. */
5574 if (TREE_ASM_WRITTEN (block
))
5575 block
= copy_node (block
);
5576 BLOCK_SUBBLOCKS (block
) = 0;
5577 TREE_ASM_WRITTEN (block
) = 1;
5578 BLOCK_SUPERCONTEXT (block
) = current_block
;
5579 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5580 BLOCK_SUBBLOCKS (current_block
) = block
;
5581 current_block
= block
;
5583 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5585 BLOCK_SUBBLOCKS (current_block
)
5586 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5587 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5591 BLOCK_SUBBLOCKS (current_block
)
5592 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5593 return current_block
;
5596 /* Reverse the order of elements in the chain T of blocks,
5597 and return the new head of the chain (old last element). */
5603 register tree prev
= 0, decl
, next
;
5604 for (decl
= t
; decl
; decl
= next
)
5606 next
= BLOCK_CHAIN (decl
);
5607 BLOCK_CHAIN (decl
) = prev
;
5613 /* Count the subblocks of the list starting with BLOCK, and list them
5614 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5618 all_blocks (block
, vector
)
5626 TREE_ASM_WRITTEN (block
) = 0;
5628 /* Record this block. */
5630 vector
[n_blocks
] = block
;
5634 /* Record the subblocks, and their subblocks... */
5635 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5636 vector
? vector
+ n_blocks
: 0);
5637 block
= BLOCK_CHAIN (block
);
5643 /* Allocate a function structure and reset its contents to the defaults. */
5645 prepare_function_start ()
5647 cfun
= (struct function
*) xcalloc (1, sizeof (struct function
));
5649 init_stmt_for_function ();
5650 init_eh_for_function ();
5652 cse_not_expected
= ! optimize
;
5654 /* Caller save not needed yet. */
5655 caller_save_needed
= 0;
5657 /* No stack slots have been made yet. */
5658 stack_slot_list
= 0;
5660 current_function_has_nonlocal_label
= 0;
5661 current_function_has_nonlocal_goto
= 0;
5663 /* There is no stack slot for handling nonlocal gotos. */
5664 nonlocal_goto_handler_slots
= 0;
5665 nonlocal_goto_stack_level
= 0;
5667 /* No labels have been declared for nonlocal use. */
5668 nonlocal_labels
= 0;
5669 nonlocal_goto_handler_labels
= 0;
5671 /* No function calls so far in this function. */
5672 function_call_count
= 0;
5674 /* No parm regs have been allocated.
5675 (This is important for output_inline_function.) */
5676 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5678 /* Initialize the RTL mechanism. */
5681 /* Initialize the queue of pending postincrement and postdecrements,
5682 and some other info in expr.c. */
5685 /* We haven't done register allocation yet. */
5688 init_varasm_status (cfun
);
5690 /* Clear out data used for inlining. */
5691 cfun
->inlinable
= 0;
5692 cfun
->original_decl_initial
= 0;
5693 cfun
->original_arg_vector
= 0;
5695 cfun
->stack_alignment_needed
= 0;
5696 #ifdef STACK_BOUNDARY
5697 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
5700 /* Set if a call to setjmp is seen. */
5701 current_function_calls_setjmp
= 0;
5703 /* Set if a call to longjmp is seen. */
5704 current_function_calls_longjmp
= 0;
5706 current_function_calls_alloca
= 0;
5707 current_function_contains_functions
= 0;
5708 current_function_is_leaf
= 0;
5709 current_function_sp_is_unchanging
= 0;
5710 current_function_uses_only_leaf_regs
= 0;
5711 current_function_has_computed_jump
= 0;
5712 current_function_is_thunk
= 0;
5714 current_function_returns_pcc_struct
= 0;
5715 current_function_returns_struct
= 0;
5716 current_function_epilogue_delay_list
= 0;
5717 current_function_uses_const_pool
= 0;
5718 current_function_uses_pic_offset_table
= 0;
5719 current_function_cannot_inline
= 0;
5721 /* We have not yet needed to make a label to jump to for tail-recursion. */
5722 tail_recursion_label
= 0;
5724 /* We haven't had a need to make a save area for ap yet. */
5725 arg_pointer_save_area
= 0;
5727 /* No stack slots allocated yet. */
5730 /* No SAVE_EXPRs in this function yet. */
5733 /* No RTL_EXPRs in this function yet. */
5736 /* Set up to allocate temporaries. */
5739 /* Indicate that we need to distinguish between the return value of the
5740 present function and the return value of a function being called. */
5741 rtx_equal_function_value_matters
= 1;
5743 /* Indicate that we have not instantiated virtual registers yet. */
5744 virtuals_instantiated
= 0;
5746 /* Indicate we have no need of a frame pointer yet. */
5747 frame_pointer_needed
= 0;
5749 /* By default assume not varargs or stdarg. */
5750 current_function_varargs
= 0;
5751 current_function_stdarg
= 0;
5753 /* We haven't made any trampolines for this function yet. */
5754 trampoline_list
= 0;
5756 init_pending_stack_adjust ();
5757 inhibit_defer_pop
= 0;
5759 current_function_outgoing_args_size
= 0;
5761 if (init_lang_status
)
5762 (*init_lang_status
) (cfun
);
5763 if (init_machine_status
)
5764 (*init_machine_status
) (cfun
);
5767 /* Initialize the rtl expansion mechanism so that we can do simple things
5768 like generate sequences. This is used to provide a context during global
5769 initialization of some passes. */
5771 init_dummy_function_start ()
5773 prepare_function_start ();
5776 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5777 and initialize static variables for generating RTL for the statements
5781 init_function_start (subr
, filename
, line
)
5786 prepare_function_start ();
5788 /* Remember this function for later. */
5789 cfun
->next_global
= all_functions
;
5790 all_functions
= cfun
;
5792 current_function_name
= (*decl_printable_name
) (subr
, 2);
5795 /* Nonzero if this is a nested function that uses a static chain. */
5797 current_function_needs_context
5798 = (decl_function_context (current_function_decl
) != 0
5799 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5801 /* Within function body, compute a type's size as soon it is laid out. */
5802 immediate_size_expand
++;
5804 /* Prevent ever trying to delete the first instruction of a function.
5805 Also tell final how to output a linenum before the function prologue.
5806 Note linenums could be missing, e.g. when compiling a Java .class file. */
5808 emit_line_note (filename
, line
);
5810 /* Make sure first insn is a note even if we don't want linenums.
5811 This makes sure the first insn will never be deleted.
5812 Also, final expects a note to appear there. */
5813 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5815 /* Set flags used by final.c. */
5816 if (aggregate_value_p (DECL_RESULT (subr
)))
5818 #ifdef PCC_STATIC_STRUCT_RETURN
5819 current_function_returns_pcc_struct
= 1;
5821 current_function_returns_struct
= 1;
5824 /* Warn if this value is an aggregate type,
5825 regardless of which calling convention we are using for it. */
5826 if (warn_aggregate_return
5827 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5828 warning ("function returns an aggregate");
5830 current_function_returns_pointer
5831 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5834 /* Make sure all values used by the optimization passes have sane
5837 init_function_for_compilation ()
5840 /* No prologue/epilogue insns yet. */
5841 prologue
= epilogue
= 0;
5844 /* Indicate that the current function uses extra args
5845 not explicitly mentioned in the argument list in any fashion. */
5850 current_function_varargs
= 1;
5853 /* Expand a call to __main at the beginning of a possible main function. */
5855 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5856 #undef HAS_INIT_SECTION
5857 #define HAS_INIT_SECTION
5861 expand_main_function ()
5863 #if !defined (HAS_INIT_SECTION)
5864 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
5866 #endif /* not HAS_INIT_SECTION */
5869 extern struct obstack permanent_obstack
;
5871 /* Start the RTL for a new function, and set variables used for
5873 SUBR is the FUNCTION_DECL node.
5874 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5875 the function's parameters, which must be run at any return statement. */
5878 expand_function_start (subr
, parms_have_cleanups
)
5880 int parms_have_cleanups
;
5883 rtx last_ptr
= NULL_RTX
;
5885 /* Make sure volatile mem refs aren't considered
5886 valid operands of arithmetic insns. */
5887 init_recog_no_volatile ();
5889 /* Set this before generating any memory accesses. */
5890 current_function_check_memory_usage
5891 = (flag_check_memory_usage
5892 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl
));
5894 current_function_instrument_entry_exit
5895 = (flag_instrument_function_entry_exit
5896 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5898 current_function_limit_stack
5899 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5901 /* If function gets a static chain arg, store it in the stack frame.
5902 Do this first, so it gets the first stack slot offset. */
5903 if (current_function_needs_context
)
5905 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5907 /* Delay copying static chain if it is not a register to avoid
5908 conflicts with regs used for parameters. */
5909 if (! SMALL_REGISTER_CLASSES
5910 || GET_CODE (static_chain_incoming_rtx
) == REG
)
5911 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5914 /* If the parameters of this function need cleaning up, get a label
5915 for the beginning of the code which executes those cleanups. This must
5916 be done before doing anything with return_label. */
5917 if (parms_have_cleanups
)
5918 cleanup_label
= gen_label_rtx ();
5922 /* Make the label for return statements to jump to, if this machine
5923 does not have a one-instruction return and uses an epilogue,
5924 or if it returns a structure, or if it has parm cleanups. */
5926 if (cleanup_label
== 0 && HAVE_return
5927 && ! current_function_instrument_entry_exit
5928 && ! current_function_returns_pcc_struct
5929 && ! (current_function_returns_struct
&& ! optimize
))
5932 return_label
= gen_label_rtx ();
5934 return_label
= gen_label_rtx ();
5937 /* Initialize rtx used to return the value. */
5938 /* Do this before assign_parms so that we copy the struct value address
5939 before any library calls that assign parms might generate. */
5941 /* Decide whether to return the value in memory or in a register. */
5942 if (aggregate_value_p (DECL_RESULT (subr
)))
5944 /* Returning something that won't go in a register. */
5945 register rtx value_address
= 0;
5947 #ifdef PCC_STATIC_STRUCT_RETURN
5948 if (current_function_returns_pcc_struct
)
5950 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5951 value_address
= assemble_static_space (size
);
5956 /* Expect to be passed the address of a place to store the value.
5957 If it is passed as an argument, assign_parms will take care of
5959 if (struct_value_incoming_rtx
)
5961 value_address
= gen_reg_rtx (Pmode
);
5962 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5967 DECL_RTL (DECL_RESULT (subr
))
5968 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
5969 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)),
5970 AGGREGATE_TYPE_P (TREE_TYPE
5975 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5976 /* If return mode is void, this decl rtl should not be used. */
5977 DECL_RTL (DECL_RESULT (subr
)) = 0;
5978 else if (parms_have_cleanups
|| current_function_instrument_entry_exit
)
5980 /* If function will end with cleanup code for parms,
5981 compute the return values into a pseudo reg,
5982 which we will copy into the true return register
5983 after the cleanups are done. */
5985 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5987 #ifdef PROMOTE_FUNCTION_RETURN
5988 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5989 int unsignedp
= TREE_UNSIGNED (type
);
5991 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5994 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5997 /* Scalar, returned in a register. */
5999 #ifdef FUNCTION_OUTGOING_VALUE
6000 DECL_RTL (DECL_RESULT (subr
))
6001 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6003 DECL_RTL (DECL_RESULT (subr
))
6004 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6007 /* Mark this reg as the function's return value. */
6008 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
6010 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
6011 /* Needed because we may need to move this to memory
6012 in case it's a named return value whose address is taken. */
6013 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6017 /* Initialize rtx for parameters and local variables.
6018 In some cases this requires emitting insns. */
6020 assign_parms (subr
);
6022 /* Copy the static chain now if it wasn't a register. The delay is to
6023 avoid conflicts with the parameter passing registers. */
6025 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
6026 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
6027 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6029 /* The following was moved from init_function_start.
6030 The move is supposed to make sdb output more accurate. */
6031 /* Indicate the beginning of the function body,
6032 as opposed to parm setup. */
6033 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
6035 if (GET_CODE (get_last_insn ()) != NOTE
)
6036 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6037 parm_birth_insn
= get_last_insn ();
6039 context_display
= 0;
6040 if (current_function_needs_context
)
6042 /* Fetch static chain values for containing functions. */
6043 tem
= decl_function_context (current_function_decl
);
6044 /* Copy the static chain pointer into a pseudo. If we have
6045 small register classes, copy the value from memory if
6046 static_chain_incoming_rtx is a REG. */
6049 /* If the static chain originally came in a register, put it back
6050 there, then move it out in the next insn. The reason for
6051 this peculiar code is to satisfy function integration. */
6052 if (SMALL_REGISTER_CLASSES
6053 && GET_CODE (static_chain_incoming_rtx
) == REG
)
6054 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
6055 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
6060 tree rtlexp
= make_node (RTL_EXPR
);
6062 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
6063 context_display
= tree_cons (tem
, rtlexp
, context_display
);
6064 tem
= decl_function_context (tem
);
6067 /* Chain thru stack frames, assuming pointer to next lexical frame
6068 is found at the place we always store it. */
6069 #ifdef FRAME_GROWS_DOWNWARD
6070 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
6072 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
6073 memory_address (Pmode
,
6076 /* If we are not optimizing, ensure that we know that this
6077 piece of context is live over the entire function. */
6079 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
6084 if (current_function_instrument_entry_exit
)
6086 rtx fun
= DECL_RTL (current_function_decl
);
6087 if (GET_CODE (fun
) == MEM
)
6088 fun
= XEXP (fun
, 0);
6091 emit_library_call (profile_function_entry_libfunc
, 0, VOIDmode
, 2,
6093 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6095 hard_frame_pointer_rtx
),
6099 /* After the display initializations is where the tail-recursion label
6100 should go, if we end up needing one. Ensure we have a NOTE here
6101 since some things (like trampolines) get placed before this. */
6102 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6104 /* Evaluate now the sizes of any types declared among the arguments. */
6105 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
6107 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
6108 EXPAND_MEMORY_USE_BAD
);
6109 /* Flush the queue in case this parameter declaration has
6114 /* Make sure there is a line number after the function entry setup code. */
6115 force_next_line_note ();
6118 /* Undo the effects of init_dummy_function_start. */
6120 expand_dummy_function_end ()
6122 /* End any sequences that failed to be closed due to syntax errors. */
6123 while (in_sequence_p ())
6126 /* Outside function body, can't compute type's actual size
6127 until next function's body starts. */
6129 free_after_parsing (cfun
);
6130 free_after_compilation (cfun
);
6135 /* Call DOIT for each hard register used as a return value from
6136 the current function. */
6139 diddle_return_value (doit
, arg
)
6140 void (*doit
) PARAMS ((rtx
, void *));
6143 rtx outgoing
= current_function_return_rtx
;
6148 if (GET_CODE (outgoing
) == REG
6149 && REGNO (outgoing
) >= FIRST_PSEUDO_REGISTER
)
6151 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6152 #ifdef FUNCTION_OUTGOING_VALUE
6153 outgoing
= FUNCTION_OUTGOING_VALUE (type
, current_function_decl
);
6155 outgoing
= FUNCTION_VALUE (type
, current_function_decl
);
6157 /* If this is a BLKmode structure being returned in registers, then use
6158 the mode computed in expand_return. */
6159 if (GET_MODE (outgoing
) == BLKmode
)
6161 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6164 if (GET_CODE (outgoing
) == REG
)
6165 (*doit
) (outgoing
, arg
);
6166 else if (GET_CODE (outgoing
) == PARALLEL
)
6170 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
6172 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
6174 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6181 do_clobber_return_reg (reg
, arg
)
6183 void *arg ATTRIBUTE_UNUSED
;
6185 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
6189 clobber_return_register ()
6191 diddle_return_value (do_clobber_return_reg
, NULL
);
6195 do_use_return_reg (reg
, arg
)
6197 void *arg ATTRIBUTE_UNUSED
;
6199 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
6203 use_return_register ()
6205 diddle_return_value (do_use_return_reg
, NULL
);
6208 /* Generate RTL for the end of the current function.
6209 FILENAME and LINE are the current position in the source file.
6211 It is up to language-specific callers to do cleanups for parameters--
6212 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6215 expand_function_end (filename
, line
, end_bindings
)
6222 #ifdef TRAMPOLINE_TEMPLATE
6223 static rtx initial_trampoline
;
6226 finish_expr_for_function ();
6228 #ifdef NON_SAVING_SETJMP
6229 /* Don't put any variables in registers if we call setjmp
6230 on a machine that fails to restore the registers. */
6231 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6233 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6234 setjmp_protect (DECL_INITIAL (current_function_decl
));
6236 setjmp_protect_args ();
6240 /* Save the argument pointer if a save area was made for it. */
6241 if (arg_pointer_save_area
)
6243 /* arg_pointer_save_area may not be a valid memory address, so we
6244 have to check it and fix it if necessary. */
6247 emit_move_insn (validize_mem (arg_pointer_save_area
),
6248 virtual_incoming_args_rtx
);
6249 seq
= gen_sequence ();
6251 emit_insn_before (seq
, tail_recursion_reentry
);
6254 /* Initialize any trampolines required by this function. */
6255 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
6257 tree function
= TREE_PURPOSE (link
);
6258 rtx context ATTRIBUTE_UNUSED
= lookup_static_chain (function
);
6259 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
6260 #ifdef TRAMPOLINE_TEMPLATE
6265 #ifdef TRAMPOLINE_TEMPLATE
6266 /* First make sure this compilation has a template for
6267 initializing trampolines. */
6268 if (initial_trampoline
== 0)
6270 end_temporary_allocation ();
6272 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
6273 resume_temporary_allocation ();
6275 ggc_add_rtx_root (&initial_trampoline
, 1);
6279 /* Generate insns to initialize the trampoline. */
6281 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
6282 #ifdef TRAMPOLINE_TEMPLATE
6283 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
6284 emit_block_move (blktramp
, initial_trampoline
,
6285 GEN_INT (TRAMPOLINE_SIZE
),
6286 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
6288 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
6292 /* Put those insns at entry to the containing function (this one). */
6293 emit_insns_before (seq
, tail_recursion_reentry
);
6296 /* If we are doing stack checking and this function makes calls,
6297 do a stack probe at the start of the function to ensure we have enough
6298 space for another stack frame. */
6299 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6303 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6304 if (GET_CODE (insn
) == CALL_INSN
)
6307 probe_stack_range (STACK_CHECK_PROTECT
,
6308 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6311 emit_insns_before (seq
, tail_recursion_reentry
);
6316 /* Warn about unused parms if extra warnings were specified. */
6317 if (warn_unused
&& extra_warnings
)
6321 for (decl
= DECL_ARGUMENTS (current_function_decl
);
6322 decl
; decl
= TREE_CHAIN (decl
))
6323 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
6324 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
6325 warning_with_decl (decl
, "unused parameter `%s'");
6328 /* Delete handlers for nonlocal gotos if nothing uses them. */
6329 if (nonlocal_goto_handler_slots
!= 0
6330 && ! current_function_has_nonlocal_label
)
6333 /* End any sequences that failed to be closed due to syntax errors. */
6334 while (in_sequence_p ())
6337 /* Outside function body, can't compute type's actual size
6338 until next function's body starts. */
6339 immediate_size_expand
--;
6341 clear_pending_stack_adjust ();
6342 do_pending_stack_adjust ();
6344 /* Mark the end of the function body.
6345 If control reaches this insn, the function can drop through
6346 without returning a value. */
6347 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
6349 /* Must mark the last line number note in the function, so that the test
6350 coverage code can avoid counting the last line twice. This just tells
6351 the code to ignore the immediately following line note, since there
6352 already exists a copy of this note somewhere above. This line number
6353 note is still needed for debugging though, so we can't delete it. */
6354 if (flag_test_coverage
)
6355 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
6357 /* Output a linenumber for the end of the function.
6358 SDB depends on this. */
6359 emit_line_note_force (filename
, line
);
6361 /* Output the label for the actual return from the function,
6362 if one is expected. This happens either because a function epilogue
6363 is used instead of a return instruction, or because a return was done
6364 with a goto in order to run local cleanups, or because of pcc-style
6365 structure returning. */
6369 /* Before the return label, clobber the return registers so that
6370 they are not propogated live to the rest of the function. This
6371 can only happen with functions that drop through; if there had
6372 been a return statement, there would have either been a return
6373 rtx, or a jump to the return label. */
6374 clobber_return_register ();
6376 emit_label (return_label
);
6379 /* C++ uses this. */
6381 expand_end_bindings (0, 0, 0);
6383 /* Now handle any leftover exception regions that may have been
6384 created for the parameters. */
6386 rtx last
= get_last_insn ();
6389 expand_leftover_cleanups ();
6391 /* If there are any catch_clauses remaining, output them now. */
6392 emit_insns (catch_clauses
);
6393 catch_clauses
= NULL_RTX
;
6394 /* If the above emitted any code, may sure we jump around it. */
6395 if (last
!= get_last_insn ())
6397 label
= gen_label_rtx ();
6398 last
= emit_jump_insn_after (gen_jump (label
), last
);
6399 last
= emit_barrier_after (last
);
6404 if (current_function_instrument_entry_exit
)
6406 rtx fun
= DECL_RTL (current_function_decl
);
6407 if (GET_CODE (fun
) == MEM
)
6408 fun
= XEXP (fun
, 0);
6411 emit_library_call (profile_function_exit_libfunc
, 0, VOIDmode
, 2,
6413 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6415 hard_frame_pointer_rtx
),
6419 /* If we had calls to alloca, and this machine needs
6420 an accurate stack pointer to exit the function,
6421 insert some code to save and restore the stack pointer. */
6422 #ifdef EXIT_IGNORE_STACK
6423 if (! EXIT_IGNORE_STACK
)
6425 if (current_function_calls_alloca
)
6429 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
6430 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
6433 /* If scalar return value was computed in a pseudo-reg,
6434 copy that to the hard return register. */
6435 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
6436 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
6437 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
6438 >= FIRST_PSEUDO_REGISTER
))
6440 rtx real_decl_result
;
6442 #ifdef FUNCTION_OUTGOING_VALUE
6444 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6445 current_function_decl
);
6448 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6449 current_function_decl
);
6451 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
6452 /* If this is a BLKmode structure being returned in registers, then use
6453 the mode computed in expand_return. */
6454 if (GET_MODE (real_decl_result
) == BLKmode
)
6455 PUT_MODE (real_decl_result
,
6456 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6457 emit_move_insn (real_decl_result
,
6458 DECL_RTL (DECL_RESULT (current_function_decl
)));
6460 /* The delay slot scheduler assumes that current_function_return_rtx
6461 holds the hard register containing the return value, not a temporary
6463 current_function_return_rtx
= real_decl_result
;
6466 /* If returning a structure, arrange to return the address of the value
6467 in a place where debuggers expect to find it.
6469 If returning a structure PCC style,
6470 the caller also depends on this value.
6471 And current_function_returns_pcc_struct is not necessarily set. */
6472 if (current_function_returns_struct
6473 || current_function_returns_pcc_struct
)
6475 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6476 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6477 #ifdef FUNCTION_OUTGOING_VALUE
6479 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
6480 current_function_decl
);
6483 = FUNCTION_VALUE (build_pointer_type (type
),
6484 current_function_decl
);
6487 /* Mark this as a function return value so integrate will delete the
6488 assignment and USE below when inlining this function. */
6489 REG_FUNCTION_VALUE_P (outgoing
) = 1;
6491 emit_move_insn (outgoing
, value_address
);
6494 /* ??? This should no longer be necessary since stupid is no longer with
6495 us, but there are some parts of the compiler (eg reload_combine, and
6496 sh mach_dep_reorg) that still try and compute their own lifetime info
6497 instead of using the general framework. */
6498 use_return_register ();
6500 /* If this is an implementation of __throw, do what's necessary to
6501 communicate between __builtin_eh_return and the epilogue. */
6502 expand_eh_return ();
6504 /* Output a return insn if we are using one.
6505 Otherwise, let the rtl chain end here, to drop through
6506 into the epilogue. */
6511 emit_jump_insn (gen_return ());
6516 /* Fix up any gotos that jumped out to the outermost
6517 binding level of the function.
6518 Must follow emitting RETURN_LABEL. */
6520 /* If you have any cleanups to do at this point,
6521 and they need to create temporary variables,
6522 then you will lose. */
6523 expand_fixups (get_insns ());
6526 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6527 or a single insn). */
6530 record_insns (insns
)
6535 if (GET_CODE (insns
) == SEQUENCE
)
6537 int len
= XVECLEN (insns
, 0);
6538 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
6541 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
6545 vec
= (int *) oballoc (2 * sizeof (int));
6546 vec
[0] = INSN_UID (insns
);
6552 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6555 contains (insn
, vec
)
6561 if (GET_CODE (insn
) == INSN
6562 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6565 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6566 for (j
= 0; vec
[j
]; j
++)
6567 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
6573 for (j
= 0; vec
[j
]; j
++)
6574 if (INSN_UID (insn
) == vec
[j
])
6581 prologue_epilogue_contains (insn
)
6584 if (prologue
&& contains (insn
, prologue
))
6586 if (epilogue
&& contains (insn
, epilogue
))
6592 /* Insert gen_return at the end of block BB. This also means updating
6593 block_for_insn appropriately. */
6596 emit_return_into_block (bb
)
6601 end
= emit_jump_insn_after (gen_return (), bb
->end
);
6602 p
= NEXT_INSN (bb
->end
);
6605 set_block_for_insn (p
, bb
);
6612 #endif /* HAVE_return */
6614 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6615 this into place with notes indicating where the prologue ends and where
6616 the epilogue begins. Update the basic block information when possible. */
6619 thread_prologue_and_epilogue_insns (f
)
6620 rtx f ATTRIBUTE_UNUSED
;
6626 #ifdef HAVE_prologue
6632 seq
= gen_prologue();
6635 /* Retain a map of the prologue insns. */
6636 if (GET_CODE (seq
) != SEQUENCE
)
6638 prologue
= record_insns (seq
);
6639 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
6641 /* GDB handles `break f' by setting a breakpoint on the first
6642 line note *after* the prologue. That means that we should
6643 insert a line note here; otherwise, if the next line note
6644 comes part way into the next block, GDB will skip all the way
6646 insn
= next_nonnote_insn (f
);
6649 if (GET_CODE (insn
) == NOTE
6650 && NOTE_LINE_NUMBER (insn
) >= 0)
6652 emit_line_note_force (NOTE_SOURCE_FILE (insn
),
6653 NOTE_LINE_NUMBER (insn
));
6657 insn
= PREV_INSN (insn
);
6660 seq
= gen_sequence ();
6663 /* If optimization is off, and perhaps in an empty function,
6664 the entry block will have no successors. */
6665 if (ENTRY_BLOCK_PTR
->succ
)
6667 /* Can't deal with multiple successsors of the entry block. */
6668 if (ENTRY_BLOCK_PTR
->succ
->succ_next
)
6671 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
6675 emit_insn_after (seq
, f
);
6679 /* If the exit block has no non-fake predecessors, we don't need
6681 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6682 if ((e
->flags
& EDGE_FAKE
) == 0)
6688 if (optimize
&& HAVE_return
)
6690 /* If we're allowed to generate a simple return instruction,
6691 then by definition we don't need a full epilogue. Examine
6692 the block that falls through to EXIT. If it does not
6693 contain any code, examine its predecessors and try to
6694 emit (conditional) return instructions. */
6700 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6701 if (e
->flags
& EDGE_FALLTHRU
)
6707 /* Verify that there are no active instructions in the last block. */
6709 while (label
&& GET_CODE (label
) != CODE_LABEL
)
6711 if (active_insn_p (label
))
6713 label
= PREV_INSN (label
);
6716 if (last
->head
== label
&& GET_CODE (label
) == CODE_LABEL
)
6718 for (e
= last
->pred
; e
; e
= e_next
)
6720 basic_block bb
= e
->src
;
6723 e_next
= e
->pred_next
;
6724 if (bb
== ENTRY_BLOCK_PTR
)
6728 if ((GET_CODE (jump
) != JUMP_INSN
) || JUMP_LABEL (jump
) != label
)
6731 /* If we have an unconditional jump, we can replace that
6732 with a simple return instruction. */
6733 if (simplejump_p (jump
))
6735 emit_return_into_block (bb
);
6736 flow_delete_insn (jump
);
6739 /* If we have a conditional jump, we can try to replace
6740 that with a conditional return instruction. */
6741 else if (condjump_p (jump
))
6745 ret
= SET_SRC (PATTERN (jump
));
6746 if (GET_CODE (XEXP (ret
, 1)) == LABEL_REF
)
6747 loc
= &XEXP (ret
, 1);
6749 loc
= &XEXP (ret
, 2);
6750 ret
= gen_rtx_RETURN (VOIDmode
);
6752 if (! validate_change (jump
, loc
, ret
, 0))
6754 if (JUMP_LABEL (jump
))
6755 LABEL_NUSES (JUMP_LABEL (jump
))--;
6757 /* If this block has only one successor, it both jumps
6758 and falls through to the fallthru block, so we can't
6760 if (bb
->succ
->succ_next
== NULL
)
6766 /* Fix up the CFG for the successful change we just made. */
6768 make_edge (NULL
, bb
, EXIT_BLOCK_PTR
, 0);
6771 /* Emit a return insn for the exit fallthru block. Whether
6772 this is still reachable will be determined later. */
6774 emit_barrier_after (last
->end
);
6775 emit_return_into_block (last
);
6779 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6780 as it may be the exit block can go elsewhere as well
6783 emit_jump_insn (gen_return ());
6784 seq
= gen_sequence ();
6786 insert_insn_on_edge (seq
, e
);
6792 #ifdef HAVE_epilogue
6795 /* Find the edge that falls through to EXIT. Other edges may exist
6796 due to RETURN instructions, but those don't need epilogues.
6797 There really shouldn't be a mixture -- either all should have
6798 been converted or none, however... */
6800 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6801 if (e
->flags
& EDGE_FALLTHRU
)
6807 emit_note (NULL
, NOTE_INSN_EPILOGUE_BEG
);
6809 seq
= gen_epilogue ();
6810 emit_jump_insn (seq
);
6812 /* Retain a map of the epilogue insns. */
6813 if (GET_CODE (seq
) != SEQUENCE
)
6815 epilogue
= record_insns (seq
);
6817 seq
= gen_sequence ();
6820 insert_insn_on_edge (seq
, e
);
6827 commit_edge_insertions ();
6830 /* Reposition the prologue-end and epilogue-begin notes after instruction
6831 scheduling and delayed branch scheduling. */
6834 reposition_prologue_and_epilogue_notes (f
)
6835 rtx f ATTRIBUTE_UNUSED
;
6837 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6838 /* Reposition the prologue and epilogue notes. */
6845 register rtx insn
, note
= 0;
6847 /* Scan from the beginning until we reach the last prologue insn.
6848 We apparently can't depend on basic_block_{head,end} after
6850 for (len
= 0; prologue
[len
]; len
++)
6852 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
6854 if (GET_CODE (insn
) == NOTE
)
6856 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
6859 else if ((len
-= contains (insn
, prologue
)) == 0)
6862 /* Find the prologue-end note if we haven't already, and
6863 move it to just after the last prologue insn. */
6866 for (note
= insn
; (note
= NEXT_INSN (note
));)
6867 if (GET_CODE (note
) == NOTE
6868 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
6872 next
= NEXT_INSN (note
);
6874 /* Whether or not we can depend on BLOCK_HEAD,
6875 attempt to keep it up-to-date. */
6876 if (BLOCK_HEAD (0) == note
)
6877 BLOCK_HEAD (0) = next
;
6880 add_insn_after (note
, insn
);
6887 register rtx insn
, note
= 0;
6889 /* Scan from the end until we reach the first epilogue insn.
6890 We apparently can't depend on basic_block_{head,end} after
6892 for (len
= 0; epilogue
[len
]; len
++)
6894 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
6896 if (GET_CODE (insn
) == NOTE
)
6898 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6901 else if ((len
-= contains (insn
, epilogue
)) == 0)
6903 /* Find the epilogue-begin note if we haven't already, and
6904 move it to just before the first epilogue insn. */
6907 for (note
= insn
; (note
= PREV_INSN (note
));)
6908 if (GET_CODE (note
) == NOTE
6909 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
6913 /* Whether or not we can depend on BLOCK_HEAD,
6914 attempt to keep it up-to-date. */
6916 && BLOCK_HEAD (n_basic_blocks
-1) == insn
)
6917 BLOCK_HEAD (n_basic_blocks
-1) = note
;
6920 add_insn_before (note
, insn
);
6925 #endif /* HAVE_prologue or HAVE_epilogue */
6928 /* Mark T for GC. */
6932 struct temp_slot
*t
;
6936 ggc_mark_rtx (t
->slot
);
6937 ggc_mark_rtx (t
->address
);
6938 ggc_mark_tree (t
->rtl_expr
);
6944 /* Mark P for GC. */
6947 mark_function_status (p
)
6956 ggc_mark_rtx (p
->arg_offset_rtx
);
6958 if (p
->x_parm_reg_stack_loc
)
6959 for (i
= p
->x_max_parm_reg
, r
= p
->x_parm_reg_stack_loc
;
6963 ggc_mark_rtx (p
->return_rtx
);
6964 ggc_mark_rtx (p
->x_cleanup_label
);
6965 ggc_mark_rtx (p
->x_return_label
);
6966 ggc_mark_rtx (p
->x_save_expr_regs
);
6967 ggc_mark_rtx (p
->x_stack_slot_list
);
6968 ggc_mark_rtx (p
->x_parm_birth_insn
);
6969 ggc_mark_rtx (p
->x_tail_recursion_label
);
6970 ggc_mark_rtx (p
->x_tail_recursion_reentry
);
6971 ggc_mark_rtx (p
->internal_arg_pointer
);
6972 ggc_mark_rtx (p
->x_arg_pointer_save_area
);
6973 ggc_mark_tree (p
->x_rtl_expr_chain
);
6974 ggc_mark_rtx (p
->x_last_parm_insn
);
6975 ggc_mark_tree (p
->x_context_display
);
6976 ggc_mark_tree (p
->x_trampoline_list
);
6977 ggc_mark_rtx (p
->epilogue_delay_list
);
6979 mark_temp_slot (p
->x_temp_slots
);
6982 struct var_refs_queue
*q
= p
->fixup_var_refs_queue
;
6985 ggc_mark_rtx (q
->modified
);
6990 ggc_mark_rtx (p
->x_nonlocal_goto_handler_slots
);
6991 ggc_mark_rtx (p
->x_nonlocal_goto_handler_labels
);
6992 ggc_mark_rtx (p
->x_nonlocal_goto_stack_level
);
6993 ggc_mark_tree (p
->x_nonlocal_labels
);
6996 /* Mark the function chain ARG (which is really a struct function **)
7000 mark_function_chain (arg
)
7003 struct function
*f
= *(struct function
**) arg
;
7005 for (; f
; f
= f
->next_global
)
7007 ggc_mark_tree (f
->decl
);
7009 mark_function_status (f
);
7010 mark_eh_status (f
->eh
);
7011 mark_stmt_status (f
->stmt
);
7012 mark_expr_status (f
->expr
);
7013 mark_emit_status (f
->emit
);
7014 mark_varasm_status (f
->varasm
);
7016 if (mark_machine_status
)
7017 (*mark_machine_status
) (f
);
7018 if (mark_lang_status
)
7019 (*mark_lang_status
) (f
);
7021 if (f
->original_arg_vector
)
7022 ggc_mark_rtvec ((rtvec
) f
->original_arg_vector
);
7023 if (f
->original_decl_initial
)
7024 ggc_mark_tree (f
->original_decl_initial
);
7028 /* Called once, at initialization, to initialize function.c. */
7031 init_function_once ()
7033 ggc_add_root (&all_functions
, 1, sizeof all_functions
,
7034 mark_function_chain
);