1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
57 /* Round a value to the lowest integer less than it that is a multiple of
58 the required alignment. Avoid using division in case the value is
59 negative. Assume the alignment is a power of two. */
60 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62 /* Similar, but round to the next highest integer that meets the
64 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
67 during rtl generation. If they are different register numbers, this is
68 always true. It may also be true if
69 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
70 generation. See fix_lexical_addr for details. */
72 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
73 #define NEED_SEPARATE_AP
76 /* Number of bytes of args popped by function being compiled on its return.
77 Zero if no bytes are to be popped.
78 May affect compilation of return insn or of function epilogue. */
80 int current_function_pops_args
;
82 /* Nonzero if function being compiled needs to be given an address
83 where the value should be stored. */
85 int current_function_returns_struct
;
87 /* Nonzero if function being compiled needs to
88 return the address of where it has put a structure value. */
90 int current_function_returns_pcc_struct
;
92 /* Nonzero if function being compiled needs to be passed a static chain. */
94 int current_function_needs_context
;
96 /* Nonzero if function being compiled can call setjmp. */
98 int current_function_calls_setjmp
;
100 /* Nonzero if function being compiled can call longjmp. */
102 int current_function_calls_longjmp
;
104 /* Nonzero if function being compiled receives nonlocal gotos
105 from nested functions. */
107 int current_function_has_nonlocal_label
;
109 /* Nonzero if function being compiled contains nested functions. */
111 int current_function_contains_functions
;
113 /* Nonzero if function being compiled can call alloca,
114 either as a subroutine or builtin. */
116 int current_function_calls_alloca
;
118 /* Nonzero if the current function returns a pointer type */
120 int current_function_returns_pointer
;
122 /* If some insns can be deferred to the delay slots of the epilogue, the
123 delay list for them is recorded here. */
125 rtx current_function_epilogue_delay_list
;
127 /* If function's args have a fixed size, this is that size, in bytes.
129 May affect compilation of return insn or of function epilogue. */
131 int current_function_args_size
;
133 /* # bytes the prologue should push and pretend that the caller pushed them.
134 The prologue must do this, but only if parms can be passed in registers. */
136 int current_function_pretend_args_size
;
138 /* # of bytes of outgoing arguments required to be pushed by the prologue.
139 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
140 and no stack adjusts will be done on function calls. */
142 int current_function_outgoing_args_size
;
144 /* This is the offset from the arg pointer to the place where the first
145 anonymous arg can be found, if there is one. */
147 rtx current_function_arg_offset_rtx
;
149 /* Nonzero if current function uses varargs.h or equivalent.
150 Zero for functions that use stdarg.h. */
152 int current_function_varargs
;
154 /* Quantities of various kinds of registers
155 used for the current function's args. */
157 CUMULATIVE_ARGS current_function_args_info
;
159 /* Name of function now being compiled. */
161 char *current_function_name
;
163 /* If non-zero, an RTL expression for that location at which the current
164 function returns its result. Always equal to
165 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
166 independently of the tree structures. */
168 rtx current_function_return_rtx
;
170 /* Nonzero if the current function uses the constant pool. */
172 int current_function_uses_const_pool
;
174 /* Nonzero if the current function uses pic_offset_table_rtx. */
175 int current_function_uses_pic_offset_table
;
177 /* The arg pointer hard register, or the pseudo into which it was copied. */
178 rtx current_function_internal_arg_pointer
;
180 /* The FUNCTION_DECL for an inline function currently being expanded. */
181 tree inline_function_decl
;
183 /* Number of function calls seen so far in current function. */
185 int function_call_count
;
187 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
188 (labels to which there can be nonlocal gotos from nested functions)
191 tree nonlocal_labels
;
193 /* RTX for stack slot that holds the current handler for nonlocal gotos.
194 Zero when function does not have nonlocal labels. */
196 rtx nonlocal_goto_handler_slot
;
198 /* RTX for stack slot that holds the stack pointer value to restore
200 Zero when function does not have nonlocal labels. */
202 rtx nonlocal_goto_stack_level
;
204 /* Label that will go on parm cleanup code, if any.
205 Jumping to this label runs cleanup code for parameters, if
206 such code must be run. Following this code is the logical return label. */
210 /* Label that will go on function epilogue.
211 Jumping to this label serves as a "return" instruction
212 on machines which require execution of the epilogue on all returns. */
216 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
217 So we can mark them all live at the end of the function, if nonopt. */
220 /* List (chain of EXPR_LISTs) of all stack slots in this function.
221 Made for the sake of unshare_all_rtl. */
224 /* Chain of all RTL_EXPRs that have insns in them. */
227 /* Label to jump back to for tail recursion, or 0 if we have
228 not yet needed one for this function. */
229 rtx tail_recursion_label
;
231 /* Place after which to insert the tail_recursion_label if we need one. */
232 rtx tail_recursion_reentry
;
234 /* Location at which to save the argument pointer if it will need to be
235 referenced. There are two cases where this is done: if nonlocal gotos
236 exist, or if vars stored at an offset from the argument pointer will be
237 needed by inner routines. */
239 rtx arg_pointer_save_area
;
241 /* Offset to end of allocated area of stack frame.
242 If stack grows down, this is the address of the last stack slot allocated.
243 If stack grows up, this is the address for the next slot. */
246 /* List (chain of TREE_LISTs) of static chains for containing functions.
247 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
248 in an RTL_EXPR in the TREE_VALUE. */
249 static tree context_display
;
251 /* List (chain of TREE_LISTs) of trampolines for nested functions.
252 The trampoline sets up the static chain and jumps to the function.
253 We supply the trampoline's address when the function's address is requested.
255 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
256 in an RTL_EXPR in the TREE_VALUE. */
257 static tree trampoline_list
;
259 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
260 static rtx parm_birth_insn
;
263 /* Nonzero if a stack slot has been generated whose address is not
264 actually valid. It means that the generated rtl must all be scanned
265 to detect and correct the invalid addresses where they occur. */
266 static int invalid_stack_slot
;
269 /* Last insn of those whose job was to put parms into their nominal homes. */
270 static rtx last_parm_insn
;
272 /* 1 + last pseudo register number used for loading a copy
273 of a parameter of this function. */
274 static int max_parm_reg
;
276 /* Vector indexed by REGNO, containing location on stack in which
277 to put the parm which is nominally in pseudo register REGNO,
278 if we discover that that parm must go in the stack. */
279 static rtx
*parm_reg_stack_loc
;
281 #if 0 /* Turned off because 0 seems to work just as well. */
282 /* Cleanup lists are required for binding levels regardless of whether
283 that binding level has cleanups or not. This node serves as the
284 cleanup list whenever an empty list is required. */
285 static tree empty_cleanup_list
;
288 /* Nonzero once virtual register instantiation has been done.
289 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
290 static int virtuals_instantiated
;
292 /* Nonzero if we need to distinguish between the return value of this function
293 and the return value of a function called by this function. This helps
296 extern int rtx_equal_function_value_matters
;
300 static tree
round_down ();
301 static rtx
round_trampoline_addr ();
302 static rtx
fixup_stack_1 ();
303 static void fixup_var_refs ();
304 static void fixup_var_refs_insns ();
305 static void fixup_var_refs_1 ();
306 static void optimize_bit_field ();
307 static void instantiate_decls ();
308 static void instantiate_decls_1 ();
309 static int instantiate_virtual_regs_1 ();
310 static rtx
fixup_memory_subreg ();
311 static rtx
walk_fixup_memory_subreg ();
313 /* In order to evaluate some expressions, such as function calls returning
314 structures in memory, we need to temporarily allocate stack locations.
315 We record each allocated temporary in the following structure.
317 Associated with each temporary slot is a nesting level. When we pop up
318 one level, all temporaries associated with the previous level are freed.
319 Normally, all temporaries are freed after the execution of the statement
320 in which they were created. However, if we are inside a ({...}) grouping,
321 the result may be in a temporary and hence must be preserved. If the
322 result could be in a temporary, we preserve it if we can determine which
323 one it is in. If we cannot determine which temporary may contain the
324 result, all temporaries are preserved. A temporary is preserved by
325 pretending it was allocated at the previous nesting level.
327 Automatic variables are also assigned temporary slots, at the nesting
328 level where they are defined. They are marked a "kept" so that
329 free_temp_slots will not free them. */
333 /* Points to next temporary slot. */
334 struct temp_slot
*next
;
335 /* The rtx to used to reference the slot. */
337 /* The size, in units, of the slot. */
339 /* Non-zero if this temporary is currently in use. */
341 /* Nesting level at which this slot is being used. */
343 /* Non-zero if this should survive a call to free_temp_slots. */
347 /* List of all temporaries allocated, both available and in use. */
349 struct temp_slot
*temp_slots
;
351 /* Current nesting level for temporaries. */
355 /* Pointer to chain of `struct function' for containing functions. */
356 struct function
*outer_function_chain
;
358 /* Given a function decl for a containing function,
359 return the `struct function' for it. */
362 find_function_data (decl
)
366 for (p
= outer_function_chain
; p
; p
= p
->next
)
372 /* Save the current context for compilation of a nested function.
373 This is called from language-specific code.
374 The caller is responsible for saving any language-specific status,
375 since this function knows only about language-independent variables. */
378 push_function_context ()
380 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
382 p
->next
= outer_function_chain
;
383 outer_function_chain
= p
;
385 p
->name
= current_function_name
;
386 p
->decl
= current_function_decl
;
387 p
->pops_args
= current_function_pops_args
;
388 p
->returns_struct
= current_function_returns_struct
;
389 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
390 p
->needs_context
= current_function_needs_context
;
391 p
->calls_setjmp
= current_function_calls_setjmp
;
392 p
->calls_longjmp
= current_function_calls_longjmp
;
393 p
->calls_alloca
= current_function_calls_alloca
;
394 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
395 p
->args_size
= current_function_args_size
;
396 p
->pretend_args_size
= current_function_pretend_args_size
;
397 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
398 p
->uses_const_pool
= current_function_uses_const_pool
;
399 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
400 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
401 p
->max_parm_reg
= max_parm_reg
;
402 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
403 p
->outgoing_args_size
= current_function_outgoing_args_size
;
404 p
->return_rtx
= current_function_return_rtx
;
405 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
406 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
407 p
->nonlocal_labels
= nonlocal_labels
;
408 p
->cleanup_label
= cleanup_label
;
409 p
->return_label
= return_label
;
410 p
->save_expr_regs
= save_expr_regs
;
411 p
->stack_slot_list
= stack_slot_list
;
412 p
->parm_birth_insn
= parm_birth_insn
;
413 p
->frame_offset
= frame_offset
;
414 p
->tail_recursion_label
= tail_recursion_label
;
415 p
->tail_recursion_reentry
= tail_recursion_reentry
;
416 p
->arg_pointer_save_area
= arg_pointer_save_area
;
417 p
->rtl_expr_chain
= rtl_expr_chain
;
418 p
->last_parm_insn
= last_parm_insn
;
419 p
->context_display
= context_display
;
420 p
->trampoline_list
= trampoline_list
;
421 p
->function_call_count
= function_call_count
;
422 p
->temp_slots
= temp_slots
;
423 p
->temp_slot_level
= temp_slot_level
;
424 p
->fixup_var_refs_queue
= 0;
426 save_tree_status (p
);
427 save_storage_status (p
);
428 save_emit_status (p
);
430 save_expr_status (p
);
431 save_stmt_status (p
);
434 /* Restore the last saved context, at the end of a nested function.
435 This function is called from language-specific code. */
438 pop_function_context ()
440 struct function
*p
= outer_function_chain
;
442 outer_function_chain
= p
->next
;
444 current_function_name
= p
->name
;
445 current_function_decl
= p
->decl
;
446 current_function_pops_args
= p
->pops_args
;
447 current_function_returns_struct
= p
->returns_struct
;
448 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
449 current_function_needs_context
= p
->needs_context
;
450 current_function_calls_setjmp
= p
->calls_setjmp
;
451 current_function_calls_longjmp
= p
->calls_longjmp
;
452 current_function_calls_alloca
= p
->calls_alloca
;
453 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
454 current_function_contains_functions
= 1;
455 current_function_args_size
= p
->args_size
;
456 current_function_pretend_args_size
= p
->pretend_args_size
;
457 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
458 current_function_uses_const_pool
= p
->uses_const_pool
;
459 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
460 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
461 max_parm_reg
= p
->max_parm_reg
;
462 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
463 current_function_outgoing_args_size
= p
->outgoing_args_size
;
464 current_function_return_rtx
= p
->return_rtx
;
465 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
466 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
467 nonlocal_labels
= p
->nonlocal_labels
;
468 cleanup_label
= p
->cleanup_label
;
469 return_label
= p
->return_label
;
470 save_expr_regs
= p
->save_expr_regs
;
471 stack_slot_list
= p
->stack_slot_list
;
472 parm_birth_insn
= p
->parm_birth_insn
;
473 frame_offset
= p
->frame_offset
;
474 tail_recursion_label
= p
->tail_recursion_label
;
475 tail_recursion_reentry
= p
->tail_recursion_reentry
;
476 arg_pointer_save_area
= p
->arg_pointer_save_area
;
477 rtl_expr_chain
= p
->rtl_expr_chain
;
478 last_parm_insn
= p
->last_parm_insn
;
479 context_display
= p
->context_display
;
480 trampoline_list
= p
->trampoline_list
;
481 function_call_count
= p
->function_call_count
;
482 temp_slots
= p
->temp_slots
;
483 temp_slot_level
= p
->temp_slot_level
;
485 restore_tree_status (p
);
486 restore_storage_status (p
);
487 restore_expr_status (p
);
488 restore_emit_status (p
);
489 restore_stmt_status (p
);
491 /* Finish doing put_var_into_stack for any of our variables
492 which became addressable during the nested function. */
494 struct var_refs_queue
*queue
= p
->fixup_var_refs_queue
;
495 for (; queue
; queue
= queue
->next
)
496 fixup_var_refs (queue
->modified
);
501 /* Reset variables that have known state during rtx generation. */
502 rtx_equal_function_value_matters
= 1;
503 virtuals_instantiated
= 0;
506 /* Allocate fixed slots in the stack frame of the current function. */
508 /* Return size needed for stack frame based on slots so far allocated.
509 This size counts from zero. It is not rounded to STACK_BOUNDARY;
510 the caller may have to do that. */
515 #ifdef FRAME_GROWS_DOWNWARD
516 return -frame_offset
;
522 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
523 with machine mode MODE.
525 ALIGN controls the amount of alignment for the address of the slot:
526 0 means according to MODE,
527 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
528 positive specifies alignment boundary in bits.
530 We do not round to stack_boundary here. */
533 assign_stack_local (mode
, size
, align
)
534 enum machine_mode mode
;
538 register rtx x
, addr
;
539 int bigend_correction
= 0;
544 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
546 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
548 else if (align
== -1)
550 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
551 size
= CEIL_ROUND (size
, alignment
);
554 alignment
= align
/ BITS_PER_UNIT
;
556 /* Round frame offset to that alignment.
557 We must be careful here, since FRAME_OFFSET might be negative and
558 division with a negative dividend isn't as well defined as we might
559 like. So we instead assume that ALIGNMENT is a power of two and
560 use logical operations which are unambiguous. */
561 #ifdef FRAME_GROWS_DOWNWARD
562 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
564 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
567 /* On a big-endian machine, if we are allocating more space than we will use,
568 use the least significant bytes of those that are allocated. */
571 bigend_correction
= size
- GET_MODE_SIZE (mode
);
574 #ifdef FRAME_GROWS_DOWNWARD
575 frame_offset
-= size
;
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (virtuals_instantiated
)
581 addr
= plus_constant (frame_pointer_rtx
,
582 (frame_offset
+ bigend_correction
583 + STARTING_FRAME_OFFSET
));
585 addr
= plus_constant (virtual_stack_vars_rtx
,
586 frame_offset
+ bigend_correction
);
588 #ifndef FRAME_GROWS_DOWNWARD
589 frame_offset
+= size
;
592 x
= gen_rtx (MEM
, mode
, addr
);
594 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, x
, stack_slot_list
);
599 /* Assign a stack slot in a containing function.
600 First three arguments are same as in preceding function.
601 The last argument specifies the function to allocate in. */
604 assign_outer_stack_local (mode
, size
, align
, function
)
605 enum machine_mode mode
;
608 struct function
*function
;
610 register rtx x
, addr
;
611 int bigend_correction
= 0;
614 /* Allocate in the memory associated with the function in whose frame
616 push_obstacks (function
->function_obstack
,
617 function
->function_maybepermanent_obstack
);
621 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
623 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
625 else if (align
== -1)
627 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
628 size
= CEIL_ROUND (size
, alignment
);
631 alignment
= align
/ BITS_PER_UNIT
;
633 /* Round frame offset to that alignment. */
634 #ifdef FRAME_GROWS_DOWNWARD
635 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
637 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
640 /* On a big-endian machine, if we are allocating more space than we will use,
641 use the least significant bytes of those that are allocated. */
644 bigend_correction
= size
- GET_MODE_SIZE (mode
);
647 #ifdef FRAME_GROWS_DOWNWARD
648 function
->frame_offset
-= size
;
650 addr
= plus_constant (virtual_stack_vars_rtx
,
651 function
->frame_offset
+ bigend_correction
);
652 #ifndef FRAME_GROWS_DOWNWARD
653 function
->frame_offset
+= size
;
656 x
= gen_rtx (MEM
, mode
, addr
);
658 function
->stack_slot_list
659 = gen_rtx (EXPR_LIST
, VOIDmode
, x
, function
->stack_slot_list
);
666 /* Allocate a temporary stack slot and record it for possible later
669 MODE is the machine mode to be given to the returned rtx.
671 SIZE is the size in units of the space required. We do no rounding here
672 since assign_stack_local will do any required rounding.
674 KEEP is non-zero if this slot is to be retained after a call to
675 free_temp_slots. Automatic variables for a block are allocated with this
679 assign_stack_temp (mode
, size
, keep
)
680 enum machine_mode mode
;
684 struct temp_slot
*p
, *best_p
= 0;
686 /* First try to find an available, already-allocated temporary that is the
687 exact size we require. */
688 for (p
= temp_slots
; p
; p
= p
->next
)
689 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
692 /* If we didn't find, one, try one that is larger than what we want. We
693 find the smallest such. */
695 for (p
= temp_slots
; p
; p
= p
->next
)
696 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
697 && (best_p
== 0 || best_p
->size
> p
->size
))
700 /* Make our best, if any, the one to use. */
704 /* If we still didn't find one, make a new temporary. */
707 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
709 /* If the temp slot mode doesn't indicate the alignment,
710 use the largest possible, so no one will be disappointed. */
711 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
712 p
->next
= temp_slots
;
717 p
->level
= temp_slot_level
;
722 /* If X could be a reference to a temporary slot, mark that slot as belonging
723 to the to one level higher. If X matched one of our slots, just mark that
724 one. Otherwise, we can't easily predict which it is, so upgrade all of
725 them. Kept slots need not be touched.
727 This is called when an ({...}) construct occurs and a statement
728 returns a value in memory. */
731 preserve_temp_slots (x
)
736 /* If X is not in memory or is at a constant address, it cannot be in
738 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
741 /* First see if we can find a match. */
742 for (p
= temp_slots
; p
; p
= p
->next
)
743 if (p
->in_use
&& x
== p
->slot
)
749 /* Otherwise, preserve all non-kept slots at this level. */
750 for (p
= temp_slots
; p
; p
= p
->next
)
751 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
755 /* Free all temporaries used so far. This is normally called at the end
756 of generating code for a statement. */
763 for (p
= temp_slots
; p
; p
= p
->next
)
764 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
768 /* Push deeper into the nesting level for stack temporaries. */
773 /* For GNU C++, we must allow a sequence to be emitted anywhere in
774 the level where the sequence was started. By not changing levels
775 when the compiler is inside a sequence, the temporaries for the
776 sequence and the temporaries will not unwittingly conflict with
777 the temporaries for other sequences and/or code at that level. */
778 if (in_sequence_p ())
784 /* Pop a temporary nesting level. All slots in use in the current level
792 /* See comment in push_temp_slots about why we don't change levels
794 if (in_sequence_p ())
797 for (p
= temp_slots
; p
; p
= p
->next
)
798 if (p
->in_use
&& p
->level
== temp_slot_level
)
804 /* Retroactively move an auto variable from a register to a stack slot.
805 This is done when an address-reference to the variable is seen. */
808 put_var_into_stack (decl
)
812 register rtx
new = 0;
813 struct function
*function
= 0;
814 tree context
= decl_function_context (decl
);
816 /* Get the current rtl used for this object. */
817 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
819 /* If this variable comes from an outer function,
820 find that function's saved context. */
821 if (context
!= current_function_decl
)
822 for (function
= outer_function_chain
; function
; function
= function
->next
)
823 if (function
->decl
== context
)
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
832 /* If this is a variable-size object with a pseudo to address it,
833 put that pseudo into the stack, if the var is nonlocal. */
834 if (TREE_NONLOCAL (decl
)
835 && GET_CODE (reg
) == MEM
836 && GET_CODE (XEXP (reg
, 0)) == REG
837 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
839 if (GET_CODE (reg
) != REG
)
844 if (REGNO (reg
) < function
->max_parm_reg
)
845 new = function
->parm_reg_stack_loc
[REGNO (reg
)];
847 new = assign_outer_stack_local (GET_MODE (reg
),
848 GET_MODE_SIZE (GET_MODE (reg
)),
853 if (REGNO (reg
) < max_parm_reg
)
854 new = parm_reg_stack_loc
[REGNO (reg
)];
856 new = assign_stack_local (GET_MODE (reg
),
857 GET_MODE_SIZE (GET_MODE (reg
)),
861 XEXP (reg
, 0) = XEXP (new, 0);
862 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
863 REG_USERVAR_P (reg
) = 0;
866 /* If this is a memory ref that contains aggregate components,
867 mark it as such for cse and loop optimize. */
868 MEM_IN_STRUCT_P (reg
)
869 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
870 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
871 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
873 /* Now make sure that all refs to the variable, previously made
874 when it was a register, are fixed up to be valid again. */
877 struct var_refs_queue
*temp
;
879 /* Variable is inherited; fix it up when we get back to its function. */
880 push_obstacks (function
->function_obstack
,
881 function
->function_maybepermanent_obstack
);
883 = (struct var_refs_queue
*) oballoc (sizeof (struct var_refs_queue
));
884 temp
->modified
= reg
;
885 temp
->next
= function
->fixup_var_refs_queue
;
886 function
->fixup_var_refs_queue
= temp
;
890 /* Variable is local; fix it up now. */
891 fixup_var_refs (reg
);
899 rtx first_insn
= get_insns ();
900 struct sequence_stack
*stack
= sequence_stack
;
901 tree rtl_exps
= rtl_expr_chain
;
903 /* Must scan all insns for stack-refs that exceed the limit. */
904 fixup_var_refs_insns (var
, first_insn
, stack
== 0);
906 /* Scan all pending sequences too. */
907 for (; stack
; stack
= stack
->next
)
909 push_to_sequence (stack
->first
);
910 fixup_var_refs_insns (var
, stack
->first
, stack
->next
!= 0);
911 /* Update remembered end of sequence
912 in case we added an insn at the end. */
913 stack
->last
= get_last_insn ();
917 /* Scan all waiting RTL_EXPRs too. */
918 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
920 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
921 if (seq
!= const0_rtx
&& seq
!= 0)
923 push_to_sequence (seq
);
924 fixup_var_refs_insns (var
, seq
, 0);
930 /* This structure is used by the following two functions to record MEMs or
931 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
932 VAR as an address. We need to maintain this list in case two operands of
933 an insn were required to match; in that case we must ensure we use the
936 struct fixup_replacement
940 struct fixup_replacement
*next
;
943 /* REPLACEMENTS is a pointer to a list of the above structures and X is
944 some part of an insn. Return a struct fixup_replacement whose OLD
945 value is equal to X. Allocate a new structure if no such entry exists. */
947 static struct fixup_replacement
*
948 find_replacement (replacements
, x
)
949 struct fixup_replacement
**replacements
;
952 struct fixup_replacement
*p
;
954 /* See if we have already replaced this. */
955 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
960 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
963 p
->next
= *replacements
;
970 /* Scan the insn-chain starting with INSN for refs to VAR
971 and fix them up. TOPLEVEL is nonzero if this chain is the
972 main chain of insns for the current function. */
975 fixup_var_refs_insns (var
, insn
, toplevel
)
982 rtx next
= NEXT_INSN (insn
);
984 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
985 || GET_CODE (insn
) == JUMP_INSN
)
987 /* The insn to load VAR from a home in the arglist
988 is now a no-op. When we see it, just delete it. */
990 && GET_CODE (PATTERN (insn
)) == SET
991 && SET_DEST (PATTERN (insn
)) == var
992 && rtx_equal_p (SET_SRC (PATTERN (insn
)), var
))
994 next
= delete_insn (insn
);
995 if (insn
== last_parm_insn
)
996 last_parm_insn
= PREV_INSN (next
);
1000 /* See if we have to do anything to INSN now that VAR is in
1001 memory. If it needs to be loaded into a pseudo, use a single
1002 pseudo for the entire insn in case there is a MATCH_DUP
1003 between two operands. We pass a pointer to the head of
1004 a list of struct fixup_replacements. If fixup_var_refs_1
1005 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1006 it will record them in this list.
1008 If it allocated a pseudo for any replacement, we copy into
1011 struct fixup_replacement
*replacements
= 0;
1013 fixup_var_refs_1 (var
, &PATTERN (insn
), insn
, &replacements
);
1015 while (replacements
)
1017 if (GET_CODE (replacements
->new) == REG
)
1021 /* OLD might be a (subreg (mem)). */
1022 if (GET_CODE (replacements
->old
) == SUBREG
)
1024 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1027 = fixup_stack_1 (replacements
->old
, insn
);
1029 /* We can not separate USE insns from the CALL_INSN
1030 that they belong to. If this is a CALL_INSN, insert
1031 the move insn before the USE insns preceding it
1032 instead of immediately before the insn. */
1033 if (GET_CODE (insn
) == CALL_INSN
)
1035 insert_before
= insn
;
1036 while (GET_CODE (PREV_INSN (insert_before
)) == INSN
1037 && GET_CODE (PATTERN (PREV_INSN (insert_before
))) == USE
)
1038 insert_before
= PREV_INSN (insert_before
);
1041 insert_before
= insn
;
1043 emit_insn_before (gen_move_insn (replacements
->new,
1048 replacements
= replacements
->next
;
1052 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1053 But don't touch other insns referred to by reg-notes;
1054 we will get them elsewhere. */
1055 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1056 if (GET_CODE (note
) != INSN_LIST
)
1057 XEXP (note
, 0) = walk_fixup_memory_subreg (XEXP (note
, 0), insn
);
1063 /* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1064 at *LOC in INSN needs to be changed.
1066 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1067 contain a list of original rtx's and replacements. If we find that we need
1068 to modify this insn by replacing a memory reference with a pseudo or by
1069 making a new MEM to implement a SUBREG, we consult that list to see if
1070 we have already chosen a replacement. If none has already been allocated,
1071 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1072 or the SUBREG, as appropriate, to the pseudo. */
1075 fixup_var_refs_1 (var
, loc
, insn
, replacements
)
1079 struct fixup_replacement
**replacements
;
1082 register rtx x
= *loc
;
1083 RTX_CODE code
= GET_CODE (x
);
1085 register rtx tem
, tem1
;
1086 struct fixup_replacement
*replacement
;
1093 /* If we already have a replacement, use it. Otherwise,
1094 try to fix up this address in case it is invalid. */
1096 replacement
= find_replacement (replacements
, var
);
1097 if (replacement
->new)
1099 *loc
= replacement
->new;
1103 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1105 /* Unless we are forcing memory to register, we can leave things
1106 the way they are if the insn is valid. */
1108 INSN_CODE (insn
) = -1;
1109 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
1112 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
1116 /* If X contains VAR, we need to unshare it here so that we update
1117 each occurrence separately. But all identical MEMs in one insn
1118 must be replaced with the same rtx because of the possibility of
1121 if (reg_mentioned_p (var
, x
))
1123 replacement
= find_replacement (replacements
, x
);
1124 if (replacement
->new == 0)
1125 replacement
->new = copy_most_rtx (x
, var
);
1127 *loc
= x
= replacement
->new;
1143 /* Note that in some cases those types of expressions are altered
1144 by optimize_bit_field, and do not survive to get here. */
1145 if (XEXP (x
, 0) == var
1146 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1147 && SUBREG_REG (XEXP (x
, 0)) == var
))
1149 /* Get TEM as a valid MEM in the mode presently in the insn.
1151 We don't worry about the possibility of MATCH_DUP here; it
1152 is highly unlikely and would be tricky to handle. */
1155 if (GET_CODE (tem
) == SUBREG
)
1156 tem
= fixup_memory_subreg (tem
, insn
, 1);
1157 tem
= fixup_stack_1 (tem
, insn
);
1159 /* Unless we want to load from memory, get TEM into the proper mode
1160 for an extract from memory. This can only be done if the
1161 extract is at a constant position and length. */
1163 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1164 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1165 && ! mode_dependent_address_p (XEXP (tem
, 0))
1166 && ! MEM_VOLATILE_P (tem
))
1168 enum machine_mode wanted_mode
= VOIDmode
;
1169 enum machine_mode is_mode
= GET_MODE (tem
);
1170 int width
= INTVAL (XEXP (x
, 1));
1171 int pos
= INTVAL (XEXP (x
, 2));
1174 if (GET_CODE (x
) == ZERO_EXTRACT
)
1175 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1178 if (GET_CODE (x
) == SIGN_EXTRACT
)
1179 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1181 /* If we have a narrower mode, we can do something. */
1182 if (wanted_mode
!= VOIDmode
1183 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1185 int offset
= pos
/ BITS_PER_UNIT
;
1186 rtx old_pos
= XEXP (x
, 2);
1189 /* If the bytes and bits are counted differently, we
1190 must adjust the offset. */
1191 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1192 offset
= (GET_MODE_SIZE (is_mode
)
1193 - GET_MODE_SIZE (wanted_mode
) - offset
);
1196 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1198 newmem
= gen_rtx (MEM
, wanted_mode
,
1199 plus_constant (XEXP (tem
, 0), offset
));
1200 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1201 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1202 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1204 /* Make the change and see if the insn remains valid. */
1205 INSN_CODE (insn
) = -1;
1206 XEXP (x
, 0) = newmem
;
1207 XEXP (x
, 2) = gen_rtx (CONST_INT
, VOIDmode
, pos
);
1209 if (recog_memoized (insn
) >= 0)
1212 /* Otherwise, restore old position. XEXP (x, 0) will be
1214 XEXP (x
, 2) = old_pos
;
1218 /* If we get here, the bitfield extract insn can't accept a memory
1219 reference. Copy the input into a register. */
1221 tem1
= gen_reg_rtx (GET_MODE (tem
));
1222 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1229 if (SUBREG_REG (x
) == var
)
1231 /* If this SUBREG makes VAR wider, it has become a paradoxical
1232 SUBREG with VAR in memory, but these aren't allowed at this
1233 stage of the compilation. So load VAR into a pseudo and take
1234 a SUBREG of that pseudo. */
1235 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
1237 replacement
= find_replacement (replacements
, var
);
1238 if (replacement
->new == 0)
1239 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1240 SUBREG_REG (x
) = replacement
->new;
1244 /* See if we have already found a replacement for this SUBREG.
1245 If so, use it. Otherwise, make a MEM and see if the insn
1246 is recognized. If not, or if we should force MEM into a register,
1247 make a pseudo for this SUBREG. */
1248 replacement
= find_replacement (replacements
, x
);
1249 if (replacement
->new)
1251 *loc
= replacement
->new;
1255 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
1257 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
1260 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
1266 /* First do special simplification of bit-field references. */
1267 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
1268 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
1269 optimize_bit_field (x
, insn
, 0);
1270 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
1271 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
1272 optimize_bit_field (x
, insn
, 0);
1274 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1275 insn into a pseudo and store the low part of the pseudo into VAR. */
1276 if (GET_CODE (SET_DEST (x
)) == SUBREG
1277 && SUBREG_REG (SET_DEST (x
)) == var
1278 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
1279 > GET_MODE_SIZE (GET_MODE (var
))))
1281 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
1282 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
1289 rtx dest
= SET_DEST (x
);
1290 rtx src
= SET_SRC (x
);
1291 rtx outerdest
= dest
;
1293 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
1294 || GET_CODE (dest
) == SIGN_EXTRACT
1295 || GET_CODE (dest
) == ZERO_EXTRACT
)
1296 dest
= XEXP (dest
, 0);
1298 if (GET_CODE (src
) == SUBREG
)
1299 src
= XEXP (src
, 0);
1301 /* If VAR does not appear at the top level of the SET
1302 just scan the lower levels of the tree. */
1304 if (src
!= var
&& dest
!= var
)
1307 /* We will need to rerecognize this insn. */
1308 INSN_CODE (insn
) = -1;
1311 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
1313 /* Since this case will return, ensure we fixup all the
1315 fixup_var_refs_1 (var
, &XEXP (outerdest
, 1), insn
, replacements
);
1316 fixup_var_refs_1 (var
, &XEXP (outerdest
, 2), insn
, replacements
);
1317 fixup_var_refs_1 (var
, &SET_SRC (x
), insn
, replacements
);
1319 tem
= XEXP (outerdest
, 0);
1321 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1322 that may appear inside a ZERO_EXTRACT.
1323 This was legitimate when the MEM was a REG. */
1324 if (GET_CODE (tem
) == SUBREG
1325 && SUBREG_REG (tem
) == var
)
1326 tem
= fixup_memory_subreg (tem
, insn
, 1);
1328 tem
= fixup_stack_1 (tem
, insn
);
1330 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
1331 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
1332 && ! mode_dependent_address_p (XEXP (tem
, 0))
1333 && ! MEM_VOLATILE_P (tem
))
1335 enum machine_mode wanted_mode
1336 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
1337 enum machine_mode is_mode
= GET_MODE (tem
);
1338 int width
= INTVAL (XEXP (outerdest
, 1));
1339 int pos
= INTVAL (XEXP (outerdest
, 2));
1341 /* If we have a narrower mode, we can do something. */
1342 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1344 int offset
= pos
/ BITS_PER_UNIT
;
1345 rtx old_pos
= XEXP (outerdest
, 2);
1348 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1349 offset
= (GET_MODE_SIZE (is_mode
)
1350 - GET_MODE_SIZE (wanted_mode
) - offset
);
1353 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1355 newmem
= gen_rtx (MEM
, wanted_mode
,
1356 plus_constant (XEXP (tem
, 0), offset
));
1357 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1358 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1359 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1361 /* Make the change and see if the insn remains valid. */
1362 INSN_CODE (insn
) = -1;
1363 XEXP (outerdest
, 0) = newmem
;
1364 XEXP (outerdest
, 2) = gen_rtx (CONST_INT
, VOIDmode
, pos
);
1366 if (recog_memoized (insn
) >= 0)
1369 /* Otherwise, restore old position. XEXP (x, 0) will be
1371 XEXP (outerdest
, 2) = old_pos
;
1375 /* If we get here, the bit-field store doesn't allow memory
1376 or isn't located at a constant position. Load the value into
1377 a register, do the store, and put it back into memory. */
1379 tem1
= gen_reg_rtx (GET_MODE (tem
));
1380 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1381 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
1382 XEXP (outerdest
, 0) = tem1
;
1387 /* STRICT_LOW_PART is a no-op on memory references
1388 and it can cause combinations to be unrecognizable,
1391 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
1392 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
1394 /* A valid insn to copy VAR into or out of a register
1395 must be left alone, to avoid an infinite loop here.
1396 If the reference to VAR is by a subreg, fix that up,
1397 since SUBREG is not valid for a memref.
1398 Also fix up the address of the stack slot. */
1400 if ((SET_SRC (x
) == var
1401 || (GET_CODE (SET_SRC (x
)) == SUBREG
1402 && SUBREG_REG (SET_SRC (x
)) == var
))
1403 && (GET_CODE (SET_DEST (x
)) == REG
1404 || (GET_CODE (SET_DEST (x
)) == SUBREG
1405 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
1406 && recog_memoized (insn
) >= 0)
1408 replacement
= find_replacement (replacements
, SET_SRC (x
));
1409 if (replacement
->new)
1411 SET_SRC (x
) = replacement
->new;
1414 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
1415 SET_SRC (x
) = replacement
->new
1416 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
1418 SET_SRC (x
) = replacement
->new
1419 = fixup_stack_1 (SET_SRC (x
), insn
);
1423 if ((SET_DEST (x
) == var
1424 || (GET_CODE (SET_DEST (x
)) == SUBREG
1425 && SUBREG_REG (SET_DEST (x
)) == var
))
1426 && (GET_CODE (SET_SRC (x
)) == REG
1427 || (GET_CODE (SET_SRC (x
)) == SUBREG
1428 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
1429 && recog_memoized (insn
) >= 0)
1431 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
1432 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
1434 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
1438 /* Otherwise, storing into VAR must be handled specially
1439 by storing into a temporary and copying that into VAR
1440 with a new insn after this one. */
1447 /* STRICT_LOW_PART can be discarded, around a MEM. */
1448 if (GET_CODE (tem
) == STRICT_LOW_PART
)
1449 tem
= XEXP (tem
, 0);
1450 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1451 if (GET_CODE (tem
) == SUBREG
)
1452 fixeddest
= fixup_memory_subreg (tem
, insn
, 0);
1454 fixeddest
= fixup_stack_1 (tem
, insn
);
1456 temp
= gen_reg_rtx (GET_MODE (tem
));
1457 emit_insn_after (gen_move_insn (fixeddest
, temp
), insn
);
1458 SET_DEST (x
) = temp
;
1463 /* Nothing special about this RTX; fix its operands. */
1465 fmt
= GET_RTX_FORMAT (code
);
1466 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1469 fixup_var_refs_1 (var
, &XEXP (x
, i
), insn
, replacements
);
1473 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1474 fixup_var_refs_1 (var
, &XVECEXP (x
, i
, j
), insn
, replacements
);
1479 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1480 return an rtx (MEM:m1 newaddr) which is equivalent.
1481 If any insns must be emitted to compute NEWADDR, put them before INSN.
1483 UNCRITICAL nonzero means accept paradoxical subregs.
1484 This is used for subregs found inside of ZERO_EXTRACTs. */
1487 fixup_memory_subreg (x
, insn
, uncritical
)
1492 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
1493 rtx addr
= XEXP (SUBREG_REG (x
), 0);
1494 enum machine_mode mode
= GET_MODE (x
);
1497 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1498 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
1502 #if BYTES_BIG_ENDIAN
1503 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
1504 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
1506 addr
= plus_constant (addr
, offset
);
1507 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
1508 /* Shortcut if no insns need be emitted. */
1509 return change_address (SUBREG_REG (x
), mode
, addr
);
1511 result
= change_address (SUBREG_REG (x
), mode
, addr
);
1512 emit_insn_before (gen_sequence (), insn
);
1517 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1518 Replace subexpressions of X in place.
1519 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1520 Otherwise return X, with its contents possibly altered.
1522 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1525 walk_fixup_memory_subreg (x
, insn
)
1529 register enum rtx_code code
;
1536 code
= GET_CODE (x
);
1538 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
1539 return fixup_memory_subreg (x
, insn
, 0);
1541 /* Nothing special about this RTX; fix its operands. */
1543 fmt
= GET_RTX_FORMAT (code
);
1544 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1547 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
);
1551 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1553 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
);
1560 /* Fix up any references to stack slots that are invalid memory addresses
1561 because they exceed the maximum range of a displacement. */
1564 fixup_stack_slots ()
1568 /* Did we generate a stack slot that is out of range
1569 or otherwise has an invalid address? */
1570 if (invalid_stack_slot
)
1572 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1573 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1574 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
1575 || GET_CODE (insn
) == JUMP_INSN
)
1576 fixup_stack_1 (PATTERN (insn
), insn
);
1581 /* For each memory ref within X, if it refers to a stack slot
1582 with an out of range displacement, put the address in a temp register
1583 (emitting new insns before INSN to load these registers)
1584 and alter the memory ref to use that register.
1585 Replace each such MEM rtx with a copy, to avoid clobberage. */
1588 fixup_stack_1 (x
, insn
)
1593 register RTX_CODE code
= GET_CODE (x
);
1598 register rtx ad
= XEXP (x
, 0);
1599 /* If we have address of a stack slot but it's not valid
1600 (displacement is too large), compute the sum in a register. */
1601 if (GET_CODE (ad
) == PLUS
1602 && GET_CODE (XEXP (ad
, 0)) == REG
1603 && REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
1604 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
1605 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
1608 if (memory_address_p (GET_MODE (x
), ad
))
1612 temp
= copy_to_reg (ad
);
1613 seq
= gen_sequence ();
1615 emit_insn_before (seq
, insn
);
1616 return change_address (x
, VOIDmode
, temp
);
1621 fmt
= GET_RTX_FORMAT (code
);
1622 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1625 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
1629 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1630 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
1636 /* Optimization: a bit-field instruction whose field
1637 happens to be a byte or halfword in memory
1638 can be changed to a move instruction.
1640 We call here when INSN is an insn to examine or store into a bit-field.
1641 BODY is the SET-rtx to be altered.
1643 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1644 (Currently this is called only from function.c, and EQUIV_MEM
1648 optimize_bit_field (body
, insn
, equiv_mem
)
1653 register rtx bitfield
;
1656 enum machine_mode mode
;
1658 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
1659 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
1660 bitfield
= SET_DEST (body
), destflag
= 1;
1662 bitfield
= SET_SRC (body
), destflag
= 0;
1664 /* First check that the field being stored has constant size and position
1665 and is in fact a byte or halfword suitably aligned. */
1667 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
1668 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
1669 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
1671 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
1673 register rtx memref
= 0;
1675 /* Now check that the containing word is memory, not a register,
1676 and that it is safe to change the machine mode. */
1678 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
1679 memref
= XEXP (bitfield
, 0);
1680 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
1682 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
1683 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
1684 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
1685 memref
= SUBREG_REG (XEXP (bitfield
, 0));
1686 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
1688 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
1689 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
1692 && ! mode_dependent_address_p (XEXP (memref
, 0))
1693 && ! MEM_VOLATILE_P (memref
))
1695 /* Now adjust the address, first for any subreg'ing
1696 that we are now getting rid of,
1697 and then for which byte of the word is wanted. */
1699 register int offset
= INTVAL (XEXP (bitfield
, 2));
1700 /* Adjust OFFSET to count bits from low-address byte. */
1701 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1702 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
1703 - offset
- INTVAL (XEXP (bitfield
, 1)));
1705 /* Adjust OFFSET to count bytes from low-address byte. */
1706 offset
/= BITS_PER_UNIT
;
1707 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
1709 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
1710 #if BYTES_BIG_ENDIAN
1711 offset
-= (MIN (UNITS_PER_WORD
,
1712 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
1713 - MIN (UNITS_PER_WORD
,
1714 GET_MODE_SIZE (GET_MODE (memref
))));
1718 memref
= change_address (memref
, mode
,
1719 plus_constant (XEXP (memref
, 0), offset
));
1721 /* Store this memory reference where
1722 we found the bit field reference. */
1726 validate_change (insn
, &SET_DEST (body
), memref
, 1);
1727 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
1729 rtx src
= SET_SRC (body
);
1730 while (GET_CODE (src
) == SUBREG
1731 && SUBREG_WORD (src
) == 0)
1732 src
= SUBREG_REG (src
);
1733 if (GET_MODE (src
) != GET_MODE (memref
))
1734 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
1735 validate_change (insn
, &SET_SRC (body
), src
, 1);
1737 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
1738 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
1739 /* This shouldn't happen because anything that didn't have
1740 one of these modes should have got converted explicitly
1741 and then referenced through a subreg.
1742 This is so because the original bit-field was
1743 handled by agg_mode and so its tree structure had
1744 the same mode that memref now has. */
1749 rtx dest
= SET_DEST (body
);
1751 while (GET_CODE (dest
) == SUBREG
1752 && SUBREG_WORD (dest
) == 0)
1753 dest
= SUBREG_REG (dest
);
1755 validate_change (insn
, &SET_DEST (body
), dest
, 1);
1757 if (GET_MODE (dest
) == GET_MODE (memref
))
1758 validate_change (insn
, &SET_SRC (body
), memref
, 1);
1761 /* Convert the mem ref to the destination mode. */
1762 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
1765 convert_move (newreg
, memref
,
1766 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
1770 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
1774 /* See if we can convert this extraction or insertion into
1775 a simple move insn. We might not be able to do so if this
1776 was, for example, part of a PARALLEL.
1778 If we succeed, write out any needed conversions. If we fail,
1779 it is hard to guess why we failed, so don't do anything
1780 special; just let the optimization be suppressed. */
1782 if (apply_change_group () && seq
)
1783 emit_insns_before (seq
, insn
);
1788 /* These routines are responsible for converting virtual register references
1789 to the actual hard register references once RTL generation is complete.
1791 The following four variables are used for communication between the
1792 routines. They contain the offsets of the virtual registers from their
1793 respective hard registers. */
1795 static int in_arg_offset
;
1796 static int var_offset
;
1797 static int dynamic_offset
;
1798 static int out_arg_offset
;
1800 /* In most machines, the stack pointer register is equivalent to the bottom
1803 #ifndef STACK_POINTER_OFFSET
1804 #define STACK_POINTER_OFFSET 0
1807 /* If not defined, pick an appropriate default for the offset of dynamically
1808 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1809 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1811 #ifndef STACK_DYNAMIC_OFFSET
1813 #ifdef ACCUMULATE_OUTGOING_ARGS
1814 /* The bottom of the stack points to the actual arguments. If
1815 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1816 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1817 stack space for register parameters is not pushed by the caller, but
1818 rather part of the fixed stack areas and hence not included in
1819 `current_function_outgoing_args_size'. Nevertheless, we must allow
1820 for it when allocating stack dynamic objects. */
1822 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1823 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1824 (current_function_outgoing_args_size \
1825 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1828 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1829 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1833 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1837 /* Pass through the INSNS of function FNDECL and convert virtual register
1838 references to hard register references. */
1841 instantiate_virtual_regs (fndecl
, insns
)
1847 /* Compute the offsets to use for this function. */
1848 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
1849 var_offset
= STARTING_FRAME_OFFSET
;
1850 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
1851 out_arg_offset
= STACK_POINTER_OFFSET
;
1853 /* Scan all variables and parameters of this function. For each that is
1854 in memory, instantiate all virtual registers if the result is a valid
1855 address. If not, we do it later. That will handle most uses of virtual
1856 regs on many machines. */
1857 instantiate_decls (fndecl
, 1);
1859 /* Initialize recognition, indicating that volatile is OK. */
1862 /* Scan through all the insns, instantiating every virtual register still
1864 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1865 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
1866 || GET_CODE (insn
) == CALL_INSN
)
1868 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
1869 instantiate_virtual_regs_1 (®_NOTES (insn
), 0, 0);
1872 /* Now instantiate the remaining register equivalences for debugging info.
1873 These will not be valid addresses. */
1874 instantiate_decls (fndecl
, 0);
1876 /* Indicate that, from now on, assign_stack_local should use
1877 frame_pointer_rtx. */
1878 virtuals_instantiated
= 1;
1881 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1882 all virtual registers in their DECL_RTL's.
1884 If VALID_ONLY, do this only if the resulting address is still valid.
1885 Otherwise, always do it. */
1888 instantiate_decls (fndecl
, valid_only
)
1894 if (TREE_INLINE (fndecl
))
1895 /* When compiling an inline function, the obstack used for
1896 rtl allocation is the maybepermanent_obstack. Calling
1897 `resume_temporary_allocation' switches us back to that
1898 obstack while we process this function's parameters. */
1899 resume_temporary_allocation ();
1901 /* Process all parameters of the function. */
1902 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1904 if (DECL_RTL (decl
) && GET_CODE (DECL_RTL (decl
)) == MEM
)
1905 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl
), 0),
1906 valid_only
? DECL_RTL (decl
) : 0, 0);
1907 #if 1 /* This is probably correct, but it seems to require fixes
1908 elsewhere in order to work. Let's fix them in 2.1. */
1909 if (DECL_INCOMING_RTL (decl
)
1910 && GET_CODE (DECL_INCOMING_RTL (decl
)) == MEM
)
1911 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl
), 0),
1912 valid_only
? DECL_INCOMING_RTL (decl
) : 0,
1917 /* Now process all variables defined in the function or its subblocks. */
1918 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
1920 if (TREE_INLINE (fndecl
))
1922 /* Save all rtl allocated for this function by raising the
1923 high-water mark on the maybepermanent_obstack. */
1925 /* All further rtl allocation is now done in the current_obstack. */
1926 rtl_in_current_obstack ();
1930 /* Subroutine of instantiate_decls: Process all decls in the given
1931 BLOCK node and all its subblocks. */
1934 instantiate_decls_1 (let
, valid_only
)
1940 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1941 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
1942 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t
), 0),
1943 valid_only
? DECL_RTL (t
) : 0, 0);
1945 /* Process all subblocks. */
1946 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1947 instantiate_decls_1 (t
, valid_only
);
1950 /* Given a pointer to a piece of rtx and an optional pointer to the
1951 containing object, instantiate any virtual registers present in it.
1953 If EXTRA_INSNS, we always do the replacement and generate
1954 any extra insns before OBJECT. If it zero, we do nothing if replacement
1957 Return 1 if we either had nothing to do or if we were able to do the
1958 needed replacement. Return 0 otherwise; we only return zero if
1959 EXTRA_INSNS is zero.
1961 We first try some simple transformations to avoid the creation of extra
1965 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
1979 /* Re-start here to avoid recursion in common cases. */
1986 code
= GET_CODE (x
);
1988 /* Check for some special cases. */
2005 /* We are allowed to set the virtual registers. This means that
2006 that the actual register should receive the source minus the
2007 appropriate offset. This is used, for example, in the handling
2008 of non-local gotos. */
2009 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
2010 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
2011 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
2012 new = frame_pointer_rtx
, offset
= - var_offset
;
2013 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
2014 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
2015 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
2016 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
2020 /* The only valid sources here are PLUS or REG. Just do
2021 the simplest possible thing to handle them. */
2022 if (GET_CODE (SET_SRC (x
)) != REG
2023 && GET_CODE (SET_SRC (x
)) != PLUS
)
2027 if (GET_CODE (SET_SRC (x
)) != REG
)
2028 temp
= force_operand (SET_SRC (x
), 0);
2031 temp
= force_operand (plus_constant (temp
, offset
), 0);
2035 emit_insns_before (seq
, object
);
2038 if (!validate_change (object
, &SET_SRC (x
), temp
, 0)
2045 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
2050 /* Handle special case of virtual register plus constant. */
2051 if (CONSTANT_P (XEXP (x
, 1)))
2055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2056 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2058 rtx inner
= XEXP (XEXP (x
, 0), 0);
2060 if (inner
== virtual_incoming_args_rtx
)
2061 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2062 else if (inner
== virtual_stack_vars_rtx
)
2063 new = frame_pointer_rtx
, offset
= var_offset
;
2064 else if (inner
== virtual_stack_dynamic_rtx
)
2065 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2066 else if (inner
== virtual_outgoing_args_rtx
)
2067 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2074 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
2076 new = gen_rtx (PLUS
, Pmode
, new, XEXP (XEXP (x
, 0), 1));
2079 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
2080 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2081 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
2082 new = frame_pointer_rtx
, offset
= var_offset
;
2083 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
2084 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2085 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
2086 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2089 /* We know the second operand is a constant. Unless the
2090 first operand is a REG (which has been already checked),
2091 it needs to be checked. */
2092 if (GET_CODE (XEXP (x
, 0)) != REG
)
2102 new = plus_constant (XEXP (x
, 1), offset
);
2104 /* If the new constant is zero, try to replace the sum with its
2106 if (new == const0_rtx
2107 && validate_change (object
, loc
, XEXP (x
, 0), 0))
2110 /* Next try to replace constant with new one. */
2111 if (!validate_change (object
, &XEXP (x
, 1), new, 0))
2119 /* Otherwise copy the new constant into a register and replace
2120 constant with that register. */
2121 temp
= gen_reg_rtx (Pmode
);
2122 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
2123 emit_insn_before (gen_move_insn (temp
, new), object
);
2126 /* If that didn't work, replace this expression with a
2127 register containing the sum. */
2129 new = gen_rtx (PLUS
, Pmode
, XEXP (x
, 0), new);
2133 temp
= force_operand (new, 0);
2137 emit_insns_before (seq
, object
);
2138 if (! validate_change (object
, loc
, temp
, 0)
2139 && ! validate_replace_rtx (x
, temp
, object
))
2147 /* Fall through to generic two-operand expression case. */
2153 case DIV
: case UDIV
:
2154 case MOD
: case UMOD
:
2155 case AND
: case IOR
: case XOR
:
2156 case LSHIFT
: case ASHIFT
: case ROTATE
:
2157 case ASHIFTRT
: case LSHIFTRT
: case ROTATERT
:
2159 case GE
: case GT
: case GEU
: case GTU
:
2160 case LE
: case LT
: case LEU
: case LTU
:
2161 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
2162 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
2167 /* Most cases of MEM that convert to valid addresses have already been
2168 handled by our scan of regno_reg_rtx. The only special handling we
2169 need here is to make a copy of the rtx to ensure it isn't being
2170 shared if we have to change it to a pseudo.
2172 If the rtx is a simple reference to an address via a virtual register,
2173 it can potentially be shared. In such cases, first try to make it
2174 a valid address, which can also be shared. Otherwise, copy it and
2177 First check for common cases that need no processing. These are
2178 usually due to instantiation already being done on a previous instance
2182 if (CONSTANT_ADDRESS_P (temp
)
2183 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2184 || temp
== arg_pointer_rtx
2186 || temp
== frame_pointer_rtx
)
2189 if (GET_CODE (temp
) == PLUS
2190 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2191 && (XEXP (temp
, 0) == frame_pointer_rtx
2192 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2193 || XEXP (temp
, 0) == arg_pointer_rtx
2198 if (temp
== virtual_stack_vars_rtx
2199 || temp
== virtual_incoming_args_rtx
2200 || (GET_CODE (temp
) == PLUS
2201 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2202 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
2203 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
2205 /* This MEM may be shared. If the substitution can be done without
2206 the need to generate new pseudos, we want to do it in place
2207 so all copies of the shared rtx benefit. The call below will
2208 only make substitutions if the resulting address is still
2211 Note that we cannot pass X as the object in the recursive call
2212 since the insn being processed may not allow all valid
2213 addresses. However, if we were not passed on object, we can
2214 only modify X without copying it if X will have a valid
2217 ??? Also note that this can still lose if OBJECT is an insn that
2218 has less restrictions on an address that some other insn.
2219 In that case, we will modify the shared address. This case
2220 doesn't seem very likely, though. */
2222 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
2223 object
? object
: x
, 0))
2226 /* Otherwise make a copy and process that copy. We copy the entire
2227 RTL expression since it might be a PLUS which could also be
2229 *loc
= x
= copy_rtx (x
);
2232 /* Fall through to generic unary operation case. */
2236 case STRICT_LOW_PART
:
2238 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
2239 case SIGN_EXTEND
: case ZERO_EXTEND
:
2240 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2241 case FLOAT
: case FIX
:
2242 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2246 /* These case either have just one operand or we know that we need not
2247 check the rest of the operands. */
2252 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2253 in front of this insn and substitute the temporary. */
2254 if (x
== virtual_incoming_args_rtx
)
2255 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2256 else if (x
== virtual_stack_vars_rtx
)
2257 new = frame_pointer_rtx
, offset
= var_offset
;
2258 else if (x
== virtual_stack_dynamic_rtx
)
2259 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2260 else if (x
== virtual_outgoing_args_rtx
)
2261 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2265 temp
= plus_constant (new, offset
);
2266 if (!validate_change (object
, loc
, temp
, 0))
2272 temp
= force_operand (temp
, 0);
2276 emit_insns_before (seq
, object
);
2277 if (! validate_change (object
, loc
, temp
, 0)
2278 && ! validate_replace_rtx (x
, temp
, object
))
2286 /* Scan all subexpressions. */
2287 fmt
= GET_RTX_FORMAT (code
);
2288 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2291 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
2294 else if (*fmt
== 'E')
2295 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2296 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
2303 /* Optimization: assuming this function does not receive nonlocal gotos,
2304 delete the handlers for such, as well as the insns to establish
2305 and disestablish them. */
2311 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2313 /* Delete the handler by turning off the flag that would
2314 prevent jump_optimize from deleting it.
2315 Also permit deletion of the nonlocal labels themselves
2316 if nothing local refers to them. */
2317 if (GET_CODE (insn
) == CODE_LABEL
)
2318 LABEL_PRESERVE_P (insn
) = 0;
2319 if (GET_CODE (insn
) == INSN
2320 && ((nonlocal_goto_handler_slot
!= 0
2321 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
2322 || (nonlocal_goto_stack_level
!= 0
2323 && reg_mentioned_p (nonlocal_goto_stack_level
,
2329 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2330 of the current function. */
2333 nonlocal_label_rtx_list ()
2338 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
2339 x
= gen_rtx (EXPR_LIST
, VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
2344 /* Output a USE for any register use in RTL.
2345 This is used with -noreg to mark the extent of lifespan
2346 of any registers used in a user-visible variable's DECL_RTL. */
2352 if (GET_CODE (rtl
) == REG
)
2353 /* This is a register variable. */
2354 emit_insn (gen_rtx (USE
, VOIDmode
, rtl
));
2355 else if (GET_CODE (rtl
) == MEM
2356 && GET_CODE (XEXP (rtl
, 0)) == REG
2357 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
2358 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
2359 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
2360 /* This is a variable-sized structure. */
2361 emit_insn (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)));
2364 /* Like use_variable except that it outputs the USEs after INSN
2365 instead of at the end of the insn-chain. */
2368 use_variable_after (rtl
, insn
)
2371 if (GET_CODE (rtl
) == REG
)
2372 /* This is a register variable. */
2373 emit_insn_after (gen_rtx (USE
, VOIDmode
, rtl
), insn
);
2374 else if (GET_CODE (rtl
) == MEM
2375 && GET_CODE (XEXP (rtl
, 0)) == REG
2376 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
2377 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
2378 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
2379 /* This is a variable-sized structure. */
2380 emit_insn_after (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)), insn
);
2386 return max_parm_reg
;
2389 /* Return the first insn following those generated by `assign_parms'. */
2392 get_first_nonparm_insn ()
2395 return NEXT_INSN (last_parm_insn
);
2396 return get_insns ();
2399 /* Return 1 if EXP returns an aggregate value, for which an address
2400 must be passed to the function or returned by the function. */
2403 aggregate_value_p (exp
)
2406 if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
2408 if (RETURN_IN_MEMORY (TREE_TYPE (exp
)))
2410 if (flag_pcc_struct_return
2411 && (TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
2412 || TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
))
2417 /* Assign RTL expressions to the function's parameters.
2418 This may involve copying them into registers and using
2419 those registers as the RTL for them.
2421 If SECOND_TIME is non-zero it means that this function is being
2422 called a second time. This is done by integrate.c when a function's
2423 compilation is deferred. We need to come back here in case the
2424 FUNCTION_ARG macro computes items needed for the rest of the compilation
2425 (such as changing which registers are fixed or caller-saved). But suppress
2426 writing any insns or setting DECL_RTL of anything in this case. */
2429 assign_parms (fndecl
, second_time
)
2434 register rtx entry_parm
= 0;
2435 register rtx stack_parm
= 0;
2436 CUMULATIVE_ARGS args_so_far
;
2437 enum machine_mode passed_mode
, nominal_mode
;
2438 /* Total space needed so far for args on the stack,
2439 given as a constant and a tree-expression. */
2440 struct args_size stack_args_size
;
2441 tree fntype
= TREE_TYPE (fndecl
);
2442 tree fnargs
= DECL_ARGUMENTS (fndecl
);
2443 /* This is used for the arg pointer when referring to stack args. */
2444 rtx internal_arg_pointer
;
2445 /* This is a dummy PARM_DECL that we used for the function result if
2446 the function returns a structure. */
2447 tree function_result_decl
= 0;
2448 int nparmregs
= list_length (fnargs
) + LAST_VIRTUAL_REGISTER
+ 1;
2449 int varargs_setup
= 0;
2451 /* Nonzero if the last arg is named `__builtin_va_alist',
2452 which is used on some machines for old-fashioned non-ANSI varargs.h;
2453 this should be stuck onto the stack as if it had arrived there. */
2456 && (parm
= tree_last (fnargs
)) != 0
2458 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
2459 "__builtin_va_alist")));
2461 /* Nonzero if function takes extra anonymous args.
2462 This means the last named arg must be on the stack
2463 right before the anonymous ones. */
2465 = (TYPE_ARG_TYPES (fntype
) != 0
2466 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
2467 != void_type_node
));
2469 /* If the reg that the virtual arg pointer will be translated into is
2470 not a fixed reg or is the stack pointer, make a copy of the virtual
2471 arg pointer, and address parms via the copy. The frame pointer is
2472 considered fixed even though it is not marked as such.
2474 The second time through, simply use ap to avoid generating rtx. */
2476 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
2477 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
2478 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
2480 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
2482 internal_arg_pointer
= virtual_incoming_args_rtx
;
2483 current_function_internal_arg_pointer
= internal_arg_pointer
;
2485 stack_args_size
.constant
= 0;
2486 stack_args_size
.var
= 0;
2488 /* If struct value address is treated as the first argument, make it so. */
2489 if (aggregate_value_p (DECL_RESULT (fndecl
))
2490 && ! current_function_returns_pcc_struct
2491 && struct_value_incoming_rtx
== 0)
2493 tree type
= build_pointer_type (fntype
);
2495 function_result_decl
= build_decl (PARM_DECL
, 0, type
);
2497 DECL_ARG_TYPE (function_result_decl
) = type
;
2498 TREE_CHAIN (function_result_decl
) = fnargs
;
2499 fnargs
= function_result_decl
;
2502 parm_reg_stack_loc
= (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
2503 bzero (parm_reg_stack_loc
, nparmregs
* sizeof (rtx
));
2505 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2506 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, 0);
2508 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, 0);
2511 /* We haven't yet found an argument that we must push and pretend the
2513 current_function_pretend_args_size
= 0;
2515 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2518 = (TREE_CODE (TREE_TYPE (parm
)) == ARRAY_TYPE
2519 || TREE_CODE (TREE_TYPE (parm
)) == RECORD_TYPE
2520 || TREE_CODE (TREE_TYPE (parm
)) == UNION_TYPE
);
2521 struct args_size stack_offset
;
2522 struct args_size arg_size
;
2523 int passed_pointer
= 0;
2524 tree passed_type
= DECL_ARG_TYPE (parm
);
2526 /* Set LAST_NAMED if this is last named arg before some
2527 anonymous args. We treat it as if it were anonymous too. */
2528 int last_named
= ((TREE_CHAIN (parm
) == 0
2529 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
2530 && (vararg
|| stdarg
));
2532 if (TREE_TYPE (parm
) == error_mark_node
2533 /* This can happen after weird syntax errors
2534 or if an enum type is defined among the parms. */
2535 || TREE_CODE (parm
) != PARM_DECL
2536 || passed_type
== NULL
)
2538 DECL_RTL (parm
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2539 TREE_USED (parm
) = 1;
2543 /* For varargs.h function, save info about regs and stack space
2544 used by the individual args, not including the va_alist arg. */
2545 if (vararg
&& last_named
)
2546 current_function_args_info
= args_so_far
;
2548 /* Find mode of arg as it is passed, and mode of arg
2549 as it should be during execution of this function. */
2550 passed_mode
= TYPE_MODE (passed_type
);
2551 nominal_mode
= TYPE_MODE (TREE_TYPE (parm
));
2553 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2554 /* See if this arg was passed by invisible reference. */
2555 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
2556 passed_type
, ! last_named
))
2558 passed_type
= build_pointer_type (passed_type
);
2560 passed_mode
= nominal_mode
= Pmode
;
2564 /* Let machine desc say which reg (if any) the parm arrives in.
2565 0 means it arrives on the stack. */
2566 #ifdef FUNCTION_INCOMING_ARG
2567 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, passed_mode
,
2568 passed_type
, ! last_named
);
2570 entry_parm
= FUNCTION_ARG (args_so_far
, passed_mode
,
2571 passed_type
, ! last_named
);
2574 #ifdef SETUP_INCOMING_VARARGS
2575 /* If this is the last named parameter, do any required setup for
2576 varargs or stdargs. We need to know about the case of this being an
2577 addressable type, in which case we skip the registers it
2578 would have arrived in.
2580 For stdargs, LAST_NAMED will be set for two parameters, the one that
2581 is actually the last named, and the dummy parameter. We only
2582 want to do this action once.
2584 Also, indicate when RTL generation is to be suppressed. */
2585 if (last_named
&& !varargs_setup
)
2587 SETUP_INCOMING_VARARGS (args_so_far
, passed_mode
, passed_type
,
2588 current_function_pretend_args_size
,
2594 /* Determine parm's home in the stack,
2595 in case it arrives in the stack or we should pretend it did.
2597 Compute the stack position and rtx where the argument arrives
2600 There is one complexity here: If this was a parameter that would
2601 have been passed in registers, but wasn't only because it is
2602 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2603 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2604 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2605 0 as it was the previous time. */
2607 locate_and_pad_parm (passed_mode
, passed_type
,
2608 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2611 #ifdef FUNCTION_INCOMING_ARG
2612 FUNCTION_INCOMING_ARG (args_so_far
, passed_mode
,
2615 || varargs_setup
)) != 0,
2617 FUNCTION_ARG (args_so_far
, passed_mode
,
2619 ! last_named
|| varargs_setup
) != 0,
2622 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
2626 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
2628 if (offset_rtx
== const0_rtx
)
2629 stack_parm
= gen_rtx (MEM
, passed_mode
, internal_arg_pointer
);
2631 stack_parm
= gen_rtx (MEM
, passed_mode
,
2632 gen_rtx (PLUS
, Pmode
,
2633 internal_arg_pointer
, offset_rtx
));
2635 /* If this is a memory ref that contains aggregate components,
2636 mark it as such for cse and loop optimize. */
2637 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
2640 /* If this parameter was passed both in registers and in the stack,
2641 use the copy on the stack. */
2642 if (MUST_PASS_IN_STACK (passed_mode
, passed_type
))
2645 /* If this parm was passed part in regs and part in memory,
2646 pretend it arrived entirely in memory
2647 by pushing the register-part onto the stack.
2649 In the special case of a DImode or DFmode that is split,
2650 we could put it together in a pseudoreg directly,
2651 but for now that's not worth bothering with. */
2656 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2657 nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, passed_mode
,
2658 passed_type
, ! last_named
);
2663 current_function_pretend_args_size
2664 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
2665 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
2666 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
2669 move_block_from_reg (REGNO (entry_parm
),
2670 validize_mem (stack_parm
), nregs
);
2671 entry_parm
= stack_parm
;
2675 /* If we didn't decide this parm came in a register,
2676 by default it came on the stack. */
2677 if (entry_parm
== 0)
2678 entry_parm
= stack_parm
;
2680 /* Record permanently how this parm was passed. */
2682 DECL_INCOMING_RTL (parm
) = entry_parm
;
2684 /* If there is actually space on the stack for this parm,
2685 count it in stack_args_size; otherwise set stack_parm to 0
2686 to indicate there is no preallocated stack slot for the parm. */
2688 if (entry_parm
== stack_parm
2689 #ifdef REG_PARM_STACK_SPACE
2690 /* On some machines, even if a parm value arrives in a register
2691 there is still an (uninitialized) stack slot allocated for it. */
2692 || REG_PARM_STACK_SPACE (fndecl
) > 0
2696 stack_args_size
.constant
+= arg_size
.constant
;
2698 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
2701 /* No stack slot was pushed for this parm. */
2704 /* Update info on where next arg arrives in registers. */
2706 FUNCTION_ARG_ADVANCE (args_so_far
, passed_mode
,
2707 passed_type
, ! last_named
);
2709 /* If this is our second time through, we are done with this parm. */
2713 /* Now adjust STACK_PARM to the mode and precise location
2714 where this parameter should live during execution,
2715 if we discover that it must live in the stack during execution.
2716 To make debuggers happier on big-endian machines, we store
2717 the value in the last bytes of the space available. */
2719 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
2724 #if BYTES_BIG_ENDIAN
2725 if (GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
2726 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
2727 - GET_MODE_SIZE (nominal_mode
));
2730 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
2731 if (offset_rtx
== const0_rtx
)
2732 stack_parm
= gen_rtx (MEM
, nominal_mode
, internal_arg_pointer
);
2734 stack_parm
= gen_rtx (MEM
, nominal_mode
,
2735 gen_rtx (PLUS
, Pmode
,
2736 internal_arg_pointer
, offset_rtx
));
2738 /* If this is a memory ref that contains aggregate components,
2739 mark it as such for cse and loop optimize. */
2740 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
2743 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2744 in the mode in which it arrives.
2745 STACK_PARM is an RTX for a stack slot where the parameter can live
2746 during the function (in case we want to put it there).
2747 STACK_PARM is 0 if no stack slot was pushed for it.
2749 Now output code if necessary to convert ENTRY_PARM to
2750 the type in which this function declares it,
2751 and store that result in an appropriate place,
2752 which may be a pseudo reg, may be STACK_PARM,
2753 or may be a local stack slot if STACK_PARM is 0.
2755 Set DECL_RTL to that place. */
2757 if (nominal_mode
== BLKmode
)
2759 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2760 if (GET_CODE (entry_parm
) == REG
)
2762 int size_stored
= CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
2765 /* Note that we will be storing an integral number of words.
2766 So we have to be careful to ensure that we allocate an
2767 integral number of words. We do this below in the
2768 assign_stack_local if space was not allocated in the argument
2769 list. If it was, this will not work if PARM_BOUNDARY is not
2770 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2771 if it becomes a problem. */
2773 if (stack_parm
== 0)
2775 = assign_stack_local (GET_MODE (entry_parm
), size_stored
, 0);
2776 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
2779 move_block_from_reg (REGNO (entry_parm
),
2780 validize_mem (stack_parm
),
2781 size_stored
/ UNITS_PER_WORD
);
2783 DECL_RTL (parm
) = stack_parm
;
2786 #if 0 /* This change was turned off because it makes compilation bigger. */
2788 #else /* It's not clear why the following was replaced. */
2789 /* Obsoleted by preceding line. */
2790 (obey_regdecls
&& ! TREE_REGDECL (parm
)
2791 && ! TREE_INLINE (fndecl
))
2793 /* layout_decl may set this. */
2794 || TREE_ADDRESSABLE (parm
)
2795 || TREE_SIDE_EFFECTS (parm
)
2796 /* If -ffloat-store specified, don't put explicit
2797 float variables into registers. */
2798 || (flag_float_store
2799 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
2800 /* Always assign pseudo to structure return or item passed
2801 by invisible reference. */
2802 || passed_pointer
|| parm
== function_result_decl
)
2804 /* Store the parm in a pseudoregister during the function. */
2805 register rtx parmreg
= gen_reg_rtx (nominal_mode
);
2807 REG_USERVAR_P (parmreg
) = 1;
2809 /* If this was an item that we received a pointer to, set DECL_RTL
2813 DECL_RTL (parm
) = gen_rtx (MEM
, TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
2814 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
2817 DECL_RTL (parm
) = parmreg
;
2819 /* Copy the value into the register. */
2820 if (GET_MODE (parmreg
) != GET_MODE (entry_parm
))
2822 /* If ENTRY_PARM is a hard register, it might be in a register
2823 not valid for operating in its mode (e.g., an odd-numbered
2824 register for a DFmode). In that case, moves are the only
2825 thing valid, so we can't do a convert from there. This
2826 occurs when the calling sequence allow such misaligned
2828 if (GET_CODE (entry_parm
) == REG
2829 && REGNO (entry_parm
) < FIRST_PSEUDO_REGISTER
2830 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm
),
2831 GET_MODE (entry_parm
)))
2832 convert_move (parmreg
, copy_to_reg (entry_parm
));
2834 convert_move (parmreg
, validize_mem (entry_parm
), 0);
2837 emit_move_insn (parmreg
, validize_mem (entry_parm
));
2839 /* In any case, record the parm's desired stack location
2840 in case we later discover it must live in the stack. */
2841 if (REGNO (parmreg
) >= nparmregs
)
2844 nparmregs
= REGNO (parmreg
) + 5;
2845 new = (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
2846 bcopy (parm_reg_stack_loc
, new, nparmregs
* sizeof (rtx
));
2847 parm_reg_stack_loc
= new;
2849 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
2851 /* Mark the register as eliminable if we did no conversion
2852 and it was copied from memory at a fixed offset,
2853 and the arg pointer was not copied to a pseudo-reg.
2854 If the arg pointer is a pseudo reg or the offset formed
2855 an invalid address, such memory-equivalences
2856 as we make here would screw up life analysis for it. */
2857 if (nominal_mode
== passed_mode
2858 && GET_CODE (entry_parm
) == MEM
2859 && stack_offset
.var
== 0
2860 && reg_mentioned_p (virtual_incoming_args_rtx
,
2861 XEXP (entry_parm
, 0)))
2862 REG_NOTES (get_last_insn ())
2863 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
2864 entry_parm
, REG_NOTES (get_last_insn ()));
2866 /* For pointer data type, suggest pointer register. */
2867 if (TREE_CODE (TREE_TYPE (parm
)) == POINTER_TYPE
)
2868 mark_reg_pointer (parmreg
);
2872 /* Value must be stored in the stack slot STACK_PARM
2873 during function execution. */
2875 if (passed_mode
!= nominal_mode
)
2877 /* Conversion is required. */
2878 if (GET_CODE (entry_parm
) == REG
2879 && REGNO (entry_parm
) < FIRST_PSEUDO_REGISTER
2880 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm
), passed_mode
))
2881 entry_parm
= copy_to_reg (entry_parm
);
2883 entry_parm
= convert_to_mode (nominal_mode
, entry_parm
, 0);
2886 if (entry_parm
!= stack_parm
)
2888 if (stack_parm
== 0)
2889 stack_parm
= assign_stack_local (GET_MODE (entry_parm
),
2890 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
2891 emit_move_insn (validize_mem (stack_parm
),
2892 validize_mem (entry_parm
));
2895 DECL_RTL (parm
) = stack_parm
;
2898 /* If this "parameter" was the place where we are receiving the
2899 function's incoming structure pointer, set up the result. */
2900 if (parm
== function_result_decl
)
2901 DECL_RTL (DECL_RESULT (fndecl
))
2902 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (fndecl
)), DECL_RTL (parm
));
2904 if (TREE_THIS_VOLATILE (parm
))
2905 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
2906 if (TREE_READONLY (parm
))
2907 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
2910 max_parm_reg
= max_reg_num ();
2911 last_parm_insn
= get_last_insn ();
2913 current_function_args_size
= stack_args_size
.constant
;
2915 /* Adjust function incoming argument size for alignment and
2918 #ifdef REG_PARM_STACK_SPACE
2919 #ifndef MAYBE_REG_PARM_STACK_SPACE
2920 current_function_args_size
= MAX (current_function_args_size
,
2921 REG_PARM_STACK_SPACE (fndecl
));
2925 #ifdef STACK_BOUNDARY
2926 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2928 current_function_args_size
2929 = ((current_function_args_size
+ STACK_BYTES
- 1)
2930 / STACK_BYTES
) * STACK_BYTES
;
2933 #ifdef ARGS_GROW_DOWNWARD
2934 current_function_arg_offset_rtx
2935 = (stack_args_size
.var
== 0 ? gen_rtx (CONST_INT
, VOIDmode
,
2936 -stack_args_size
.constant
)
2937 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
2938 size_int (-stack_args_size
.constant
)),
2941 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
2944 /* See how many bytes, if any, of its args a function should try to pop
2947 current_function_pops_args
= RETURN_POPS_ARGS (TREE_TYPE (fndecl
),
2948 current_function_args_size
);
2950 /* For stdarg.h function, save info about regs and stack space
2951 used by the named args. */
2954 current_function_args_info
= args_so_far
;
2956 /* Set the rtx used for the function return value. Put this in its
2957 own variable so any optimizers that need this information don't have
2958 to include tree.h. Do this here so it gets done when an inlined
2959 function gets output. */
2961 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
2964 /* Compute the size and offset from the start of the stacked arguments for a
2965 parm passed in mode PASSED_MODE and with type TYPE.
2967 INITIAL_OFFSET_PTR points to the current offset into the stacked
2970 The starting offset and size for this parm are returned in *OFFSET_PTR
2971 and *ARG_SIZE_PTR, respectively.
2973 IN_REGS is non-zero if the argument will be passed in registers. It will
2974 never be set if REG_PARM_STACK_SPACE is not defined.
2976 FNDECL is the function in which the argument was defined.
2978 There are two types of rounding that are done. The first, controlled by
2979 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
2980 list to be aligned to the specific boundary (in bits). This rounding
2981 affects the initial and starting offsets, but not the argument size.
2983 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
2984 optionally rounds the size of the parm to PARM_BOUNDARY. The
2985 initial offset is not affected by this rounding, while the size always
2986 is and the starting offset may be. */
2988 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
2989 initial_offset_ptr is positive because locate_and_pad_parm's
2990 callers pass in the total size of args so far as
2991 initial_offset_ptr. arg_size_ptr is always positive.*/
2993 static void pad_to_arg_alignment (), pad_below ();
2996 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
2997 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
2998 enum machine_mode passed_mode
;
3002 struct args_size
*initial_offset_ptr
;
3003 struct args_size
*offset_ptr
;
3004 struct args_size
*arg_size_ptr
;
3007 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3008 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3009 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3010 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3011 int reg_parm_stack_space
= 0;
3013 #ifdef REG_PARM_STACK_SPACE
3014 /* If we have found a stack parm before we reach the end of the
3015 area reserved for registers, skip that area. */
3018 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3019 if (reg_parm_stack_space
> 0)
3021 if (initial_offset_ptr
->var
)
3023 initial_offset_ptr
->var
3024 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3025 size_int (reg_parm_stack_space
));
3026 initial_offset_ptr
->constant
= 0;
3028 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3029 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3032 #endif /* REG_PARM_STACK_SPACE */
3034 arg_size_ptr
->var
= 0;
3035 arg_size_ptr
->constant
= 0;
3037 #ifdef ARGS_GROW_DOWNWARD
3038 if (initial_offset_ptr
->var
)
3040 offset_ptr
->constant
= 0;
3041 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
3042 initial_offset_ptr
->var
);
3046 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
3047 offset_ptr
->var
= 0;
3049 if (where_pad
== upward
3050 && (TREE_CODE (sizetree
) != INTEGER_CST
3051 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
3052 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3053 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3054 pad_to_arg_alignment (offset_ptr
, boundary
);
3055 if (initial_offset_ptr
->var
)
3057 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
3058 size_binop (MINUS_EXPR
,
3060 initial_offset_ptr
->var
),
3065 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
-
3066 offset_ptr
->constant
);
3068 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3069 if (where_pad
== downward
)
3070 pad_below (arg_size_ptr
, passed_mode
, sizetree
);
3071 #else /* !ARGS_GROW_DOWNWARD */
3072 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
3073 *offset_ptr
= *initial_offset_ptr
;
3074 if (where_pad
== downward
)
3075 pad_below (offset_ptr
, passed_mode
, sizetree
);
3077 #ifdef PUSH_ROUNDING
3078 if (passed_mode
!= BLKmode
)
3079 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3082 if (where_pad
!= none
3083 && (TREE_CODE (sizetree
) != INTEGER_CST
3084 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
3085 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3087 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
3088 #endif /* ARGS_GROW_DOWNWARD */
3092 pad_to_arg_alignment (offset_ptr
, boundary
)
3093 struct args_size
*offset_ptr
;
3096 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3098 if (boundary
> BITS_PER_UNIT
)
3100 if (offset_ptr
->var
)
3103 #ifdef ARGS_GROW_DOWNWARD
3108 (ARGS_SIZE_TREE (*offset_ptr
),
3109 boundary
/ BITS_PER_UNIT
);
3110 offset_ptr
->constant
= 0; /*?*/
3113 offset_ptr
->constant
=
3114 #ifdef ARGS_GROW_DOWNWARD
3115 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
3117 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
3123 pad_below (offset_ptr
, passed_mode
, sizetree
)
3124 struct args_size
*offset_ptr
;
3125 enum machine_mode passed_mode
;
3128 if (passed_mode
!= BLKmode
)
3130 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3131 offset_ptr
->constant
3132 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3133 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3134 - GET_MODE_SIZE (passed_mode
));
3138 if (TREE_CODE (sizetree
) != INTEGER_CST
3139 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3141 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3142 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3144 ADD_PARM_SIZE (*offset_ptr
, s2
);
3145 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3151 round_down (value
, divisor
)
3155 return size_binop (MULT_EXPR
,
3156 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
3157 size_int (divisor
));
3160 /* Walk the tree of blocks describing the binding levels within a function
3161 and warn about uninitialized variables.
3162 This is done after calling flow_analysis and before global_alloc
3163 clobbers the pseudo-regs to hard regs. */
3166 uninitialized_vars_warning (block
)
3169 register tree decl
, sub
;
3170 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3172 if (TREE_CODE (decl
) == VAR_DECL
3173 /* These warnings are unreliable for and aggregates
3174 because assigning the fields one by one can fail to convince
3175 flow.c that the entire aggregate was initialized.
3176 Unions are troublesome because members may be shorter. */
3177 && TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
3178 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
3179 && TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
3180 && DECL_RTL (decl
) != 0
3181 && GET_CODE (DECL_RTL (decl
)) == REG
3182 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
3183 warning_with_decl (decl
,
3184 "`%s' may be used uninitialized in this function");
3185 if (TREE_CODE (decl
) == VAR_DECL
3186 && DECL_RTL (decl
) != 0
3187 && GET_CODE (DECL_RTL (decl
)) == REG
3188 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3189 warning_with_decl (decl
,
3190 "variable `%s' may be clobbered by `longjmp'");
3192 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3193 uninitialized_vars_warning (sub
);
3196 /* Do the appropriate part of uninitialized_vars_warning
3197 but for arguments instead of local variables. */
3200 setjmp_args_warning (block
)
3204 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3205 decl
; decl
= TREE_CHAIN (decl
))
3206 if (DECL_RTL (decl
) != 0
3207 && GET_CODE (DECL_RTL (decl
)) == REG
3208 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3209 warning_with_decl (decl
, "argument `%s' may be clobbered by `longjmp'");
3212 /* If this function call setjmp, put all vars into the stack
3213 unless they were declared `register'. */
3216 setjmp_protect (block
)
3219 register tree decl
, sub
;
3220 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3221 if ((TREE_CODE (decl
) == VAR_DECL
3222 || TREE_CODE (decl
) == PARM_DECL
)
3223 && DECL_RTL (decl
) != 0
3224 && GET_CODE (DECL_RTL (decl
)) == REG
3225 /* If this variable came from an inline function, it must be
3226 that it's life doesn't overlap the setjmp. If there was a
3227 setjmp in the function, it would already be in memory. We
3228 must exclude such variable because their DECL_RTL might be
3229 set to strange things such as virtual_stack_vars_rtx. */
3230 && ! DECL_FROM_INLINE (decl
)
3232 #ifdef NON_SAVING_SETJMP
3233 /* If longjmp doesn't restore the registers,
3234 don't put anything in them. */
3238 ! TREE_REGDECL (decl
)))
3239 put_var_into_stack (decl
);
3240 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3241 setjmp_protect (sub
);
3244 /* Like the previous function, but for args instead of local variables. */
3247 setjmp_protect_args ()
3249 register tree decl
, sub
;
3250 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3251 decl
; decl
= TREE_CHAIN (decl
))
3252 if ((TREE_CODE (decl
) == VAR_DECL
3253 || TREE_CODE (decl
) == PARM_DECL
)
3254 && DECL_RTL (decl
) != 0
3255 && GET_CODE (DECL_RTL (decl
)) == REG
3257 /* If longjmp doesn't restore the registers,
3258 don't put anything in them. */
3259 #ifdef NON_SAVING_SETJMP
3263 ! TREE_REGDECL (decl
)))
3264 put_var_into_stack (decl
);
3267 /* Return the context-pointer register corresponding to DECL,
3268 or 0 if it does not need one. */
3271 lookup_static_chain (decl
)
3274 tree context
= decl_function_context (decl
);
3280 /* We treat inline_function_decl as an alias for the current function
3281 because that is the inline function whose vars, types, etc.
3282 are being merged into the current function.
3283 See expand_inline_function. */
3284 if (context
== current_function_decl
|| context
== inline_function_decl
)
3285 return virtual_stack_vars_rtx
;
3287 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
3288 if (TREE_PURPOSE (link
) == context
)
3289 return RTL_EXPR_RTL (TREE_VALUE (link
));
3294 /* Convert a stack slot address ADDR for variable VAR
3295 (from a containing function)
3296 into an address valid in this function (using a static chain). */
3299 fix_lexical_addr (addr
, var
)
3305 tree context
= decl_function_context (var
);
3306 struct function
*fp
;
3309 /* If this is the present function, we need not do anything. */
3310 if (context
== current_function_decl
|| context
== inline_function_decl
)
3313 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
3314 if (fp
->decl
== context
)
3320 /* Decode given address as base reg plus displacement. */
3321 if (GET_CODE (addr
) == REG
)
3322 basereg
= addr
, displacement
= 0;
3323 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3324 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
3328 /* We accept vars reached via the containing function's
3329 incoming arg pointer and via its stack variables pointer. */
3330 if (basereg
== fp
->internal_arg_pointer
)
3332 /* If reached via arg pointer, get the arg pointer value
3333 out of that function's stack frame.
3335 There are two cases: If a separate ap is needed, allocate a
3336 slot in the outer function for it and dereference it that way.
3337 This is correct even if the real ap is actually a pseudo.
3338 Otherwise, just adjust the offset from the frame pointer to
3341 #ifdef NEED_SEPARATE_AP
3344 if (fp
->arg_pointer_save_area
== 0)
3345 fp
->arg_pointer_save_area
3346 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
3348 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
3349 addr
= memory_address (Pmode
, addr
);
3351 base
= copy_to_reg (gen_rtx (MEM
, Pmode
, addr
));
3353 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
3354 base
= lookup_static_chain (var
);
3358 else if (basereg
== virtual_stack_vars_rtx
)
3360 /* This is the same code as lookup_static_chain, duplicated here to
3361 avoid an extra call to decl_function_context. */
3364 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
3365 if (TREE_PURPOSE (link
) == context
)
3367 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
3375 /* Use same offset, relative to appropriate static chain or argument
3377 return plus_constant (base
, displacement
);
3380 /* Return the address of the trampoline for entering nested fn FUNCTION.
3381 If necessary, allocate a trampoline (in the stack frame)
3382 and emit rtl to initialize its contents (at entry to this function). */
3385 trampoline_address (function
)
3391 struct function
*fp
;
3394 /* Find an existing trampoline and return it. */
3395 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
3396 if (TREE_PURPOSE (link
) == function
)
3397 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0);
3398 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
3399 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
3400 if (TREE_PURPOSE (link
) == function
)
3402 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
3404 return round_trampoline_addr (tramp
);
3407 /* None exists; we must make one. */
3409 /* Find the `struct function' for the function containing FUNCTION. */
3411 fn_context
= decl_function_context (function
);
3412 if (fn_context
!= current_function_decl
)
3413 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
3414 if (fp
->decl
== fn_context
)
3417 /* Allocate run-time space for this trampoline
3418 (usually in the defining function's stack frame). */
3419 #ifdef ALLOCATE_TRAMPOLINE
3420 tramp
= ALLOCATE_TRAMPOLINE (fp
);
3422 /* If rounding needed, allocate extra space
3423 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3424 #ifdef TRAMPOLINE_ALIGNMENT
3425 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3427 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3430 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
3432 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
3435 /* Record the trampoline for reuse and note it for later initialization
3436 by expand_function_end. */
3439 push_obstacks (fp
->current_obstack
, fp
->function_maybepermanent_obstack
);
3440 rtlexp
= make_node (RTL_EXPR
);
3441 RTL_EXPR_RTL (rtlexp
) = tramp
;
3442 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
3447 /* Make the RTL_EXPR node temporary, not momentary, so that the
3448 trampoline_list doesn't become garbage. */
3449 int momentary
= suspend_momentary ();
3450 rtlexp
= make_node (RTL_EXPR
);
3451 resume_momentary (momentary
);
3453 RTL_EXPR_RTL (rtlexp
) = tramp
;
3454 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
3457 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
3458 return round_trampoline_addr (tramp
);
3461 /* Given a trampoline address,
3462 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3465 round_trampoline_addr (tramp
)
3468 #ifdef TRAMPOLINE_ALIGNMENT
3469 /* Round address up to desired boundary. */
3470 rtx temp
= gen_reg_rtx (Pmode
);
3471 temp
= expand_binop (Pmode
, add_optab
, tramp
,
3472 gen_rtx (CONST_INT
, VOIDmode
, TRAMPOLINE_ALIGNMENT
- 1),
3473 temp
, 0, OPTAB_LIB_WIDEN
);
3474 tramp
= expand_binop (Pmode
, and_optab
, temp
,
3475 gen_rtx (CONST_INT
, VOIDmode
, - TRAMPOLINE_ALIGNMENT
),
3476 temp
, 0, OPTAB_LIB_WIDEN
);
3481 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3482 and initialize static variables for generating RTL for the statements
3486 init_function_start (subr
, filename
, line
)
3493 init_stmt_for_function ();
3495 cse_not_expected
= ! optimize
;
3497 /* Caller save not needed yet. */
3498 caller_save_needed
= 0;
3500 /* No stack slots have been made yet. */
3501 stack_slot_list
= 0;
3503 /* There is no stack slot for handling nonlocal gotos. */
3504 nonlocal_goto_handler_slot
= 0;
3505 nonlocal_goto_stack_level
= 0;
3507 /* No labels have been declared for nonlocal use. */
3508 nonlocal_labels
= 0;
3510 /* No function calls so far in this function. */
3511 function_call_count
= 0;
3513 /* No parm regs have been allocated.
3514 (This is important for output_inline_function.) */
3515 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
3517 /* Initialize the RTL mechanism. */
3520 /* Initialize the queue of pending postincrement and postdecrements,
3521 and some other info in expr.c. */
3524 /* We haven't done register allocation yet. */
3527 init_const_rtx_hash_table ();
3529 current_function_name
= (*decl_printable_name
) (subr
, &junk
);
3531 /* Nonzero if this is a nested function that uses a static chain. */
3533 current_function_needs_context
3534 = (decl_function_context (current_function_decl
) != 0);
3536 /* Set if a call to setjmp is seen. */
3537 current_function_calls_setjmp
= 0;
3539 /* Set if a call to longjmp is seen. */
3540 current_function_calls_longjmp
= 0;
3542 current_function_calls_alloca
= 0;
3543 current_function_has_nonlocal_label
= 0;
3544 current_function_contains_functions
= 0;
3546 current_function_returns_pcc_struct
= 0;
3547 current_function_returns_struct
= 0;
3548 current_function_epilogue_delay_list
= 0;
3549 current_function_uses_const_pool
= 0;
3550 current_function_uses_pic_offset_table
= 0;
3552 /* We have not yet needed to make a label to jump to for tail-recursion. */
3553 tail_recursion_label
= 0;
3555 /* We haven't had a need to make a save area for ap yet. */
3557 arg_pointer_save_area
= 0;
3559 /* No stack slots allocated yet. */
3562 /* No SAVE_EXPRs in this function yet. */
3565 /* No RTL_EXPRs in this function yet. */
3568 /* We have not allocated any temporaries yet. */
3570 temp_slot_level
= 0;
3572 /* Within function body, compute a type's size as soon it is laid out. */
3573 immediate_size_expand
++;
3575 init_pending_stack_adjust ();
3576 inhibit_defer_pop
= 0;
3578 current_function_outgoing_args_size
= 0;
3580 /* Initialize the insn lengths. */
3581 init_insn_lengths ();
3583 /* Prevent ever trying to delete the first instruction of a function.
3584 Also tell final how to output a linenum before the function prologue. */
3585 emit_line_note (filename
, line
);
3587 /* Make sure first insn is a note even if we don't want linenums.
3588 This makes sure the first insn will never be deleted.
3589 Also, final expects a note to appear there. */
3590 emit_note (0, NOTE_INSN_DELETED
);
3592 /* Set flags used by final.c. */
3593 if (aggregate_value_p (DECL_RESULT (subr
)))
3595 #ifdef PCC_STATIC_STRUCT_RETURN
3596 if (flag_pcc_struct_return
)
3597 current_function_returns_pcc_struct
= 1;
3600 current_function_returns_struct
= 1;
3603 /* Warn if this value is an aggregate type,
3604 regardless of which calling convention we are using for it. */
3605 if (warn_aggregate_return
3606 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr
))) == RECORD_TYPE
3607 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr
))) == UNION_TYPE
3608 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr
))) == ARRAY_TYPE
))
3609 warning ("function returns an aggregate");
3611 current_function_returns_pointer
3612 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr
))) == POINTER_TYPE
);
3614 /* Indicate that we need to distinguish between the return value of the
3615 present function and the return value of a function being called. */
3616 rtx_equal_function_value_matters
= 1;
3618 /* Indicate that we have not instantiated virtual registers yet. */
3619 virtuals_instantiated
= 0;
3621 /* Indicate we have no need of a frame pointer yet. */
3622 frame_pointer_needed
= 0;
3624 /* By default assume not varargs. */
3625 current_function_varargs
= 0;
3628 /* Indicate that the current function uses extra args
3629 not explicitly mentioned in the argument list in any fashion. */
3634 current_function_varargs
= 1;
3637 /* Expand a call to __main at the beginning of a possible main function. */
3640 expand_main_function ()
3642 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
3643 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__main"), 0,
3645 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
3648 /* Start the RTL for a new function, and set variables used for
3650 SUBR is the FUNCTION_DECL node.
3651 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3652 the function's parameters, which must be run at any return statement. */
3655 expand_function_start (subr
, parms_have_cleanups
)
3657 int parms_have_cleanups
;
3663 /* Make sure volatile mem refs aren't considered
3664 valid operands of arithmetic insns. */
3665 init_recog_no_volatile ();
3667 /* If function gets a static chain arg, store it in the stack frame.
3668 Do this first, so it gets the first stack slot offset. */
3669 if (current_function_needs_context
)
3670 emit_move_insn (assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0),
3671 static_chain_incoming_rtx
);
3673 /* If the parameters of this function need cleaning up, get a label
3674 for the beginning of the code which executes those cleanups. This must
3675 be done before doing anything with return_label. */
3676 if (parms_have_cleanups
)
3677 cleanup_label
= gen_label_rtx ();
3681 /* Make the label for return statements to jump to, if this machine
3682 does not have a one-instruction return and uses an epilogue,
3683 or if it returns a structure, or if it has parm cleanups. */
3685 if (cleanup_label
== 0 && HAVE_return
3686 && ! current_function_returns_pcc_struct
3687 && ! (current_function_returns_struct
&& ! optimize
))
3690 return_label
= gen_label_rtx ();
3692 return_label
= gen_label_rtx ();
3695 /* Initialize rtx used to return the value. */
3696 /* Do this before assign_parms so that we copy the struct value address
3697 before any library calls that assign parms might generate. */
3699 /* Decide whether to return the value in memory or in a register. */
3700 if (aggregate_value_p (DECL_RESULT (subr
)))
3702 /* Returning something that won't go in a register. */
3703 register rtx value_address
;
3705 #ifdef PCC_STATIC_STRUCT_RETURN
3706 if (current_function_returns_pcc_struct
)
3708 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
3709 value_address
= assemble_static_space (size
);
3714 /* Expect to be passed the address of a place to store the value.
3715 If it is passed as an argument, assign_parms will take care of
3717 if (struct_value_incoming_rtx
)
3719 value_address
= gen_reg_rtx (Pmode
);
3720 emit_move_insn (value_address
, struct_value_incoming_rtx
);
3724 DECL_RTL (DECL_RESULT (subr
))
3725 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (subr
)),
3728 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
3729 /* If return mode is void, this decl rtl should not be used. */
3730 DECL_RTL (DECL_RESULT (subr
)) = 0;
3731 else if (parms_have_cleanups
)
3732 /* If function will end with cleanup code for parms,
3733 compute the return values into a pseudo reg,
3734 which we will copy into the true return register
3735 after the cleanups are done. */
3736 DECL_RTL (DECL_RESULT (subr
))
3737 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr
)));
3739 /* Scalar, returned in a register. */
3741 #ifdef FUNCTION_OUTGOING_VALUE
3742 DECL_RTL (DECL_RESULT (subr
))
3743 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
3745 DECL_RTL (DECL_RESULT (subr
))
3746 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
3749 /* Mark this reg as the function's return value. */
3750 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
3752 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
3753 /* Needed because we may need to move this to memory
3754 in case it's a named return value whose address is taken. */
3755 TREE_REGDECL (DECL_RESULT (subr
)) = 1;
3759 /* Initialize rtx for parameters and local variables.
3760 In some cases this requires emitting insns. */
3762 assign_parms (subr
, 0);
3764 /* The following was moved from init_function_start.
3765 The move is supposed to make sdb output more accurate. */
3766 /* Indicate the beginning of the function body,
3767 as opposed to parm setup. */
3768 emit_note (0, NOTE_INSN_FUNCTION_BEG
);
3770 /* If doing stupid allocation, mark parms as born here. */
3772 if (GET_CODE (get_last_insn ()) != NOTE
)
3773 emit_note (0, NOTE_INSN_DELETED
);
3774 parm_birth_insn
= get_last_insn ();
3778 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
3779 use_variable (regno_reg_rtx
[i
]);
3781 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
3782 use_variable (current_function_internal_arg_pointer
);
3785 /* Fetch static chain values for containing functions. */
3786 tem
= decl_function_context (current_function_decl
);
3788 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
3789 context_display
= 0;
3792 tree rtlexp
= make_node (RTL_EXPR
);
3794 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
3795 context_display
= tree_cons (tem
, rtlexp
, context_display
);
3796 tem
= decl_function_context (tem
);
3799 /* Chain thru stack frames, assuming pointer to next lexical frame
3800 is found at the place we always store it. */
3801 #ifdef FRAME_GROWS_DOWNWARD
3802 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
3804 last_ptr
= copy_to_reg (gen_rtx (MEM
, Pmode
,
3805 memory_address (Pmode
, last_ptr
)));
3808 /* After the display initializations is where the tail-recursion label
3809 should go, if we end up needing one. Ensure we have a NOTE here
3810 since some things (like trampolines) get placed before this. */
3811 tail_recursion_reentry
= emit_note (0, NOTE_INSN_DELETED
);
3813 /* Evaluate now the sizes of any types declared among the arguments. */
3814 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
3815 expand_expr (TREE_VALUE (tem
), 0, VOIDmode
, 0);
3817 /* Make sure there is a line number after the function entry setup code. */
3818 force_next_line_note ();
3821 /* Generate RTL for the end of the current function.
3822 FILENAME and LINE are the current position in the source file. */
3824 /* It is up to language-specific callers to do cleanups for parameters. */
3827 expand_function_end (filename
, line
)
3834 static rtx initial_trampoline
;
3836 #ifdef NON_SAVING_SETJMP
3837 /* Don't put any variables in registers if we call setjmp
3838 on a machine that fails to restore the registers. */
3839 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
3841 setjmp_protect (DECL_INITIAL (current_function_decl
));
3842 setjmp_protect_args ();
3846 /* Save the argument pointer if a save area was made for it. */
3847 if (arg_pointer_save_area
)
3849 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
3850 emit_insn_before (x
, tail_recursion_reentry
);
3853 /* Initialize any trampolines required by this function. */
3854 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
3856 tree function
= TREE_PURPOSE (link
);
3857 rtx context
= lookup_static_chain (function
);
3858 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
3861 /* First make sure this compilation has a template for
3862 initializing trampolines. */
3863 if (initial_trampoline
== 0)
3865 end_temporary_allocation ();
3867 = gen_rtx (MEM
, BLKmode
, assemble_trampoline_template ());
3868 resume_temporary_allocation ();
3871 /* Generate insns to initialize the trampoline. */
3873 tramp
= change_address (initial_trampoline
, BLKmode
,
3874 round_trampoline_addr (XEXP (tramp
, 0)));
3875 emit_block_move (tramp
, initial_trampoline
,
3876 gen_rtx (CONST_INT
, VOIDmode
, TRAMPOLINE_SIZE
),
3877 FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
3878 INITIALIZE_TRAMPOLINE (XEXP (tramp
, 0),
3879 XEXP (DECL_RTL (function
), 0), context
);
3883 /* Put those insns at entry to the containing function (this one). */
3884 emit_insns_before (seq
, tail_recursion_reentry
);
3886 /* Clear the trampoline_list for the next function. */
3887 trampoline_list
= 0;
3889 #if 0 /* I think unused parms are legitimate enough. */
3890 /* Warn about unused parms. */
3895 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3896 decl
; decl
= TREE_CHAIN (decl
))
3897 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
3898 warning_with_decl (decl
, "unused parameter `%s'");
3902 /* Delete handlers for nonlocal gotos if nothing uses them. */
3903 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
3906 /* End any sequences that failed to be closed due to syntax errors. */
3907 while (in_sequence_p ())
3910 /* Outside function body, can't compute type's actual size
3911 until next function's body starts. */
3912 immediate_size_expand
--;
3914 /* If doing stupid register allocation,
3915 mark register parms as dying here. */
3920 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
3921 use_variable (regno_reg_rtx
[i
]);
3923 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
3925 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
3927 use_variable (XEXP (tem
, 0));
3928 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
3931 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
3932 use_variable (current_function_internal_arg_pointer
);
3935 clear_pending_stack_adjust ();
3936 do_pending_stack_adjust ();
3938 /* Mark the end of the function body.
3939 If control reaches this insn, the function can drop through
3940 without returning a value. */
3941 emit_note (0, NOTE_INSN_FUNCTION_END
);
3943 /* Output a linenumber for the end of the function.
3944 SDB depends on this. */
3945 emit_line_note_force (filename
, line
);
3947 /* Output the label for the actual return from the function,
3948 if one is expected. This happens either because a function epilogue
3949 is used instead of a return instruction, or because a return was done
3950 with a goto in order to run local cleanups, or because of pcc-style
3951 structure returning. */
3954 emit_label (return_label
);
3956 /* If we had calls to alloca, and this machine needs
3957 an accurate stack pointer to exit the function,
3958 insert some code to save and restore the stack pointer. */
3959 #ifdef EXIT_IGNORE_STACK
3960 if (! EXIT_IGNORE_STACK
)
3962 if (current_function_calls_alloca
)
3966 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
3967 emit_stack_restore (SAVE_FUNCTION
, tem
, 0);
3970 /* If scalar return value was computed in a pseudo-reg,
3971 copy that to the hard return register. */
3972 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
3973 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
3974 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
3975 >= FIRST_PSEUDO_REGISTER
))
3977 rtx real_decl_result
;
3979 #ifdef FUNCTION_OUTGOING_VALUE
3981 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
3982 current_function_decl
);
3985 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
3986 current_function_decl
);
3988 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
3989 emit_move_insn (real_decl_result
,
3990 DECL_RTL (DECL_RESULT (current_function_decl
)));
3991 emit_insn (gen_rtx (USE
, VOIDmode
, real_decl_result
));
3994 /* If returning a structure, arrange to return the address of the value
3995 in a place where debuggers expect to find it.
3997 If returning a structure PCC style,
3998 the caller also depends on this value.
3999 And current_function_returns_pcc_struct is not necessarily set. */
4000 if (current_function_returns_struct
4001 || current_function_returns_pcc_struct
)
4003 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
4004 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4005 #ifdef FUNCTION_OUTGOING_VALUE
4007 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
4008 current_function_decl
);
4011 = FUNCTION_VALUE (build_pointer_type (type
),
4012 current_function_decl
);
4015 /* Mark this as a function return value so integrate will delete the
4016 assignment and USE below when inlining this function. */
4017 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4019 emit_move_insn (outgoing
, value_address
);
4020 use_variable (outgoing
);
4023 /* Output a return insn if we are using one.
4024 Otherwise, let the rtl chain end here, to drop through
4025 into the epilogue. */
4030 emit_jump_insn (gen_return ());
4035 /* Fix up any gotos that jumped out to the outermost
4036 binding level of the function.
4037 Must follow emitting RETURN_LABEL. */
4039 /* If you have any cleanups to do at this point,
4040 and they need to create temporary variables,
4041 then you will lose. */
4042 fixup_gotos (0, 0, 0, get_insns (), 0);