1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
49 #include "insn-flags.h"
51 #include "insn-codes.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
57 #include "basic-block.h"
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
75 /* Similar, but round to the next highest integer that meets the
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
93 int current_function_pops_args
;
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
98 int current_function_returns_struct
;
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
103 int current_function_returns_pcc_struct
;
105 /* Nonzero if function being compiled needs to be passed a static chain. */
107 int current_function_needs_context
;
109 /* Nonzero if function being compiled can call setjmp. */
111 int current_function_calls_setjmp
;
113 /* Nonzero if function being compiled can call longjmp. */
115 int current_function_calls_longjmp
;
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
120 int current_function_has_nonlocal_label
;
122 /* Nonzero if function being compiled has nonlocal gotos to parent
125 int current_function_has_nonlocal_goto
;
127 /* Nonzero if function being compiled contains nested functions. */
129 int current_function_contains_functions
;
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
134 int current_function_calls_alloca
;
136 /* Nonzero if the current function returns a pointer type */
138 int current_function_returns_pointer
;
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
143 rtx current_function_epilogue_delay_list
;
145 /* If function's args have a fixed size, this is that size, in bytes.
147 May affect compilation of return insn or of function epilogue. */
149 int current_function_args_size
;
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
154 int current_function_pretend_args_size
;
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
159 int current_function_outgoing_args_size
;
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
164 rtx current_function_arg_offset_rtx
;
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
169 int current_function_varargs
;
171 /* Quantities of various kinds of registers
172 used for the current function's args. */
174 CUMULATIVE_ARGS current_function_args_info
;
176 /* Name of function now being compiled. */
178 char *current_function_name
;
180 /* If non-zero, an RTL expression for that location at which the current
181 function returns its result. Always equal to
182 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
183 independently of the tree structures. */
185 rtx current_function_return_rtx
;
187 /* Nonzero if the current function uses the constant pool. */
189 int current_function_uses_const_pool
;
191 /* Nonzero if the current function uses pic_offset_table_rtx. */
192 int current_function_uses_pic_offset_table
;
194 /* The arg pointer hard register, or the pseudo into which it was copied. */
195 rtx current_function_internal_arg_pointer
;
197 /* The FUNCTION_DECL for an inline function currently being expanded. */
198 tree inline_function_decl
;
200 /* Number of function calls seen so far in current function. */
202 int function_call_count
;
204 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
205 (labels to which there can be nonlocal gotos from nested functions)
208 tree nonlocal_labels
;
210 /* RTX for stack slot that holds the current handler for nonlocal gotos.
211 Zero when function does not have nonlocal labels. */
213 rtx nonlocal_goto_handler_slot
;
215 /* RTX for stack slot that holds the stack pointer value to restore
217 Zero when function does not have nonlocal labels. */
219 rtx nonlocal_goto_stack_level
;
221 /* Label that will go on parm cleanup code, if any.
222 Jumping to this label runs cleanup code for parameters, if
223 such code must be run. Following this code is the logical return label. */
227 /* Label that will go on function epilogue.
228 Jumping to this label serves as a "return" instruction
229 on machines which require execution of the epilogue on all returns. */
233 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
234 So we can mark them all live at the end of the function, if nonopt. */
237 /* List (chain of EXPR_LISTs) of all stack slots in this function.
238 Made for the sake of unshare_all_rtl. */
241 /* Chain of all RTL_EXPRs that have insns in them. */
244 /* Label to jump back to for tail recursion, or 0 if we have
245 not yet needed one for this function. */
246 rtx tail_recursion_label
;
248 /* Place after which to insert the tail_recursion_label if we need one. */
249 rtx tail_recursion_reentry
;
251 /* Location at which to save the argument pointer if it will need to be
252 referenced. There are two cases where this is done: if nonlocal gotos
253 exist, or if vars stored at an offset from the argument pointer will be
254 needed by inner routines. */
256 rtx arg_pointer_save_area
;
258 /* Offset to end of allocated area of stack frame.
259 If stack grows down, this is the address of the last stack slot allocated.
260 If stack grows up, this is the address for the next slot. */
263 /* List (chain of TREE_LISTs) of static chains for containing functions.
264 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
265 in an RTL_EXPR in the TREE_VALUE. */
266 static tree context_display
;
268 /* List (chain of TREE_LISTs) of trampolines for nested functions.
269 The trampoline sets up the static chain and jumps to the function.
270 We supply the trampoline's address when the function's address is requested.
272 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
273 in an RTL_EXPR in the TREE_VALUE. */
274 static tree trampoline_list
;
276 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
277 static rtx parm_birth_insn
;
280 /* Nonzero if a stack slot has been generated whose address is not
281 actually valid. It means that the generated rtl must all be scanned
282 to detect and correct the invalid addresses where they occur. */
283 static int invalid_stack_slot
;
286 /* Last insn of those whose job was to put parms into their nominal homes. */
287 static rtx last_parm_insn
;
289 /* 1 + last pseudo register number used for loading a copy
290 of a parameter of this function. */
291 static int max_parm_reg
;
293 /* Vector indexed by REGNO, containing location on stack in which
294 to put the parm which is nominally in pseudo register REGNO,
295 if we discover that that parm must go in the stack. */
296 static rtx
*parm_reg_stack_loc
;
298 #if 0 /* Turned off because 0 seems to work just as well. */
299 /* Cleanup lists are required for binding levels regardless of whether
300 that binding level has cleanups or not. This node serves as the
301 cleanup list whenever an empty list is required. */
302 static tree empty_cleanup_list
;
305 /* Nonzero once virtual register instantiation has been done.
306 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
307 static int virtuals_instantiated
;
309 /* These variables hold pointers to functions to
310 save and restore machine-specific data,
311 in push_function_context and pop_function_context. */
312 void (*save_machine_status
) ();
313 void (*restore_machine_status
) ();
315 /* Nonzero if we need to distinguish between the return value of this function
316 and the return value of a function called by this function. This helps
319 extern int rtx_equal_function_value_matters
;
320 extern tree sequence_rtl_expr
;
321 extern tree
bc_runtime_type_code ();
322 extern rtx
bc_build_calldesc ();
323 extern char *bc_emit_trampoline ();
324 extern char *bc_end_function ();
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
346 /* Points to next temporary slot. */
347 struct temp_slot
*next
;
348 /* The rtx to used to reference the slot. */
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
353 /* The size, in units, of the slot. */
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
357 /* Non-zero if this temporary is currently in use. */
359 /* Non-zero if this temporary has its address taken. */
361 /* Nesting level at which this slot is being used. */
363 /* Non-zero if this should survive a call to free_temp_slots. */
367 /* List of all temporaries allocated, both available and in use. */
369 struct temp_slot
*temp_slots
;
371 /* Current nesting level for temporaries. */
375 /* The FUNCTION_DECL node for the current function. */
376 static tree this_function_decl
;
378 /* Callinfo pointer for the current function. */
379 static rtx this_function_callinfo
;
381 /* The label in the bytecode file of this function's actual bytecode.
383 static char *this_function_bytecode
;
385 /* The call description vector for the current function. */
386 static rtx this_function_calldesc
;
388 /* Size of the local variables allocated for the current function. */
391 /* Current depth of the bytecode evaluation stack. */
394 /* Maximum depth of the evaluation stack in this function. */
397 /* Current depth in statement expressions. */
398 static int stmt_expr_depth
;
400 /* This structure is used to record MEMs or pseudos used to replace VAR, any
401 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
402 maintain this list in case two operands of an insn were required to match;
403 in that case we must ensure we use the same replacement. */
405 struct fixup_replacement
409 struct fixup_replacement
*next
;
412 /* Forward declarations. */
414 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
415 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
416 enum machine_mode
, enum machine_mode
));
417 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int));
418 static struct fixup_replacement
419 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
420 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
422 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
423 struct fixup_replacement
**));
424 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
425 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
426 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
427 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
428 static void instantiate_decls
PROTO((tree
, int));
429 static void instantiate_decls_1
PROTO((tree
, int));
430 static void instantiate_decl
PROTO((rtx
, int, int));
431 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
432 static void delete_handlers
PROTO((void));
433 static void pad_to_arg_alignment
PROTO((struct args_size
*, int));
434 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
436 static tree round_down
PROTO((tree
, int));
437 static rtx round_trampoline_addr
PROTO((rtx
));
438 static tree blocks_nreverse
PROTO((tree
));
439 static int all_blocks
PROTO((tree
, tree
*));
440 static int *record_insns
PROTO((rtx
));
441 static int contains
PROTO((rtx
, int *));
443 /* Pointer to chain of `struct function' for containing functions. */
444 struct function
*outer_function_chain
;
446 /* Given a function decl for a containing function,
447 return the `struct function' for it. */
450 find_function_data (decl
)
454 for (p
= outer_function_chain
; p
; p
= p
->next
)
460 /* Save the current context for compilation of a nested function.
461 This is called from language-specific code.
462 The caller is responsible for saving any language-specific status,
463 since this function knows only about language-independent variables. */
466 push_function_context_to (context
)
469 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
471 p
->next
= outer_function_chain
;
472 outer_function_chain
= p
;
474 p
->name
= current_function_name
;
475 p
->decl
= current_function_decl
;
476 p
->pops_args
= current_function_pops_args
;
477 p
->returns_struct
= current_function_returns_struct
;
478 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
479 p
->needs_context
= current_function_needs_context
;
480 p
->calls_setjmp
= current_function_calls_setjmp
;
481 p
->calls_longjmp
= current_function_calls_longjmp
;
482 p
->calls_alloca
= current_function_calls_alloca
;
483 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
484 p
->has_nonlocal_goto
= current_function_has_nonlocal_goto
;
485 p
->contains_functions
= current_function_contains_functions
;
486 p
->args_size
= current_function_args_size
;
487 p
->pretend_args_size
= current_function_pretend_args_size
;
488 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
489 p
->varargs
= current_function_varargs
;
490 p
->uses_const_pool
= current_function_uses_const_pool
;
491 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
492 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
493 p
->max_parm_reg
= max_parm_reg
;
494 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
495 p
->outgoing_args_size
= current_function_outgoing_args_size
;
496 p
->return_rtx
= current_function_return_rtx
;
497 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
498 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
499 p
->nonlocal_labels
= nonlocal_labels
;
500 p
->cleanup_label
= cleanup_label
;
501 p
->return_label
= return_label
;
502 p
->save_expr_regs
= save_expr_regs
;
503 p
->stack_slot_list
= stack_slot_list
;
504 p
->parm_birth_insn
= parm_birth_insn
;
505 p
->frame_offset
= frame_offset
;
506 p
->tail_recursion_label
= tail_recursion_label
;
507 p
->tail_recursion_reentry
= tail_recursion_reentry
;
508 p
->arg_pointer_save_area
= arg_pointer_save_area
;
509 p
->rtl_expr_chain
= rtl_expr_chain
;
510 p
->last_parm_insn
= last_parm_insn
;
511 p
->context_display
= context_display
;
512 p
->trampoline_list
= trampoline_list
;
513 p
->function_call_count
= function_call_count
;
514 p
->temp_slots
= temp_slots
;
515 p
->temp_slot_level
= temp_slot_level
;
516 p
->fixup_var_refs_queue
= 0;
517 p
->epilogue_delay_list
= current_function_epilogue_delay_list
;
519 save_tree_status (p
, context
);
520 save_storage_status (p
);
521 save_emit_status (p
);
523 save_expr_status (p
);
524 save_stmt_status (p
);
525 save_varasm_status (p
);
527 if (save_machine_status
)
528 (*save_machine_status
) (p
);
532 push_function_context ()
534 push_function_context_to (current_function_decl
);
537 /* Restore the last saved context, at the end of a nested function.
538 This function is called from language-specific code. */
541 pop_function_context_from (context
)
544 struct function
*p
= outer_function_chain
;
546 outer_function_chain
= p
->next
;
548 current_function_contains_functions
549 = p
->contains_functions
|| p
->inline_obstacks
550 || context
== current_function_decl
;
551 current_function_name
= p
->name
;
552 current_function_decl
= p
->decl
;
553 current_function_pops_args
= p
->pops_args
;
554 current_function_returns_struct
= p
->returns_struct
;
555 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
556 current_function_needs_context
= p
->needs_context
;
557 current_function_calls_setjmp
= p
->calls_setjmp
;
558 current_function_calls_longjmp
= p
->calls_longjmp
;
559 current_function_calls_alloca
= p
->calls_alloca
;
560 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
561 current_function_has_nonlocal_goto
= p
->has_nonlocal_goto
;
562 current_function_args_size
= p
->args_size
;
563 current_function_pretend_args_size
= p
->pretend_args_size
;
564 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
565 current_function_varargs
= p
->varargs
;
566 current_function_uses_const_pool
= p
->uses_const_pool
;
567 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
568 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
569 max_parm_reg
= p
->max_parm_reg
;
570 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
571 current_function_outgoing_args_size
= p
->outgoing_args_size
;
572 current_function_return_rtx
= p
->return_rtx
;
573 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
574 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
575 nonlocal_labels
= p
->nonlocal_labels
;
576 cleanup_label
= p
->cleanup_label
;
577 return_label
= p
->return_label
;
578 save_expr_regs
= p
->save_expr_regs
;
579 stack_slot_list
= p
->stack_slot_list
;
580 parm_birth_insn
= p
->parm_birth_insn
;
581 frame_offset
= p
->frame_offset
;
582 tail_recursion_label
= p
->tail_recursion_label
;
583 tail_recursion_reentry
= p
->tail_recursion_reentry
;
584 arg_pointer_save_area
= p
->arg_pointer_save_area
;
585 rtl_expr_chain
= p
->rtl_expr_chain
;
586 last_parm_insn
= p
->last_parm_insn
;
587 context_display
= p
->context_display
;
588 trampoline_list
= p
->trampoline_list
;
589 function_call_count
= p
->function_call_count
;
590 temp_slots
= p
->temp_slots
;
591 temp_slot_level
= p
->temp_slot_level
;
592 current_function_epilogue_delay_list
= p
->epilogue_delay_list
;
595 restore_tree_status (p
);
596 restore_storage_status (p
);
597 restore_expr_status (p
);
598 restore_emit_status (p
);
599 restore_stmt_status (p
);
600 restore_varasm_status (p
);
602 if (restore_machine_status
)
603 (*restore_machine_status
) (p
);
605 /* Finish doing put_var_into_stack for any of our variables
606 which became addressable during the nested function. */
608 struct var_refs_queue
*queue
= p
->fixup_var_refs_queue
;
609 for (; queue
; queue
= queue
->next
)
610 fixup_var_refs (queue
->modified
, queue
->promoted_mode
, queue
->unsignedp
);
615 /* Reset variables that have known state during rtx generation. */
616 rtx_equal_function_value_matters
= 1;
617 virtuals_instantiated
= 0;
620 void pop_function_context ()
622 pop_function_context_from (current_function_decl
);
625 /* Allocate fixed slots in the stack frame of the current function. */
627 /* Return size needed for stack frame based on slots so far allocated.
628 This size counts from zero. It is not rounded to STACK_BOUNDARY;
629 the caller may have to do that. */
634 #ifdef FRAME_GROWS_DOWNWARD
635 return -frame_offset
;
641 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
642 with machine mode MODE.
644 ALIGN controls the amount of alignment for the address of the slot:
645 0 means according to MODE,
646 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
647 positive specifies alignment boundary in bits.
649 We do not round to stack_boundary here. */
652 assign_stack_local (mode
, size
, align
)
653 enum machine_mode mode
;
657 register rtx x
, addr
;
658 int bigend_correction
= 0;
663 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
665 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
667 else if (align
== -1)
669 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
670 size
= CEIL_ROUND (size
, alignment
);
673 alignment
= align
/ BITS_PER_UNIT
;
675 /* Round frame offset to that alignment.
676 We must be careful here, since FRAME_OFFSET might be negative and
677 division with a negative dividend isn't as well defined as we might
678 like. So we instead assume that ALIGNMENT is a power of two and
679 use logical operations which are unambiguous. */
680 #ifdef FRAME_GROWS_DOWNWARD
681 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
683 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
686 /* On a big-endian machine, if we are allocating more space than we will use,
687 use the least significant bytes of those that are allocated. */
688 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
689 bigend_correction
= size
- GET_MODE_SIZE (mode
);
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset
-= size
;
695 /* If we have already instantiated virtual registers, return the actual
696 address relative to the frame pointer. */
697 if (virtuals_instantiated
)
698 addr
= plus_constant (frame_pointer_rtx
,
699 (frame_offset
+ bigend_correction
700 + STARTING_FRAME_OFFSET
));
702 addr
= plus_constant (virtual_stack_vars_rtx
,
703 frame_offset
+ bigend_correction
);
705 #ifndef FRAME_GROWS_DOWNWARD
706 frame_offset
+= size
;
709 x
= gen_rtx (MEM
, mode
, addr
);
711 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, x
, stack_slot_list
);
716 /* Assign a stack slot in a containing function.
717 First three arguments are same as in preceding function.
718 The last argument specifies the function to allocate in. */
721 assign_outer_stack_local (mode
, size
, align
, function
)
722 enum machine_mode mode
;
725 struct function
*function
;
727 register rtx x
, addr
;
728 int bigend_correction
= 0;
731 /* Allocate in the memory associated with the function in whose frame
733 push_obstacks (function
->function_obstack
,
734 function
->function_maybepermanent_obstack
);
738 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
740 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
742 else if (align
== -1)
744 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
745 size
= CEIL_ROUND (size
, alignment
);
748 alignment
= align
/ BITS_PER_UNIT
;
750 /* Round frame offset to that alignment. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 function
->frame_offset
= FLOOR_ROUND (function
->frame_offset
, alignment
);
754 function
->frame_offset
= CEIL_ROUND (function
->frame_offset
, alignment
);
757 /* On a big-endian machine, if we are allocating more space than we will use,
758 use the least significant bytes of those that are allocated. */
759 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
760 bigend_correction
= size
- GET_MODE_SIZE (mode
);
762 #ifdef FRAME_GROWS_DOWNWARD
763 function
->frame_offset
-= size
;
765 addr
= plus_constant (virtual_stack_vars_rtx
,
766 function
->frame_offset
+ bigend_correction
);
767 #ifndef FRAME_GROWS_DOWNWARD
768 function
->frame_offset
+= size
;
771 x
= gen_rtx (MEM
, mode
, addr
);
773 function
->stack_slot_list
774 = gen_rtx (EXPR_LIST
, VOIDmode
, x
, function
->stack_slot_list
);
781 /* Allocate a temporary stack slot and record it for possible later
784 MODE is the machine mode to be given to the returned rtx.
786 SIZE is the size in units of the space required. We do no rounding here
787 since assign_stack_local will do any required rounding.
789 KEEP is 1 if this slot is to be retained after a call to
790 free_temp_slots. Automatic variables for a block are allocated
791 with this flag. KEEP is 2, if we allocate a longer term temporary,
792 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
795 assign_stack_temp (mode
, size
, keep
)
796 enum machine_mode mode
;
800 struct temp_slot
*p
, *best_p
= 0;
802 /* If SIZE is -1 it means that somebody tried to allocate a temporary
803 of a variable size. */
807 /* First try to find an available, already-allocated temporary that is the
808 exact size we require. */
809 for (p
= temp_slots
; p
; p
= p
->next
)
810 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
813 /* If we didn't find, one, try one that is larger than what we want. We
814 find the smallest such. */
816 for (p
= temp_slots
; p
; p
= p
->next
)
817 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
818 && (best_p
== 0 || best_p
->size
> p
->size
))
821 /* Make our best, if any, the one to use. */
824 /* If there are enough aligned bytes left over, make them into a new
825 temp_slot so that the extra bytes don't get wasted. Do this only
826 for BLKmode slots, so that we can be sure of the alignment. */
827 if (GET_MODE (best_p
->slot
) == BLKmode
)
829 int alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
830 int rounded_size
= CEIL_ROUND (size
, alignment
);
832 if (best_p
->size
- rounded_size
>= alignment
)
834 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
835 p
->in_use
= p
->addr_taken
= 0;
836 p
->size
= best_p
->size
- rounded_size
;
837 p
->slot
= gen_rtx (MEM
, BLKmode
,
838 plus_constant (XEXP (best_p
->slot
, 0),
842 p
->next
= temp_slots
;
845 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->slot
,
848 best_p
->size
= rounded_size
;
855 /* If we still didn't find one, make a new temporary. */
858 int frame_offset_old
= frame_offset
;
859 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
860 /* If the temp slot mode doesn't indicate the alignment,
861 use the largest possible, so no one will be disappointed. */
862 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
863 /* The following slot size computation is necessary because we don't
864 know the actual size of the temporary slot until assign_stack_local
865 has performed all the frame alignment and size rounding for the
866 requested temporary. Otherwise combine_temp_slots won't think that
867 adjacent slots really are adjacent. */
868 #ifdef FRAME_GROWS_DOWNWARD
869 p
->size
= frame_offset_old
- frame_offset
;
871 p
->size
= frame_offset
- frame_offset_old
;
874 p
->next
= temp_slots
;
880 p
->rtl_expr
= sequence_rtl_expr
;
884 p
->level
= target_temp_slot_level
;
889 p
->level
= temp_slot_level
;
895 /* Combine temporary stack slots which are adjacent on the stack.
897 This allows for better use of already allocated stack space. This is only
898 done for BLKmode slots because we can be sure that we won't have alignment
899 problems in this case. */
902 combine_temp_slots ()
904 struct temp_slot
*p
, *q
;
905 struct temp_slot
*prev_p
, *prev_q
;
906 /* Determine where to free back to after this function. */
907 rtx free_pointer
= rtx_alloc (CONST_INT
);
909 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
912 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
913 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
916 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
918 if (rtx_equal_p (plus_constant (XEXP (p
->slot
, 0), p
->size
),
921 /* Q comes after P; combine Q into P. */
925 else if (rtx_equal_p (plus_constant (XEXP (q
->slot
, 0), q
->size
),
928 /* P comes after Q; combine P into Q. */
934 /* Either delete Q or advance past it. */
936 prev_q
->next
= q
->next
;
940 /* Either delete P or advance past it. */
944 prev_p
->next
= p
->next
;
946 temp_slots
= p
->next
;
952 /* Free all the RTL made by plus_constant. */
953 rtx_free (free_pointer
);
956 /* Find the temp slot corresponding to the object at address X. */
958 static struct temp_slot
*
959 find_temp_slot_from_address (x
)
965 for (p
= temp_slots
; p
; p
= p
->next
)
969 else if (XEXP (p
->slot
, 0) == x
973 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
974 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
975 if (XEXP (next
, 0) == x
)
982 /* Indicate that NEW is an alternate way of referring to the temp slot
983 that previous was known by OLD. */
986 update_temp_slot_address (old
, new)
989 struct temp_slot
*p
= find_temp_slot_from_address (old
);
991 /* If none, return. Else add NEW as an alias. */
994 else if (p
->address
== 0)
998 if (GET_CODE (p
->address
) != EXPR_LIST
)
999 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->address
, NULL_RTX
);
1001 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, new, p
->address
);
1005 /* If X could be a reference to a temporary slot, mark the fact that its
1006 address was taken. */
1009 mark_temp_addr_taken (x
)
1012 struct temp_slot
*p
;
1017 /* If X is not in memory or is at a constant address, it cannot be in
1018 a temporary slot. */
1019 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1022 p
= find_temp_slot_from_address (XEXP (x
, 0));
1027 /* If X could be a reference to a temporary slot, mark that slot as belonging
1028 to the to one level higher. If X matched one of our slots, just mark that
1029 one. Otherwise, we can't easily predict which it is, so upgrade all of
1030 them. Kept slots need not be touched.
1032 This is called when an ({...}) construct occurs and a statement
1033 returns a value in memory. */
1036 preserve_temp_slots (x
)
1039 struct temp_slot
*p
= 0;
1041 /* If there is no result, we still might have some objects whose address
1042 were taken, so we need to make sure they stay around. */
1045 for (p
= temp_slots
; p
; p
= p
->next
)
1046 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1052 /* If X is a register that is being used as a pointer, see if we have
1053 a temporary slot we know it points to. To be consistent with
1054 the code below, we really should preserve all non-kept slots
1055 if we can't find a match, but that seems to be much too costly. */
1056 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1057 p
= find_temp_slot_from_address (x
);
1059 /* If X is not in memory or is at a constant address, it cannot be in
1060 a temporary slot, but it can contain something whose address was
1062 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1064 for (p
= temp_slots
; p
; p
= p
->next
)
1065 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1071 /* First see if we can find a match. */
1073 p
= find_temp_slot_from_address (XEXP (x
, 0));
1077 /* Move everything at our level whose address was taken to our new
1078 level in case we used its address. */
1079 struct temp_slot
*q
;
1081 for (q
= temp_slots
; q
; q
= q
->next
)
1082 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1090 /* Otherwise, preserve all non-kept slots at this level. */
1091 for (p
= temp_slots
; p
; p
= p
->next
)
1092 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1096 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1097 with that RTL_EXPR, promote it into a temporary slot at the present
1098 level so it will not be freed when we free slots made in the
1102 preserve_rtl_expr_result (x
)
1105 struct temp_slot
*p
;
1107 /* If X is not in memory or is at a constant address, it cannot be in
1108 a temporary slot. */
1109 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1112 /* If we can find a match, move it to our level unless it is already at
1114 p
= find_temp_slot_from_address (XEXP (x
, 0));
1117 p
->level
= MIN (p
->level
, temp_slot_level
);
1124 /* Free all temporaries used so far. This is normally called at the end
1125 of generating code for a statement. Don't free any temporaries
1126 currently in use for an RTL_EXPR that hasn't yet been emitted.
1127 We could eventually do better than this since it can be reused while
1128 generating the same RTL_EXPR, but this is complex and probably not
1134 struct temp_slot
*p
;
1136 for (p
= temp_slots
; p
; p
= p
->next
)
1137 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1138 && p
->rtl_expr
== 0)
1141 combine_temp_slots ();
1144 /* Free all temporary slots used in T, an RTL_EXPR node. */
1147 free_temps_for_rtl_expr (t
)
1150 struct temp_slot
*p
;
1152 for (p
= temp_slots
; p
; p
= p
->next
)
1153 if (p
->rtl_expr
== t
)
1156 combine_temp_slots ();
1159 /* Push deeper into the nesting level for stack temporaries. */
1167 /* Pop a temporary nesting level. All slots in use in the current level
1173 struct temp_slot
*p
;
1175 for (p
= temp_slots
; p
; p
= p
->next
)
1176 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1179 combine_temp_slots ();
1184 /* Retroactively move an auto variable from a register to a stack slot.
1185 This is done when an address-reference to the variable is seen. */
1188 put_var_into_stack (decl
)
1192 enum machine_mode promoted_mode
, decl_mode
;
1193 struct function
*function
= 0;
1196 if (output_bytecode
)
1199 context
= decl_function_context (decl
);
1201 /* Get the current rtl used for this object and it's original mode. */
1202 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1204 /* No need to do anything if decl has no rtx yet
1205 since in that case caller is setting TREE_ADDRESSABLE
1206 and a stack slot will be assigned when the rtl is made. */
1210 /* Get the declared mode for this object. */
1211 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1212 : DECL_MODE (decl
));
1213 /* Get the mode it's actually stored in. */
1214 promoted_mode
= GET_MODE (reg
);
1216 /* If this variable comes from an outer function,
1217 find that function's saved context. */
1218 if (context
!= current_function_decl
)
1219 for (function
= outer_function_chain
; function
; function
= function
->next
)
1220 if (function
->decl
== context
)
1223 /* If this is a variable-size object with a pseudo to address it,
1224 put that pseudo into the stack, if the var is nonlocal. */
1225 if (DECL_NONLOCAL (decl
)
1226 && GET_CODE (reg
) == MEM
1227 && GET_CODE (XEXP (reg
, 0)) == REG
1228 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1230 reg
= XEXP (reg
, 0);
1231 decl_mode
= promoted_mode
= GET_MODE (reg
);
1234 /* Now we should have a value that resides in one or more pseudo regs. */
1236 if (GET_CODE (reg
) == REG
)
1237 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1238 promoted_mode
, decl_mode
);
1239 else if (GET_CODE (reg
) == CONCAT
)
1241 /* A CONCAT contains two pseudos; put them both in the stack.
1242 We do it so they end up consecutive. */
1243 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1244 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1245 #ifdef STACK_GROWS_DOWNWARD
1246 /* Since part 0 should have a lower address, do it second. */
1247 put_reg_into_stack (function
, XEXP (reg
, 1),
1248 part_type
, part_mode
, part_mode
);
1249 put_reg_into_stack (function
, XEXP (reg
, 0),
1250 part_type
, part_mode
, part_mode
);
1252 put_reg_into_stack (function
, XEXP (reg
, 0),
1253 part_type
, part_mode
, part_mode
);
1254 put_reg_into_stack (function
, XEXP (reg
, 1),
1255 part_type
, part_mode
, part_mode
);
1258 /* Change the CONCAT into a combined MEM for both parts. */
1259 PUT_CODE (reg
, MEM
);
1260 /* The two parts are in memory order already.
1261 Use the lower parts address as ours. */
1262 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1263 /* Prevent sharing of rtl that might lose. */
1264 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1265 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1269 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1270 into the stack frame of FUNCTION (0 means the current function).
1271 DECL_MODE is the machine mode of the user-level data type.
1272 PROMOTED_MODE is the machine mode of the register. */
1275 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
)
1276 struct function
*function
;
1279 enum machine_mode promoted_mode
, decl_mode
;
1285 if (REGNO (reg
) < function
->max_parm_reg
)
1286 new = function
->parm_reg_stack_loc
[REGNO (reg
)];
1288 new = assign_outer_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
),
1293 if (REGNO (reg
) < max_parm_reg
)
1294 new = parm_reg_stack_loc
[REGNO (reg
)];
1296 new = assign_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
), 0);
1299 XEXP (reg
, 0) = XEXP (new, 0);
1300 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1301 REG_USERVAR_P (reg
) = 0;
1302 PUT_CODE (reg
, MEM
);
1303 PUT_MODE (reg
, decl_mode
);
1305 /* If this is a memory ref that contains aggregate components,
1306 mark it as such for cse and loop optimize. */
1307 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
);
1309 /* Now make sure that all refs to the variable, previously made
1310 when it was a register, are fixed up to be valid again. */
1313 struct var_refs_queue
*temp
;
1315 /* Variable is inherited; fix it up when we get back to its function. */
1316 push_obstacks (function
->function_obstack
,
1317 function
->function_maybepermanent_obstack
);
1319 /* See comment in restore_tree_status in tree.c for why this needs to be
1320 on saveable obstack. */
1322 = (struct var_refs_queue
*) savealloc (sizeof (struct var_refs_queue
));
1323 temp
->modified
= reg
;
1324 temp
->promoted_mode
= promoted_mode
;
1325 temp
->unsignedp
= TREE_UNSIGNED (type
);
1326 temp
->next
= function
->fixup_var_refs_queue
;
1327 function
->fixup_var_refs_queue
= temp
;
1331 /* Variable is local; fix it up now. */
1332 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
));
1336 fixup_var_refs (var
, promoted_mode
, unsignedp
)
1338 enum machine_mode promoted_mode
;
1342 rtx first_insn
= get_insns ();
1343 struct sequence_stack
*stack
= sequence_stack
;
1344 tree rtl_exps
= rtl_expr_chain
;
1346 /* Must scan all insns for stack-refs that exceed the limit. */
1347 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
, stack
== 0);
1349 /* Scan all pending sequences too. */
1350 for (; stack
; stack
= stack
->next
)
1352 push_to_sequence (stack
->first
);
1353 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1354 stack
->first
, stack
->next
!= 0);
1355 /* Update remembered end of sequence
1356 in case we added an insn at the end. */
1357 stack
->last
= get_last_insn ();
1361 /* Scan all waiting RTL_EXPRs too. */
1362 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1364 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1365 if (seq
!= const0_rtx
&& seq
!= 0)
1367 push_to_sequence (seq
);
1368 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0);
1374 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1375 some part of an insn. Return a struct fixup_replacement whose OLD
1376 value is equal to X. Allocate a new structure if no such entry exists. */
1378 static struct fixup_replacement
*
1379 find_fixup_replacement (replacements
, x
)
1380 struct fixup_replacement
**replacements
;
1383 struct fixup_replacement
*p
;
1385 /* See if we have already replaced this. */
1386 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
1391 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1394 p
->next
= *replacements
;
1401 /* Scan the insn-chain starting with INSN for refs to VAR
1402 and fix them up. TOPLEVEL is nonzero if this chain is the
1403 main chain of insns for the current function. */
1406 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
)
1408 enum machine_mode promoted_mode
;
1417 rtx next
= NEXT_INSN (insn
);
1419 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1421 /* If this is a CLOBBER of VAR, delete it.
1423 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1424 and REG_RETVAL notes too. */
1425 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1426 && XEXP (PATTERN (insn
), 0) == var
)
1428 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1429 /* The REG_LIBCALL note will go away since we are going to
1430 turn INSN into a NOTE, so just delete the
1431 corresponding REG_RETVAL note. */
1432 remove_note (XEXP (note
, 0),
1433 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1436 /* In unoptimized compilation, we shouldn't call delete_insn
1437 except in jump.c doing warnings. */
1438 PUT_CODE (insn
, NOTE
);
1439 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1440 NOTE_SOURCE_FILE (insn
) = 0;
1443 /* The insn to load VAR from a home in the arglist
1444 is now a no-op. When we see it, just delete it. */
1446 && GET_CODE (PATTERN (insn
)) == SET
1447 && SET_DEST (PATTERN (insn
)) == var
1448 /* If this represents the result of an insn group,
1449 don't delete the insn. */
1450 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1451 && rtx_equal_p (SET_SRC (PATTERN (insn
)), var
))
1453 /* In unoptimized compilation, we shouldn't call delete_insn
1454 except in jump.c doing warnings. */
1455 PUT_CODE (insn
, NOTE
);
1456 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1457 NOTE_SOURCE_FILE (insn
) = 0;
1458 if (insn
== last_parm_insn
)
1459 last_parm_insn
= PREV_INSN (next
);
1463 struct fixup_replacement
*replacements
= 0;
1464 rtx next_insn
= NEXT_INSN (insn
);
1466 #ifdef SMALL_REGISTER_CLASSES
1467 /* If the insn that copies the results of a CALL_INSN
1468 into a pseudo now references VAR, we have to use an
1469 intermediate pseudo since we want the life of the
1470 return value register to be only a single insn.
1472 If we don't use an intermediate pseudo, such things as
1473 address computations to make the address of VAR valid
1474 if it is not can be placed between the CALL_INSN and INSN.
1476 To make sure this doesn't happen, we record the destination
1477 of the CALL_INSN and see if the next insn uses both that
1480 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1481 && reg_mentioned_p (var
, PATTERN (insn
))
1482 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1484 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1486 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1488 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1492 if (GET_CODE (insn
) == CALL_INSN
1493 && GET_CODE (PATTERN (insn
)) == SET
)
1494 call_dest
= SET_DEST (PATTERN (insn
));
1495 else if (GET_CODE (insn
) == CALL_INSN
1496 && GET_CODE (PATTERN (insn
)) == PARALLEL
1497 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1498 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1503 /* See if we have to do anything to INSN now that VAR is in
1504 memory. If it needs to be loaded into a pseudo, use a single
1505 pseudo for the entire insn in case there is a MATCH_DUP
1506 between two operands. We pass a pointer to the head of
1507 a list of struct fixup_replacements. If fixup_var_refs_1
1508 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1509 it will record them in this list.
1511 If it allocated a pseudo for any replacement, we copy into
1514 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1517 /* If this is last_parm_insn, and any instructions were output
1518 after it to fix it up, then we must set last_parm_insn to
1519 the last such instruction emitted. */
1520 if (insn
== last_parm_insn
)
1521 last_parm_insn
= PREV_INSN (next_insn
);
1523 while (replacements
)
1525 if (GET_CODE (replacements
->new) == REG
)
1530 /* OLD might be a (subreg (mem)). */
1531 if (GET_CODE (replacements
->old
) == SUBREG
)
1533 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1536 = fixup_stack_1 (replacements
->old
, insn
);
1538 insert_before
= insn
;
1540 /* If we are changing the mode, do a conversion.
1541 This might be wasteful, but combine.c will
1542 eliminate much of the waste. */
1544 if (GET_MODE (replacements
->new)
1545 != GET_MODE (replacements
->old
))
1548 convert_move (replacements
->new,
1549 replacements
->old
, unsignedp
);
1550 seq
= gen_sequence ();
1554 seq
= gen_move_insn (replacements
->new,
1557 emit_insn_before (seq
, insert_before
);
1560 replacements
= replacements
->next
;
1564 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1565 But don't touch other insns referred to by reg-notes;
1566 we will get them elsewhere. */
1567 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1568 if (GET_CODE (note
) != INSN_LIST
)
1570 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1576 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1577 See if the rtx expression at *LOC in INSN needs to be changed.
1579 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1580 contain a list of original rtx's and replacements. If we find that we need
1581 to modify this insn by replacing a memory reference with a pseudo or by
1582 making a new MEM to implement a SUBREG, we consult that list to see if
1583 we have already chosen a replacement. If none has already been allocated,
1584 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1585 or the SUBREG, as appropriate, to the pseudo. */
1588 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1590 enum machine_mode promoted_mode
;
1593 struct fixup_replacement
**replacements
;
1596 register rtx x
= *loc
;
1597 RTX_CODE code
= GET_CODE (x
);
1599 register rtx tem
, tem1
;
1600 struct fixup_replacement
*replacement
;
1607 /* If we already have a replacement, use it. Otherwise,
1608 try to fix up this address in case it is invalid. */
1610 replacement
= find_fixup_replacement (replacements
, var
);
1611 if (replacement
->new)
1613 *loc
= replacement
->new;
1617 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1619 /* Unless we are forcing memory to register or we changed the mode,
1620 we can leave things the way they are if the insn is valid. */
1622 INSN_CODE (insn
) = -1;
1623 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1624 && recog_memoized (insn
) >= 0)
1627 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1631 /* If X contains VAR, we need to unshare it here so that we update
1632 each occurrence separately. But all identical MEMs in one insn
1633 must be replaced with the same rtx because of the possibility of
1636 if (reg_mentioned_p (var
, x
))
1638 replacement
= find_fixup_replacement (replacements
, x
);
1639 if (replacement
->new == 0)
1640 replacement
->new = copy_most_rtx (x
, var
);
1642 *loc
= x
= replacement
->new;
1658 /* Note that in some cases those types of expressions are altered
1659 by optimize_bit_field, and do not survive to get here. */
1660 if (XEXP (x
, 0) == var
1661 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1662 && SUBREG_REG (XEXP (x
, 0)) == var
))
1664 /* Get TEM as a valid MEM in the mode presently in the insn.
1666 We don't worry about the possibility of MATCH_DUP here; it
1667 is highly unlikely and would be tricky to handle. */
1670 if (GET_CODE (tem
) == SUBREG
)
1671 tem
= fixup_memory_subreg (tem
, insn
, 1);
1672 tem
= fixup_stack_1 (tem
, insn
);
1674 /* Unless we want to load from memory, get TEM into the proper mode
1675 for an extract from memory. This can only be done if the
1676 extract is at a constant position and length. */
1678 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1679 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1680 && ! mode_dependent_address_p (XEXP (tem
, 0))
1681 && ! MEM_VOLATILE_P (tem
))
1683 enum machine_mode wanted_mode
= VOIDmode
;
1684 enum machine_mode is_mode
= GET_MODE (tem
);
1685 int width
= INTVAL (XEXP (x
, 1));
1686 int pos
= INTVAL (XEXP (x
, 2));
1689 if (GET_CODE (x
) == ZERO_EXTRACT
)
1690 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1693 if (GET_CODE (x
) == SIGN_EXTRACT
)
1694 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1696 /* If we have a narrower mode, we can do something. */
1697 if (wanted_mode
!= VOIDmode
1698 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1700 int offset
= pos
/ BITS_PER_UNIT
;
1701 rtx old_pos
= XEXP (x
, 2);
1704 /* If the bytes and bits are counted differently, we
1705 must adjust the offset. */
1706 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1707 offset
= (GET_MODE_SIZE (is_mode
)
1708 - GET_MODE_SIZE (wanted_mode
) - offset
);
1710 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1712 newmem
= gen_rtx (MEM
, wanted_mode
,
1713 plus_constant (XEXP (tem
, 0), offset
));
1714 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1715 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1716 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1718 /* Make the change and see if the insn remains valid. */
1719 INSN_CODE (insn
) = -1;
1720 XEXP (x
, 0) = newmem
;
1721 XEXP (x
, 2) = GEN_INT (pos
);
1723 if (recog_memoized (insn
) >= 0)
1726 /* Otherwise, restore old position. XEXP (x, 0) will be
1728 XEXP (x
, 2) = old_pos
;
1732 /* If we get here, the bitfield extract insn can't accept a memory
1733 reference. Copy the input into a register. */
1735 tem1
= gen_reg_rtx (GET_MODE (tem
));
1736 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1743 if (SUBREG_REG (x
) == var
)
1745 /* If this is a special SUBREG made because VAR was promoted
1746 from a wider mode, replace it with VAR and call ourself
1747 recursively, this time saying that the object previously
1748 had its current mode (by virtue of the SUBREG). */
1750 if (SUBREG_PROMOTED_VAR_P (x
))
1753 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
1757 /* If this SUBREG makes VAR wider, it has become a paradoxical
1758 SUBREG with VAR in memory, but these aren't allowed at this
1759 stage of the compilation. So load VAR into a pseudo and take
1760 a SUBREG of that pseudo. */
1761 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
1763 replacement
= find_fixup_replacement (replacements
, var
);
1764 if (replacement
->new == 0)
1765 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1766 SUBREG_REG (x
) = replacement
->new;
1770 /* See if we have already found a replacement for this SUBREG.
1771 If so, use it. Otherwise, make a MEM and see if the insn
1772 is recognized. If not, or if we should force MEM into a register,
1773 make a pseudo for this SUBREG. */
1774 replacement
= find_fixup_replacement (replacements
, x
);
1775 if (replacement
->new)
1777 *loc
= replacement
->new;
1781 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
1783 INSN_CODE (insn
) = -1;
1784 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
1787 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
1793 /* First do special simplification of bit-field references. */
1794 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
1795 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
1796 optimize_bit_field (x
, insn
, 0);
1797 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
1798 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
1799 optimize_bit_field (x
, insn
, NULL_PTR
);
1801 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1802 insn into a pseudo and store the low part of the pseudo into VAR. */
1803 if (GET_CODE (SET_DEST (x
)) == SUBREG
1804 && SUBREG_REG (SET_DEST (x
)) == var
1805 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
1806 > GET_MODE_SIZE (GET_MODE (var
))))
1808 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
1809 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
1816 rtx dest
= SET_DEST (x
);
1817 rtx src
= SET_SRC (x
);
1818 rtx outerdest
= dest
;
1820 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
1821 || GET_CODE (dest
) == SIGN_EXTRACT
1822 || GET_CODE (dest
) == ZERO_EXTRACT
)
1823 dest
= XEXP (dest
, 0);
1825 if (GET_CODE (src
) == SUBREG
)
1826 src
= XEXP (src
, 0);
1828 /* If VAR does not appear at the top level of the SET
1829 just scan the lower levels of the tree. */
1831 if (src
!= var
&& dest
!= var
)
1834 /* We will need to rerecognize this insn. */
1835 INSN_CODE (insn
) = -1;
1838 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
1840 /* Since this case will return, ensure we fixup all the
1842 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
1843 insn
, replacements
);
1844 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
1845 insn
, replacements
);
1846 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
1847 insn
, replacements
);
1849 tem
= XEXP (outerdest
, 0);
1851 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1852 that may appear inside a ZERO_EXTRACT.
1853 This was legitimate when the MEM was a REG. */
1854 if (GET_CODE (tem
) == SUBREG
1855 && SUBREG_REG (tem
) == var
)
1856 tem
= fixup_memory_subreg (tem
, insn
, 1);
1858 tem
= fixup_stack_1 (tem
, insn
);
1860 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
1861 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
1862 && ! mode_dependent_address_p (XEXP (tem
, 0))
1863 && ! MEM_VOLATILE_P (tem
))
1865 enum machine_mode wanted_mode
1866 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
1867 enum machine_mode is_mode
= GET_MODE (tem
);
1868 int width
= INTVAL (XEXP (outerdest
, 1));
1869 int pos
= INTVAL (XEXP (outerdest
, 2));
1871 /* If we have a narrower mode, we can do something. */
1872 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1874 int offset
= pos
/ BITS_PER_UNIT
;
1875 rtx old_pos
= XEXP (outerdest
, 2);
1878 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1879 offset
= (GET_MODE_SIZE (is_mode
)
1880 - GET_MODE_SIZE (wanted_mode
) - offset
);
1882 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1884 newmem
= gen_rtx (MEM
, wanted_mode
,
1885 plus_constant (XEXP (tem
, 0), offset
));
1886 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1887 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1888 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1890 /* Make the change and see if the insn remains valid. */
1891 INSN_CODE (insn
) = -1;
1892 XEXP (outerdest
, 0) = newmem
;
1893 XEXP (outerdest
, 2) = GEN_INT (pos
);
1895 if (recog_memoized (insn
) >= 0)
1898 /* Otherwise, restore old position. XEXP (x, 0) will be
1900 XEXP (outerdest
, 2) = old_pos
;
1904 /* If we get here, the bit-field store doesn't allow memory
1905 or isn't located at a constant position. Load the value into
1906 a register, do the store, and put it back into memory. */
1908 tem1
= gen_reg_rtx (GET_MODE (tem
));
1909 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1910 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
1911 XEXP (outerdest
, 0) = tem1
;
1916 /* STRICT_LOW_PART is a no-op on memory references
1917 and it can cause combinations to be unrecognizable,
1920 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
1921 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
1923 /* A valid insn to copy VAR into or out of a register
1924 must be left alone, to avoid an infinite loop here.
1925 If the reference to VAR is by a subreg, fix that up,
1926 since SUBREG is not valid for a memref.
1927 Also fix up the address of the stack slot.
1929 Note that we must not try to recognize the insn until
1930 after we know that we have valid addresses and no
1931 (subreg (mem ...) ...) constructs, since these interfere
1932 with determining the validity of the insn. */
1934 if ((SET_SRC (x
) == var
1935 || (GET_CODE (SET_SRC (x
)) == SUBREG
1936 && SUBREG_REG (SET_SRC (x
)) == var
))
1937 && (GET_CODE (SET_DEST (x
)) == REG
1938 || (GET_CODE (SET_DEST (x
)) == SUBREG
1939 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
1940 && x
== single_set (PATTERN (insn
)))
1944 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
1945 if (replacement
->new)
1946 SET_SRC (x
) = replacement
->new;
1947 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
1948 SET_SRC (x
) = replacement
->new
1949 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
1951 SET_SRC (x
) = replacement
->new
1952 = fixup_stack_1 (SET_SRC (x
), insn
);
1954 if (recog_memoized (insn
) >= 0)
1957 /* INSN is not valid, but we know that we want to
1958 copy SET_SRC (x) to SET_DEST (x) in some way. So
1959 we generate the move and see whether it requires more
1960 than one insn. If it does, we emit those insns and
1961 delete INSN. Otherwise, we an just replace the pattern
1962 of INSN; we have already verified above that INSN has
1963 no other function that to do X. */
1965 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
1966 if (GET_CODE (pat
) == SEQUENCE
)
1968 emit_insn_after (pat
, insn
);
1969 PUT_CODE (insn
, NOTE
);
1970 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1971 NOTE_SOURCE_FILE (insn
) = 0;
1974 PATTERN (insn
) = pat
;
1979 if ((SET_DEST (x
) == var
1980 || (GET_CODE (SET_DEST (x
)) == SUBREG
1981 && SUBREG_REG (SET_DEST (x
)) == var
))
1982 && (GET_CODE (SET_SRC (x
)) == REG
1983 || (GET_CODE (SET_SRC (x
)) == SUBREG
1984 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
1985 && x
== single_set (PATTERN (insn
)))
1989 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
1990 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
1992 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
1994 if (recog_memoized (insn
) >= 0)
1997 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
1998 if (GET_CODE (pat
) == SEQUENCE
)
2000 emit_insn_after (pat
, insn
);
2001 PUT_CODE (insn
, NOTE
);
2002 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2003 NOTE_SOURCE_FILE (insn
) = 0;
2006 PATTERN (insn
) = pat
;
2011 /* Otherwise, storing into VAR must be handled specially
2012 by storing into a temporary and copying that into VAR
2013 with a new insn after this one. Note that this case
2014 will be used when storing into a promoted scalar since
2015 the insn will now have different modes on the input
2016 and output and hence will be invalid (except for the case
2017 of setting it to a constant, which does not need any
2018 change if it is valid). We generate extra code in that case,
2019 but combine.c will eliminate it. */
2024 rtx fixeddest
= SET_DEST (x
);
2026 /* STRICT_LOW_PART can be discarded, around a MEM. */
2027 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2028 fixeddest
= XEXP (fixeddest
, 0);
2029 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2030 if (GET_CODE (fixeddest
) == SUBREG
)
2031 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2033 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2035 temp
= gen_reg_rtx (GET_MODE (SET_SRC (x
)) == VOIDmode
2036 ? GET_MODE (fixeddest
)
2037 : GET_MODE (SET_SRC (x
)));
2039 emit_insn_after (gen_move_insn (fixeddest
,
2040 gen_lowpart (GET_MODE (fixeddest
),
2044 SET_DEST (x
) = temp
;
2049 /* Nothing special about this RTX; fix its operands. */
2051 fmt
= GET_RTX_FORMAT (code
);
2052 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2055 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2059 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2060 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2061 insn
, replacements
);
2066 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2067 return an rtx (MEM:m1 newaddr) which is equivalent.
2068 If any insns must be emitted to compute NEWADDR, put them before INSN.
2070 UNCRITICAL nonzero means accept paradoxical subregs.
2071 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2074 fixup_memory_subreg (x
, insn
, uncritical
)
2079 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2080 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2081 enum machine_mode mode
= GET_MODE (x
);
2084 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2085 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2089 if (BYTES_BIG_ENDIAN
)
2090 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2091 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2092 addr
= plus_constant (addr
, offset
);
2093 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2094 /* Shortcut if no insns need be emitted. */
2095 return change_address (SUBREG_REG (x
), mode
, addr
);
2097 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2098 emit_insn_before (gen_sequence (), insn
);
2103 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2104 Replace subexpressions of X in place.
2105 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2106 Otherwise return X, with its contents possibly altered.
2108 If any insns must be emitted to compute NEWADDR, put them before INSN.
2110 UNCRITICAL is as in fixup_memory_subreg. */
2113 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2118 register enum rtx_code code
;
2125 code
= GET_CODE (x
);
2127 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2128 return fixup_memory_subreg (x
, insn
, uncritical
);
2130 /* Nothing special about this RTX; fix its operands. */
2132 fmt
= GET_RTX_FORMAT (code
);
2133 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2136 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2140 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2142 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2148 /* For each memory ref within X, if it refers to a stack slot
2149 with an out of range displacement, put the address in a temp register
2150 (emitting new insns before INSN to load these registers)
2151 and alter the memory ref to use that register.
2152 Replace each such MEM rtx with a copy, to avoid clobberage. */
2155 fixup_stack_1 (x
, insn
)
2160 register RTX_CODE code
= GET_CODE (x
);
2165 register rtx ad
= XEXP (x
, 0);
2166 /* If we have address of a stack slot but it's not valid
2167 (displacement is too large), compute the sum in a register. */
2168 if (GET_CODE (ad
) == PLUS
2169 && GET_CODE (XEXP (ad
, 0)) == REG
2170 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2171 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2172 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2173 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2176 if (memory_address_p (GET_MODE (x
), ad
))
2180 temp
= copy_to_reg (ad
);
2181 seq
= gen_sequence ();
2183 emit_insn_before (seq
, insn
);
2184 return change_address (x
, VOIDmode
, temp
);
2189 fmt
= GET_RTX_FORMAT (code
);
2190 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2193 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2197 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2198 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2204 /* Optimization: a bit-field instruction whose field
2205 happens to be a byte or halfword in memory
2206 can be changed to a move instruction.
2208 We call here when INSN is an insn to examine or store into a bit-field.
2209 BODY is the SET-rtx to be altered.
2211 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2212 (Currently this is called only from function.c, and EQUIV_MEM
2216 optimize_bit_field (body
, insn
, equiv_mem
)
2221 register rtx bitfield
;
2224 enum machine_mode mode
;
2226 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2227 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2228 bitfield
= SET_DEST (body
), destflag
= 1;
2230 bitfield
= SET_SRC (body
), destflag
= 0;
2232 /* First check that the field being stored has constant size and position
2233 and is in fact a byte or halfword suitably aligned. */
2235 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2236 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2237 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2239 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2241 register rtx memref
= 0;
2243 /* Now check that the containing word is memory, not a register,
2244 and that it is safe to change the machine mode. */
2246 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2247 memref
= XEXP (bitfield
, 0);
2248 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2250 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2251 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2252 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2253 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2254 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2256 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2257 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2260 && ! mode_dependent_address_p (XEXP (memref
, 0))
2261 && ! MEM_VOLATILE_P (memref
))
2263 /* Now adjust the address, first for any subreg'ing
2264 that we are now getting rid of,
2265 and then for which byte of the word is wanted. */
2267 register int offset
= INTVAL (XEXP (bitfield
, 2));
2270 /* Adjust OFFSET to count bits from low-address byte. */
2271 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2272 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2273 - offset
- INTVAL (XEXP (bitfield
, 1)));
2275 /* Adjust OFFSET to count bytes from low-address byte. */
2276 offset
/= BITS_PER_UNIT
;
2277 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2279 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2280 if (BYTES_BIG_ENDIAN
)
2281 offset
-= (MIN (UNITS_PER_WORD
,
2282 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2283 - MIN (UNITS_PER_WORD
,
2284 GET_MODE_SIZE (GET_MODE (memref
))));
2288 memref
= change_address (memref
, mode
,
2289 plus_constant (XEXP (memref
, 0), offset
));
2290 insns
= get_insns ();
2292 emit_insns_before (insns
, insn
);
2294 /* Store this memory reference where
2295 we found the bit field reference. */
2299 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2300 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2302 rtx src
= SET_SRC (body
);
2303 while (GET_CODE (src
) == SUBREG
2304 && SUBREG_WORD (src
) == 0)
2305 src
= SUBREG_REG (src
);
2306 if (GET_MODE (src
) != GET_MODE (memref
))
2307 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2308 validate_change (insn
, &SET_SRC (body
), src
, 1);
2310 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2311 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2312 /* This shouldn't happen because anything that didn't have
2313 one of these modes should have got converted explicitly
2314 and then referenced through a subreg.
2315 This is so because the original bit-field was
2316 handled by agg_mode and so its tree structure had
2317 the same mode that memref now has. */
2322 rtx dest
= SET_DEST (body
);
2324 while (GET_CODE (dest
) == SUBREG
2325 && SUBREG_WORD (dest
) == 0
2326 && (GET_MODE_CLASS (GET_MODE (dest
))
2327 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
)))))
2328 dest
= SUBREG_REG (dest
);
2330 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2332 if (GET_MODE (dest
) == GET_MODE (memref
))
2333 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2336 /* Convert the mem ref to the destination mode. */
2337 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2340 convert_move (newreg
, memref
,
2341 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2345 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2349 /* See if we can convert this extraction or insertion into
2350 a simple move insn. We might not be able to do so if this
2351 was, for example, part of a PARALLEL.
2353 If we succeed, write out any needed conversions. If we fail,
2354 it is hard to guess why we failed, so don't do anything
2355 special; just let the optimization be suppressed. */
2357 if (apply_change_group () && seq
)
2358 emit_insns_before (seq
, insn
);
2363 /* These routines are responsible for converting virtual register references
2364 to the actual hard register references once RTL generation is complete.
2366 The following four variables are used for communication between the
2367 routines. They contain the offsets of the virtual registers from their
2368 respective hard registers. */
2370 static int in_arg_offset
;
2371 static int var_offset
;
2372 static int dynamic_offset
;
2373 static int out_arg_offset
;
2375 /* In most machines, the stack pointer register is equivalent to the bottom
2378 #ifndef STACK_POINTER_OFFSET
2379 #define STACK_POINTER_OFFSET 0
2382 /* If not defined, pick an appropriate default for the offset of dynamically
2383 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2384 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2386 #ifndef STACK_DYNAMIC_OFFSET
2388 #ifdef ACCUMULATE_OUTGOING_ARGS
2389 /* The bottom of the stack points to the actual arguments. If
2390 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2391 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2392 stack space for register parameters is not pushed by the caller, but
2393 rather part of the fixed stack areas and hence not included in
2394 `current_function_outgoing_args_size'. Nevertheless, we must allow
2395 for it when allocating stack dynamic objects. */
2397 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2398 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2399 (current_function_outgoing_args_size \
2400 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2403 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2404 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2408 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2412 /* Pass through the INSNS of function FNDECL and convert virtual register
2413 references to hard register references. */
2416 instantiate_virtual_regs (fndecl
, insns
)
2422 /* Compute the offsets to use for this function. */
2423 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
2424 var_offset
= STARTING_FRAME_OFFSET
;
2425 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
2426 out_arg_offset
= STACK_POINTER_OFFSET
;
2428 /* Scan all variables and parameters of this function. For each that is
2429 in memory, instantiate all virtual registers if the result is a valid
2430 address. If not, we do it later. That will handle most uses of virtual
2431 regs on many machines. */
2432 instantiate_decls (fndecl
, 1);
2434 /* Initialize recognition, indicating that volatile is OK. */
2437 /* Scan through all the insns, instantiating every virtual register still
2439 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2440 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2441 || GET_CODE (insn
) == CALL_INSN
)
2443 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
2444 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
2447 /* Now instantiate the remaining register equivalences for debugging info.
2448 These will not be valid addresses. */
2449 instantiate_decls (fndecl
, 0);
2451 /* Indicate that, from now on, assign_stack_local should use
2452 frame_pointer_rtx. */
2453 virtuals_instantiated
= 1;
2456 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2457 all virtual registers in their DECL_RTL's.
2459 If VALID_ONLY, do this only if the resulting address is still valid.
2460 Otherwise, always do it. */
2463 instantiate_decls (fndecl
, valid_only
)
2469 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2470 /* When compiling an inline function, the obstack used for
2471 rtl allocation is the maybepermanent_obstack. Calling
2472 `resume_temporary_allocation' switches us back to that
2473 obstack while we process this function's parameters. */
2474 resume_temporary_allocation ();
2476 /* Process all parameters of the function. */
2477 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
2479 instantiate_decl (DECL_RTL (decl
), int_size_in_bytes (TREE_TYPE (decl
)),
2481 instantiate_decl (DECL_INCOMING_RTL (decl
),
2482 int_size_in_bytes (TREE_TYPE (decl
)), valid_only
);
2485 /* Now process all variables defined in the function or its subblocks. */
2486 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
2488 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2490 /* Save all rtl allocated for this function by raising the
2491 high-water mark on the maybepermanent_obstack. */
2493 /* All further rtl allocation is now done in the current_obstack. */
2494 rtl_in_current_obstack ();
2498 /* Subroutine of instantiate_decls: Process all decls in the given
2499 BLOCK node and all its subblocks. */
2502 instantiate_decls_1 (let
, valid_only
)
2508 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2509 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
2512 /* Process all subblocks. */
2513 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2514 instantiate_decls_1 (t
, valid_only
);
2517 /* Subroutine of the preceding procedures: Given RTL representing a
2518 decl and the size of the object, do any instantiation required.
2520 If VALID_ONLY is non-zero, it means that the RTL should only be
2521 changed if the new address is valid. */
2524 instantiate_decl (x
, size
, valid_only
)
2529 enum machine_mode mode
;
2532 /* If this is not a MEM, no need to do anything. Similarly if the
2533 address is a constant or a register that is not a virtual register. */
2535 if (x
== 0 || GET_CODE (x
) != MEM
)
2539 if (CONSTANT_P (addr
)
2540 || (GET_CODE (addr
) == REG
2541 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
2542 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
2545 /* If we should only do this if the address is valid, copy the address.
2546 We need to do this so we can undo any changes that might make the
2547 address invalid. This copy is unfortunate, but probably can't be
2551 addr
= copy_rtx (addr
);
2553 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
2558 /* Now verify that the resulting address is valid for every integer or
2559 floating-point mode up to and including SIZE bytes long. We do this
2560 since the object might be accessed in any mode and frame addresses
2563 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2564 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2565 mode
= GET_MODE_WIDER_MODE (mode
))
2566 if (! memory_address_p (mode
, addr
))
2569 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
2570 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2571 mode
= GET_MODE_WIDER_MODE (mode
))
2572 if (! memory_address_p (mode
, addr
))
2575 /* Otherwise, put back the address, now that we have updated it and we
2576 know it is valid. */
2581 /* Given a pointer to a piece of rtx and an optional pointer to the
2582 containing object, instantiate any virtual registers present in it.
2584 If EXTRA_INSNS, we always do the replacement and generate
2585 any extra insns before OBJECT. If it zero, we do nothing if replacement
2588 Return 1 if we either had nothing to do or if we were able to do the
2589 needed replacement. Return 0 otherwise; we only return zero if
2590 EXTRA_INSNS is zero.
2592 We first try some simple transformations to avoid the creation of extra
2596 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
2610 /* Re-start here to avoid recursion in common cases. */
2617 code
= GET_CODE (x
);
2619 /* Check for some special cases. */
2636 /* We are allowed to set the virtual registers. This means that
2637 that the actual register should receive the source minus the
2638 appropriate offset. This is used, for example, in the handling
2639 of non-local gotos. */
2640 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
2641 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
2642 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
2643 new = frame_pointer_rtx
, offset
= - var_offset
;
2644 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
2645 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
2646 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
2647 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
2651 /* The only valid sources here are PLUS or REG. Just do
2652 the simplest possible thing to handle them. */
2653 if (GET_CODE (SET_SRC (x
)) != REG
2654 && GET_CODE (SET_SRC (x
)) != PLUS
)
2658 if (GET_CODE (SET_SRC (x
)) != REG
)
2659 temp
= force_operand (SET_SRC (x
), NULL_RTX
);
2662 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
2666 emit_insns_before (seq
, object
);
2669 if (!validate_change (object
, &SET_SRC (x
), temp
, 0)
2676 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
2681 /* Handle special case of virtual register plus constant. */
2682 if (CONSTANT_P (XEXP (x
, 1)))
2684 rtx old
, new_offset
;
2686 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2687 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2689 rtx inner
= XEXP (XEXP (x
, 0), 0);
2691 if (inner
== virtual_incoming_args_rtx
)
2692 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2693 else if (inner
== virtual_stack_vars_rtx
)
2694 new = frame_pointer_rtx
, offset
= var_offset
;
2695 else if (inner
== virtual_stack_dynamic_rtx
)
2696 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2697 else if (inner
== virtual_outgoing_args_rtx
)
2698 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2705 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
2707 new = gen_rtx (PLUS
, Pmode
, new, XEXP (XEXP (x
, 0), 1));
2710 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
2711 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2712 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
2713 new = frame_pointer_rtx
, offset
= var_offset
;
2714 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
2715 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2716 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
2717 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2720 /* We know the second operand is a constant. Unless the
2721 first operand is a REG (which has been already checked),
2722 it needs to be checked. */
2723 if (GET_CODE (XEXP (x
, 0)) != REG
)
2731 new_offset
= plus_constant (XEXP (x
, 1), offset
);
2733 /* If the new constant is zero, try to replace the sum with just
2735 if (new_offset
== const0_rtx
2736 && validate_change (object
, loc
, new, 0))
2739 /* Next try to replace the register and new offset.
2740 There are two changes to validate here and we can't assume that
2741 in the case of old offset equals new just changing the register
2742 will yield a valid insn. In the interests of a little efficiency,
2743 however, we only call validate change once (we don't queue up the
2744 changes and then call apply_change_group). */
2748 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
2749 : (XEXP (x
, 0) = new,
2750 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
2758 /* Otherwise copy the new constant into a register and replace
2759 constant with that register. */
2760 temp
= gen_reg_rtx (Pmode
);
2762 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
2763 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
2766 /* If that didn't work, replace this expression with a
2767 register containing the sum. */
2770 new = gen_rtx (PLUS
, Pmode
, new, new_offset
);
2773 temp
= force_operand (new, NULL_RTX
);
2777 emit_insns_before (seq
, object
);
2778 if (! validate_change (object
, loc
, temp
, 0)
2779 && ! validate_replace_rtx (x
, temp
, object
))
2787 /* Fall through to generic two-operand expression case. */
2793 case DIV
: case UDIV
:
2794 case MOD
: case UMOD
:
2795 case AND
: case IOR
: case XOR
:
2796 case ROTATERT
: case ROTATE
:
2797 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2799 case GE
: case GT
: case GEU
: case GTU
:
2800 case LE
: case LT
: case LEU
: case LTU
:
2801 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
2802 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
2807 /* Most cases of MEM that convert to valid addresses have already been
2808 handled by our scan of regno_reg_rtx. The only special handling we
2809 need here is to make a copy of the rtx to ensure it isn't being
2810 shared if we have to change it to a pseudo.
2812 If the rtx is a simple reference to an address via a virtual register,
2813 it can potentially be shared. In such cases, first try to make it
2814 a valid address, which can also be shared. Otherwise, copy it and
2817 First check for common cases that need no processing. These are
2818 usually due to instantiation already being done on a previous instance
2822 if (CONSTANT_ADDRESS_P (temp
)
2823 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2824 || temp
== arg_pointer_rtx
2826 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2827 || temp
== hard_frame_pointer_rtx
2829 || temp
== frame_pointer_rtx
)
2832 if (GET_CODE (temp
) == PLUS
2833 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2834 && (XEXP (temp
, 0) == frame_pointer_rtx
2835 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2836 || XEXP (temp
, 0) == hard_frame_pointer_rtx
2838 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2839 || XEXP (temp
, 0) == arg_pointer_rtx
2844 if (temp
== virtual_stack_vars_rtx
2845 || temp
== virtual_incoming_args_rtx
2846 || (GET_CODE (temp
) == PLUS
2847 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2848 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
2849 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
2851 /* This MEM may be shared. If the substitution can be done without
2852 the need to generate new pseudos, we want to do it in place
2853 so all copies of the shared rtx benefit. The call below will
2854 only make substitutions if the resulting address is still
2857 Note that we cannot pass X as the object in the recursive call
2858 since the insn being processed may not allow all valid
2859 addresses. However, if we were not passed on object, we can
2860 only modify X without copying it if X will have a valid
2863 ??? Also note that this can still lose if OBJECT is an insn that
2864 has less restrictions on an address that some other insn.
2865 In that case, we will modify the shared address. This case
2866 doesn't seem very likely, though. */
2868 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
2869 object
? object
: x
, 0))
2872 /* Otherwise make a copy and process that copy. We copy the entire
2873 RTL expression since it might be a PLUS which could also be
2875 *loc
= x
= copy_rtx (x
);
2878 /* Fall through to generic unary operation case. */
2882 case STRICT_LOW_PART
:
2884 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
2885 case SIGN_EXTEND
: case ZERO_EXTEND
:
2886 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2887 case FLOAT
: case FIX
:
2888 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2892 /* These case either have just one operand or we know that we need not
2893 check the rest of the operands. */
2898 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2899 in front of this insn and substitute the temporary. */
2900 if (x
== virtual_incoming_args_rtx
)
2901 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2902 else if (x
== virtual_stack_vars_rtx
)
2903 new = frame_pointer_rtx
, offset
= var_offset
;
2904 else if (x
== virtual_stack_dynamic_rtx
)
2905 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2906 else if (x
== virtual_outgoing_args_rtx
)
2907 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2911 temp
= plus_constant (new, offset
);
2912 if (!validate_change (object
, loc
, temp
, 0))
2918 temp
= force_operand (temp
, NULL_RTX
);
2922 emit_insns_before (seq
, object
);
2923 if (! validate_change (object
, loc
, temp
, 0)
2924 && ! validate_replace_rtx (x
, temp
, object
))
2932 /* Scan all subexpressions. */
2933 fmt
= GET_RTX_FORMAT (code
);
2934 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2937 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
2940 else if (*fmt
== 'E')
2941 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2942 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
2949 /* Optimization: assuming this function does not receive nonlocal gotos,
2950 delete the handlers for such, as well as the insns to establish
2951 and disestablish them. */
2957 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2959 /* Delete the handler by turning off the flag that would
2960 prevent jump_optimize from deleting it.
2961 Also permit deletion of the nonlocal labels themselves
2962 if nothing local refers to them. */
2963 if (GET_CODE (insn
) == CODE_LABEL
)
2967 LABEL_PRESERVE_P (insn
) = 0;
2969 /* Remove it from the nonlocal_label list, to avoid confusing
2971 for (t
= nonlocal_labels
, last_t
= 0; t
;
2972 last_t
= t
, t
= TREE_CHAIN (t
))
2973 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
2978 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
2980 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
2983 if (GET_CODE (insn
) == INSN
2984 && ((nonlocal_goto_handler_slot
!= 0
2985 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
2986 || (nonlocal_goto_stack_level
!= 0
2987 && reg_mentioned_p (nonlocal_goto_stack_level
,
2993 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2994 of the current function. */
2997 nonlocal_label_rtx_list ()
3002 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
3003 x
= gen_rtx (EXPR_LIST
, VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
3008 /* Output a USE for any register use in RTL.
3009 This is used with -noreg to mark the extent of lifespan
3010 of any registers used in a user-visible variable's DECL_RTL. */
3016 if (GET_CODE (rtl
) == REG
)
3017 /* This is a register variable. */
3018 emit_insn (gen_rtx (USE
, VOIDmode
, rtl
));
3019 else if (GET_CODE (rtl
) == MEM
3020 && GET_CODE (XEXP (rtl
, 0)) == REG
3021 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3022 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3023 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3024 /* This is a variable-sized structure. */
3025 emit_insn (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)));
3028 /* Like use_variable except that it outputs the USEs after INSN
3029 instead of at the end of the insn-chain. */
3032 use_variable_after (rtl
, insn
)
3035 if (GET_CODE (rtl
) == REG
)
3036 /* This is a register variable. */
3037 emit_insn_after (gen_rtx (USE
, VOIDmode
, rtl
), insn
);
3038 else if (GET_CODE (rtl
) == MEM
3039 && GET_CODE (XEXP (rtl
, 0)) == REG
3040 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3041 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3042 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3043 /* This is a variable-sized structure. */
3044 emit_insn_after (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)), insn
);
3050 return max_parm_reg
;
3053 /* Return the first insn following those generated by `assign_parms'. */
3056 get_first_nonparm_insn ()
3059 return NEXT_INSN (last_parm_insn
);
3060 return get_insns ();
3063 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3064 Crash if there is none. */
3067 get_first_block_beg ()
3069 register rtx searcher
;
3070 register rtx insn
= get_first_nonparm_insn ();
3072 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3073 if (GET_CODE (searcher
) == NOTE
3074 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3077 abort (); /* Invalid call to this function. (See comments above.) */
3081 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3082 This means a type for which function calls must pass an address to the
3083 function or get an address back from the function.
3084 EXP may be a type node or an expression (whose type is tested). */
3087 aggregate_value_p (exp
)
3090 int i
, regno
, nregs
;
3093 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3096 type
= TREE_TYPE (exp
);
3098 if (RETURN_IN_MEMORY (type
))
3100 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3102 /* Make sure we have suitable call-clobbered regs to return
3103 the value in; if not, we must return it in memory. */
3104 reg
= hard_function_value (type
, 0);
3105 regno
= REGNO (reg
);
3106 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3107 for (i
= 0; i
< nregs
; i
++)
3108 if (! call_used_regs
[regno
+ i
])
3113 /* Assign RTL expressions to the function's parameters.
3114 This may involve copying them into registers and using
3115 those registers as the RTL for them.
3117 If SECOND_TIME is non-zero it means that this function is being
3118 called a second time. This is done by integrate.c when a function's
3119 compilation is deferred. We need to come back here in case the
3120 FUNCTION_ARG macro computes items needed for the rest of the compilation
3121 (such as changing which registers are fixed or caller-saved). But suppress
3122 writing any insns or setting DECL_RTL of anything in this case. */
3125 assign_parms (fndecl
, second_time
)
3130 register rtx entry_parm
= 0;
3131 register rtx stack_parm
= 0;
3132 CUMULATIVE_ARGS args_so_far
;
3133 enum machine_mode promoted_mode
, passed_mode
;
3134 enum machine_mode nominal_mode
, promoted_nominal_mode
;
3136 /* Total space needed so far for args on the stack,
3137 given as a constant and a tree-expression. */
3138 struct args_size stack_args_size
;
3139 tree fntype
= TREE_TYPE (fndecl
);
3140 tree fnargs
= DECL_ARGUMENTS (fndecl
);
3141 /* This is used for the arg pointer when referring to stack args. */
3142 rtx internal_arg_pointer
;
3143 /* This is a dummy PARM_DECL that we used for the function result if
3144 the function returns a structure. */
3145 tree function_result_decl
= 0;
3146 int nparmregs
= list_length (fnargs
) + LAST_VIRTUAL_REGISTER
+ 1;
3147 int varargs_setup
= 0;
3148 rtx conversion_insns
= 0;
3149 /* FUNCTION_ARG may look at this variable. Since this is not
3150 expanding a call it will always be zero in this function. */
3151 int current_call_is_indirect
= 0;
3153 /* Nonzero if the last arg is named `__builtin_va_alist',
3154 which is used on some machines for old-fashioned non-ANSI varargs.h;
3155 this should be stuck onto the stack as if it had arrived there. */
3157 = (current_function_varargs
3159 && (parm
= tree_last (fnargs
)) != 0
3161 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
3162 "__builtin_va_alist")));
3164 /* Nonzero if function takes extra anonymous args.
3165 This means the last named arg must be on the stack
3166 right before the anonymous ones. */
3168 = (TYPE_ARG_TYPES (fntype
) != 0
3169 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3170 != void_type_node
));
3172 /* If the reg that the virtual arg pointer will be translated into is
3173 not a fixed reg or is the stack pointer, make a copy of the virtual
3174 arg pointer, and address parms via the copy. The frame pointer is
3175 considered fixed even though it is not marked as such.
3177 The second time through, simply use ap to avoid generating rtx. */
3179 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3180 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3181 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
3183 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3185 internal_arg_pointer
= virtual_incoming_args_rtx
;
3186 current_function_internal_arg_pointer
= internal_arg_pointer
;
3188 stack_args_size
.constant
= 0;
3189 stack_args_size
.var
= 0;
3191 /* If struct value address is treated as the first argument, make it so. */
3192 if (aggregate_value_p (DECL_RESULT (fndecl
))
3193 && ! current_function_returns_pcc_struct
3194 && struct_value_incoming_rtx
== 0)
3196 tree type
= build_pointer_type (fntype
);
3198 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
3200 DECL_ARG_TYPE (function_result_decl
) = type
;
3201 TREE_CHAIN (function_result_decl
) = fnargs
;
3202 fnargs
= function_result_decl
;
3205 parm_reg_stack_loc
= (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3206 bzero ((char *) parm_reg_stack_loc
, nparmregs
* sizeof (rtx
));
3208 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3209 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
3211 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
);
3214 /* We haven't yet found an argument that we must push and pretend the
3216 current_function_pretend_args_size
= 0;
3218 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3220 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
3221 struct args_size stack_offset
;
3222 struct args_size arg_size
;
3223 int passed_pointer
= 0;
3224 int did_conversion
= 0;
3225 tree passed_type
= DECL_ARG_TYPE (parm
);
3226 tree nominal_type
= TREE_TYPE (parm
);
3228 /* Set LAST_NAMED if this is last named arg before some
3229 anonymous args. We treat it as if it were anonymous too. */
3230 int last_named
= ((TREE_CHAIN (parm
) == 0
3231 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
3232 && (stdarg
|| current_function_varargs
));
3234 if (TREE_TYPE (parm
) == error_mark_node
3235 /* This can happen after weird syntax errors
3236 or if an enum type is defined among the parms. */
3237 || TREE_CODE (parm
) != PARM_DECL
3238 || passed_type
== NULL
)
3240 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = gen_rtx (MEM
, BLKmode
,
3242 TREE_USED (parm
) = 1;
3246 /* For varargs.h function, save info about regs and stack space
3247 used by the individual args, not including the va_alist arg. */
3248 if (hide_last_arg
&& last_named
)
3249 current_function_args_info
= args_so_far
;
3251 /* Find mode of arg as it is passed, and mode of arg
3252 as it should be during execution of this function. */
3253 passed_mode
= TYPE_MODE (passed_type
);
3254 nominal_mode
= TYPE_MODE (nominal_type
);
3256 /* If the parm's mode is VOID, its value doesn't matter,
3257 and avoid the usual things like emit_move_insn that could crash. */
3258 if (nominal_mode
== VOIDmode
)
3260 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
3264 /* If the parm is to be passed as a transparent union, use the
3265 type of the first field for the tests below. We have already
3266 verified that the modes are the same. */
3267 if (DECL_TRANSPARENT_UNION (parm
)
3268 || TYPE_TRANSPARENT_UNION (passed_type
))
3269 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
3271 /* See if this arg was passed by invisible reference. It is if
3272 it is an object whose size depends on the contents of the
3273 object itself or if the machine requires these objects be passed
3276 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
3277 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
3278 || TREE_ADDRESSABLE (passed_type
)
3279 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3280 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
3281 passed_type
, ! last_named
)
3285 passed_type
= nominal_type
= build_pointer_type (passed_type
);
3287 passed_mode
= nominal_mode
= Pmode
;
3290 promoted_mode
= passed_mode
;
3292 #ifdef PROMOTE_FUNCTION_ARGS
3293 /* Compute the mode in which the arg is actually extended to. */
3294 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
3297 /* Let machine desc say which reg (if any) the parm arrives in.
3298 0 means it arrives on the stack. */
3299 #ifdef FUNCTION_INCOMING_ARG
3300 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3301 passed_type
, ! last_named
);
3303 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
3304 passed_type
, ! last_named
);
3307 if (entry_parm
== 0)
3308 promoted_mode
= passed_mode
;
3310 #ifdef SETUP_INCOMING_VARARGS
3311 /* If this is the last named parameter, do any required setup for
3312 varargs or stdargs. We need to know about the case of this being an
3313 addressable type, in which case we skip the registers it
3314 would have arrived in.
3316 For stdargs, LAST_NAMED will be set for two parameters, the one that
3317 is actually the last named, and the dummy parameter. We only
3318 want to do this action once.
3320 Also, indicate when RTL generation is to be suppressed. */
3321 if (last_named
&& !varargs_setup
)
3323 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
3324 current_function_pretend_args_size
,
3330 /* Determine parm's home in the stack,
3331 in case it arrives in the stack or we should pretend it did.
3333 Compute the stack position and rtx where the argument arrives
3336 There is one complexity here: If this was a parameter that would
3337 have been passed in registers, but wasn't only because it is
3338 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3339 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3340 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3341 0 as it was the previous time. */
3343 locate_and_pad_parm (promoted_mode
, passed_type
,
3344 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3347 #ifdef FUNCTION_INCOMING_ARG
3348 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3351 || varargs_setup
)) != 0,
3353 FUNCTION_ARG (args_so_far
, promoted_mode
,
3355 ! last_named
|| varargs_setup
) != 0,
3358 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
3362 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3364 if (offset_rtx
== const0_rtx
)
3365 stack_parm
= gen_rtx (MEM
, promoted_mode
, internal_arg_pointer
);
3367 stack_parm
= gen_rtx (MEM
, promoted_mode
,
3368 gen_rtx (PLUS
, Pmode
,
3369 internal_arg_pointer
, offset_rtx
));
3371 /* If this is a memory ref that contains aggregate components,
3372 mark it as such for cse and loop optimize. */
3373 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3376 /* If this parameter was passed both in registers and in the stack,
3377 use the copy on the stack. */
3378 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
3381 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3382 /* If this parm was passed part in regs and part in memory,
3383 pretend it arrived entirely in memory
3384 by pushing the register-part onto the stack.
3386 In the special case of a DImode or DFmode that is split,
3387 we could put it together in a pseudoreg directly,
3388 but for now that's not worth bothering with. */
3392 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
3393 passed_type
, ! last_named
);
3397 current_function_pretend_args_size
3398 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
3399 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3400 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3403 move_block_from_reg (REGNO (entry_parm
),
3404 validize_mem (stack_parm
), nregs
,
3405 int_size_in_bytes (TREE_TYPE (parm
)));
3406 entry_parm
= stack_parm
;
3411 /* If we didn't decide this parm came in a register,
3412 by default it came on the stack. */
3413 if (entry_parm
== 0)
3414 entry_parm
= stack_parm
;
3416 /* Record permanently how this parm was passed. */
3418 DECL_INCOMING_RTL (parm
) = entry_parm
;
3420 /* If there is actually space on the stack for this parm,
3421 count it in stack_args_size; otherwise set stack_parm to 0
3422 to indicate there is no preallocated stack slot for the parm. */
3424 if (entry_parm
== stack_parm
3425 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3426 /* On some machines, even if a parm value arrives in a register
3427 there is still an (uninitialized) stack slot allocated for it.
3429 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3430 whether this parameter already has a stack slot allocated,
3431 because an arg block exists only if current_function_args_size
3432 is larger than some threshhold, and we haven't calculated that
3433 yet. So, for now, we just assume that stack slots never exist
3435 || REG_PARM_STACK_SPACE (fndecl
) > 0
3439 stack_args_size
.constant
+= arg_size
.constant
;
3441 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
3444 /* No stack slot was pushed for this parm. */
3447 /* Update info on where next arg arrives in registers. */
3449 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
3450 passed_type
, ! last_named
);
3452 /* If this is our second time through, we are done with this parm. */
3456 /* If we can't trust the parm stack slot to be aligned enough
3457 for its ultimate type, don't use that slot after entry.
3458 We'll make another stack slot, if we need one. */
3460 int thisparm_boundary
3461 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
3463 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
3467 /* If parm was passed in memory, and we need to convert it on entry,
3468 don't store it back in that same slot. */
3470 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
3474 /* Now adjust STACK_PARM to the mode and precise location
3475 where this parameter should live during execution,
3476 if we discover that it must live in the stack during execution.
3477 To make debuggers happier on big-endian machines, we store
3478 the value in the last bytes of the space available. */
3480 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
3485 if (BYTES_BIG_ENDIAN
3486 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
3487 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
3488 - GET_MODE_SIZE (nominal_mode
));
3490 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3491 if (offset_rtx
== const0_rtx
)
3492 stack_parm
= gen_rtx (MEM
, nominal_mode
, internal_arg_pointer
);
3494 stack_parm
= gen_rtx (MEM
, nominal_mode
,
3495 gen_rtx (PLUS
, Pmode
,
3496 internal_arg_pointer
, offset_rtx
));
3498 /* If this is a memory ref that contains aggregate components,
3499 mark it as such for cse and loop optimize. */
3500 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3505 /* We need this "use" info, because the gcc-register->stack-register
3506 converter in reg-stack.c needs to know which registers are active
3507 at the start of the function call. The actual parameter loading
3508 instructions are not always available then anymore, since they might
3509 have been optimised away. */
3511 if (GET_CODE (entry_parm
) == REG
&& !(hide_last_arg
&& last_named
))
3512 emit_insn (gen_rtx (USE
, GET_MODE (entry_parm
), entry_parm
));
3515 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3516 in the mode in which it arrives.
3517 STACK_PARM is an RTX for a stack slot where the parameter can live
3518 during the function (in case we want to put it there).
3519 STACK_PARM is 0 if no stack slot was pushed for it.
3521 Now output code if necessary to convert ENTRY_PARM to
3522 the type in which this function declares it,
3523 and store that result in an appropriate place,
3524 which may be a pseudo reg, may be STACK_PARM,
3525 or may be a local stack slot if STACK_PARM is 0.
3527 Set DECL_RTL to that place. */
3529 if (nominal_mode
== BLKmode
)
3531 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3532 if (GET_CODE (entry_parm
) == REG
)
3535 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
3538 /* Note that we will be storing an integral number of words.
3539 So we have to be careful to ensure that we allocate an
3540 integral number of words. We do this below in the
3541 assign_stack_local if space was not allocated in the argument
3542 list. If it was, this will not work if PARM_BOUNDARY is not
3543 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3544 if it becomes a problem. */
3546 if (stack_parm
== 0)
3549 = assign_stack_local (GET_MODE (entry_parm
),
3552 /* If this is a memory ref that contains aggregate
3553 components, mark it as such for cse and loop optimize. */
3554 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3557 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
3560 if (TREE_READONLY (parm
))
3561 RTX_UNCHANGING_P (stack_parm
) = 1;
3563 move_block_from_reg (REGNO (entry_parm
),
3564 validize_mem (stack_parm
),
3565 size_stored
/ UNITS_PER_WORD
,
3566 int_size_in_bytes (TREE_TYPE (parm
)));
3568 DECL_RTL (parm
) = stack_parm
;
3570 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3571 && ! DECL_INLINE (fndecl
))
3572 /* layout_decl may set this. */
3573 || TREE_ADDRESSABLE (parm
)
3574 || TREE_SIDE_EFFECTS (parm
)
3575 /* If -ffloat-store specified, don't put explicit
3576 float variables into registers. */
3577 || (flag_float_store
3578 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
3579 /* Always assign pseudo to structure return or item passed
3580 by invisible reference. */
3581 || passed_pointer
|| parm
== function_result_decl
)
3583 /* Store the parm in a pseudoregister during the function, but we
3584 may need to do it in a wider mode. */
3586 register rtx parmreg
;
3587 int regno
, regnoi
, regnor
;
3589 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
3591 promoted_nominal_mode
3592 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
3594 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3595 REG_USERVAR_P (parmreg
) = 1;
3597 /* If this was an item that we received a pointer to, set DECL_RTL
3602 = gen_rtx (MEM
, TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
3603 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
3606 DECL_RTL (parm
) = parmreg
;
3608 /* Copy the value into the register. */
3609 if (nominal_mode
!= passed_mode
3610 || promoted_nominal_mode
!= promoted_mode
)
3612 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3613 mode, by the caller. We now have to convert it to
3614 NOMINAL_MODE, if different. However, PARMREG may be in
3615 a diffent mode than NOMINAL_MODE if it is being stored
3618 If ENTRY_PARM is a hard register, it might be in a register
3619 not valid for operating in its mode (e.g., an odd-numbered
3620 register for a DFmode). In that case, moves are the only
3621 thing valid, so we can't do a convert from there. This
3622 occurs when the calling sequence allow such misaligned
3625 In addition, the conversion may involve a call, which could
3626 clobber parameters which haven't been copied to pseudo
3627 registers yet. Therefore, we must first copy the parm to
3628 a pseudo reg here, and save the conversion until after all
3629 parameters have been moved. */
3631 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3633 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3634 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
3636 push_to_sequence (conversion_insns
);
3637 expand_assignment (parm
,
3638 make_tree (nominal_type
, tempreg
), 0, 0);
3639 conversion_insns
= get_insns ();
3644 emit_move_insn (parmreg
, validize_mem (entry_parm
));
3646 /* If we were passed a pointer but the actual value
3647 can safely live in a register, put it in one. */
3648 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3649 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3650 && ! DECL_INLINE (fndecl
))
3651 /* layout_decl may set this. */
3652 || TREE_ADDRESSABLE (parm
)
3653 || TREE_SIDE_EFFECTS (parm
)
3654 /* If -ffloat-store specified, don't put explicit
3655 float variables into registers. */
3656 || (flag_float_store
3657 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
3659 /* We can't use nominal_mode, because it will have been set to
3660 Pmode above. We must use the actual mode of the parm. */
3661 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3662 REG_USERVAR_P (parmreg
) = 1;
3663 emit_move_insn (parmreg
, DECL_RTL (parm
));
3664 DECL_RTL (parm
) = parmreg
;
3665 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3669 #ifdef FUNCTION_ARG_CALLEE_COPIES
3670 /* If we are passed an arg by reference and it is our responsibility
3671 to make a copy, do it now.
3672 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3673 original argument, so we must recreate them in the call to
3674 FUNCTION_ARG_CALLEE_COPIES. */
3675 /* ??? Later add code to handle the case that if the argument isn't
3676 modified, don't do the copy. */
3678 else if (passed_pointer
3679 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
3680 TYPE_MODE (DECL_ARG_TYPE (parm
)),
3681 DECL_ARG_TYPE (parm
),
3685 tree type
= DECL_ARG_TYPE (parm
);
3687 /* This sequence may involve a library call perhaps clobbering
3688 registers that haven't been copied to pseudos yet. */
3690 push_to_sequence (conversion_insns
);
3692 if (TYPE_SIZE (type
) == 0
3693 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3694 /* This is a variable sized object. */
3695 copy
= gen_rtx (MEM
, BLKmode
,
3696 allocate_dynamic_stack_space
3697 (expr_size (parm
), NULL_RTX
,
3698 TYPE_ALIGN (type
)));
3700 copy
= assign_stack_temp (TYPE_MODE (type
),
3701 int_size_in_bytes (type
), 1);
3702 MEM_IN_STRUCT_P (copy
) = AGGREGATE_TYPE_P (type
);
3704 store_expr (parm
, copy
, 0);
3705 emit_move_insn (parmreg
, XEXP (copy
, 0));
3706 conversion_insns
= get_insns ();
3710 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3712 /* In any case, record the parm's desired stack location
3713 in case we later discover it must live in the stack.
3715 If it is a COMPLEX value, store the stack location for both
3718 if (GET_CODE (parmreg
) == CONCAT
)
3719 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
3721 regno
= REGNO (parmreg
);
3723 if (regno
>= nparmregs
)
3726 int old_nparmregs
= nparmregs
;
3728 nparmregs
= regno
+ 5;
3729 new = (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3730 bcopy ((char *) parm_reg_stack_loc
, (char *) new,
3731 old_nparmregs
* sizeof (rtx
));
3732 bzero ((char *) (new + old_nparmregs
),
3733 (nparmregs
- old_nparmregs
) * sizeof (rtx
));
3734 parm_reg_stack_loc
= new;
3737 if (GET_CODE (parmreg
) == CONCAT
)
3739 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
3741 regnor
= REGNO (gen_realpart (submode
, parmreg
));
3742 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
3744 if (stack_parm
!= 0)
3746 parm_reg_stack_loc
[regnor
]
3747 = gen_realpart (submode
, stack_parm
);
3748 parm_reg_stack_loc
[regnoi
]
3749 = gen_imagpart (submode
, stack_parm
);
3753 parm_reg_stack_loc
[regnor
] = 0;
3754 parm_reg_stack_loc
[regnoi
] = 0;
3758 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
3760 /* Mark the register as eliminable if we did no conversion
3761 and it was copied from memory at a fixed offset,
3762 and the arg pointer was not copied to a pseudo-reg.
3763 If the arg pointer is a pseudo reg or the offset formed
3764 an invalid address, such memory-equivalences
3765 as we make here would screw up life analysis for it. */
3766 if (nominal_mode
== passed_mode
3768 && GET_CODE (entry_parm
) == MEM
3769 && entry_parm
== stack_parm
3770 && stack_offset
.var
== 0
3771 && reg_mentioned_p (virtual_incoming_args_rtx
,
3772 XEXP (entry_parm
, 0)))
3774 rtx linsn
= get_last_insn ();
3776 /* Mark complex types separately. */
3777 if (GET_CODE (parmreg
) == CONCAT
)
3780 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3781 parm_reg_stack_loc
[regnoi
], REG_NOTES (linsn
));
3783 /* Now search backward for where we set the real part. */
3785 && ! reg_referenced_p (parm_reg_stack_loc
[regnor
],
3787 linsn
= prev_nonnote_insn (linsn
))
3791 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3792 parm_reg_stack_loc
[regnor
], REG_NOTES (linsn
));
3796 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3797 entry_parm
, REG_NOTES (linsn
));
3800 /* For pointer data type, suggest pointer register. */
3801 if (TREE_CODE (TREE_TYPE (parm
)) == POINTER_TYPE
)
3802 mark_reg_pointer (parmreg
);
3806 /* Value must be stored in the stack slot STACK_PARM
3807 during function execution. */
3809 if (promoted_mode
!= nominal_mode
)
3811 /* Conversion is required. */
3812 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3814 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3816 push_to_sequence (conversion_insns
);
3817 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
3818 TREE_UNSIGNED (TREE_TYPE (parm
)));
3819 conversion_insns
= get_insns ();
3824 if (entry_parm
!= stack_parm
)
3826 if (stack_parm
== 0)
3829 = assign_stack_local (GET_MODE (entry_parm
),
3830 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
3831 /* If this is a memory ref that contains aggregate components,
3832 mark it as such for cse and loop optimize. */
3833 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3836 if (promoted_mode
!= nominal_mode
)
3838 push_to_sequence (conversion_insns
);
3839 emit_move_insn (validize_mem (stack_parm
),
3840 validize_mem (entry_parm
));
3841 conversion_insns
= get_insns ();
3845 emit_move_insn (validize_mem (stack_parm
),
3846 validize_mem (entry_parm
));
3849 DECL_RTL (parm
) = stack_parm
;
3852 /* If this "parameter" was the place where we are receiving the
3853 function's incoming structure pointer, set up the result. */
3854 if (parm
== function_result_decl
)
3856 tree result
= DECL_RESULT (fndecl
);
3857 tree restype
= TREE_TYPE (result
);
3860 = gen_rtx (MEM
, DECL_MODE (result
), DECL_RTL (parm
));
3862 MEM_IN_STRUCT_P (DECL_RTL (result
)) = AGGREGATE_TYPE_P (restype
);
3865 if (TREE_THIS_VOLATILE (parm
))
3866 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
3867 if (TREE_READONLY (parm
))
3868 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
3871 /* Output all parameter conversion instructions (possibly including calls)
3872 now that all parameters have been copied out of hard registers. */
3873 emit_insns (conversion_insns
);
3875 max_parm_reg
= max_reg_num ();
3876 last_parm_insn
= get_last_insn ();
3878 current_function_args_size
= stack_args_size
.constant
;
3880 /* Adjust function incoming argument size for alignment and
3883 #ifdef REG_PARM_STACK_SPACE
3884 #ifndef MAYBE_REG_PARM_STACK_SPACE
3885 current_function_args_size
= MAX (current_function_args_size
,
3886 REG_PARM_STACK_SPACE (fndecl
));
3890 #ifdef STACK_BOUNDARY
3891 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3893 current_function_args_size
3894 = ((current_function_args_size
+ STACK_BYTES
- 1)
3895 / STACK_BYTES
) * STACK_BYTES
;
3898 #ifdef ARGS_GROW_DOWNWARD
3899 current_function_arg_offset_rtx
3900 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
3901 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
3902 size_int (-stack_args_size
.constant
)),
3903 NULL_RTX
, VOIDmode
, 0));
3905 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
3908 /* See how many bytes, if any, of its args a function should try to pop
3911 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3912 current_function_args_size
);
3914 /* For stdarg.h function, save info about
3915 regs and stack space used by the named args. */
3918 current_function_args_info
= args_so_far
;
3920 /* Set the rtx used for the function return value. Put this in its
3921 own variable so any optimizers that need this information don't have
3922 to include tree.h. Do this here so it gets done when an inlined
3923 function gets output. */
3925 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
3928 /* Indicate whether REGNO is an incoming argument to the current function
3929 that was promoted to a wider mode. If so, return the RTX for the
3930 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3931 that REGNO is promoted from and whether the promotion was signed or
3934 #ifdef PROMOTE_FUNCTION_ARGS
3937 promoted_input_arg (regno
, pmode
, punsignedp
)
3939 enum machine_mode
*pmode
;
3944 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
3945 arg
= TREE_CHAIN (arg
))
3946 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
3947 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
3948 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
3950 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
3951 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
3953 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
3954 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
3955 && mode
!= DECL_MODE (arg
))
3957 *pmode
= DECL_MODE (arg
);
3958 *punsignedp
= unsignedp
;
3959 return DECL_INCOMING_RTL (arg
);
3968 /* Compute the size and offset from the start of the stacked arguments for a
3969 parm passed in mode PASSED_MODE and with type TYPE.
3971 INITIAL_OFFSET_PTR points to the current offset into the stacked
3974 The starting offset and size for this parm are returned in *OFFSET_PTR
3975 and *ARG_SIZE_PTR, respectively.
3977 IN_REGS is non-zero if the argument will be passed in registers. It will
3978 never be set if REG_PARM_STACK_SPACE is not defined.
3980 FNDECL is the function in which the argument was defined.
3982 There are two types of rounding that are done. The first, controlled by
3983 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3984 list to be aligned to the specific boundary (in bits). This rounding
3985 affects the initial and starting offsets, but not the argument size.
3987 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3988 optionally rounds the size of the parm to PARM_BOUNDARY. The
3989 initial offset is not affected by this rounding, while the size always
3990 is and the starting offset may be. */
3992 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3993 initial_offset_ptr is positive because locate_and_pad_parm's
3994 callers pass in the total size of args so far as
3995 initial_offset_ptr. arg_size_ptr is always positive.*/
3998 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
3999 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
4000 enum machine_mode passed_mode
;
4004 struct args_size
*initial_offset_ptr
;
4005 struct args_size
*offset_ptr
;
4006 struct args_size
*arg_size_ptr
;
4009 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4010 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4011 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4012 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4013 int reg_parm_stack_space
= 0;
4015 #ifdef REG_PARM_STACK_SPACE
4016 /* If we have found a stack parm before we reach the end of the
4017 area reserved for registers, skip that area. */
4020 #ifdef MAYBE_REG_PARM_STACK_SPACE
4021 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4023 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4025 if (reg_parm_stack_space
> 0)
4027 if (initial_offset_ptr
->var
)
4029 initial_offset_ptr
->var
4030 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4031 size_int (reg_parm_stack_space
));
4032 initial_offset_ptr
->constant
= 0;
4034 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4035 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4038 #endif /* REG_PARM_STACK_SPACE */
4040 arg_size_ptr
->var
= 0;
4041 arg_size_ptr
->constant
= 0;
4043 #ifdef ARGS_GROW_DOWNWARD
4044 if (initial_offset_ptr
->var
)
4046 offset_ptr
->constant
= 0;
4047 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4048 initial_offset_ptr
->var
);
4052 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4053 offset_ptr
->var
= 0;
4055 if (where_pad
!= none
4056 && (TREE_CODE (sizetree
) != INTEGER_CST
4057 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4058 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4059 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4060 if (where_pad
!= downward
)
4061 pad_to_arg_alignment (offset_ptr
, boundary
);
4062 if (initial_offset_ptr
->var
)
4064 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
4065 size_binop (MINUS_EXPR
,
4067 initial_offset_ptr
->var
),
4072 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
-
4073 offset_ptr
->constant
);
4075 #else /* !ARGS_GROW_DOWNWARD */
4076 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
4077 *offset_ptr
= *initial_offset_ptr
;
4079 #ifdef PUSH_ROUNDING
4080 if (passed_mode
!= BLKmode
)
4081 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4084 /* Pad_below needs the pre-rounded size to know how much to pad below
4085 so this must be done before rounding up. */
4086 if (where_pad
== downward
4087 /* However, BLKmode args passed in regs have their padding done elsewhere.
4088 The stack slot must be able to hold the entire register. */
4089 && !(in_regs
&& passed_mode
== BLKmode
))
4090 pad_below (offset_ptr
, passed_mode
, sizetree
);
4092 if (where_pad
!= none
4093 && (TREE_CODE (sizetree
) != INTEGER_CST
4094 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4095 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4097 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
4098 #endif /* ARGS_GROW_DOWNWARD */
4101 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4102 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4105 pad_to_arg_alignment (offset_ptr
, boundary
)
4106 struct args_size
*offset_ptr
;
4109 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4111 if (boundary
> BITS_PER_UNIT
)
4113 if (offset_ptr
->var
)
4116 #ifdef ARGS_GROW_DOWNWARD
4121 (ARGS_SIZE_TREE (*offset_ptr
),
4122 boundary
/ BITS_PER_UNIT
);
4123 offset_ptr
->constant
= 0; /*?*/
4126 offset_ptr
->constant
=
4127 #ifdef ARGS_GROW_DOWNWARD
4128 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4130 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4136 pad_below (offset_ptr
, passed_mode
, sizetree
)
4137 struct args_size
*offset_ptr
;
4138 enum machine_mode passed_mode
;
4141 if (passed_mode
!= BLKmode
)
4143 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4144 offset_ptr
->constant
4145 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4146 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4147 - GET_MODE_SIZE (passed_mode
));
4151 if (TREE_CODE (sizetree
) != INTEGER_CST
4152 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4154 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4155 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4157 ADD_PARM_SIZE (*offset_ptr
, s2
);
4158 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4164 round_down (value
, divisor
)
4168 return size_binop (MULT_EXPR
,
4169 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
4170 size_int (divisor
));
4173 /* Walk the tree of blocks describing the binding levels within a function
4174 and warn about uninitialized variables.
4175 This is done after calling flow_analysis and before global_alloc
4176 clobbers the pseudo-regs to hard regs. */
4179 uninitialized_vars_warning (block
)
4182 register tree decl
, sub
;
4183 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4185 if (TREE_CODE (decl
) == VAR_DECL
4186 /* These warnings are unreliable for and aggregates
4187 because assigning the fields one by one can fail to convince
4188 flow.c that the entire aggregate was initialized.
4189 Unions are troublesome because members may be shorter. */
4190 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
4191 && DECL_RTL (decl
) != 0
4192 && GET_CODE (DECL_RTL (decl
)) == REG
4193 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4194 warning_with_decl (decl
,
4195 "`%s' might be used uninitialized in this function");
4196 if (TREE_CODE (decl
) == VAR_DECL
4197 && DECL_RTL (decl
) != 0
4198 && GET_CODE (DECL_RTL (decl
)) == REG
4199 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4200 warning_with_decl (decl
,
4201 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4203 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4204 uninitialized_vars_warning (sub
);
4207 /* Do the appropriate part of uninitialized_vars_warning
4208 but for arguments instead of local variables. */
4211 setjmp_args_warning (block
)
4215 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4216 decl
; decl
= TREE_CHAIN (decl
))
4217 if (DECL_RTL (decl
) != 0
4218 && GET_CODE (DECL_RTL (decl
)) == REG
4219 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4220 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4223 /* If this function call setjmp, put all vars into the stack
4224 unless they were declared `register'. */
4227 setjmp_protect (block
)
4230 register tree decl
, sub
;
4231 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4232 if ((TREE_CODE (decl
) == VAR_DECL
4233 || TREE_CODE (decl
) == PARM_DECL
)
4234 && DECL_RTL (decl
) != 0
4235 && GET_CODE (DECL_RTL (decl
)) == REG
4236 /* If this variable came from an inline function, it must be
4237 that it's life doesn't overlap the setjmp. If there was a
4238 setjmp in the function, it would already be in memory. We
4239 must exclude such variable because their DECL_RTL might be
4240 set to strange things such as virtual_stack_vars_rtx. */
4241 && ! DECL_FROM_INLINE (decl
)
4243 #ifdef NON_SAVING_SETJMP
4244 /* If longjmp doesn't restore the registers,
4245 don't put anything in them. */
4249 ! DECL_REGISTER (decl
)))
4250 put_var_into_stack (decl
);
4251 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4252 setjmp_protect (sub
);
4255 /* Like the previous function, but for args instead of local variables. */
4258 setjmp_protect_args ()
4260 register tree decl
, sub
;
4261 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4262 decl
; decl
= TREE_CHAIN (decl
))
4263 if ((TREE_CODE (decl
) == VAR_DECL
4264 || TREE_CODE (decl
) == PARM_DECL
)
4265 && DECL_RTL (decl
) != 0
4266 && GET_CODE (DECL_RTL (decl
)) == REG
4268 /* If longjmp doesn't restore the registers,
4269 don't put anything in them. */
4270 #ifdef NON_SAVING_SETJMP
4274 ! DECL_REGISTER (decl
)))
4275 put_var_into_stack (decl
);
4278 /* Return the context-pointer register corresponding to DECL,
4279 or 0 if it does not need one. */
4282 lookup_static_chain (decl
)
4285 tree context
= decl_function_context (decl
);
4291 /* We treat inline_function_decl as an alias for the current function
4292 because that is the inline function whose vars, types, etc.
4293 are being merged into the current function.
4294 See expand_inline_function. */
4295 if (context
== current_function_decl
|| context
== inline_function_decl
)
4296 return virtual_stack_vars_rtx
;
4298 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4299 if (TREE_PURPOSE (link
) == context
)
4300 return RTL_EXPR_RTL (TREE_VALUE (link
));
4305 /* Convert a stack slot address ADDR for variable VAR
4306 (from a containing function)
4307 into an address valid in this function (using a static chain). */
4310 fix_lexical_addr (addr
, var
)
4316 tree context
= decl_function_context (var
);
4317 struct function
*fp
;
4320 /* If this is the present function, we need not do anything. */
4321 if (context
== current_function_decl
|| context
== inline_function_decl
)
4324 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4325 if (fp
->decl
== context
)
4331 /* Decode given address as base reg plus displacement. */
4332 if (GET_CODE (addr
) == REG
)
4333 basereg
= addr
, displacement
= 0;
4334 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4335 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
4339 /* We accept vars reached via the containing function's
4340 incoming arg pointer and via its stack variables pointer. */
4341 if (basereg
== fp
->internal_arg_pointer
)
4343 /* If reached via arg pointer, get the arg pointer value
4344 out of that function's stack frame.
4346 There are two cases: If a separate ap is needed, allocate a
4347 slot in the outer function for it and dereference it that way.
4348 This is correct even if the real ap is actually a pseudo.
4349 Otherwise, just adjust the offset from the frame pointer to
4352 #ifdef NEED_SEPARATE_AP
4355 if (fp
->arg_pointer_save_area
== 0)
4356 fp
->arg_pointer_save_area
4357 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
4359 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
4360 addr
= memory_address (Pmode
, addr
);
4362 base
= copy_to_reg (gen_rtx (MEM
, Pmode
, addr
));
4364 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
4365 base
= lookup_static_chain (var
);
4369 else if (basereg
== virtual_stack_vars_rtx
)
4371 /* This is the same code as lookup_static_chain, duplicated here to
4372 avoid an extra call to decl_function_context. */
4375 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4376 if (TREE_PURPOSE (link
) == context
)
4378 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
4386 /* Use same offset, relative to appropriate static chain or argument
4388 return plus_constant (base
, displacement
);
4391 /* Return the address of the trampoline for entering nested fn FUNCTION.
4392 If necessary, allocate a trampoline (in the stack frame)
4393 and emit rtl to initialize its contents (at entry to this function). */
4396 trampoline_address (function
)
4402 struct function
*fp
;
4405 /* Find an existing trampoline and return it. */
4406 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
4407 if (TREE_PURPOSE (link
) == function
)
4409 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
4411 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4412 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
4413 if (TREE_PURPOSE (link
) == function
)
4415 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
4417 return round_trampoline_addr (tramp
);
4420 /* None exists; we must make one. */
4422 /* Find the `struct function' for the function containing FUNCTION. */
4424 fn_context
= decl_function_context (function
);
4425 if (fn_context
!= current_function_decl
)
4426 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4427 if (fp
->decl
== fn_context
)
4430 /* Allocate run-time space for this trampoline
4431 (usually in the defining function's stack frame). */
4432 #ifdef ALLOCATE_TRAMPOLINE
4433 tramp
= ALLOCATE_TRAMPOLINE (fp
);
4435 /* If rounding needed, allocate extra space
4436 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4437 #ifdef TRAMPOLINE_ALIGNMENT
4438 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
4440 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4443 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
4445 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
4448 /* Record the trampoline for reuse and note it for later initialization
4449 by expand_function_end. */
4452 push_obstacks (fp
->function_maybepermanent_obstack
,
4453 fp
->function_maybepermanent_obstack
);
4454 rtlexp
= make_node (RTL_EXPR
);
4455 RTL_EXPR_RTL (rtlexp
) = tramp
;
4456 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
4461 /* Make the RTL_EXPR node temporary, not momentary, so that the
4462 trampoline_list doesn't become garbage. */
4463 int momentary
= suspend_momentary ();
4464 rtlexp
= make_node (RTL_EXPR
);
4465 resume_momentary (momentary
);
4467 RTL_EXPR_RTL (rtlexp
) = tramp
;
4468 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
4471 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
4472 return round_trampoline_addr (tramp
);
4475 /* Given a trampoline address,
4476 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4479 round_trampoline_addr (tramp
)
4482 #ifdef TRAMPOLINE_ALIGNMENT
4483 /* Round address up to desired boundary. */
4484 rtx temp
= gen_reg_rtx (Pmode
);
4485 temp
= expand_binop (Pmode
, add_optab
, tramp
,
4486 GEN_INT (TRAMPOLINE_ALIGNMENT
- 1),
4487 temp
, 0, OPTAB_LIB_WIDEN
);
4488 tramp
= expand_binop (Pmode
, and_optab
, temp
,
4489 GEN_INT (- TRAMPOLINE_ALIGNMENT
),
4490 temp
, 0, OPTAB_LIB_WIDEN
);
4495 /* The functions identify_blocks and reorder_blocks provide a way to
4496 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4497 duplicate portions of the RTL code. Call identify_blocks before
4498 changing the RTL, and call reorder_blocks after. */
4500 /* Put all this function's BLOCK nodes into a vector, and return it.
4501 Also store in each NOTE for the beginning or end of a block
4502 the index of that block in the vector.
4503 The arguments are TOP_BLOCK, the top-level block of the function,
4504 and INSNS, the insn chain of the function. */
4507 identify_blocks (top_block
, insns
)
4515 int next_block_number
= 0;
4516 int current_block_number
= 0;
4522 n_blocks
= all_blocks (top_block
, 0);
4523 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
4524 block_stack
= (int *) alloca (n_blocks
* sizeof (int));
4526 all_blocks (top_block
, block_vector
);
4528 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4529 if (GET_CODE (insn
) == NOTE
)
4531 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4533 block_stack
[depth
++] = current_block_number
;
4534 current_block_number
= next_block_number
;
4535 NOTE_BLOCK_NUMBER (insn
) = next_block_number
++;
4537 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4539 current_block_number
= block_stack
[--depth
];
4540 NOTE_BLOCK_NUMBER (insn
) = current_block_number
;
4544 return block_vector
;
4547 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4548 and a revised instruction chain, rebuild the tree structure
4549 of BLOCK nodes to correspond to the new order of RTL.
4550 The new block tree is inserted below TOP_BLOCK.
4551 Returns the current top-level block. */
4554 reorder_blocks (block_vector
, top_block
, insns
)
4559 tree current_block
= top_block
;
4562 if (block_vector
== 0)
4565 /* Prune the old tree away, so that it doesn't get in the way. */
4566 BLOCK_SUBBLOCKS (current_block
) = 0;
4568 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4569 if (GET_CODE (insn
) == NOTE
)
4571 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4573 tree block
= block_vector
[NOTE_BLOCK_NUMBER (insn
)];
4574 /* If we have seen this block before, copy it. */
4575 if (TREE_ASM_WRITTEN (block
))
4576 block
= copy_node (block
);
4577 BLOCK_SUBBLOCKS (block
) = 0;
4578 TREE_ASM_WRITTEN (block
) = 1;
4579 BLOCK_SUPERCONTEXT (block
) = current_block
;
4580 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4581 BLOCK_SUBBLOCKS (current_block
) = block
;
4582 current_block
= block
;
4583 NOTE_SOURCE_FILE (insn
) = 0;
4585 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4587 BLOCK_SUBBLOCKS (current_block
)
4588 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
4589 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4590 NOTE_SOURCE_FILE (insn
) = 0;
4594 return current_block
;
4597 /* Reverse the order of elements in the chain T of blocks,
4598 and return the new head of the chain (old last element). */
4604 register tree prev
= 0, decl
, next
;
4605 for (decl
= t
; decl
; decl
= next
)
4607 next
= BLOCK_CHAIN (decl
);
4608 BLOCK_CHAIN (decl
) = prev
;
4614 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
4615 Also clear TREE_ASM_WRITTEN in all blocks. */
4618 all_blocks (block
, vector
)
4625 TREE_ASM_WRITTEN (block
) = 0;
4626 /* Record this block. */
4630 /* Record the subblocks, and their subblocks. */
4631 for (subblocks
= BLOCK_SUBBLOCKS (block
);
4632 subblocks
; subblocks
= BLOCK_CHAIN (subblocks
))
4633 n_blocks
+= all_blocks (subblocks
, vector
? vector
+ n_blocks
: 0);
4638 /* Build bytecode call descriptor for function SUBR. */
4641 bc_build_calldesc (subr
)
4644 tree calldesc
= 0, arg
;
4647 /* Build the argument description vector in reverse order. */
4648 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4651 for (arg
= DECL_ARGUMENTS (subr
); arg
; arg
= TREE_CHAIN (arg
))
4655 calldesc
= tree_cons ((tree
) 0, size_in_bytes (TREE_TYPE (arg
)), calldesc
);
4656 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (TREE_TYPE (arg
)), calldesc
);
4659 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4661 /* Prepend the function's return type. */
4662 calldesc
= tree_cons ((tree
) 0,
4663 size_in_bytes (TREE_TYPE (TREE_TYPE (subr
))),
4666 calldesc
= tree_cons ((tree
) 0,
4667 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr
))),
4670 /* Prepend the arg count. */
4671 calldesc
= tree_cons ((tree
) 0, build_int_2 (nargs
, 0), calldesc
);
4673 /* Output the call description vector and get its address. */
4674 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
4675 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
4676 build_index_type (build_int_2 (nargs
* 2, 0)));
4678 return output_constant_def (calldesc
);
4682 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4683 and initialize static variables for generating RTL for the statements
4687 init_function_start (subr
, filename
, line
)
4694 if (output_bytecode
)
4696 this_function_decl
= subr
;
4697 this_function_calldesc
= bc_build_calldesc (subr
);
4698 local_vars_size
= 0;
4700 max_stack_depth
= 0;
4701 stmt_expr_depth
= 0;
4705 init_stmt_for_function ();
4707 cse_not_expected
= ! optimize
;
4709 /* Caller save not needed yet. */
4710 caller_save_needed
= 0;
4712 /* No stack slots have been made yet. */
4713 stack_slot_list
= 0;
4715 /* There is no stack slot for handling nonlocal gotos. */
4716 nonlocal_goto_handler_slot
= 0;
4717 nonlocal_goto_stack_level
= 0;
4719 /* No labels have been declared for nonlocal use. */
4720 nonlocal_labels
= 0;
4722 /* No function calls so far in this function. */
4723 function_call_count
= 0;
4725 /* No parm regs have been allocated.
4726 (This is important for output_inline_function.) */
4727 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4729 /* Initialize the RTL mechanism. */
4732 /* Initialize the queue of pending postincrement and postdecrements,
4733 and some other info in expr.c. */
4736 /* We haven't done register allocation yet. */
4739 init_const_rtx_hash_table ();
4741 current_function_name
= (*decl_printable_name
) (subr
, &junk
);
4743 /* Nonzero if this is a nested function that uses a static chain. */
4745 current_function_needs_context
4746 = (decl_function_context (current_function_decl
) != 0);
4748 /* Set if a call to setjmp is seen. */
4749 current_function_calls_setjmp
= 0;
4751 /* Set if a call to longjmp is seen. */
4752 current_function_calls_longjmp
= 0;
4754 current_function_calls_alloca
= 0;
4755 current_function_has_nonlocal_label
= 0;
4756 current_function_has_nonlocal_goto
= 0;
4757 current_function_contains_functions
= 0;
4759 current_function_returns_pcc_struct
= 0;
4760 current_function_returns_struct
= 0;
4761 current_function_epilogue_delay_list
= 0;
4762 current_function_uses_const_pool
= 0;
4763 current_function_uses_pic_offset_table
= 0;
4765 /* We have not yet needed to make a label to jump to for tail-recursion. */
4766 tail_recursion_label
= 0;
4768 /* We haven't had a need to make a save area for ap yet. */
4770 arg_pointer_save_area
= 0;
4772 /* No stack slots allocated yet. */
4775 /* No SAVE_EXPRs in this function yet. */
4778 /* No RTL_EXPRs in this function yet. */
4781 /* We have not allocated any temporaries yet. */
4783 temp_slot_level
= 0;
4784 target_temp_slot_level
= 0;
4786 /* Within function body, compute a type's size as soon it is laid out. */
4787 immediate_size_expand
++;
4789 /* We haven't made any trampolines for this function yet. */
4790 trampoline_list
= 0;
4792 init_pending_stack_adjust ();
4793 inhibit_defer_pop
= 0;
4795 current_function_outgoing_args_size
= 0;
4797 /* Initialize the insn lengths. */
4798 init_insn_lengths ();
4800 /* Prevent ever trying to delete the first instruction of a function.
4801 Also tell final how to output a linenum before the function prologue. */
4802 emit_line_note (filename
, line
);
4804 /* Make sure first insn is a note even if we don't want linenums.
4805 This makes sure the first insn will never be deleted.
4806 Also, final expects a note to appear there. */
4807 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
4809 /* Set flags used by final.c. */
4810 if (aggregate_value_p (DECL_RESULT (subr
)))
4812 #ifdef PCC_STATIC_STRUCT_RETURN
4813 current_function_returns_pcc_struct
= 1;
4815 current_function_returns_struct
= 1;
4818 /* Warn if this value is an aggregate type,
4819 regardless of which calling convention we are using for it. */
4820 if (warn_aggregate_return
4821 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4822 warning ("function returns an aggregate");
4824 current_function_returns_pointer
4825 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
4827 /* Indicate that we need to distinguish between the return value of the
4828 present function and the return value of a function being called. */
4829 rtx_equal_function_value_matters
= 1;
4831 /* Indicate that we have not instantiated virtual registers yet. */
4832 virtuals_instantiated
= 0;
4834 /* Indicate we have no need of a frame pointer yet. */
4835 frame_pointer_needed
= 0;
4837 /* By default assume not varargs. */
4838 current_function_varargs
= 0;
4841 /* Indicate that the current function uses extra args
4842 not explicitly mentioned in the argument list in any fashion. */
4847 current_function_varargs
= 1;
4850 /* Expand a call to __main at the beginning of a possible main function. */
4852 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4853 #undef HAS_INIT_SECTION
4854 #define HAS_INIT_SECTION
4858 expand_main_function ()
4860 if (!output_bytecode
)
4862 /* The zero below avoids a possible parse error */
4864 #if !defined (HAS_INIT_SECTION)
4865 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, NAME__MAIN
), 0,
4867 #endif /* not HAS_INIT_SECTION */
4871 extern struct obstack permanent_obstack
;
4873 /* Expand start of bytecode function. See comment at
4874 expand_function_start below for details. */
4877 bc_expand_function_start (subr
, parms_have_cleanups
)
4879 int parms_have_cleanups
;
4881 char label
[20], *name
;
4886 if (TREE_PUBLIC (subr
))
4887 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr
)));
4889 #ifdef DEBUG_PRINT_CODE
4890 fprintf (stderr
, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr
)));
4893 for (argsz
= 0, thisarg
= DECL_ARGUMENTS (subr
); thisarg
; thisarg
= TREE_CHAIN (thisarg
))
4895 if (DECL_RTL (thisarg
))
4896 abort (); /* Should be NULL here I think. */
4897 else if (TREE_CONSTANT (DECL_SIZE (thisarg
)))
4899 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
4900 argsz
+= TREE_INT_CST_LOW (DECL_SIZE (thisarg
));
4904 /* Variable-sized objects are pointers to their storage. */
4905 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
4906 argsz
+= POINTER_SIZE
;
4910 bc_begin_function (bc_xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr
))));
4912 ASM_GENERATE_INTERNAL_LABEL (label
, "LX", nlab
);
4915 name
= (char *) obstack_copy0 (&permanent_obstack
, label
, strlen (label
));
4916 this_function_callinfo
= bc_gen_rtx (name
, 0, (struct bc_label
*) 0);
4917 this_function_bytecode
=
4918 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo
));
4922 /* Expand end of bytecode function. See details the comment of
4923 expand_function_end(), below. */
4926 bc_expand_function_end ()
4930 expand_null_return ();
4932 /* Emit any fixup code. This must be done before the call to
4933 to BC_END_FUNCTION (), since that will cause the bytecode
4934 segment to be finished off and closed. */
4936 expand_fixups (NULL_RTX
);
4938 ptrconsts
= bc_end_function ();
4940 bc_align_const (2 /* INT_ALIGN */);
4942 /* If this changes also make sure to change bc-interp.h! */
4944 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo
));
4945 bc_emit_const ((char *) &max_stack_depth
, sizeof max_stack_depth
);
4946 bc_emit_const ((char *) &local_vars_size
, sizeof local_vars_size
);
4947 bc_emit_const_labelref (this_function_bytecode
, 0);
4948 bc_emit_const_labelref (ptrconsts
, 0);
4949 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc
), 0);
4953 /* Start the RTL for a new function, and set variables used for
4955 SUBR is the FUNCTION_DECL node.
4956 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4957 the function's parameters, which must be run at any return statement. */
4960 expand_function_start (subr
, parms_have_cleanups
)
4962 int parms_have_cleanups
;
4968 if (output_bytecode
)
4970 bc_expand_function_start (subr
, parms_have_cleanups
);
4974 /* Make sure volatile mem refs aren't considered
4975 valid operands of arithmetic insns. */
4976 init_recog_no_volatile ();
4978 /* If function gets a static chain arg, store it in the stack frame.
4979 Do this first, so it gets the first stack slot offset. */
4980 if (current_function_needs_context
)
4982 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
4984 #ifdef SMALL_REGISTER_CLASSES
4985 /* Delay copying static chain if it is not a register to avoid
4986 conflicts with regs used for parameters. */
4987 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
4989 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
4992 /* If the parameters of this function need cleaning up, get a label
4993 for the beginning of the code which executes those cleanups. This must
4994 be done before doing anything with return_label. */
4995 if (parms_have_cleanups
)
4996 cleanup_label
= gen_label_rtx ();
5000 /* Make the label for return statements to jump to, if this machine
5001 does not have a one-instruction return and uses an epilogue,
5002 or if it returns a structure, or if it has parm cleanups. */
5004 if (cleanup_label
== 0 && HAVE_return
5005 && ! current_function_returns_pcc_struct
5006 && ! (current_function_returns_struct
&& ! optimize
))
5009 return_label
= gen_label_rtx ();
5011 return_label
= gen_label_rtx ();
5014 /* Initialize rtx used to return the value. */
5015 /* Do this before assign_parms so that we copy the struct value address
5016 before any library calls that assign parms might generate. */
5018 /* Decide whether to return the value in memory or in a register. */
5019 if (aggregate_value_p (DECL_RESULT (subr
)))
5021 /* Returning something that won't go in a register. */
5022 register rtx value_address
= 0;
5024 #ifdef PCC_STATIC_STRUCT_RETURN
5025 if (current_function_returns_pcc_struct
)
5027 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5028 value_address
= assemble_static_space (size
);
5033 /* Expect to be passed the address of a place to store the value.
5034 If it is passed as an argument, assign_parms will take care of
5036 if (struct_value_incoming_rtx
)
5038 value_address
= gen_reg_rtx (Pmode
);
5039 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5044 DECL_RTL (DECL_RESULT (subr
))
5045 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (subr
)), value_address
);
5046 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)))
5047 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5050 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5051 /* If return mode is void, this decl rtl should not be used. */
5052 DECL_RTL (DECL_RESULT (subr
)) = 0;
5053 else if (parms_have_cleanups
)
5055 /* If function will end with cleanup code for parms,
5056 compute the return values into a pseudo reg,
5057 which we will copy into the true return register
5058 after the cleanups are done. */
5060 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5062 #ifdef PROMOTE_FUNCTION_RETURN
5063 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5064 int unsignedp
= TREE_UNSIGNED (type
);
5066 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5069 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5072 /* Scalar, returned in a register. */
5074 #ifdef FUNCTION_OUTGOING_VALUE
5075 DECL_RTL (DECL_RESULT (subr
))
5076 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5078 DECL_RTL (DECL_RESULT (subr
))
5079 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5082 /* Mark this reg as the function's return value. */
5083 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
5085 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
5086 /* Needed because we may need to move this to memory
5087 in case it's a named return value whose address is taken. */
5088 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5092 /* Initialize rtx for parameters and local variables.
5093 In some cases this requires emitting insns. */
5095 assign_parms (subr
, 0);
5097 #ifdef SMALL_REGISTER_CLASSES
5098 /* Copy the static chain now if it wasn't a register. The delay is to
5099 avoid conflicts with the parameter passing registers. */
5101 if (current_function_needs_context
)
5102 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
5103 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5106 /* The following was moved from init_function_start.
5107 The move is supposed to make sdb output more accurate. */
5108 /* Indicate the beginning of the function body,
5109 as opposed to parm setup. */
5110 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
5112 /* If doing stupid allocation, mark parms as born here. */
5114 if (GET_CODE (get_last_insn ()) != NOTE
)
5115 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5116 parm_birth_insn
= get_last_insn ();
5120 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5121 use_variable (regno_reg_rtx
[i
]);
5123 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5124 use_variable (current_function_internal_arg_pointer
);
5127 /* Fetch static chain values for containing functions. */
5128 tem
= decl_function_context (current_function_decl
);
5129 /* If not doing stupid register allocation copy the static chain
5130 pointer into a pseudo. If we have small register classes, copy the
5131 value from memory if static_chain_incoming_rtx is a REG. If we do
5132 stupid register allocation, we use the stack address generated above. */
5133 if (tem
&& ! obey_regdecls
)
5135 #ifdef SMALL_REGISTER_CLASSES
5136 /* If the static chain originally came in a register, put it back
5137 there, then move it out in the next insn. The reason for
5138 this peculiar code is to satisfy function integration. */
5139 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
5140 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
5143 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
5146 context_display
= 0;
5149 tree rtlexp
= make_node (RTL_EXPR
);
5151 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
5152 context_display
= tree_cons (tem
, rtlexp
, context_display
);
5153 tem
= decl_function_context (tem
);
5156 /* Chain thru stack frames, assuming pointer to next lexical frame
5157 is found at the place we always store it. */
5158 #ifdef FRAME_GROWS_DOWNWARD
5159 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
5161 last_ptr
= copy_to_reg (gen_rtx (MEM
, Pmode
,
5162 memory_address (Pmode
, last_ptr
)));
5164 /* If we are not optimizing, ensure that we know that this
5165 piece of context is live over the entire function. */
5167 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, last_ptr
,
5171 /* After the display initializations is where the tail-recursion label
5172 should go, if we end up needing one. Ensure we have a NOTE here
5173 since some things (like trampolines) get placed before this. */
5174 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5176 /* Evaluate now the sizes of any types declared among the arguments. */
5177 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
5178 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
5180 /* Make sure there is a line number after the function entry setup code. */
5181 force_next_line_note ();
5184 /* Generate RTL for the end of the current function.
5185 FILENAME and LINE are the current position in the source file.
5187 It is up to language-specific callers to do cleanups for parameters--
5188 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5191 expand_function_end (filename
, line
, end_bindings
)
5199 static rtx initial_trampoline
;
5201 if (output_bytecode
)
5203 bc_expand_function_end ();
5207 #ifdef NON_SAVING_SETJMP
5208 /* Don't put any variables in registers if we call setjmp
5209 on a machine that fails to restore the registers. */
5210 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
5212 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
5213 setjmp_protect (DECL_INITIAL (current_function_decl
));
5215 setjmp_protect_args ();
5219 /* Save the argument pointer if a save area was made for it. */
5220 if (arg_pointer_save_area
)
5222 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
5223 emit_insn_before (x
, tail_recursion_reentry
);
5226 /* Initialize any trampolines required by this function. */
5227 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5229 tree function
= TREE_PURPOSE (link
);
5230 rtx context
= lookup_static_chain (function
);
5231 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
5234 /* First make sure this compilation has a template for
5235 initializing trampolines. */
5236 if (initial_trampoline
== 0)
5238 end_temporary_allocation ();
5240 = gen_rtx (MEM
, BLKmode
, assemble_trampoline_template ());
5241 resume_temporary_allocation ();
5244 /* Generate insns to initialize the trampoline. */
5246 tramp
= change_address (initial_trampoline
, BLKmode
,
5247 round_trampoline_addr (XEXP (tramp
, 0)));
5248 emit_block_move (tramp
, initial_trampoline
, GEN_INT (TRAMPOLINE_SIZE
),
5249 FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
5250 INITIALIZE_TRAMPOLINE (XEXP (tramp
, 0),
5251 XEXP (DECL_RTL (function
), 0), context
);
5255 /* Put those insns at entry to the containing function (this one). */
5256 emit_insns_before (seq
, tail_recursion_reentry
);
5259 /* Warn about unused parms if extra warnings were specified. */
5260 if (warn_unused
&& extra_warnings
)
5264 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5265 decl
; decl
= TREE_CHAIN (decl
))
5266 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5267 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
5268 warning_with_decl (decl
, "unused parameter `%s'");
5271 /* Delete handlers for nonlocal gotos if nothing uses them. */
5272 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
5275 /* End any sequences that failed to be closed due to syntax errors. */
5276 while (in_sequence_p ())
5279 /* Outside function body, can't compute type's actual size
5280 until next function's body starts. */
5281 immediate_size_expand
--;
5283 /* If doing stupid register allocation,
5284 mark register parms as dying here. */
5289 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5290 use_variable (regno_reg_rtx
[i
]);
5292 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5294 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
5296 use_variable (XEXP (tem
, 0));
5297 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
5300 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5301 use_variable (current_function_internal_arg_pointer
);
5304 clear_pending_stack_adjust ();
5305 do_pending_stack_adjust ();
5307 /* Mark the end of the function body.
5308 If control reaches this insn, the function can drop through
5309 without returning a value. */
5310 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
5312 /* Output a linenumber for the end of the function.
5313 SDB depends on this. */
5314 emit_line_note_force (filename
, line
);
5316 /* Output the label for the actual return from the function,
5317 if one is expected. This happens either because a function epilogue
5318 is used instead of a return instruction, or because a return was done
5319 with a goto in order to run local cleanups, or because of pcc-style
5320 structure returning. */
5323 emit_label (return_label
);
5325 /* C++ uses this. */
5327 expand_end_bindings (0, 0, 0);
5329 /* If we had calls to alloca, and this machine needs
5330 an accurate stack pointer to exit the function,
5331 insert some code to save and restore the stack pointer. */
5332 #ifdef EXIT_IGNORE_STACK
5333 if (! EXIT_IGNORE_STACK
)
5335 if (current_function_calls_alloca
)
5339 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5340 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5343 /* If scalar return value was computed in a pseudo-reg,
5344 copy that to the hard return register. */
5345 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
5346 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
5347 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
5348 >= FIRST_PSEUDO_REGISTER
))
5350 rtx real_decl_result
;
5352 #ifdef FUNCTION_OUTGOING_VALUE
5354 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5355 current_function_decl
);
5358 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5359 current_function_decl
);
5361 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
5362 emit_move_insn (real_decl_result
,
5363 DECL_RTL (DECL_RESULT (current_function_decl
)));
5364 emit_insn (gen_rtx (USE
, VOIDmode
, real_decl_result
));
5367 /* If returning a structure, arrange to return the address of the value
5368 in a place where debuggers expect to find it.
5370 If returning a structure PCC style,
5371 the caller also depends on this value.
5372 And current_function_returns_pcc_struct is not necessarily set. */
5373 if (current_function_returns_struct
5374 || current_function_returns_pcc_struct
)
5376 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5377 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5378 #ifdef FUNCTION_OUTGOING_VALUE
5380 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
5381 current_function_decl
);
5384 = FUNCTION_VALUE (build_pointer_type (type
),
5385 current_function_decl
);
5388 /* Mark this as a function return value so integrate will delete the
5389 assignment and USE below when inlining this function. */
5390 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5392 emit_move_insn (outgoing
, value_address
);
5393 use_variable (outgoing
);
5396 /* Output a return insn if we are using one.
5397 Otherwise, let the rtl chain end here, to drop through
5398 into the epilogue. */
5403 emit_jump_insn (gen_return ());
5408 /* Fix up any gotos that jumped out to the outermost
5409 binding level of the function.
5410 Must follow emitting RETURN_LABEL. */
5412 /* If you have any cleanups to do at this point,
5413 and they need to create temporary variables,
5414 then you will lose. */
5415 expand_fixups (get_insns ());
5418 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5420 static int *prologue
;
5421 static int *epilogue
;
5423 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5424 or a single insn). */
5427 record_insns (insns
)
5432 if (GET_CODE (insns
) == SEQUENCE
)
5434 int len
= XVECLEN (insns
, 0);
5435 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
5438 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
5442 vec
= (int *) oballoc (2 * sizeof (int));
5443 vec
[0] = INSN_UID (insns
);
5449 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5452 contains (insn
, vec
)
5458 if (GET_CODE (insn
) == INSN
5459 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5462 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5463 for (j
= 0; vec
[j
]; j
++)
5464 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
5470 for (j
= 0; vec
[j
]; j
++)
5471 if (INSN_UID (insn
) == vec
[j
])
5477 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5478 this into place with notes indicating where the prologue ends and where
5479 the epilogue begins. Update the basic block information when possible. */
5482 thread_prologue_and_epilogue_insns (f
)
5485 #ifdef HAVE_prologue
5488 rtx head
, seq
, insn
;
5490 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5491 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5492 emit_note_after (NOTE_INSN_PROLOGUE_END
, f
);
5493 seq
= gen_prologue ();
5494 head
= emit_insn_after (seq
, f
);
5496 /* Include the new prologue insns in the first block. Ignore them
5497 if they form a basic block unto themselves. */
5498 if (basic_block_head
&& n_basic_blocks
5499 && GET_CODE (basic_block_head
[0]) != CODE_LABEL
)
5500 basic_block_head
[0] = NEXT_INSN (f
);
5502 /* Retain a map of the prologue insns. */
5503 prologue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: head
);
5509 #ifdef HAVE_epilogue
5512 rtx insn
= get_last_insn ();
5513 rtx prev
= prev_nonnote_insn (insn
);
5515 /* If we end with a BARRIER, we don't need an epilogue. */
5516 if (! (prev
&& GET_CODE (prev
) == BARRIER
))
5522 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5523 epilogue insns, the USE insns at the end of a function,
5524 the jump insn that returns, and then a BARRIER. */
5526 /* Move the USE insns at the end of a function onto a list. */
5528 && GET_CODE (prev
) == INSN
5529 && GET_CODE (PATTERN (prev
)) == USE
)
5532 prev
= prev_nonnote_insn (prev
);
5534 NEXT_INSN (PREV_INSN (tem
)) = NEXT_INSN (tem
);
5535 PREV_INSN (NEXT_INSN (tem
)) = PREV_INSN (tem
);
5538 NEXT_INSN (tem
) = first_use
;
5539 PREV_INSN (first_use
) = tem
;
5546 emit_barrier_after (insn
);
5548 seq
= gen_epilogue ();
5549 tail
= emit_jump_insn_after (seq
, insn
);
5551 /* Insert the USE insns immediately before the return insn, which
5552 must be the first instruction before the final barrier. */
5555 tem
= prev_nonnote_insn (get_last_insn ());
5556 NEXT_INSN (PREV_INSN (tem
)) = first_use
;
5557 PREV_INSN (first_use
) = PREV_INSN (tem
);
5558 PREV_INSN (tem
) = last_use
;
5559 NEXT_INSN (last_use
) = tem
;
5562 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, insn
);
5564 /* Include the new epilogue insns in the last block. Ignore
5565 them if they form a basic block unto themselves. */
5566 if (basic_block_end
&& n_basic_blocks
5567 && GET_CODE (basic_block_end
[n_basic_blocks
- 1]) != JUMP_INSN
)
5568 basic_block_end
[n_basic_blocks
- 1] = tail
;
5570 /* Retain a map of the epilogue insns. */
5571 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
5579 /* Reposition the prologue-end and epilogue-begin notes after instruction
5580 scheduling and delayed branch scheduling. */
5583 reposition_prologue_and_epilogue_notes (f
)
5586 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5587 /* Reposition the prologue and epilogue notes. */
5595 register rtx insn
, note
= 0;
5597 /* Scan from the beginning until we reach the last prologue insn.
5598 We apparently can't depend on basic_block_{head,end} after
5600 for (len
= 0; prologue
[len
]; len
++)
5602 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
5604 if (GET_CODE (insn
) == NOTE
)
5606 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5609 else if ((len
-= contains (insn
, prologue
)) == 0)
5611 /* Find the prologue-end note if we haven't already, and
5612 move it to just after the last prologue insn. */
5615 for (note
= insn
; note
= NEXT_INSN (note
);)
5616 if (GET_CODE (note
) == NOTE
5617 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5620 next
= NEXT_INSN (note
);
5621 prev
= PREV_INSN (note
);
5623 NEXT_INSN (prev
) = next
;
5625 PREV_INSN (next
) = prev
;
5626 add_insn_after (note
, insn
);
5633 register rtx insn
, note
= 0;
5635 /* Scan from the end until we reach the first epilogue insn.
5636 We apparently can't depend on basic_block_{head,end} after
5638 for (len
= 0; epilogue
[len
]; len
++)
5640 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
5642 if (GET_CODE (insn
) == NOTE
)
5644 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5647 else if ((len
-= contains (insn
, epilogue
)) == 0)
5649 /* Find the epilogue-begin note if we haven't already, and
5650 move it to just before the first epilogue insn. */
5653 for (note
= insn
; note
= PREV_INSN (note
);)
5654 if (GET_CODE (note
) == NOTE
5655 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5658 next
= NEXT_INSN (note
);
5659 prev
= PREV_INSN (note
);
5661 NEXT_INSN (prev
) = next
;
5663 PREV_INSN (next
) = prev
;
5664 add_insn_after (note
, PREV_INSN (insn
));
5669 #endif /* HAVE_prologue or HAVE_epilogue */