1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "insn-flags.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
59 /* Tree node for this argument. */
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode
;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 /* Initially-compute RTL value for argument; only for const functions. */
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset
;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset
;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size
;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad
;
114 #ifdef ACCUMULATE_OUTGOING_ARGS
115 /* A vector of one char per byte of stack space. A byte if non-zero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map
;
121 /* Size of STACK_USAGE_MAP. */
122 static int highest_outgoing_arg_in_use
;
124 /* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129 int stack_arg_under_construction
;
132 static int calls_function
PARAMS ((tree
, int));
133 static int calls_function_1
PARAMS ((tree
, int));
134 static void emit_call_1
PARAMS ((rtx
, tree
, tree
, HOST_WIDE_INT
,
135 HOST_WIDE_INT
, HOST_WIDE_INT
, rtx
,
136 rtx
, int, rtx
, int));
137 static void precompute_register_parameters
PARAMS ((int,
140 static void store_one_arg
PARAMS ((struct arg_data
*, rtx
, int, int,
142 static void store_unaligned_arguments_into_pseudos
PARAMS ((struct arg_data
*,
144 static int finalize_must_preallocate
PARAMS ((int, int,
146 struct args_size
*));
147 static void precompute_arguments
PARAMS ((int, int, int,
149 struct args_size
*));
150 static int compute_argument_block_size
PARAMS ((int,
153 static void initialize_argument_information
PARAMS ((int,
160 static void compute_argument_addresses
PARAMS ((struct arg_data
*,
162 static rtx rtx_for_function_call
PARAMS ((tree
, tree
));
163 static void load_register_parameters
PARAMS ((struct arg_data
*,
166 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
167 static rtx save_fixed_argument_area
PARAMS ((int, rtx
, int *, int *));
168 static void restore_fixed_argument_area
PARAMS ((rtx
, rtx
, int, int));
171 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
174 If WHICH is 0, return 1 if EXP contains a call to any function.
175 Actually, we only need return 1 if evaluating EXP would require pushing
176 arguments on the stack, but that is too difficult to compute, so we just
177 assume any function call might require the stack. */
179 static tree calls_function_save_exprs
;
182 calls_function (exp
, which
)
187 calls_function_save_exprs
= 0;
188 val
= calls_function_1 (exp
, which
);
189 calls_function_save_exprs
= 0;
194 calls_function_1 (exp
, which
)
199 enum tree_code code
= TREE_CODE (exp
);
200 int type
= TREE_CODE_CLASS (code
);
201 int length
= tree_code_length
[(int) code
];
203 /* If this code is language-specific, we don't know what it will do. */
204 if ((int) code
>= NUM_TREE_CODES
)
207 /* Only expressions and references can contain calls. */
208 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r'
217 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
218 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
221 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
223 if ((DECL_BUILT_IN (fndecl
)
224 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
225 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
)
226 || (DECL_SAVED_INSNS (fndecl
)
227 && DECL_SAVED_INSNS (fndecl
)->calls_alloca
))
231 /* Third operand is RTL. */
236 if (SAVE_EXPR_RTL (exp
) != 0)
238 if (value_member (exp
, calls_function_save_exprs
))
240 calls_function_save_exprs
= tree_cons (NULL_TREE
, exp
,
241 calls_function_save_exprs
);
242 return (TREE_OPERAND (exp
, 0) != 0
243 && calls_function_1 (TREE_OPERAND (exp
, 0), which
));
249 for (local
= BLOCK_VARS (exp
); local
; local
= TREE_CHAIN (local
))
250 if (DECL_INITIAL (local
) != 0
251 && calls_function_1 (DECL_INITIAL (local
), which
))
255 register tree subblock
;
257 for (subblock
= BLOCK_SUBBLOCKS (exp
);
259 subblock
= TREE_CHAIN (subblock
))
260 if (calls_function_1 (subblock
, which
))
265 case METHOD_CALL_EXPR
:
269 case WITH_CLEANUP_EXPR
:
280 for (i
= 0; i
< length
; i
++)
281 if (TREE_OPERAND (exp
, i
) != 0
282 && calls_function_1 (TREE_OPERAND (exp
, i
), which
))
288 /* Force FUNEXP into a form suitable for the address of a CALL,
289 and return that as an rtx. Also load the static chain register
290 if FNDECL is a nested function.
292 CALL_FUSAGE points to a variable holding the prospective
293 CALL_INSN_FUNCTION_USAGE information. */
296 prepare_call_address (funexp
, fndecl
, call_fusage
, reg_parm_seen
)
302 rtx static_chain_value
= 0;
304 funexp
= protect_from_queue (funexp
, 0);
307 /* Get possible static chain value for nested function in C. */
308 static_chain_value
= lookup_static_chain (fndecl
);
310 /* Make a valid memory address and copy constants thru pseudo-regs,
311 but not for a constant address if -fno-function-cse. */
312 if (GET_CODE (funexp
) != SYMBOL_REF
)
313 /* If we are using registers for parameters, force the
314 function address into a register now. */
315 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
316 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
317 : memory_address (FUNCTION_MODE
, funexp
));
320 #ifndef NO_FUNCTION_CSE
321 if (optimize
&& ! flag_no_function_cse
)
322 #ifdef NO_RECURSIVE_FUNCTION_CSE
323 if (fndecl
!= current_function_decl
)
325 funexp
= force_reg (Pmode
, funexp
);
329 if (static_chain_value
!= 0)
331 emit_move_insn (static_chain_rtx
, static_chain_value
);
333 if (GET_CODE (static_chain_rtx
) == REG
)
334 use_reg (call_fusage
, static_chain_rtx
);
340 /* Generate instructions to call function FUNEXP,
341 and optionally pop the results.
342 The CALL_INSN is the first insn generated.
344 FNDECL is the declaration node of the function. This is given to the
345 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
347 FUNTYPE is the data type of the function. This is given to the macro
348 RETURN_POPS_ARGS to determine whether this function pops its own args.
349 We used to allow an identifier for library functions, but that doesn't
350 work when the return type is an aggregate type and the calling convention
351 says that the pointer to this aggregate is to be popped by the callee.
353 STACK_SIZE is the number of bytes of arguments on the stack,
354 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
355 This is both to put into the call insn and
356 to generate explicit popping code if necessary.
358 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
359 It is zero if this call doesn't want a structure value.
361 NEXT_ARG_REG is the rtx that results from executing
362 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
363 just after all the args have had their registers assigned.
364 This could be whatever you like, but normally it is the first
365 arg-register beyond those used for args in this call,
366 or 0 if all the arg-registers are used in this call.
367 It is passed on to `gen_call' so you can put this info in the call insn.
369 VALREG is a hard register in which a value is returned,
370 or 0 if the call does not return a value.
372 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
373 the args to this call were processed.
374 We restore `inhibit_defer_pop' to that value.
376 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
377 denote registers used by the called function.
379 IS_CONST is true if this is a `const' call. */
382 emit_call_1 (funexp
, fndecl
, funtype
, stack_size
, rounded_stack_size
,
383 struct_value_size
, next_arg_reg
, valreg
, old_inhibit_defer_pop
,
384 call_fusage
, is_const
)
386 tree fndecl ATTRIBUTE_UNUSED
;
387 tree funtype ATTRIBUTE_UNUSED
;
388 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
;
389 HOST_WIDE_INT rounded_stack_size
;
390 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED
;
393 int old_inhibit_defer_pop
;
397 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
398 #if defined (HAVE_call) && defined (HAVE_call_value)
399 rtx struct_value_size_rtx
= GEN_INT (struct_value_size
);
402 #ifndef ACCUMULATE_OUTGOING_ARGS
403 int already_popped
= 0;
404 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
407 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
408 and we don't want to load it into a register as an optimization,
409 because prepare_call_address already did it if it should be done. */
410 if (GET_CODE (funexp
) != SYMBOL_REF
)
411 funexp
= memory_address (FUNCTION_MODE
, funexp
);
413 #ifndef ACCUMULATE_OUTGOING_ARGS
414 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
415 /* If the target has "call" or "call_value" insns, then prefer them
416 if no arguments are actually popped. If the target does not have
417 "call" or "call_value" insns, then we must use the popping versions
418 even if the call has no arguments to pop. */
419 #if defined (HAVE_call) && defined (HAVE_call_value)
420 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
423 if (HAVE_call_pop
&& HAVE_call_value_pop
)
426 rtx n_pop
= GEN_INT (n_popped
);
429 /* If this subroutine pops its own args, record that in the call insn
430 if possible, for the sake of frame pointer elimination. */
433 pat
= gen_call_value_pop (valreg
,
434 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
435 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
437 pat
= gen_call_pop (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
438 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
440 emit_call_insn (pat
);
447 #if defined (HAVE_call) && defined (HAVE_call_value)
448 if (HAVE_call
&& HAVE_call_value
)
451 emit_call_insn (gen_call_value (valreg
,
452 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
453 rounded_stack_size_rtx
, next_arg_reg
,
456 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
457 rounded_stack_size_rtx
, next_arg_reg
,
458 struct_value_size_rtx
));
464 /* Find the CALL insn we just emitted. */
465 for (call_insn
= get_last_insn ();
466 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
467 call_insn
= PREV_INSN (call_insn
))
473 /* Put the register usage information on the CALL. If there is already
474 some usage information, put ours at the end. */
475 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
479 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
480 link
= XEXP (link
, 1))
483 XEXP (link
, 1) = call_fusage
;
486 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
488 /* If this is a const call, then set the insn's unchanging bit. */
490 CONST_CALL_P (call_insn
) = 1;
492 /* Restore this now, so that we do defer pops for this call's args
493 if the context of the call as a whole permits. */
494 inhibit_defer_pop
= old_inhibit_defer_pop
;
496 #ifndef ACCUMULATE_OUTGOING_ARGS
497 /* If returning from the subroutine does not automatically pop the args,
498 we need an instruction to pop them sooner or later.
499 Perhaps do it now; perhaps just record how much space to pop later.
501 If returning from the subroutine does pop the args, indicate that the
502 stack pointer will be changed. */
507 CALL_INSN_FUNCTION_USAGE (call_insn
)
508 = gen_rtx_EXPR_LIST (VOIDmode
,
509 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
510 CALL_INSN_FUNCTION_USAGE (call_insn
));
511 rounded_stack_size
-= n_popped
;
512 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
515 if (rounded_stack_size
!= 0)
517 if (flag_defer_pop
&& inhibit_defer_pop
== 0 && !is_const
)
518 pending_stack_adjust
+= rounded_stack_size
;
520 adjust_stack (rounded_stack_size_rtx
);
525 /* Determine if the function identified by NAME and FNDECL is one with
526 special properties we wish to know about.
528 For example, if the function might return more than one time (setjmp), then
529 set RETURNS_TWICE to a nonzero value.
531 Similarly set IS_LONGJMP for if the function is in the longjmp family.
533 Set IS_MALLOC for any of the standard memory allocation functions which
534 allocate from the heap.
536 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
537 space from the stack such as alloca. */
540 special_function_p (fndecl
, returns_twice
, is_longjmp
, fork_or_exec
,
541 is_malloc
, may_be_alloca
)
554 /* The function decl may have the `malloc' attribute. */
555 *is_malloc
= fndecl
&& DECL_IS_MALLOC (fndecl
);
558 && fndecl
&& DECL_NAME (fndecl
)
559 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
560 /* Exclude functions not at the file scope, or not `extern',
561 since they are not the magic functions we would otherwise
563 && DECL_CONTEXT (fndecl
) == NULL_TREE
&& TREE_PUBLIC (fndecl
))
565 char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
568 /* We assume that alloca will always be called by name. It
569 makes no sense to pass it as a pointer-to-function to
570 anything that does not understand its behavior. */
572 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
574 && ! strcmp (name
, "alloca"))
575 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
577 && ! strcmp (name
, "__builtin_alloca"))));
579 /* Disregard prefix _, __ or __x. */
582 if (name
[1] == '_' && name
[2] == 'x')
584 else if (name
[1] == '_')
594 && (! strcmp (tname
, "setjmp")
595 || ! strcmp (tname
, "setjmp_syscall")))
597 && ! strcmp (tname
, "sigsetjmp"))
599 && ! strcmp (tname
, "savectx")));
601 && ! strcmp (tname
, "siglongjmp"))
604 else if ((tname
[0] == 'q' && tname
[1] == 's'
605 && ! strcmp (tname
, "qsetjmp"))
606 || (tname
[0] == 'v' && tname
[1] == 'f'
607 && ! strcmp (tname
, "vfork")))
610 else if (tname
[0] == 'l' && tname
[1] == 'o'
611 && ! strcmp (tname
, "longjmp"))
614 else if ((tname
[0] == 'f' && tname
[1] == 'o'
615 && ! strcmp (tname
, "fork"))
616 /* Linux specific: __clone. check NAME to insist on the
617 leading underscores, to avoid polluting the ISO / POSIX
619 || (name
[0] == '_' && name
[1] == '_'
620 && ! strcmp (tname
, "clone"))
621 || (tname
[0] == 'e' && tname
[1] == 'x' && tname
[2] == 'e'
622 && tname
[3] == 'c' && (tname
[4] == 'l' || tname
[4] == 'v')
624 || ((tname
[5] == 'p' || tname
[5] == 'e')
625 && tname
[6] == '\0'))))
628 /* Do not add any more malloc-like functions to this list,
629 instead mark them as malloc functions using the malloc attribute.
630 Note, realloc is not suitable for attribute malloc since
631 it may return the same address across multiple calls. */
632 else if (! strcmp (tname
, "malloc")
633 || ! strcmp (tname
, "calloc")
634 || ! strcmp (tname
, "strdup")
635 /* Note use of NAME rather than TNAME here. These functions
636 are only reserved when preceded with __. */
637 || ! strcmp (name
, "__vn") /* mangled __builtin_vec_new */
638 || ! strcmp (name
, "__nw") /* mangled __builtin_new */
639 || ! strcmp (name
, "__builtin_new")
640 || ! strcmp (name
, "__builtin_vec_new"))
645 /* Precompute all register parameters as described by ARGS, storing values
646 into fields within the ARGS array.
648 NUM_ACTUALS indicates the total number elements in the ARGS array.
650 Set REG_PARM_SEEN if we encounter a register parameter. */
653 precompute_register_parameters (num_actuals
, args
, reg_parm_seen
)
655 struct arg_data
*args
;
662 for (i
= 0; i
< num_actuals
; i
++)
663 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
667 if (args
[i
].value
== 0)
670 args
[i
].value
= expand_expr (args
[i
].tree_value
, NULL_RTX
,
672 preserve_temp_slots (args
[i
].value
);
675 /* ANSI doesn't require a sequence point here,
676 but PCC has one, so this will avoid some problems. */
680 /* If we are to promote the function arg to a wider mode,
683 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
685 = convert_modes (args
[i
].mode
,
686 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
687 args
[i
].value
, args
[i
].unsignedp
);
689 /* If the value is expensive, and we are inside an appropriately
690 short loop, put the value into a pseudo and then put the pseudo
693 For small register classes, also do this if this call uses
694 register parameters. This is to avoid reload conflicts while
695 loading the parameters registers. */
697 if ((! (GET_CODE (args
[i
].value
) == REG
698 || (GET_CODE (args
[i
].value
) == SUBREG
699 && GET_CODE (SUBREG_REG (args
[i
].value
)) == REG
)))
700 && args
[i
].mode
!= BLKmode
701 && rtx_cost (args
[i
].value
, SET
) > 2
702 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
703 || preserve_subexpressions_p ()))
704 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
708 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
710 /* The argument list is the property of the called routine and it
711 may clobber it. If the fixed area has been used for previous
712 parameters, we must save and restore it. */
714 save_fixed_argument_area (reg_parm_stack_space
, argblock
,
715 low_to_save
, high_to_save
)
716 int reg_parm_stack_space
;
722 rtx save_area
= NULL_RTX
;
724 /* Compute the boundary of the that needs to be saved, if any. */
725 #ifdef ARGS_GROW_DOWNWARD
726 for (i
= 0; i
< reg_parm_stack_space
+ 1; i
++)
728 for (i
= 0; i
< reg_parm_stack_space
; i
++)
731 if (i
>= highest_outgoing_arg_in_use
732 || stack_usage_map
[i
] == 0)
735 if (*low_to_save
== -1)
741 if (*low_to_save
>= 0)
743 int num_to_save
= *high_to_save
- *low_to_save
+ 1;
744 enum machine_mode save_mode
745 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
748 /* If we don't have the required alignment, must do this in BLKmode. */
749 if ((*low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
750 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
753 #ifdef ARGS_GROW_DOWNWARD
754 stack_area
= gen_rtx_MEM (save_mode
,
755 memory_address (save_mode
,
756 plus_constant (argblock
,
759 stack_area
= gen_rtx_MEM (save_mode
,
760 memory_address (save_mode
,
761 plus_constant (argblock
,
764 if (save_mode
== BLKmode
)
766 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
767 /* Cannot use emit_block_move here because it can be done by a library
768 call which in turn gets into this place again and deadly infinite
769 recursion happens. */
770 move_by_pieces (validize_mem (save_area
), stack_area
, num_to_save
,
771 PARM_BOUNDARY
/ BITS_PER_UNIT
);
775 save_area
= gen_reg_rtx (save_mode
);
776 emit_move_insn (save_area
, stack_area
);
783 restore_fixed_argument_area (save_area
, argblock
, high_to_save
, low_to_save
)
789 enum machine_mode save_mode
= GET_MODE (save_area
);
790 #ifdef ARGS_GROW_DOWNWARD
792 = gen_rtx_MEM (save_mode
,
793 memory_address (save_mode
,
794 plus_constant (argblock
,
798 = gen_rtx_MEM (save_mode
,
799 memory_address (save_mode
,
800 plus_constant (argblock
,
804 if (save_mode
!= BLKmode
)
805 emit_move_insn (stack_area
, save_area
);
807 /* Cannot use emit_block_move here because it can be done by a library
808 call which in turn gets into this place again and deadly infinite
809 recursion happens. */
810 move_by_pieces (stack_area
, validize_mem (save_area
),
811 high_to_save
- low_to_save
+ 1,
812 PARM_BOUNDARY
/ BITS_PER_UNIT
);
816 /* If any elements in ARGS refer to parameters that are to be passed in
817 registers, but not in memory, and whose alignment does not permit a
818 direct copy into registers. Copy the values into a group of pseudos
819 which we will later copy into the appropriate hard registers.
821 Pseudos for each unaligned argument will be stored into the array
822 args[argnum].aligned_regs. The caller is responsible for deallocating
823 the aligned_regs array if it is nonzero. */
826 store_unaligned_arguments_into_pseudos (args
, num_actuals
)
827 struct arg_data
*args
;
832 for (i
= 0; i
< num_actuals
; i
++)
833 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
834 && args
[i
].mode
== BLKmode
835 && (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
836 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
838 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
839 int big_endian_correction
= 0;
841 args
[i
].n_aligned_regs
842 = args
[i
].partial
? args
[i
].partial
843 : (bytes
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
845 args
[i
].aligned_regs
= (rtx
*) xmalloc (sizeof (rtx
)
846 * args
[i
].n_aligned_regs
);
848 /* Structures smaller than a word are aligned to the least
849 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
850 this means we must skip the empty high order bytes when
851 calculating the bit offset. */
852 if (BYTES_BIG_ENDIAN
&& bytes
< UNITS_PER_WORD
)
853 big_endian_correction
= (BITS_PER_WORD
- (bytes
* BITS_PER_UNIT
));
855 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
857 rtx reg
= gen_reg_rtx (word_mode
);
858 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
859 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
860 int bitalign
= TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
));
862 args
[i
].aligned_regs
[j
] = reg
;
864 /* There is no need to restrict this code to loading items
865 in TYPE_ALIGN sized hunks. The bitfield instructions can
866 load up entire word sized registers efficiently.
868 ??? This may not be needed anymore.
869 We use to emit a clobber here but that doesn't let later
870 passes optimize the instructions we emit. By storing 0 into
871 the register later passes know the first AND to zero out the
872 bitfield being set in the register is unnecessary. The store
873 of 0 will be deleted as will at least the first AND. */
875 emit_move_insn (reg
, const0_rtx
);
877 bytes
-= bitsize
/ BITS_PER_UNIT
;
878 store_bit_field (reg
, bitsize
, big_endian_correction
, word_mode
,
879 extract_bit_field (word
, bitsize
, 0, 1,
882 bitalign
/ BITS_PER_UNIT
,
884 bitalign
/ BITS_PER_UNIT
, BITS_PER_WORD
);
889 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
892 NUM_ACTUALS is the total number of parameters.
894 N_NAMED_ARGS is the total number of named arguments.
896 FNDECL is the tree code for the target of this call (if known)
898 ARGS_SO_FAR holds state needed by the target to know where to place
901 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
902 for arguments which are passed in registers.
904 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
905 and may be modified by this routine.
907 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
908 flags which may may be modified by this routine. */
911 initialize_argument_information (num_actuals
, args
, args_size
, n_named_args
,
912 actparms
, fndecl
, args_so_far
,
913 reg_parm_stack_space
, old_stack_level
,
914 old_pending_adj
, must_preallocate
, is_const
)
915 int num_actuals ATTRIBUTE_UNUSED
;
916 struct arg_data
*args
;
917 struct args_size
*args_size
;
918 int n_named_args ATTRIBUTE_UNUSED
;
921 CUMULATIVE_ARGS
*args_so_far
;
922 int reg_parm_stack_space
;
923 rtx
*old_stack_level
;
924 int *old_pending_adj
;
925 int *must_preallocate
;
928 /* 1 if scanning parms front to back, -1 if scanning back to front. */
931 /* Count arg position in order args appear. */
934 struct args_size alignment_pad
;
938 args_size
->constant
= 0;
941 /* In this loop, we consider args in the order they are written.
942 We fill up ARGS from the front or from the back if necessary
943 so that in any case the first arg to be pushed ends up at the front. */
945 #ifdef PUSH_ARGS_REVERSED
946 i
= num_actuals
- 1, inc
= -1;
947 /* In this case, must reverse order of args
948 so that we compute and push the last arg first. */
953 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
954 for (p
= actparms
, argpos
= 0; p
; p
= TREE_CHAIN (p
), i
+= inc
, argpos
++)
956 tree type
= TREE_TYPE (TREE_VALUE (p
));
958 enum machine_mode mode
;
960 args
[i
].tree_value
= TREE_VALUE (p
);
962 /* Replace erroneous argument with constant zero. */
963 if (type
== error_mark_node
|| TYPE_SIZE (type
) == 0)
964 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
966 /* If TYPE is a transparent union, pass things the way we would
967 pass the first field of the union. We have already verified that
968 the modes are the same. */
969 if (TYPE_TRANSPARENT_UNION (type
))
970 type
= TREE_TYPE (TYPE_FIELDS (type
));
972 /* Decide where to pass this arg.
974 args[i].reg is nonzero if all or part is passed in registers.
976 args[i].partial is nonzero if part but not all is passed in registers,
977 and the exact value says how many words are passed in registers.
979 args[i].pass_on_stack is nonzero if the argument must at least be
980 computed on the stack. It may then be loaded back into registers
981 if args[i].reg is nonzero.
983 These decisions are driven by the FUNCTION_... macros and must agree
984 with those made by function.c. */
986 /* See if this argument should be passed by invisible reference. */
987 if ((TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
988 && contains_placeholder_p (TYPE_SIZE (type
)))
989 || TREE_ADDRESSABLE (type
)
990 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
991 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far
, TYPE_MODE (type
),
992 type
, argpos
< n_named_args
)
996 /* If we're compiling a thunk, pass through invisible
997 references instead of making a copy. */
998 if (current_function_is_thunk
999 #ifdef FUNCTION_ARG_CALLEE_COPIES
1000 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far
, TYPE_MODE (type
),
1001 type
, argpos
< n_named_args
)
1002 /* If it's in a register, we must make a copy of it too. */
1003 /* ??? Is this a sufficient test? Is there a better one? */
1004 && !(TREE_CODE (args
[i
].tree_value
) == VAR_DECL
1005 && REG_P (DECL_RTL (args
[i
].tree_value
)))
1006 && ! TREE_ADDRESSABLE (type
))
1010 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1011 new object from the argument. If we are passing by
1012 invisible reference, the callee will do that for us, so we
1013 can strip off the TARGET_EXPR. This is not always safe,
1014 but it is safe in the only case where this is a useful
1015 optimization; namely, when the argument is a plain object.
1016 In that case, the frontend is just asking the backend to
1017 make a bitwise copy of the argument. */
1019 if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1020 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1021 (args
[i
].tree_value
, 1)))
1023 && ! REG_P (DECL_RTL (TREE_OPERAND (args
[i
].tree_value
, 1))))
1024 args
[i
].tree_value
= TREE_OPERAND (args
[i
].tree_value
, 1);
1026 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1027 build_pointer_type (type
),
1028 args
[i
].tree_value
);
1029 type
= build_pointer_type (type
);
1033 /* We make a copy of the object and pass the address to the
1034 function being called. */
1037 if (TYPE_SIZE (type
) == 0
1038 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1039 || (flag_stack_check
&& ! STACK_CHECK_BUILTIN
1040 && (TREE_INT_CST_HIGH (TYPE_SIZE (type
)) != 0
1041 || (TREE_INT_CST_LOW (TYPE_SIZE (type
))
1042 > STACK_CHECK_MAX_VAR_SIZE
* BITS_PER_UNIT
))))
1044 /* This is a variable-sized object. Make space on the stack
1046 rtx size_rtx
= expr_size (TREE_VALUE (p
));
1048 if (*old_stack_level
== 0)
1050 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1051 *old_pending_adj
= pending_stack_adjust
;
1052 pending_stack_adjust
= 0;
1055 copy
= gen_rtx_MEM (BLKmode
,
1056 allocate_dynamic_stack_space (size_rtx
,
1058 TYPE_ALIGN (type
)));
1062 int size
= int_size_in_bytes (type
);
1063 copy
= assign_stack_temp (TYPE_MODE (type
), size
, 0);
1066 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
1068 store_expr (args
[i
].tree_value
, copy
, 0);
1071 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1072 build_pointer_type (type
),
1073 make_tree (type
, copy
));
1074 type
= build_pointer_type (type
);
1078 mode
= TYPE_MODE (type
);
1079 unsignedp
= TREE_UNSIGNED (type
);
1081 #ifdef PROMOTE_FUNCTION_ARGS
1082 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
1085 args
[i
].unsignedp
= unsignedp
;
1086 args
[i
].mode
= mode
;
1087 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1088 argpos
< n_named_args
);
1089 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1092 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far
, mode
, type
,
1093 argpos
< n_named_args
);
1096 args
[i
].pass_on_stack
= MUST_PASS_IN_STACK (mode
, type
);
1098 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1099 it means that we are to pass this arg in the register(s) designated
1100 by the PARALLEL, but also to pass it in the stack. */
1101 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1102 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1103 args
[i
].pass_on_stack
= 1;
1105 /* If this is an addressable type, we must preallocate the stack
1106 since we must evaluate the object into its final location.
1108 If this is to be passed in both registers and the stack, it is simpler
1110 if (TREE_ADDRESSABLE (type
)
1111 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1112 *must_preallocate
= 1;
1114 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1115 we cannot consider this function call constant. */
1116 if (TREE_ADDRESSABLE (type
))
1119 /* Compute the stack-size of this argument. */
1120 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1121 || reg_parm_stack_space
> 0
1122 || args
[i
].pass_on_stack
)
1123 locate_and_pad_parm (mode
, type
,
1124 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1129 fndecl
, args_size
, &args
[i
].offset
,
1130 &args
[i
].size
, &alignment_pad
);
1132 #ifndef ARGS_GROW_DOWNWARD
1133 args
[i
].slot_offset
= *args_size
;
1136 args
[i
].alignment_pad
= alignment_pad
;
1138 /* If a part of the arg was put into registers,
1139 don't include that part in the amount pushed. */
1140 if (reg_parm_stack_space
== 0 && ! args
[i
].pass_on_stack
)
1141 args
[i
].size
.constant
-= ((args
[i
].partial
* UNITS_PER_WORD
)
1142 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
1143 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
1145 /* Update ARGS_SIZE, the total stack space for args so far. */
1147 args_size
->constant
+= args
[i
].size
.constant
;
1148 if (args
[i
].size
.var
)
1150 ADD_PARM_SIZE (*args_size
, args
[i
].size
.var
);
1153 /* Since the slot offset points to the bottom of the slot,
1154 we must record it after incrementing if the args grow down. */
1155 #ifdef ARGS_GROW_DOWNWARD
1156 args
[i
].slot_offset
= *args_size
;
1158 args
[i
].slot_offset
.constant
= -args_size
->constant
;
1161 SUB_PARM_SIZE (args
[i
].slot_offset
, args_size
->var
);
1165 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1166 have been used, etc. */
1168 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1169 argpos
< n_named_args
);
1173 /* Update ARGS_SIZE to contain the total size for the argument block.
1174 Return the original constant component of the argument block's size.
1176 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1177 for arguments passed in registers. */
1180 compute_argument_block_size (reg_parm_stack_space
, args_size
,
1181 preferred_stack_boundary
)
1182 int reg_parm_stack_space
;
1183 struct args_size
*args_size
;
1184 int preferred_stack_boundary ATTRIBUTE_UNUSED
;
1186 int unadjusted_args_size
= args_size
->constant
;
1188 /* Compute the actual size of the argument block required. The variable
1189 and constant sizes must be combined, the size may have to be rounded,
1190 and there may be a minimum required size. */
1194 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1195 args_size
->constant
= 0;
1197 #ifdef PREFERRED_STACK_BOUNDARY
1198 preferred_stack_boundary
/= BITS_PER_UNIT
;
1199 if (preferred_stack_boundary
> 1)
1200 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
1203 if (reg_parm_stack_space
> 0)
1206 = size_binop (MAX_EXPR
, args_size
->var
,
1207 size_int (reg_parm_stack_space
));
1209 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1210 /* The area corresponding to register parameters is not to count in
1211 the size of the block we need. So make the adjustment. */
1213 = size_binop (MINUS_EXPR
, args_size
->var
,
1214 size_int (reg_parm_stack_space
));
1220 #ifdef PREFERRED_STACK_BOUNDARY
1221 preferred_stack_boundary
/= BITS_PER_UNIT
;
1222 args_size
->constant
= (((args_size
->constant
1223 + pending_stack_adjust
1224 + preferred_stack_boundary
- 1)
1225 / preferred_stack_boundary
1226 * preferred_stack_boundary
)
1227 - pending_stack_adjust
);
1230 args_size
->constant
= MAX (args_size
->constant
,
1231 reg_parm_stack_space
);
1233 #ifdef MAYBE_REG_PARM_STACK_SPACE
1234 if (reg_parm_stack_space
== 0)
1235 args_size
->constant
= 0;
1238 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1239 args_size
->constant
-= reg_parm_stack_space
;
1242 return unadjusted_args_size
;
1245 /* Precompute parameters as needed for a function call.
1247 IS_CONST indicates the target function is a pure function.
1249 MUST_PREALLOCATE indicates that we must preallocate stack space for
1250 any stack arguments.
1252 NUM_ACTUALS is the number of arguments.
1254 ARGS is an array containing information for each argument; this routine
1255 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1257 ARGS_SIZE contains information about the size of the arg list. */
1260 precompute_arguments (is_const
, must_preallocate
, num_actuals
, args
, args_size
)
1262 int must_preallocate
;
1264 struct arg_data
*args
;
1265 struct args_size
*args_size
;
1269 /* If this function call is cse'able, precompute all the parameters.
1270 Note that if the parameter is constructed into a temporary, this will
1271 cause an additional copy because the parameter will be constructed
1272 into a temporary location and then copied into the outgoing arguments.
1273 If a parameter contains a call to alloca and this function uses the
1274 stack, precompute the parameter. */
1276 /* If we preallocated the stack space, and some arguments must be passed
1277 on the stack, then we must precompute any parameter which contains a
1278 function call which will store arguments on the stack.
1279 Otherwise, evaluating the parameter may clobber previous parameters
1280 which have already been stored into the stack. */
1282 for (i
= 0; i
< num_actuals
; i
++)
1284 || ((args_size
->var
!= 0 || args_size
->constant
!= 0)
1285 && calls_function (args
[i
].tree_value
, 1))
1286 || (must_preallocate
1287 && (args_size
->var
!= 0 || args_size
->constant
!= 0)
1288 && calls_function (args
[i
].tree_value
, 0)))
1290 /* If this is an addressable type, we cannot pre-evaluate it. */
1291 if (TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
)))
1297 = expand_expr (args
[i
].tree_value
, NULL_RTX
, VOIDmode
, 0);
1299 preserve_temp_slots (args
[i
].value
);
1302 /* ANSI doesn't require a sequence point here,
1303 but PCC has one, so this will avoid some problems. */
1306 args
[i
].initial_value
= args
[i
].value
1307 = protect_from_queue (args
[i
].value
, 0);
1309 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) != args
[i
].mode
)
1312 = convert_modes (args
[i
].mode
,
1313 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
1314 args
[i
].value
, args
[i
].unsignedp
);
1315 #ifdef PROMOTE_FOR_CALL_ONLY
1316 /* CSE will replace this only if it contains args[i].value
1317 pseudo, so convert it down to the declared mode using
1319 if (GET_CODE (args
[i
].value
) == REG
1320 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
)
1322 args
[i
].initial_value
1323 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
1325 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
1326 SUBREG_PROMOTED_UNSIGNED_P (args
[i
].initial_value
)
1327 = args
[i
].unsignedp
;
1334 /* Given the current state of MUST_PREALLOCATE and information about
1335 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1336 compute and return the final value for MUST_PREALLOCATE. */
1339 finalize_must_preallocate (must_preallocate
, num_actuals
, args
, args_size
)
1340 int must_preallocate
;
1342 struct arg_data
*args
;
1343 struct args_size
*args_size
;
1345 /* See if we have or want to preallocate stack space.
1347 If we would have to push a partially-in-regs parm
1348 before other stack parms, preallocate stack space instead.
1350 If the size of some parm is not a multiple of the required stack
1351 alignment, we must preallocate.
1353 If the total size of arguments that would otherwise create a copy in
1354 a temporary (such as a CALL) is more than half the total argument list
1355 size, preallocation is faster.
1357 Another reason to preallocate is if we have a machine (like the m88k)
1358 where stack alignment is required to be maintained between every
1359 pair of insns, not just when the call is made. However, we assume here
1360 that such machines either do not have push insns (and hence preallocation
1361 would occur anyway) or the problem is taken care of with
1364 if (! must_preallocate
)
1366 int partial_seen
= 0;
1367 int copy_to_evaluate_size
= 0;
1370 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1372 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1374 else if (partial_seen
&& args
[i
].reg
== 0)
1375 must_preallocate
= 1;
1377 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1378 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1379 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1380 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1381 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1382 copy_to_evaluate_size
1383 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1386 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1387 && args_size
->constant
> 0)
1388 must_preallocate
= 1;
1390 return must_preallocate
;
1393 /* If we preallocated stack space, compute the address of each argument
1394 and store it into the ARGS array.
1396 We need not ensure it is a valid memory address here; it will be
1397 validized when it is used.
1399 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1402 compute_argument_addresses (args
, argblock
, num_actuals
)
1403 struct arg_data
*args
;
1409 rtx arg_reg
= argblock
;
1410 int i
, arg_offset
= 0;
1412 if (GET_CODE (argblock
) == PLUS
)
1413 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1415 for (i
= 0; i
< num_actuals
; i
++)
1417 rtx offset
= ARGS_SIZE_RTX (args
[i
].offset
);
1418 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].slot_offset
);
1421 /* Skip this parm if it will not be passed on the stack. */
1422 if (! args
[i
].pass_on_stack
&& args
[i
].reg
!= 0)
1425 if (GET_CODE (offset
) == CONST_INT
)
1426 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1428 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1430 addr
= plus_constant (addr
, arg_offset
);
1431 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1434 AGGREGATE_TYPE_P (TREE_TYPE (args
[i
].tree_value
)));
1436 if (GET_CODE (slot_offset
) == CONST_INT
)
1437 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1439 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1441 addr
= plus_constant (addr
, arg_offset
);
1442 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1447 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1448 in a call instruction.
1450 FNDECL is the tree node for the target function. For an indirect call
1451 FNDECL will be NULL_TREE.
1453 EXP is the CALL_EXPR for this call. */
1456 rtx_for_function_call (fndecl
, exp
)
1462 /* Get the function to call, in the form of RTL. */
1465 /* If this is the first use of the function, see if we need to
1466 make an external definition for it. */
1467 if (! TREE_USED (fndecl
))
1469 assemble_external (fndecl
);
1470 TREE_USED (fndecl
) = 1;
1473 /* Get a SYMBOL_REF rtx for the function address. */
1474 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1477 /* Generate an rtx (probably a pseudo-register) for the address. */
1482 expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
1483 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1485 /* Check the function is executable. */
1486 if (current_function_check_memory_usage
)
1488 #ifdef POINTERS_EXTEND_UNSIGNED
1489 /* It might be OK to convert funexp in place, but there's
1490 a lot going on between here and when it happens naturally
1491 that this seems safer. */
1492 funaddr
= convert_memory_address (Pmode
, funexp
);
1494 emit_library_call (chkr_check_exec_libfunc
, 1,
1503 /* Do the register loads required for any wholly-register parms or any
1504 parms which are passed both on the stack and in a register. Their
1505 expressions were already evaluated.
1507 Mark all register-parms as living through the call, putting these USE
1508 insns in the CALL_INSN_FUNCTION_USAGE field. */
1511 load_register_parameters (args
, num_actuals
, call_fusage
)
1512 struct arg_data
*args
;
1518 #ifdef LOAD_ARGS_REVERSED
1519 for (i
= num_actuals
- 1; i
>= 0; i
--)
1521 for (i
= 0; i
< num_actuals
; i
++)
1524 rtx reg
= args
[i
].reg
;
1525 int partial
= args
[i
].partial
;
1530 /* Set to non-negative if must move a word at a time, even if just
1531 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1532 we just use a normal move insn. This value can be zero if the
1533 argument is a zero size structure with no fields. */
1534 nregs
= (partial
? partial
1535 : (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1536 ? ((int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
))
1537 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
1540 /* Handle calls that pass values in multiple non-contiguous
1541 locations. The Irix 6 ABI has examples of this. */
1543 if (GET_CODE (reg
) == PARALLEL
)
1545 emit_group_load (reg
, args
[i
].value
,
1546 int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
)),
1547 (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
1551 /* If simple case, just do move. If normal partial, store_one_arg
1552 has already loaded the register for us. In all other cases,
1553 load the register(s) from memory. */
1555 else if (nregs
== -1)
1556 emit_move_insn (reg
, args
[i
].value
);
1558 /* If we have pre-computed the values to put in the registers in
1559 the case of non-aligned structures, copy them in now. */
1561 else if (args
[i
].n_aligned_regs
!= 0)
1562 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1563 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1564 args
[i
].aligned_regs
[j
]);
1566 else if (partial
== 0 || args
[i
].pass_on_stack
)
1567 move_block_to_reg (REGNO (reg
),
1568 validize_mem (args
[i
].value
), nregs
,
1571 /* Handle calls that pass values in multiple non-contiguous
1572 locations. The Irix 6 ABI has examples of this. */
1573 if (GET_CODE (reg
) == PARALLEL
)
1574 use_group_regs (call_fusage
, reg
);
1575 else if (nregs
== -1)
1576 use_reg (call_fusage
, reg
);
1578 use_regs (call_fusage
, REGNO (reg
), nregs
== 0 ? 1 : nregs
);
1583 /* Generate all the code for a function call
1584 and return an rtx for its value.
1585 Store the value in TARGET (specified as an rtx) if convenient.
1586 If the value is stored in TARGET then TARGET is returned.
1587 If IGNORE is nonzero, then we ignore the value of the function call. */
1590 expand_call (exp
, target
, ignore
)
1595 /* List of actual parameters. */
1596 tree actparms
= TREE_OPERAND (exp
, 1);
1597 /* RTX for the function to be called. */
1599 /* Data type of the function. */
1601 /* Declaration of the function being called,
1602 or 0 if the function is computed (not known by name). */
1607 /* Register in which non-BLKmode value will be returned,
1608 or 0 if no value or if value is BLKmode. */
1610 /* Address where we should return a BLKmode value;
1611 0 if value not BLKmode. */
1612 rtx structure_value_addr
= 0;
1613 /* Nonzero if that address is being passed by treating it as
1614 an extra, implicit first parameter. Otherwise,
1615 it is passed by being copied directly into struct_value_rtx. */
1616 int structure_value_addr_parm
= 0;
1617 /* Size of aggregate value wanted, or zero if none wanted
1618 or if we are using the non-reentrant PCC calling convention
1619 or expecting the value in registers. */
1620 HOST_WIDE_INT struct_value_size
= 0;
1621 /* Nonzero if called function returns an aggregate in memory PCC style,
1622 by returning the address of where to find it. */
1623 int pcc_struct_value
= 0;
1625 /* Number of actual parameters in this call, including struct value addr. */
1627 /* Number of named args. Args after this are anonymous ones
1628 and they must all go on the stack. */
1631 /* Vector of information about each argument.
1632 Arguments are numbered in the order they will be pushed,
1633 not the order they are written. */
1634 struct arg_data
*args
;
1636 /* Total size in bytes of all the stack-parms scanned so far. */
1637 struct args_size args_size
;
1638 /* Size of arguments before any adjustments (such as rounding). */
1639 int unadjusted_args_size
;
1640 /* Data on reg parms scanned so far. */
1641 CUMULATIVE_ARGS args_so_far
;
1642 /* Nonzero if a reg parm has been scanned. */
1644 /* Nonzero if this is an indirect function call. */
1646 /* Nonzero if we must avoid push-insns in the args for this call.
1647 If stack space is allocated for register parameters, but not by the
1648 caller, then it is preallocated in the fixed part of the stack frame.
1649 So the entire argument block must then be preallocated (i.e., we
1650 ignore PUSH_ROUNDING in that case). */
1652 #ifdef PUSH_ROUNDING
1653 int must_preallocate
= 0;
1655 int must_preallocate
= 1;
1658 /* Size of the stack reserved for parameter registers. */
1659 int reg_parm_stack_space
= 0;
1661 /* Address of space preallocated for stack parms
1662 (on machines that lack push insns), or 0 if space not preallocated. */
1665 /* Nonzero if it is plausible that this is a call to alloca. */
1667 /* Nonzero if this is a call to malloc or a related function. */
1669 /* Nonzero if this is a call to setjmp or a related function. */
1671 /* Nonzero if this is a call to `longjmp'. */
1673 /* Nonzero if this is a syscall that makes a new process in the image of
1676 /* Nonzero if this is a call to an inline function. */
1677 int is_integrable
= 0;
1678 /* Nonzero if this is a call to a `const' function.
1679 Note that only explicitly named functions are handled as `const' here. */
1681 /* Nonzero if this is a call to a `volatile' function. */
1682 int is_volatile
= 0;
1683 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1684 /* Define the boundary of the register parm stack space that needs to be
1686 int low_to_save
= -1, high_to_save
;
1687 rtx save_area
= 0; /* Place that it is saved */
1690 #ifdef ACCUMULATE_OUTGOING_ARGS
1691 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
1692 char *initial_stack_usage_map
= stack_usage_map
;
1693 int old_stack_arg_under_construction
= 0;
1696 rtx old_stack_level
= 0;
1697 int old_pending_adj
= 0;
1698 int old_inhibit_defer_pop
= inhibit_defer_pop
;
1699 rtx call_fusage
= 0;
1702 #ifdef PREFERRED_STACK_BOUNDARY
1703 int preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1705 /* In this case preferred_stack_boundary variable is meaningless.
1706 It is used only in order to keep ifdef noise down when calling
1707 compute_argument_block_size. */
1708 int preferred_stack_boundary
= 0;
1711 /* The value of the function call can be put in a hard register. But
1712 if -fcheck-memory-usage, code which invokes functions (and thus
1713 damages some hard registers) can be inserted before using the value.
1714 So, target is always a pseudo-register in that case. */
1715 if (current_function_check_memory_usage
)
1718 /* See if we can find a DECL-node for the actual function.
1719 As a result, decide whether this is a call to an integrable function. */
1721 p
= TREE_OPERAND (exp
, 0);
1722 if (TREE_CODE (p
) == ADDR_EXPR
)
1724 fndecl
= TREE_OPERAND (p
, 0);
1725 if (TREE_CODE (fndecl
) != FUNCTION_DECL
)
1730 && fndecl
!= current_function_decl
1731 && DECL_INLINE (fndecl
)
1732 && DECL_SAVED_INSNS (fndecl
)
1733 && DECL_SAVED_INSNS (fndecl
)->inlinable
)
1735 else if (! TREE_ADDRESSABLE (fndecl
))
1737 /* In case this function later becomes inlinable,
1738 record that there was already a non-inline call to it.
1740 Use abstraction instead of setting TREE_ADDRESSABLE
1742 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1745 warning_with_decl (fndecl
, "can't inline call to `%s'");
1746 warning ("called from here");
1748 mark_addressable (fndecl
);
1751 if (TREE_READONLY (fndecl
) && ! TREE_THIS_VOLATILE (fndecl
)
1752 && TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
)
1755 if (TREE_THIS_VOLATILE (fndecl
))
1760 /* If we don't have specific function to call, see if we have a
1761 constant or `noreturn' function from the type. */
1764 is_const
= TREE_READONLY (TREE_TYPE (TREE_TYPE (p
)));
1765 is_volatile
= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p
)));
1768 #ifdef REG_PARM_STACK_SPACE
1769 #ifdef MAYBE_REG_PARM_STACK_SPACE
1770 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
1772 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
1776 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1777 if (reg_parm_stack_space
> 0)
1778 must_preallocate
= 1;
1781 /* Warn if this value is an aggregate type,
1782 regardless of which calling convention we are using for it. */
1783 if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
1784 warning ("function call has aggregate value");
1786 /* Set up a place to return a structure. */
1788 /* Cater to broken compilers. */
1789 if (aggregate_value_p (exp
))
1791 /* This call returns a big structure. */
1794 #ifdef PCC_STATIC_STRUCT_RETURN
1796 pcc_struct_value
= 1;
1797 /* Easier than making that case work right. */
1800 /* In case this is a static function, note that it has been
1802 if (! TREE_ADDRESSABLE (fndecl
))
1803 mark_addressable (fndecl
);
1807 #else /* not PCC_STATIC_STRUCT_RETURN */
1809 struct_value_size
= int_size_in_bytes (TREE_TYPE (exp
));
1811 if (target
&& GET_CODE (target
) == MEM
)
1812 structure_value_addr
= XEXP (target
, 0);
1815 /* Assign a temporary to hold the value. */
1818 /* For variable-sized objects, we must be called with a target
1819 specified. If we were to allocate space on the stack here,
1820 we would have no way of knowing when to free it. */
1822 if (struct_value_size
< 0)
1825 /* This DECL is just something to feed to mark_addressable;
1826 it doesn't get pushed. */
1827 d
= build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1828 DECL_RTL (d
) = assign_temp (TREE_TYPE (exp
), 1, 0, 1);
1829 mark_addressable (d
);
1830 mark_temp_addr_taken (DECL_RTL (d
));
1831 structure_value_addr
= XEXP (DECL_RTL (d
), 0);
1836 #endif /* not PCC_STATIC_STRUCT_RETURN */
1839 /* If called function is inline, try to integrate it. */
1845 #ifdef ACCUMULATE_OUTGOING_ARGS
1846 before_call
= get_last_insn ();
1849 temp
= expand_inline_function (fndecl
, actparms
, target
,
1850 ignore
, TREE_TYPE (exp
),
1851 structure_value_addr
);
1853 /* If inlining succeeded, return. */
1854 if (temp
!= (rtx
) (HOST_WIDE_INT
) -1)
1856 #ifdef ACCUMULATE_OUTGOING_ARGS
1857 /* If the outgoing argument list must be preserved, push
1858 the stack before executing the inlined function if it
1861 for (i
= reg_parm_stack_space
- 1; i
>= 0; i
--)
1862 if (i
< highest_outgoing_arg_in_use
&& stack_usage_map
[i
] != 0)
1865 if (stack_arg_under_construction
|| i
>= 0)
1868 = before_call
? NEXT_INSN (before_call
) : get_insns ();
1869 rtx insn
= NULL_RTX
, seq
;
1871 /* Look for a call in the inline function code.
1872 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1873 nonzero then there is a call and it is not necessary
1874 to scan the insns. */
1876 if (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
== 0)
1877 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
1878 if (GET_CODE (insn
) == CALL_INSN
)
1883 /* Reserve enough stack space so that the largest
1884 argument list of any function call in the inline
1885 function does not overlap the argument list being
1886 evaluated. This is usually an overestimate because
1887 allocate_dynamic_stack_space reserves space for an
1888 outgoing argument list in addition to the requested
1889 space, but there is no way to ask for stack space such
1890 that an argument list of a certain length can be
1893 Add the stack space reserved for register arguments, if
1894 any, in the inline function. What is really needed is the
1895 largest value of reg_parm_stack_space in the inline
1896 function, but that is not available. Using the current
1897 value of reg_parm_stack_space is wrong, but gives
1898 correct results on all supported machines. */
1900 int adjust
= (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
1901 + reg_parm_stack_space
);
1904 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1905 allocate_dynamic_stack_space (GEN_INT (adjust
),
1906 NULL_RTX
, BITS_PER_UNIT
);
1909 emit_insns_before (seq
, first_insn
);
1910 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1915 /* If the result is equivalent to TARGET, return TARGET to simplify
1916 checks in store_expr. They can be equivalent but not equal in the
1917 case of a function that returns BLKmode. */
1918 if (temp
!= target
&& rtx_equal_p (temp
, target
))
1923 /* If inlining failed, mark FNDECL as needing to be compiled
1924 separately after all. If function was declared inline,
1926 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1927 && optimize
> 0 && ! TREE_ADDRESSABLE (fndecl
))
1929 warning_with_decl (fndecl
, "inlining failed in call to `%s'");
1930 warning ("called from here");
1932 mark_addressable (fndecl
);
1935 function_call_count
++;
1937 if (fndecl
&& DECL_NAME (fndecl
))
1938 name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
1940 /* Ensure current function's preferred stack boundary is at least
1941 what we need. We don't have to increase alignment for recursive
1943 if (cfun
->preferred_stack_boundary
< preferred_stack_boundary
1944 && fndecl
!= current_function_decl
)
1945 cfun
->preferred_stack_boundary
= preferred_stack_boundary
;
1947 /* See if this is a call to a function that can return more than once
1948 or a call to longjmp or malloc. */
1949 special_function_p (fndecl
, &returns_twice
, &is_longjmp
, &fork_or_exec
,
1950 &is_malloc
, &may_be_alloca
);
1953 current_function_calls_alloca
= 1;
1955 /* Operand 0 is a pointer-to-function; get the type of the function. */
1956 funtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
1957 if (! POINTER_TYPE_P (funtype
))
1959 funtype
= TREE_TYPE (funtype
);
1961 /* When calling a const function, we must pop the stack args right away,
1962 so that the pop is deleted or moved with the call. */
1966 /* Don't let pending stack adjusts add up to too much.
1967 Also, do all pending adjustments now
1968 if there is any chance this might be a call to alloca. */
1970 if (pending_stack_adjust
>= 32
1971 || (pending_stack_adjust
> 0 && may_be_alloca
))
1972 do_pending_stack_adjust ();
1974 if (profile_arc_flag
&& fork_or_exec
)
1976 /* A fork duplicates the profile information, and an exec discards
1977 it. We can't rely on fork/exec to be paired. So write out the
1978 profile information we have gathered so far, and clear it. */
1979 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__bb_fork_func"), 0,
1982 /* ??? When __clone is called with CLONE_VM set, profiling is
1983 subject to race conditions, just as with multithreaded programs. */
1986 /* Push the temporary stack slot level so that we can free any temporaries
1990 /* Start updating where the next arg would go.
1992 On some machines (such as the PA) indirect calls have a different
1993 calling convention than normal calls. The last argument in
1994 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1996 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, (fndecl
== 0));
1998 /* If struct_value_rtx is 0, it means pass the address
1999 as if it were an extra parameter. */
2000 if (structure_value_addr
&& struct_value_rtx
== 0)
2002 /* If structure_value_addr is a REG other than
2003 virtual_outgoing_args_rtx, we can use always use it. If it
2004 is not a REG, we must always copy it into a register.
2005 If it is virtual_outgoing_args_rtx, we must copy it to another
2006 register in some cases. */
2007 rtx temp
= (GET_CODE (structure_value_addr
) != REG
2008 #ifdef ACCUMULATE_OUTGOING_ARGS
2009 || (stack_arg_under_construction
2010 && structure_value_addr
== virtual_outgoing_args_rtx
)
2012 ? copy_addr_to_reg (structure_value_addr
)
2013 : structure_value_addr
);
2016 = tree_cons (error_mark_node
,
2017 make_tree (build_pointer_type (TREE_TYPE (funtype
)),
2020 structure_value_addr_parm
= 1;
2023 /* Count the arguments and set NUM_ACTUALS. */
2024 for (p
= actparms
, i
= 0; p
; p
= TREE_CHAIN (p
)) i
++;
2027 /* Compute number of named args.
2028 Normally, don't include the last named arg if anonymous args follow.
2029 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2030 (If no anonymous args follow, the result of list_length is actually
2031 one too large. This is harmless.)
2033 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2034 zero, this machine will be able to place unnamed args that were passed in
2035 registers into the stack. So treat all args as named. This allows the
2036 insns emitting for a specific argument list to be independent of the
2037 function declaration.
2039 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
2040 way to pass unnamed args in registers, so we must force them into
2043 if ((STRICT_ARGUMENT_NAMING
2044 || ! PRETEND_OUTGOING_VARARGS_NAMED
)
2045 && TYPE_ARG_TYPES (funtype
) != 0)
2047 = (list_length (TYPE_ARG_TYPES (funtype
))
2048 /* Don't include the last named arg. */
2049 - (STRICT_ARGUMENT_NAMING
? 0 : 1)
2050 /* Count the struct value address, if it is passed as a parm. */
2051 + structure_value_addr_parm
);
2053 /* If we know nothing, treat all args as named. */
2054 n_named_args
= num_actuals
;
2056 /* Make a vector to hold all the information about each arg. */
2057 args
= (struct arg_data
*) alloca (num_actuals
* sizeof (struct arg_data
));
2058 bzero ((char *) args
, num_actuals
* sizeof (struct arg_data
));
2060 /* Build up entries inthe ARGS array, compute the size of the arguments
2061 into ARGS_SIZE, etc. */
2062 initialize_argument_information (num_actuals
, args
, &args_size
, n_named_args
,
2063 actparms
, fndecl
, &args_so_far
,
2064 reg_parm_stack_space
, &old_stack_level
,
2065 &old_pending_adj
, &must_preallocate
,
2068 #ifdef FINAL_REG_PARM_STACK_SPACE
2069 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
2075 /* If this function requires a variable-sized argument list, don't try to
2076 make a cse'able block for this call. We may be able to do this
2077 eventually, but it is too complicated to keep track of what insns go
2078 in the cse'able block and which don't. */
2081 must_preallocate
= 1;
2084 /* Compute the actual size of the argument block required. The variable
2085 and constant sizes must be combined, the size may have to be rounded,
2086 and there may be a minimum required size. */
2087 unadjusted_args_size
2088 = compute_argument_block_size (reg_parm_stack_space
, &args_size
,
2089 preferred_stack_boundary
);
2091 /* Now make final decision about preallocating stack space. */
2092 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2093 num_actuals
, args
, &args_size
);
2095 /* If the structure value address will reference the stack pointer, we must
2096 stabilize it. We don't need to do this if we know that we are not going
2097 to adjust the stack pointer in processing this call. */
2099 if (structure_value_addr
2100 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2101 || reg_mentioned_p (virtual_outgoing_args_rtx
, structure_value_addr
))
2103 #ifndef ACCUMULATE_OUTGOING_ARGS
2104 || args_size
.constant
2107 structure_value_addr
= copy_to_reg (structure_value_addr
);
2109 /* Precompute any arguments as needed. */
2110 precompute_arguments (is_const
, must_preallocate
, num_actuals
,
2113 /* Now we are about to start emitting insns that can be deleted
2114 if a libcall is deleted. */
2115 if (is_const
|| is_malloc
)
2118 /* If we have no actual push instructions, or shouldn't use them,
2119 make space for all args right now. */
2121 if (args_size
.var
!= 0)
2123 if (old_stack_level
== 0)
2125 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2126 old_pending_adj
= pending_stack_adjust
;
2127 pending_stack_adjust
= 0;
2128 #ifdef ACCUMULATE_OUTGOING_ARGS
2129 /* stack_arg_under_construction says whether a stack arg is
2130 being constructed at the old stack level. Pushing the stack
2131 gets a clean outgoing argument block. */
2132 old_stack_arg_under_construction
= stack_arg_under_construction
;
2133 stack_arg_under_construction
= 0;
2136 argblock
= push_block (ARGS_SIZE_RTX (args_size
), 0, 0);
2140 /* Note that we must go through the motions of allocating an argument
2141 block even if the size is zero because we may be storing args
2142 in the area reserved for register arguments, which may be part of
2145 int needed
= args_size
.constant
;
2147 /* Store the maximum argument space used. It will be pushed by
2148 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2151 if (needed
> current_function_outgoing_args_size
)
2152 current_function_outgoing_args_size
= needed
;
2154 if (must_preallocate
)
2156 #ifdef ACCUMULATE_OUTGOING_ARGS
2157 /* Since the stack pointer will never be pushed, it is possible for
2158 the evaluation of a parm to clobber something we have already
2159 written to the stack. Since most function calls on RISC machines
2160 do not use the stack, this is uncommon, but must work correctly.
2162 Therefore, we save any area of the stack that was already written
2163 and that we are using. Here we set up to do this by making a new
2164 stack usage map from the old one. The actual save will be done
2167 Another approach might be to try to reorder the argument
2168 evaluations to avoid this conflicting stack usage. */
2170 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2171 /* Since we will be writing into the entire argument area, the
2172 map must be allocated for its entire size, not just the part that
2173 is the responsibility of the caller. */
2174 needed
+= reg_parm_stack_space
;
2177 #ifdef ARGS_GROW_DOWNWARD
2178 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2181 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2184 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
2186 if (initial_highest_arg_in_use
)
2187 bcopy (initial_stack_usage_map
, stack_usage_map
,
2188 initial_highest_arg_in_use
);
2190 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2191 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
2192 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
2195 /* The address of the outgoing argument list must not be copied to a
2196 register here, because argblock would be left pointing to the
2197 wrong place after the call to allocate_dynamic_stack_space below.
2200 argblock
= virtual_outgoing_args_rtx
;
2202 #else /* not ACCUMULATE_OUTGOING_ARGS */
2203 if (inhibit_defer_pop
== 0)
2205 /* Try to reuse some or all of the pending_stack_adjust
2206 to get this space. Maybe we can avoid any pushing. */
2207 if (needed
> pending_stack_adjust
)
2209 needed
-= pending_stack_adjust
;
2210 pending_stack_adjust
= 0;
2214 pending_stack_adjust
-= needed
;
2218 /* Special case this because overhead of `push_block' in this
2219 case is non-trivial. */
2221 argblock
= virtual_outgoing_args_rtx
;
2223 argblock
= push_block (GEN_INT (needed
), 0, 0);
2225 /* We only really need to call `copy_to_reg' in the case where push
2226 insns are going to be used to pass ARGBLOCK to a function
2227 call in ARGS. In that case, the stack pointer changes value
2228 from the allocation point to the call point, and hence
2229 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2230 But might as well always do it. */
2231 argblock
= copy_to_reg (argblock
);
2232 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2236 #ifdef ACCUMULATE_OUTGOING_ARGS
2237 /* The save/restore code in store_one_arg handles all cases except one:
2238 a constructor call (including a C function returning a BLKmode struct)
2239 to initialize an argument. */
2240 if (stack_arg_under_construction
)
2242 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2243 rtx push_size
= GEN_INT (reg_parm_stack_space
+ args_size
.constant
);
2245 rtx push_size
= GEN_INT (args_size
.constant
);
2247 if (old_stack_level
== 0)
2249 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2250 old_pending_adj
= pending_stack_adjust
;
2251 pending_stack_adjust
= 0;
2252 /* stack_arg_under_construction says whether a stack arg is
2253 being constructed at the old stack level. Pushing the stack
2254 gets a clean outgoing argument block. */
2255 old_stack_arg_under_construction
= stack_arg_under_construction
;
2256 stack_arg_under_construction
= 0;
2257 /* Make a new map for the new argument list. */
2258 stack_usage_map
= (char *)alloca (highest_outgoing_arg_in_use
);
2259 bzero (stack_usage_map
, highest_outgoing_arg_in_use
);
2260 highest_outgoing_arg_in_use
= 0;
2262 allocate_dynamic_stack_space (push_size
, NULL_RTX
, BITS_PER_UNIT
);
2264 /* If argument evaluation might modify the stack pointer, copy the
2265 address of the argument list to a register. */
2266 for (i
= 0; i
< num_actuals
; i
++)
2267 if (args
[i
].pass_on_stack
)
2269 argblock
= copy_addr_to_reg (argblock
);
2274 compute_argument_addresses (args
, argblock
, num_actuals
);
2276 #ifdef PUSH_ARGS_REVERSED
2277 #ifdef PREFERRED_STACK_BOUNDARY
2278 /* If we push args individually in reverse order, perform stack alignment
2279 before the first push (the last arg). */
2281 anti_adjust_stack (GEN_INT (args_size
.constant
- unadjusted_args_size
));
2285 /* Don't try to defer pops if preallocating, not even from the first arg,
2286 since ARGBLOCK probably refers to the SP. */
2290 funexp
= rtx_for_function_call (fndecl
, exp
);
2292 /* Figure out the register where the value, if any, will come back. */
2294 if (TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
2295 && ! structure_value_addr
)
2297 if (pcc_struct_value
)
2298 valreg
= hard_function_value (build_pointer_type (TREE_TYPE (exp
)),
2301 valreg
= hard_function_value (TREE_TYPE (exp
), fndecl
, 0);
2304 /* Precompute all register parameters. It isn't safe to compute anything
2305 once we have started filling any specific hard regs. */
2306 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2308 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2310 /* Save the fixed argument area if it's part of the caller's frame and
2311 is clobbered by argument setup for this call. */
2312 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2313 &low_to_save
, &high_to_save
);
2317 /* Now store (and compute if necessary) all non-register parms.
2318 These come before register parms, since they can require block-moves,
2319 which could clobber the registers used for register parms.
2320 Parms which have partial registers are not stored here,
2321 but we do preallocate space here if they want that. */
2323 for (i
= 0; i
< num_actuals
; i
++)
2324 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
2325 store_one_arg (&args
[i
], argblock
, may_be_alloca
,
2326 args_size
.var
!= 0, reg_parm_stack_space
);
2328 /* If we have a parm that is passed in registers but not in memory
2329 and whose alignment does not permit a direct copy into registers,
2330 make a group of pseudos that correspond to each register that we
2332 if (STRICT_ALIGNMENT
)
2333 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
2335 /* Now store any partially-in-registers parm.
2336 This is the last place a block-move can happen. */
2338 for (i
= 0; i
< num_actuals
; i
++)
2339 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
2340 store_one_arg (&args
[i
], argblock
, may_be_alloca
,
2341 args_size
.var
!= 0, reg_parm_stack_space
);
2343 #ifndef PUSH_ARGS_REVERSED
2344 #ifdef PREFERRED_STACK_BOUNDARY
2345 /* If we pushed args in forward order, perform stack alignment
2346 after pushing the last arg. */
2348 anti_adjust_stack (GEN_INT (args_size
.constant
- unadjusted_args_size
));
2352 /* If register arguments require space on the stack and stack space
2353 was not preallocated, allocate stack space here for arguments
2354 passed in registers. */
2355 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2356 if (must_preallocate
== 0 && reg_parm_stack_space
> 0)
2357 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
2360 /* Pass the function the address in which to return a structure value. */
2361 if (structure_value_addr
&& ! structure_value_addr_parm
)
2363 emit_move_insn (struct_value_rtx
,
2365 force_operand (structure_value_addr
,
2368 /* Mark the memory for the aggregate as write-only. */
2369 if (current_function_check_memory_usage
)
2370 emit_library_call (chkr_set_right_libfunc
, 1,
2372 structure_value_addr
, Pmode
,
2373 GEN_INT (struct_value_size
), TYPE_MODE (sizetype
),
2374 GEN_INT (MEMORY_USE_WO
),
2375 TYPE_MODE (integer_type_node
));
2377 if (GET_CODE (struct_value_rtx
) == REG
)
2378 use_reg (&call_fusage
, struct_value_rtx
);
2381 funexp
= prepare_call_address (funexp
, fndecl
, &call_fusage
, reg_parm_seen
);
2383 load_register_parameters (args
, num_actuals
, &call_fusage
);
2385 /* Perform postincrements before actually calling the function. */
2388 /* Save a pointer to the last insn before the call, so that we can
2389 later safely search backwards to find the CALL_INSN. */
2390 before_call
= get_last_insn ();
2392 /* All arguments and registers used for the call must be set up by now! */
2394 /* Generate the actual call instruction. */
2395 emit_call_1 (funexp
, fndecl
, funtype
, unadjusted_args_size
,
2396 args_size
.constant
, struct_value_size
,
2397 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
2398 valreg
, old_inhibit_defer_pop
, call_fusage
, is_const
);
2400 /* If call is cse'able, make appropriate pair of reg-notes around it.
2401 Test valreg so we don't crash; may safely ignore `const'
2402 if return type is void. Disable for PARALLEL return values, because
2403 we have no way to move such values into a pseudo register. */
2404 if (is_const
&& valreg
!= 0 && GET_CODE (valreg
) != PARALLEL
)
2407 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2410 /* Mark the return value as a pointer if needed. */
2411 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2413 tree pointed_to
= TREE_TYPE (TREE_TYPE (exp
));
2414 mark_reg_pointer (temp
, TYPE_ALIGN (pointed_to
) / BITS_PER_UNIT
);
2417 /* Construct an "equal form" for the value which mentions all the
2418 arguments in order as well as the function name. */
2419 #ifdef PUSH_ARGS_REVERSED
2420 for (i
= 0; i
< num_actuals
; i
++)
2421 note
= gen_rtx_EXPR_LIST (VOIDmode
, args
[i
].initial_value
, note
);
2423 for (i
= num_actuals
- 1; i
>= 0; i
--)
2424 note
= gen_rtx_EXPR_LIST (VOIDmode
, args
[i
].initial_value
, note
);
2426 note
= gen_rtx_EXPR_LIST (VOIDmode
, funexp
, note
);
2428 insns
= get_insns ();
2431 emit_libcall_block (insns
, temp
, valreg
, note
);
2437 /* Otherwise, just write out the sequence without a note. */
2438 rtx insns
= get_insns ();
2445 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2448 /* The return value from a malloc-like function is a pointer. */
2449 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2450 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2452 emit_move_insn (temp
, valreg
);
2454 /* The return value from a malloc-like function can not alias
2456 last
= get_last_insn ();
2458 gen_rtx_EXPR_LIST (REG_NOALIAS
, temp
, REG_NOTES (last
));
2460 /* Write out the sequence. */
2461 insns
= get_insns ();
2467 /* For calls to `setjmp', etc., inform flow.c it should complain
2468 if nonvolatile values are live. */
2472 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2473 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2474 we must search for it here. */
2475 rtx last
= get_last_insn ();
2476 while (GET_CODE (last
) != CALL_INSN
)
2478 last
= PREV_INSN (last
);
2479 /* There was no CALL_INSN? */
2480 if (last
== before_call
)
2483 emit_note_after (NOTE_INSN_SETJMP
, last
);
2484 current_function_calls_setjmp
= 1;
2488 current_function_calls_longjmp
= 1;
2490 /* Notice functions that cannot return.
2491 If optimizing, insns emitted below will be dead.
2492 If not optimizing, they will exist, which is useful
2493 if the user uses the `return' command in the debugger. */
2495 if (is_volatile
|| is_longjmp
)
2498 /* If value type not void, return an rtx for the value. */
2500 /* If there are cleanups to be called, don't use a hard reg as target.
2501 We need to double check this and see if it matters anymore. */
2502 if (any_pending_cleanups (1)
2503 && target
&& REG_P (target
)
2504 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2507 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
2510 target
= const0_rtx
;
2512 else if (structure_value_addr
)
2514 if (target
== 0 || GET_CODE (target
) != MEM
)
2516 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2517 memory_address (TYPE_MODE (TREE_TYPE (exp
)),
2518 structure_value_addr
));
2519 MEM_SET_IN_STRUCT_P (target
,
2520 AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2523 else if (pcc_struct_value
)
2525 /* This is the special C++ case where we need to
2526 know what the true target was. We take care to
2527 never use this value more than once in one expression. */
2528 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2529 copy_to_reg (valreg
));
2530 MEM_SET_IN_STRUCT_P (target
, AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2532 /* Handle calls that return values in multiple non-contiguous locations.
2533 The Irix 6 ABI has examples of this. */
2534 else if (GET_CODE (valreg
) == PARALLEL
)
2536 int bytes
= int_size_in_bytes (TREE_TYPE (exp
));
2540 target
= assign_stack_temp (TYPE_MODE (TREE_TYPE (exp
)), bytes
, 0);
2541 MEM_SET_IN_STRUCT_P (target
, AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2542 preserve_temp_slots (target
);
2545 if (! rtx_equal_p (target
, valreg
))
2546 emit_group_store (target
, valreg
, bytes
,
2547 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2549 else if (target
&& GET_MODE (target
) == TYPE_MODE (TREE_TYPE (exp
))
2550 && GET_MODE (target
) == GET_MODE (valreg
))
2551 /* TARGET and VALREG cannot be equal at this point because the latter
2552 would not have REG_FUNCTION_VALUE_P true, while the former would if
2553 it were referring to the same register.
2555 If they refer to the same register, this move will be a no-op, except
2556 when function inlining is being done. */
2557 emit_move_insn (target
, valreg
);
2558 else if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
2559 target
= copy_blkmode_from_reg (target
, valreg
, TREE_TYPE (exp
));
2561 target
= copy_to_reg (valreg
);
2563 #ifdef PROMOTE_FUNCTION_RETURN
2564 /* If we promoted this return value, make the proper SUBREG. TARGET
2565 might be const0_rtx here, so be careful. */
2566 if (GET_CODE (target
) == REG
2567 && TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
2568 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2570 tree type
= TREE_TYPE (exp
);
2571 int unsignedp
= TREE_UNSIGNED (type
);
2573 /* If we don't promote as expected, something is wrong. */
2574 if (GET_MODE (target
)
2575 != promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1))
2578 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, 0);
2579 SUBREG_PROMOTED_VAR_P (target
) = 1;
2580 SUBREG_PROMOTED_UNSIGNED_P (target
) = unsignedp
;
2584 /* If size of args is variable or this was a constructor call for a stack
2585 argument, restore saved stack-pointer value. */
2587 if (old_stack_level
)
2589 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
2590 pending_stack_adjust
= old_pending_adj
;
2591 #ifdef ACCUMULATE_OUTGOING_ARGS
2592 stack_arg_under_construction
= old_stack_arg_under_construction
;
2593 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
2594 stack_usage_map
= initial_stack_usage_map
;
2597 #ifdef ACCUMULATE_OUTGOING_ARGS
2600 #ifdef REG_PARM_STACK_SPACE
2602 restore_fixed_argument_area (save_area
, argblock
,
2603 high_to_save
, low_to_save
);
2606 /* If we saved any argument areas, restore them. */
2607 for (i
= 0; i
< num_actuals
; i
++)
2608 if (args
[i
].save_area
)
2610 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
2612 = gen_rtx_MEM (save_mode
,
2613 memory_address (save_mode
,
2614 XEXP (args
[i
].stack_slot
, 0)));
2616 if (save_mode
!= BLKmode
)
2617 emit_move_insn (stack_area
, args
[i
].save_area
);
2619 emit_block_move (stack_area
, validize_mem (args
[i
].save_area
),
2620 GEN_INT (args
[i
].size
.constant
),
2621 PARM_BOUNDARY
/ BITS_PER_UNIT
);
2624 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
2625 stack_usage_map
= initial_stack_usage_map
;
2629 /* If this was alloca, record the new stack level for nonlocal gotos.
2630 Check for the handler slots since we might not have a save area
2631 for non-local gotos. */
2633 if (may_be_alloca
&& nonlocal_goto_handler_slots
!= 0)
2634 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
2638 /* Free up storage we no longer need. */
2639 for (i
= 0; i
< num_actuals
; ++i
)
2640 if (args
[i
].aligned_regs
)
2641 free (args
[i
].aligned_regs
);
2646 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2647 (emitting the queue unless NO_QUEUE is nonzero),
2648 for a value of mode OUTMODE,
2649 with NARGS different arguments, passed as alternating rtx values
2650 and machine_modes to convert them to.
2651 The rtx values should have been passed through protect_from_queue already.
2653 NO_QUEUE will be true if and only if the library call is a `const' call
2654 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2655 to the variable is_const in expand_call.
2657 NO_QUEUE must be true for const calls, because if it isn't, then
2658 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2659 and will be lost if the libcall sequence is optimized away.
2661 NO_QUEUE must be false for non-const calls, because if it isn't, the
2662 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2663 optimized. For instance, the instruction scheduler may incorrectly
2664 move memory references across the non-const call. */
2667 emit_library_call
VPARAMS((rtx orgfun
, int no_queue
, enum machine_mode outmode
,
2670 #ifndef ANSI_PROTOTYPES
2673 enum machine_mode outmode
;
2677 /* Total size in bytes of all the stack-parms scanned so far. */
2678 struct args_size args_size
;
2679 /* Size of arguments before any adjustments (such as rounding). */
2680 struct args_size original_args_size
;
2681 register int argnum
;
2685 struct args_size alignment_pad
;
2687 CUMULATIVE_ARGS args_so_far
;
2688 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
2689 struct args_size offset
; struct args_size size
; rtx save_area
; };
2691 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2692 rtx call_fusage
= 0;
2693 int reg_parm_stack_space
= 0;
2694 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2695 /* Define the boundary of the register parm stack space that needs to be
2697 int low_to_save
= -1, high_to_save
= 0;
2698 rtx save_area
= 0; /* Place that it is saved */
2701 #ifdef ACCUMULATE_OUTGOING_ARGS
2702 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
2703 char *initial_stack_usage_map
= stack_usage_map
;
2707 #ifdef REG_PARM_STACK_SPACE
2708 /* Size of the stack reserved for parameter registers. */
2709 #ifdef MAYBE_REG_PARM_STACK_SPACE
2710 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
2712 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
2716 VA_START (p
, nargs
);
2718 #ifndef ANSI_PROTOTYPES
2719 orgfun
= va_arg (p
, rtx
);
2720 no_queue
= va_arg (p
, int);
2721 outmode
= va_arg (p
, enum machine_mode
);
2722 nargs
= va_arg (p
, int);
2727 /* Copy all the libcall-arguments out of the varargs data
2728 and into a vector ARGVEC.
2730 Compute how to pass each argument. We only support a very small subset
2731 of the full argument passing conventions to limit complexity here since
2732 library functions shouldn't have many args. */
2734 argvec
= (struct arg
*) alloca (nargs
* sizeof (struct arg
));
2735 bzero ((char *) argvec
, nargs
* sizeof (struct arg
));
2738 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0);
2740 args_size
.constant
= 0;
2745 #ifdef PREFERRED_STACK_BOUNDARY
2746 /* Ensure current function's preferred stack boundary is at least
2748 if (cfun
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
2749 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2752 for (count
= 0; count
< nargs
; count
++)
2754 rtx val
= va_arg (p
, rtx
);
2755 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
2757 /* We cannot convert the arg value to the mode the library wants here;
2758 must do it earlier where we know the signedness of the arg. */
2760 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
2763 /* On some machines, there's no way to pass a float to a library fcn.
2764 Pass it as a double instead. */
2765 #ifdef LIBGCC_NEEDS_DOUBLE
2766 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
2767 val
= convert_modes (DFmode
, SFmode
, val
, 0), mode
= DFmode
;
2770 /* There's no need to call protect_from_queue, because
2771 either emit_move_insn or emit_push_insn will do that. */
2773 /* Make sure it is a reasonable operand for a move or push insn. */
2774 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
2775 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
2776 val
= force_operand (val
, NULL_RTX
);
2778 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2779 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
2781 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2782 be viewed as just an efficiency improvement. */
2783 rtx slot
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2784 emit_move_insn (slot
, val
);
2785 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
2790 argvec
[count
].value
= val
;
2791 argvec
[count
].mode
= mode
;
2793 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
2794 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == PARALLEL
)
2796 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2797 argvec
[count
].partial
2798 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
2800 argvec
[count
].partial
= 0;
2803 locate_and_pad_parm (mode
, NULL_TREE
,
2804 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
2805 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
2806 &argvec
[count
].size
, &alignment_pad
);
2808 if (argvec
[count
].size
.var
)
2811 if (reg_parm_stack_space
== 0 && argvec
[count
].partial
)
2812 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
2814 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
2815 || reg_parm_stack_space
> 0)
2816 args_size
.constant
+= argvec
[count
].size
.constant
;
2818 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
2822 #ifdef FINAL_REG_PARM_STACK_SPACE
2823 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
2827 /* If this machine requires an external definition for library
2828 functions, write one out. */
2829 assemble_external_libcall (fun
);
2831 original_args_size
= args_size
;
2832 #ifdef PREFERRED_STACK_BOUNDARY
2833 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
2834 / STACK_BYTES
) * STACK_BYTES
);
2837 args_size
.constant
= MAX (args_size
.constant
,
2838 reg_parm_stack_space
);
2840 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2841 args_size
.constant
-= reg_parm_stack_space
;
2844 if (args_size
.constant
> current_function_outgoing_args_size
)
2845 current_function_outgoing_args_size
= args_size
.constant
;
2847 #ifdef ACCUMULATE_OUTGOING_ARGS
2848 /* Since the stack pointer will never be pushed, it is possible for
2849 the evaluation of a parm to clobber something we have already
2850 written to the stack. Since most function calls on RISC machines
2851 do not use the stack, this is uncommon, but must work correctly.
2853 Therefore, we save any area of the stack that was already written
2854 and that we are using. Here we set up to do this by making a new
2855 stack usage map from the old one.
2857 Another approach might be to try to reorder the argument
2858 evaluations to avoid this conflicting stack usage. */
2860 needed
= args_size
.constant
;
2862 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2863 /* Since we will be writing into the entire argument area, the
2864 map must be allocated for its entire size, not just the part that
2865 is the responsibility of the caller. */
2866 needed
+= reg_parm_stack_space
;
2869 #ifdef ARGS_GROW_DOWNWARD
2870 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2873 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2876 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
2878 if (initial_highest_arg_in_use
)
2879 bcopy (initial_stack_usage_map
, stack_usage_map
,
2880 initial_highest_arg_in_use
);
2882 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2883 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
2884 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
2887 /* The address of the outgoing argument list must not be copied to a
2888 register here, because argblock would be left pointing to the
2889 wrong place after the call to allocate_dynamic_stack_space below.
2892 argblock
= virtual_outgoing_args_rtx
;
2893 #else /* not ACCUMULATE_OUTGOING_ARGS */
2894 #ifndef PUSH_ROUNDING
2895 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
2899 #ifdef PUSH_ARGS_REVERSED
2900 #ifdef PREFERRED_STACK_BOUNDARY
2901 /* If we push args individually in reverse order, perform stack alignment
2902 before the first push (the last arg). */
2904 anti_adjust_stack (GEN_INT (args_size
.constant
2905 - original_args_size
.constant
));
2909 #ifdef PUSH_ARGS_REVERSED
2917 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2918 /* The argument list is the property of the called routine and it
2919 may clobber it. If the fixed area has been used for previous
2920 parameters, we must save and restore it.
2922 Here we compute the boundary of the that needs to be saved, if any. */
2924 #ifdef ARGS_GROW_DOWNWARD
2925 for (count
= 0; count
< reg_parm_stack_space
+ 1; count
++)
2927 for (count
= 0; count
< reg_parm_stack_space
; count
++)
2930 if (count
>= highest_outgoing_arg_in_use
2931 || stack_usage_map
[count
] == 0)
2934 if (low_to_save
== -1)
2935 low_to_save
= count
;
2937 high_to_save
= count
;
2940 if (low_to_save
>= 0)
2942 int num_to_save
= high_to_save
- low_to_save
+ 1;
2943 enum machine_mode save_mode
2944 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
2947 /* If we don't have the required alignment, must do this in BLKmode. */
2948 if ((low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
2949 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
2950 save_mode
= BLKmode
;
2952 #ifdef ARGS_GROW_DOWNWARD
2953 stack_area
= gen_rtx_MEM (save_mode
,
2954 memory_address (save_mode
,
2955 plus_constant (argblock
,
2958 stack_area
= gen_rtx_MEM (save_mode
,
2959 memory_address (save_mode
,
2960 plus_constant (argblock
,
2963 if (save_mode
== BLKmode
)
2965 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
2966 emit_block_move (validize_mem (save_area
), stack_area
,
2967 GEN_INT (num_to_save
),
2968 PARM_BOUNDARY
/ BITS_PER_UNIT
);
2972 save_area
= gen_reg_rtx (save_mode
);
2973 emit_move_insn (save_area
, stack_area
);
2978 /* Push the args that need to be pushed. */
2980 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2981 are to be pushed. */
2982 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2984 register enum machine_mode mode
= argvec
[argnum
].mode
;
2985 register rtx val
= argvec
[argnum
].value
;
2986 rtx reg
= argvec
[argnum
].reg
;
2987 int partial
= argvec
[argnum
].partial
;
2988 #ifdef ACCUMULATE_OUTGOING_ARGS
2989 int lower_bound
, upper_bound
, i
;
2992 if (! (reg
!= 0 && partial
== 0))
2994 #ifdef ACCUMULATE_OUTGOING_ARGS
2995 /* If this is being stored into a pre-allocated, fixed-size, stack
2996 area, save any previous data at that location. */
2998 #ifdef ARGS_GROW_DOWNWARD
2999 /* stack_slot is negative, but we want to index stack_usage_map
3000 with positive values. */
3001 upper_bound
= -argvec
[argnum
].offset
.constant
+ 1;
3002 lower_bound
= upper_bound
- argvec
[argnum
].size
.constant
;
3004 lower_bound
= argvec
[argnum
].offset
.constant
;
3005 upper_bound
= lower_bound
+ argvec
[argnum
].size
.constant
;
3008 for (i
= lower_bound
; i
< upper_bound
; i
++)
3009 if (stack_usage_map
[i
]
3010 /* Don't store things in the fixed argument area at this point;
3011 it has already been saved. */
3012 && i
> reg_parm_stack_space
)
3015 if (i
!= upper_bound
)
3017 /* We need to make a save area. See what mode we can make it. */
3018 enum machine_mode save_mode
3019 = mode_for_size (argvec
[argnum
].size
.constant
* BITS_PER_UNIT
,
3026 plus_constant (argblock
,
3027 argvec
[argnum
].offset
.constant
)));
3029 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3030 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3033 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
3034 argblock
, GEN_INT (argvec
[argnum
].offset
.constant
),
3035 reg_parm_stack_space
, ARGS_SIZE_RTX (alignment_pad
));
3037 #ifdef ACCUMULATE_OUTGOING_ARGS
3038 /* Now mark the segment we just used. */
3039 for (i
= lower_bound
; i
< upper_bound
; i
++)
3040 stack_usage_map
[i
] = 1;
3047 #ifndef PUSH_ARGS_REVERSED
3048 #ifdef PREFERRED_STACK_BOUNDARY
3049 /* If we pushed args in forward order, perform stack alignment
3050 after pushing the last arg. */
3052 anti_adjust_stack (GEN_INT (args_size
.constant
3053 - original_args_size
.constant
));
3057 #ifdef PUSH_ARGS_REVERSED
3063 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0);
3065 /* Now load any reg parms into their regs. */
3067 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3068 are to be pushed. */
3069 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3071 register rtx val
= argvec
[argnum
].value
;
3072 rtx reg
= argvec
[argnum
].reg
;
3073 int partial
= argvec
[argnum
].partial
;
3075 if (reg
!= 0 && partial
== 0)
3076 emit_move_insn (reg
, val
);
3080 /* For version 1.37, try deleting this entirely. */
3084 /* Any regs containing parms remain in use through the call. */
3085 for (count
= 0; count
< nargs
; count
++)
3086 if (argvec
[count
].reg
!= 0)
3087 use_reg (&call_fusage
, argvec
[count
].reg
);
3089 /* Don't allow popping to be deferred, since then
3090 cse'ing of library calls could delete a call and leave the pop. */
3093 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3094 will set inhibit_defer_pop to that value. */
3096 /* The return type is needed to decide how many bytes the function pops.
3097 Signedness plays no role in that, so for simplicity, we pretend it's
3098 always signed. We also assume that the list of arguments passed has
3099 no impact, so we pretend it is unknown. */
3102 get_identifier (XSTR (orgfun
, 0)),
3103 build_function_type (outmode
== VOIDmode
? void_type_node
3104 : type_for_mode (outmode
, 0), NULL_TREE
),
3105 original_args_size
.constant
, args_size
.constant
, 0,
3106 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3107 outmode
!= VOIDmode
? hard_libcall_value (outmode
) : NULL_RTX
,
3108 old_inhibit_defer_pop
+ 1, call_fusage
, no_queue
);
3112 /* Now restore inhibit_defer_pop to its actual original value. */
3115 #ifdef ACCUMULATE_OUTGOING_ARGS
3116 #ifdef REG_PARM_STACK_SPACE
3119 enum machine_mode save_mode
= GET_MODE (save_area
);
3120 #ifdef ARGS_GROW_DOWNWARD
3122 = gen_rtx_MEM (save_mode
,
3123 memory_address (save_mode
,
3124 plus_constant (argblock
,
3128 = gen_rtx_MEM (save_mode
,
3129 memory_address (save_mode
,
3130 plus_constant (argblock
, low_to_save
)));
3133 if (save_mode
!= BLKmode
)
3134 emit_move_insn (stack_area
, save_area
);
3136 emit_block_move (stack_area
, validize_mem (save_area
),
3137 GEN_INT (high_to_save
- low_to_save
+ 1),
3138 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3142 /* If we saved any argument areas, restore them. */
3143 for (count
= 0; count
< nargs
; count
++)
3144 if (argvec
[count
].save_area
)
3146 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3148 = gen_rtx_MEM (save_mode
,
3151 plus_constant (argblock
,
3152 argvec
[count
].offset
.constant
)));
3154 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3157 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3158 stack_usage_map
= initial_stack_usage_map
;
3162 /* Like emit_library_call except that an extra argument, VALUE,
3163 comes second and says where to store the result.
3164 (If VALUE is zero, this function chooses a convenient way
3165 to return the value.
3167 This function returns an rtx for where the value is to be found.
3168 If VALUE is nonzero, VALUE is returned. */
3171 emit_library_call_value
VPARAMS((rtx orgfun
, rtx value
, int no_queue
,
3172 enum machine_mode outmode
, int nargs
, ...))
3174 #ifndef ANSI_PROTOTYPES
3178 enum machine_mode outmode
;
3182 /* Total size in bytes of all the stack-parms scanned so far. */
3183 struct args_size args_size
;
3184 /* Size of arguments before any adjustments (such as rounding). */
3185 struct args_size original_args_size
;
3186 register int argnum
;
3190 struct args_size alignment_pad
;
3192 CUMULATIVE_ARGS args_so_far
;
3193 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
3194 struct args_size offset
; struct args_size size
; rtx save_area
; };
3196 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3197 rtx call_fusage
= 0;
3199 int pcc_struct_value
= 0;
3200 int struct_value_size
= 0;
3202 int reg_parm_stack_space
= 0;
3203 #ifdef ACCUMULATE_OUTGOING_ARGS
3207 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3208 /* Define the boundary of the register parm stack space that needs to be
3210 int low_to_save
= -1, high_to_save
= 0;
3211 rtx save_area
= 0; /* Place that it is saved */
3214 #ifdef ACCUMULATE_OUTGOING_ARGS
3215 /* Size of the stack reserved for parameter registers. */
3216 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3217 char *initial_stack_usage_map
= stack_usage_map
;
3220 #ifdef REG_PARM_STACK_SPACE
3221 #ifdef MAYBE_REG_PARM_STACK_SPACE
3222 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
3224 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3228 VA_START (p
, nargs
);
3230 #ifndef ANSI_PROTOTYPES
3231 orgfun
= va_arg (p
, rtx
);
3232 value
= va_arg (p
, rtx
);
3233 no_queue
= va_arg (p
, int);
3234 outmode
= va_arg (p
, enum machine_mode
);
3235 nargs
= va_arg (p
, int);
3238 is_const
= no_queue
;
3241 #ifdef PREFERRED_STACK_BOUNDARY
3242 /* Ensure current function's preferred stack boundary is at least
3244 if (cfun
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
3245 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3248 /* If this kind of value comes back in memory,
3249 decide where in memory it should come back. */
3250 if (aggregate_value_p (type_for_mode (outmode
, 0)))
3252 #ifdef PCC_STATIC_STRUCT_RETURN
3254 = hard_function_value (build_pointer_type (type_for_mode (outmode
, 0)),
3256 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3257 pcc_struct_value
= 1;
3259 value
= gen_reg_rtx (outmode
);
3260 #else /* not PCC_STATIC_STRUCT_RETURN */
3261 struct_value_size
= GET_MODE_SIZE (outmode
);
3262 if (value
!= 0 && GET_CODE (value
) == MEM
)
3265 mem_value
= assign_stack_temp (outmode
, GET_MODE_SIZE (outmode
), 0);
3268 /* This call returns a big structure. */
3272 /* ??? Unfinished: must pass the memory address as an argument. */
3274 /* Copy all the libcall-arguments out of the varargs data
3275 and into a vector ARGVEC.
3277 Compute how to pass each argument. We only support a very small subset
3278 of the full argument passing conventions to limit complexity here since
3279 library functions shouldn't have many args. */
3281 argvec
= (struct arg
*) alloca ((nargs
+ 1) * sizeof (struct arg
));
3282 bzero ((char *) argvec
, (nargs
+ 1) * sizeof (struct arg
));
3284 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0);
3286 args_size
.constant
= 0;
3293 /* If there's a structure value address to be passed,
3294 either pass it in the special place, or pass it as an extra argument. */
3295 if (mem_value
&& struct_value_rtx
== 0 && ! pcc_struct_value
)
3297 rtx addr
= XEXP (mem_value
, 0);
3300 /* Make sure it is a reasonable operand for a move or push insn. */
3301 if (GET_CODE (addr
) != REG
&& GET_CODE (addr
) != MEM
3302 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3303 addr
= force_operand (addr
, NULL_RTX
);
3305 argvec
[count
].value
= addr
;
3306 argvec
[count
].mode
= Pmode
;
3307 argvec
[count
].partial
= 0;
3309 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3310 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3311 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, Pmode
, NULL_TREE
, 1))
3315 locate_and_pad_parm (Pmode
, NULL_TREE
,
3316 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
3317 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3318 &argvec
[count
].size
, &alignment_pad
);
3321 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3322 || reg_parm_stack_space
> 0)
3323 args_size
.constant
+= argvec
[count
].size
.constant
;
3325 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3330 for (; count
< nargs
; count
++)
3332 rtx val
= va_arg (p
, rtx
);
3333 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
3335 /* We cannot convert the arg value to the mode the library wants here;
3336 must do it earlier where we know the signedness of the arg. */
3338 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
3341 /* On some machines, there's no way to pass a float to a library fcn.
3342 Pass it as a double instead. */
3343 #ifdef LIBGCC_NEEDS_DOUBLE
3344 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
3345 val
= convert_modes (DFmode
, SFmode
, val
, 0), mode
= DFmode
;
3348 /* There's no need to call protect_from_queue, because
3349 either emit_move_insn or emit_push_insn will do that. */
3351 /* Make sure it is a reasonable operand for a move or push insn. */
3352 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
3353 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3354 val
= force_operand (val
, NULL_RTX
);
3356 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3357 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
3359 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3360 be viewed as just an efficiency improvement. */
3361 rtx slot
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
3362 emit_move_insn (slot
, val
);
3363 val
= XEXP (slot
, 0);
3368 argvec
[count
].value
= val
;
3369 argvec
[count
].mode
= mode
;
3371 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3372 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == PARALLEL
)
3374 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3375 argvec
[count
].partial
3376 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
3378 argvec
[count
].partial
= 0;
3381 locate_and_pad_parm (mode
, NULL_TREE
,
3382 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
3383 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3384 &argvec
[count
].size
, &alignment_pad
);
3386 if (argvec
[count
].size
.var
)
3389 if (reg_parm_stack_space
== 0 && argvec
[count
].partial
)
3390 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
3392 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3393 || reg_parm_stack_space
> 0)
3394 args_size
.constant
+= argvec
[count
].size
.constant
;
3396 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3400 #ifdef FINAL_REG_PARM_STACK_SPACE
3401 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
3404 /* If this machine requires an external definition for library
3405 functions, write one out. */
3406 assemble_external_libcall (fun
);
3408 original_args_size
= args_size
;
3409 #ifdef PREFERRED_STACK_BOUNDARY
3410 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
3411 / STACK_BYTES
) * STACK_BYTES
);
3414 args_size
.constant
= MAX (args_size
.constant
,
3415 reg_parm_stack_space
);
3417 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3418 args_size
.constant
-= reg_parm_stack_space
;
3421 if (args_size
.constant
> current_function_outgoing_args_size
)
3422 current_function_outgoing_args_size
= args_size
.constant
;
3424 #ifdef ACCUMULATE_OUTGOING_ARGS
3425 /* Since the stack pointer will never be pushed, it is possible for
3426 the evaluation of a parm to clobber something we have already
3427 written to the stack. Since most function calls on RISC machines
3428 do not use the stack, this is uncommon, but must work correctly.
3430 Therefore, we save any area of the stack that was already written
3431 and that we are using. Here we set up to do this by making a new
3432 stack usage map from the old one.
3434 Another approach might be to try to reorder the argument
3435 evaluations to avoid this conflicting stack usage. */
3437 needed
= args_size
.constant
;
3439 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3440 /* Since we will be writing into the entire argument area, the
3441 map must be allocated for its entire size, not just the part that
3442 is the responsibility of the caller. */
3443 needed
+= reg_parm_stack_space
;
3446 #ifdef ARGS_GROW_DOWNWARD
3447 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3450 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3453 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
3455 if (initial_highest_arg_in_use
)
3456 bcopy (initial_stack_usage_map
, stack_usage_map
,
3457 initial_highest_arg_in_use
);
3459 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3460 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
3461 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3464 /* The address of the outgoing argument list must not be copied to a
3465 register here, because argblock would be left pointing to the
3466 wrong place after the call to allocate_dynamic_stack_space below.
3469 argblock
= virtual_outgoing_args_rtx
;
3470 #else /* not ACCUMULATE_OUTGOING_ARGS */
3471 #ifndef PUSH_ROUNDING
3472 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
3476 #ifdef PUSH_ARGS_REVERSED
3477 #ifdef PREFERRED_STACK_BOUNDARY
3478 /* If we push args individually in reverse order, perform stack alignment
3479 before the first push (the last arg). */
3481 anti_adjust_stack (GEN_INT (args_size
.constant
3482 - original_args_size
.constant
));
3486 #ifdef PUSH_ARGS_REVERSED
3494 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3495 /* The argument list is the property of the called routine and it
3496 may clobber it. If the fixed area has been used for previous
3497 parameters, we must save and restore it.
3499 Here we compute the boundary of the that needs to be saved, if any. */
3501 #ifdef ARGS_GROW_DOWNWARD
3502 for (count
= 0; count
< reg_parm_stack_space
+ 1; count
++)
3504 for (count
= 0; count
< reg_parm_stack_space
; count
++)
3507 if (count
>= highest_outgoing_arg_in_use
3508 || stack_usage_map
[count
] == 0)
3511 if (low_to_save
== -1)
3512 low_to_save
= count
;
3514 high_to_save
= count
;
3517 if (low_to_save
>= 0)
3519 int num_to_save
= high_to_save
- low_to_save
+ 1;
3520 enum machine_mode save_mode
3521 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
3524 /* If we don't have the required alignment, must do this in BLKmode. */
3525 if ((low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
3526 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
3527 save_mode
= BLKmode
;
3529 #ifdef ARGS_GROW_DOWNWARD
3530 stack_area
= gen_rtx_MEM (save_mode
,
3531 memory_address (save_mode
,
3532 plus_constant (argblock
,
3535 stack_area
= gen_rtx_MEM (save_mode
,
3536 memory_address (save_mode
,
3537 plus_constant (argblock
,
3540 if (save_mode
== BLKmode
)
3542 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
3543 emit_block_move (validize_mem (save_area
), stack_area
,
3544 GEN_INT (num_to_save
),
3545 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3549 save_area
= gen_reg_rtx (save_mode
);
3550 emit_move_insn (save_area
, stack_area
);
3555 /* Push the args that need to be pushed. */
3557 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3558 are to be pushed. */
3559 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3561 register enum machine_mode mode
= argvec
[argnum
].mode
;
3562 register rtx val
= argvec
[argnum
].value
;
3563 rtx reg
= argvec
[argnum
].reg
;
3564 int partial
= argvec
[argnum
].partial
;
3565 #ifdef ACCUMULATE_OUTGOING_ARGS
3566 int lower_bound
, upper_bound
, i
;
3569 if (! (reg
!= 0 && partial
== 0))
3571 #ifdef ACCUMULATE_OUTGOING_ARGS
3572 /* If this is being stored into a pre-allocated, fixed-size, stack
3573 area, save any previous data at that location. */
3575 #ifdef ARGS_GROW_DOWNWARD
3576 /* stack_slot is negative, but we want to index stack_usage_map
3577 with positive values. */
3578 upper_bound
= -argvec
[argnum
].offset
.constant
+ 1;
3579 lower_bound
= upper_bound
- argvec
[argnum
].size
.constant
;
3581 lower_bound
= argvec
[argnum
].offset
.constant
;
3582 upper_bound
= lower_bound
+ argvec
[argnum
].size
.constant
;
3585 for (i
= lower_bound
; i
< upper_bound
; i
++)
3586 if (stack_usage_map
[i
]
3587 /* Don't store things in the fixed argument area at this point;
3588 it has already been saved. */
3589 && i
> reg_parm_stack_space
)
3592 if (i
!= upper_bound
)
3594 /* We need to make a save area. See what mode we can make it. */
3595 enum machine_mode save_mode
3596 = mode_for_size (argvec
[argnum
].size
.constant
* BITS_PER_UNIT
,
3603 plus_constant (argblock
,
3604 argvec
[argnum
].offset
.constant
)));
3605 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3607 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3610 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
3611 argblock
, GEN_INT (argvec
[argnum
].offset
.constant
),
3612 reg_parm_stack_space
, ARGS_SIZE_RTX (alignment_pad
));
3614 #ifdef ACCUMULATE_OUTGOING_ARGS
3615 /* Now mark the segment we just used. */
3616 for (i
= lower_bound
; i
< upper_bound
; i
++)
3617 stack_usage_map
[i
] = 1;
3624 #ifndef PUSH_ARGS_REVERSED
3625 #ifdef PREFERRED_STACK_BOUNDARY
3626 /* If we pushed args in forward order, perform stack alignment
3627 after pushing the last arg. */
3629 anti_adjust_stack (GEN_INT (args_size
.constant
3630 - original_args_size
.constant
));
3634 #ifdef PUSH_ARGS_REVERSED
3640 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0);
3642 /* Now load any reg parms into their regs. */
3644 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3645 are to be pushed. */
3646 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3648 register rtx val
= argvec
[argnum
].value
;
3649 rtx reg
= argvec
[argnum
].reg
;
3650 int partial
= argvec
[argnum
].partial
;
3652 if (reg
!= 0 && partial
== 0)
3653 emit_move_insn (reg
, val
);
3658 /* For version 1.37, try deleting this entirely. */
3663 /* Any regs containing parms remain in use through the call. */
3664 for (count
= 0; count
< nargs
; count
++)
3665 if (argvec
[count
].reg
!= 0)
3666 use_reg (&call_fusage
, argvec
[count
].reg
);
3668 /* Pass the function the address in which to return a structure value. */
3669 if (mem_value
!= 0 && struct_value_rtx
!= 0 && ! pcc_struct_value
)
3671 emit_move_insn (struct_value_rtx
,
3673 force_operand (XEXP (mem_value
, 0),
3675 if (GET_CODE (struct_value_rtx
) == REG
)
3676 use_reg (&call_fusage
, struct_value_rtx
);
3679 /* Don't allow popping to be deferred, since then
3680 cse'ing of library calls could delete a call and leave the pop. */
3683 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3684 will set inhibit_defer_pop to that value. */
3685 /* See the comment in emit_library_call about the function type we build
3689 get_identifier (XSTR (orgfun
, 0)),
3690 build_function_type (type_for_mode (outmode
, 0), NULL_TREE
),
3691 original_args_size
.constant
, args_size
.constant
,
3693 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3694 mem_value
== 0 ? hard_libcall_value (outmode
) : NULL_RTX
,
3695 old_inhibit_defer_pop
+ 1, call_fusage
, is_const
);
3697 /* Now restore inhibit_defer_pop to its actual original value. */
3702 /* Copy the value to the right place. */
3703 if (outmode
!= VOIDmode
)
3709 if (value
!= mem_value
)
3710 emit_move_insn (value
, mem_value
);
3712 else if (value
!= 0)
3713 emit_move_insn (value
, hard_libcall_value (outmode
));
3715 value
= hard_libcall_value (outmode
);
3718 #ifdef ACCUMULATE_OUTGOING_ARGS
3719 #ifdef REG_PARM_STACK_SPACE
3722 enum machine_mode save_mode
= GET_MODE (save_area
);
3723 #ifdef ARGS_GROW_DOWNWARD
3725 = gen_rtx_MEM (save_mode
,
3726 memory_address (save_mode
,
3727 plus_constant (argblock
,
3731 = gen_rtx_MEM (save_mode
,
3732 memory_address (save_mode
,
3733 plus_constant (argblock
, low_to_save
)));
3735 if (save_mode
!= BLKmode
)
3736 emit_move_insn (stack_area
, save_area
);
3738 emit_block_move (stack_area
, validize_mem (save_area
),
3739 GEN_INT (high_to_save
- low_to_save
+ 1),
3740 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3744 /* If we saved any argument areas, restore them. */
3745 for (count
= 0; count
< nargs
; count
++)
3746 if (argvec
[count
].save_area
)
3748 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3750 = gen_rtx_MEM (save_mode
,
3753 plus_constant (argblock
,
3754 argvec
[count
].offset
.constant
)));
3756 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3759 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3760 stack_usage_map
= initial_stack_usage_map
;
3767 /* Return an rtx which represents a suitable home on the stack
3768 given TYPE, the type of the argument looking for a home.
3769 This is called only for BLKmode arguments.
3771 SIZE is the size needed for this target.
3772 ARGS_ADDR is the address of the bottom of the argument block for this call.
3773 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3774 if this machine uses push insns. */
3777 target_for_arg (type
, size
, args_addr
, offset
)
3781 struct args_size offset
;
3784 rtx offset_rtx
= ARGS_SIZE_RTX (offset
);
3786 /* We do not call memory_address if possible,
3787 because we want to address as close to the stack
3788 as possible. For non-variable sized arguments,
3789 this will be stack-pointer relative addressing. */
3790 if (GET_CODE (offset_rtx
) == CONST_INT
)
3791 target
= plus_constant (args_addr
, INTVAL (offset_rtx
));
3794 /* I have no idea how to guarantee that this
3795 will work in the presence of register parameters. */
3796 target
= gen_rtx_PLUS (Pmode
, args_addr
, offset_rtx
);
3797 target
= memory_address (QImode
, target
);
3800 return gen_rtx_MEM (BLKmode
, target
);
3804 /* Store a single argument for a function call
3805 into the register or memory area where it must be passed.
3806 *ARG describes the argument value and where to pass it.
3808 ARGBLOCK is the address of the stack-block for all the arguments,
3809 or 0 on a machine where arguments are pushed individually.
3811 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3812 so must be careful about how the stack is used.
3814 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3815 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3816 that we need not worry about saving and restoring the stack.
3818 FNDECL is the declaration of the function we are calling. */
3821 store_one_arg (arg
, argblock
, may_be_alloca
, variable_size
,
3822 reg_parm_stack_space
)
3823 struct arg_data
*arg
;
3826 int variable_size ATTRIBUTE_UNUSED
;
3827 int reg_parm_stack_space
;
3829 register tree pval
= arg
->tree_value
;
3833 #ifdef ACCUMULATE_OUTGOING_ARGS
3834 int i
, lower_bound
= 0, upper_bound
= 0;
3837 if (TREE_CODE (pval
) == ERROR_MARK
)
3840 /* Push a new temporary level for any temporaries we make for
3844 #ifdef ACCUMULATE_OUTGOING_ARGS
3845 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3846 save any previous data at that location. */
3847 if (argblock
&& ! variable_size
&& arg
->stack
)
3849 #ifdef ARGS_GROW_DOWNWARD
3850 /* stack_slot is negative, but we want to index stack_usage_map
3851 with positive values. */
3852 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
3853 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
3857 lower_bound
= upper_bound
- arg
->size
.constant
;
3859 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
3860 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
3864 upper_bound
= lower_bound
+ arg
->size
.constant
;
3867 for (i
= lower_bound
; i
< upper_bound
; i
++)
3868 if (stack_usage_map
[i
]
3869 /* Don't store things in the fixed argument area at this point;
3870 it has already been saved. */
3871 && i
> reg_parm_stack_space
)
3874 if (i
!= upper_bound
)
3876 /* We need to make a save area. See what mode we can make it. */
3877 enum machine_mode save_mode
3878 = mode_for_size (arg
->size
.constant
* BITS_PER_UNIT
, MODE_INT
, 1);
3880 = gen_rtx_MEM (save_mode
,
3881 memory_address (save_mode
,
3882 XEXP (arg
->stack_slot
, 0)));
3884 if (save_mode
== BLKmode
)
3886 arg
->save_area
= assign_stack_temp (BLKmode
,
3887 arg
->size
.constant
, 0);
3888 MEM_SET_IN_STRUCT_P (arg
->save_area
,
3889 AGGREGATE_TYPE_P (TREE_TYPE
3890 (arg
->tree_value
)));
3891 preserve_temp_slots (arg
->save_area
);
3892 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
3893 GEN_INT (arg
->size
.constant
),
3894 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3898 arg
->save_area
= gen_reg_rtx (save_mode
);
3899 emit_move_insn (arg
->save_area
, stack_area
);
3904 /* Now that we have saved any slots that will be overwritten by this
3905 store, mark all slots this store will use. We must do this before
3906 we actually expand the argument since the expansion itself may
3907 trigger library calls which might need to use the same stack slot. */
3908 if (argblock
&& ! variable_size
&& arg
->stack
)
3909 for (i
= lower_bound
; i
< upper_bound
; i
++)
3910 stack_usage_map
[i
] = 1;
3913 /* If this isn't going to be placed on both the stack and in registers,
3914 set up the register and number of words. */
3915 if (! arg
->pass_on_stack
)
3916 reg
= arg
->reg
, partial
= arg
->partial
;
3918 if (reg
!= 0 && partial
== 0)
3919 /* Being passed entirely in a register. We shouldn't be called in
3923 /* If this arg needs special alignment, don't load the registers
3925 if (arg
->n_aligned_regs
!= 0)
3928 /* If this is being passed partially in a register, we can't evaluate
3929 it directly into its stack slot. Otherwise, we can. */
3930 if (arg
->value
== 0)
3932 #ifdef ACCUMULATE_OUTGOING_ARGS
3933 /* stack_arg_under_construction is nonzero if a function argument is
3934 being evaluated directly into the outgoing argument list and
3935 expand_call must take special action to preserve the argument list
3936 if it is called recursively.
3938 For scalar function arguments stack_usage_map is sufficient to
3939 determine which stack slots must be saved and restored. Scalar
3940 arguments in general have pass_on_stack == 0.
3942 If this argument is initialized by a function which takes the
3943 address of the argument (a C++ constructor or a C function
3944 returning a BLKmode structure), then stack_usage_map is
3945 insufficient and expand_call must push the stack around the
3946 function call. Such arguments have pass_on_stack == 1.
3948 Note that it is always safe to set stack_arg_under_construction,
3949 but this generates suboptimal code if set when not needed. */
3951 if (arg
->pass_on_stack
)
3952 stack_arg_under_construction
++;
3954 arg
->value
= expand_expr (pval
,
3956 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
3957 ? NULL_RTX
: arg
->stack
,
3960 /* If we are promoting object (or for any other reason) the mode
3961 doesn't agree, convert the mode. */
3963 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
3964 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
3965 arg
->value
, arg
->unsignedp
);
3967 #ifdef ACCUMULATE_OUTGOING_ARGS
3968 if (arg
->pass_on_stack
)
3969 stack_arg_under_construction
--;
3973 /* Don't allow anything left on stack from computation
3974 of argument to alloca. */
3976 do_pending_stack_adjust ();
3978 if (arg
->value
== arg
->stack
)
3980 /* If the value is already in the stack slot, we are done. */
3981 if (current_function_check_memory_usage
&& GET_CODE (arg
->stack
) == MEM
)
3983 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3984 XEXP (arg
->stack
, 0), Pmode
,
3985 ARGS_SIZE_RTX (arg
->size
),
3986 TYPE_MODE (sizetype
),
3987 GEN_INT (MEMORY_USE_RW
),
3988 TYPE_MODE (integer_type_node
));
3991 else if (arg
->mode
!= BLKmode
)
3995 /* Argument is a scalar, not entirely passed in registers.
3996 (If part is passed in registers, arg->partial says how much
3997 and emit_push_insn will take care of putting it there.)
3999 Push it, and if its size is less than the
4000 amount of space allocated to it,
4001 also bump stack pointer by the additional space.
4002 Note that in C the default argument promotions
4003 will prevent such mismatches. */
4005 size
= GET_MODE_SIZE (arg
->mode
);
4006 /* Compute how much space the push instruction will push.
4007 On many machines, pushing a byte will advance the stack
4008 pointer by a halfword. */
4009 #ifdef PUSH_ROUNDING
4010 size
= PUSH_ROUNDING (size
);
4014 /* Compute how much space the argument should get:
4015 round up to a multiple of the alignment for arguments. */
4016 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
4017 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
4018 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
4019 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4021 /* This isn't already where we want it on the stack, so put it there.
4022 This can either be done with push or copy insns. */
4023 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
, 0,
4024 partial
, reg
, used
- size
, argblock
,
4025 ARGS_SIZE_RTX (arg
->offset
), reg_parm_stack_space
,
4026 ARGS_SIZE_RTX (arg
->alignment_pad
));
4031 /* BLKmode, at least partly to be pushed. */
4033 register int excess
;
4036 /* Pushing a nonscalar.
4037 If part is passed in registers, PARTIAL says how much
4038 and emit_push_insn will take care of putting it there. */
4040 /* Round its size up to a multiple
4041 of the allocation unit for arguments. */
4043 if (arg
->size
.var
!= 0)
4046 size_rtx
= ARGS_SIZE_RTX (arg
->size
);
4050 /* PUSH_ROUNDING has no effect on us, because
4051 emit_push_insn for BLKmode is careful to avoid it. */
4052 excess
= (arg
->size
.constant
- int_size_in_bytes (TREE_TYPE (pval
))
4053 + partial
* UNITS_PER_WORD
);
4054 size_rtx
= expr_size (pval
);
4057 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
4058 TYPE_ALIGN (TREE_TYPE (pval
)) / BITS_PER_UNIT
, partial
,
4059 reg
, excess
, argblock
, ARGS_SIZE_RTX (arg
->offset
),
4060 reg_parm_stack_space
,
4061 ARGS_SIZE_RTX (arg
->alignment_pad
));
4065 /* Unless this is a partially-in-register argument, the argument is now
4068 ??? Note that this can change arg->value from arg->stack to
4069 arg->stack_slot and it matters when they are not the same.
4070 It isn't totally clear that this is correct in all cases. */
4072 arg
->value
= arg
->stack_slot
;
4074 /* Once we have pushed something, pops can't safely
4075 be deferred during the rest of the arguments. */
4078 /* ANSI doesn't require a sequence point here,
4079 but PCC has one, so this will avoid some problems. */
4082 /* Free any temporary slots made in processing this argument. Show
4083 that we might have taken the address of something and pushed that
4085 preserve_temp_slots (NULL_RTX
);