]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
c-parse.in (language_string): Constify.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
4283012f 2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
670ee920
KG
22#include "system.h"
23#include "rtl.h"
24#include "tree.h"
25#include "flags.h"
26#include "expr.h"
49ad7cfa 27#include "function.h"
670ee920 28#include "regs.h"
51bbfa0c 29#include "insn-flags.h"
5f6da302 30#include "toplev.h"
d6f4ec51 31#include "output.h"
51bbfa0c 32
c795bca9
BS
33#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
34#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
35#endif
36
51bbfa0c 37/* Decide whether a function's arguments should be processed
bbc8a071
RK
38 from first to last or from last to first.
39
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
51bbfa0c 42
51bbfa0c 43#ifdef PUSH_ROUNDING
bbc8a071 44
40083ddf 45#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
46#define PUSH_ARGS_REVERSED /* If it's last to first */
47#endif
bbc8a071 48
51bbfa0c
RS
49#endif
50
c795bca9
BS
51/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
52#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
53
54/* Data structure and subroutines used within expand_call. */
55
56struct arg_data
57{
58 /* Tree node for this argument. */
59 tree tree_value;
1efe6448
RK
60 /* Mode for value; TYPE_MODE unless promoted. */
61 enum machine_mode mode;
51bbfa0c
RS
62 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 rtx value;
64 /* Initially-compute RTL value for argument; only for const functions. */
65 rtx initial_value;
66 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 67 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
68 registers. */
69 rtx reg;
84b55618
RK
70 /* If REG was promoted from the actual mode of the argument expression,
71 indicates whether the promotion is sign- or zero-extended. */
72 int unsignedp;
51bbfa0c
RS
73 /* Number of registers to use. 0 means put the whole arg in registers.
74 Also 0 if not passed in registers. */
75 int partial;
d64f5a78
RS
76 /* Non-zero if argument must be passed on stack.
77 Note that some arguments may be passed on the stack
78 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
79 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
80 int pass_on_stack;
81 /* Offset of this argument from beginning of stack-args. */
82 struct args_size offset;
83 /* Similar, but offset to the start of the stack slot. Different from
84 OFFSET if this arg pads downward. */
85 struct args_size slot_offset;
86 /* Size of this argument on the stack, rounded up for any padding it gets,
87 parts of the argument passed in registers do not count.
88 If REG_PARM_STACK_SPACE is defined, then register parms
89 are counted here as well. */
90 struct args_size size;
91 /* Location on the stack at which parameter should be stored. The store
92 has already been done if STACK == VALUE. */
93 rtx stack;
94 /* Location on the stack of the start of this argument slot. This can
95 differ from STACK if this arg pads downward. This location is known
96 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 rtx stack_slot;
98#ifdef ACCUMULATE_OUTGOING_ARGS
99 /* Place that this stack area has been saved, if needed. */
100 rtx save_area;
101#endif
4ab56118
RK
102 /* If an argument's alignment does not permit direct copying into registers,
103 copy in smaller-sized pieces into pseudos. These are stored in a
104 block pointed to by this field. The next field says how many
105 word-sized pseudos we made. */
106 rtx *aligned_regs;
107 int n_aligned_regs;
51bbfa0c
RS
108};
109
110#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 111/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
112 the corresponding stack location has been used.
113 This vector is used to prevent a function call within an argument from
114 clobbering any stack already set up. */
115static char *stack_usage_map;
116
117/* Size of STACK_USAGE_MAP. */
118static int highest_outgoing_arg_in_use;
2f4aa534
RS
119
120/* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125int stack_arg_under_construction;
51bbfa0c
RS
126#endif
127
20efdf74
JL
128static int calls_function PROTO ((tree, int));
129static int calls_function_1 PROTO ((tree, int));
130static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
fb5eebb9
RH
131 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
132 rtx, int, rtx, int));
20efdf74
JL
133static void special_function_p PROTO ((char *, tree, int *, int *,
134 int *, int *));
135static void precompute_register_parameters PROTO ((int, struct arg_data *,
136 int *));
322e3e34 137static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
c84e2712 138 int));
20efdf74
JL
139static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
140 int));
0f9b3ea6
JL
141static int finalize_must_preallocate PROTO ((int, int,
142 struct arg_data *,
143 struct args_size *));
cc0b1adc
JL
144static void precompute_arguments PROTO ((int, int, int,
145 struct arg_data *,
146 struct args_size *));
599f37b6
JL
147static int compute_argument_block_size PROTO ((int,
148 struct args_size *));
d7cdf113
JL
149static void initialize_argument_information PROTO ((int,
150 struct arg_data *,
151 struct args_size *,
152 int, tree, tree,
959f3a06 153 CUMULATIVE_ARGS *,
d7cdf113
JL
154 int, rtx *, int *,
155 int *, int *));
a45bdd02
JL
156static void compute_argument_addresses PROTO ((struct arg_data *,
157 rtx, int));
158static rtx rtx_for_function_call PROTO ((tree, tree));
21a3b983
JL
159static void load_register_parameters PROTO ((struct arg_data *,
160 int, rtx *));
161
20efdf74
JL
162#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
163static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
164static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
165#endif
51bbfa0c 166\f
1ce0cb53
JW
167/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
168 `alloca'.
169
170 If WHICH is 0, return 1 if EXP contains a call to any function.
171 Actually, we only need return 1 if evaluating EXP would require pushing
172 arguments on the stack, but that is too difficult to compute, so we just
173 assume any function call might require the stack. */
51bbfa0c 174
1c8d7aef
RS
175static tree calls_function_save_exprs;
176
51bbfa0c 177static int
1ce0cb53 178calls_function (exp, which)
51bbfa0c 179 tree exp;
1ce0cb53 180 int which;
1c8d7aef
RS
181{
182 int val;
183 calls_function_save_exprs = 0;
184 val = calls_function_1 (exp, which);
185 calls_function_save_exprs = 0;
186 return val;
187}
188
189static int
190calls_function_1 (exp, which)
191 tree exp;
192 int which;
51bbfa0c
RS
193{
194 register int i;
0207efa2
RK
195 enum tree_code code = TREE_CODE (exp);
196 int type = TREE_CODE_CLASS (code);
197 int length = tree_code_length[(int) code];
51bbfa0c 198
ddd5a7c1 199 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
200 if ((int) code >= NUM_TREE_CODES)
201 return 1;
51bbfa0c 202
0207efa2 203 /* Only expressions and references can contain calls. */
3b59a331
RS
204 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
205 && type != 'b')
51bbfa0c
RS
206 return 0;
207
0207efa2 208 switch (code)
51bbfa0c
RS
209 {
210 case CALL_EXPR:
1ce0cb53
JW
211 if (which == 0)
212 return 1;
213 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
214 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
215 == FUNCTION_DECL))
216 {
217 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
218
219 if ((DECL_BUILT_IN (fndecl)
220 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
221 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 222 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
223 return 1;
224 }
51bbfa0c
RS
225
226 /* Third operand is RTL. */
227 length = 2;
228 break;
229
230 case SAVE_EXPR:
231 if (SAVE_EXPR_RTL (exp) != 0)
232 return 0;
1c8d7aef
RS
233 if (value_member (exp, calls_function_save_exprs))
234 return 0;
235 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
236 calls_function_save_exprs);
237 return (TREE_OPERAND (exp, 0) != 0
238 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
239
240 case BLOCK:
ef03bc85
CH
241 {
242 register tree local;
243
244 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 245 if (DECL_INITIAL (local) != 0
1c8d7aef 246 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
247 return 1;
248 }
249 {
250 register tree subblock;
251
252 for (subblock = BLOCK_SUBBLOCKS (exp);
253 subblock;
254 subblock = TREE_CHAIN (subblock))
1c8d7aef 255 if (calls_function_1 (subblock, which))
ef03bc85
CH
256 return 1;
257 }
258 return 0;
51bbfa0c
RS
259
260 case METHOD_CALL_EXPR:
261 length = 3;
262 break;
263
264 case WITH_CLEANUP_EXPR:
265 length = 1;
266 break;
267
268 case RTL_EXPR:
269 return 0;
e9a25f70
JL
270
271 default:
272 break;
51bbfa0c
RS
273 }
274
275 for (i = 0; i < length; i++)
276 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 277 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
278 return 1;
279
280 return 0;
281}
282\f
283/* Force FUNEXP into a form suitable for the address of a CALL,
284 and return that as an rtx. Also load the static chain register
285 if FNDECL is a nested function.
286
77cac2f2
RK
287 CALL_FUSAGE points to a variable holding the prospective
288 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 289
03dacb02 290rtx
77cac2f2 291prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
292 rtx funexp;
293 tree fndecl;
77cac2f2 294 rtx *call_fusage;
01368078 295 int reg_parm_seen;
51bbfa0c
RS
296{
297 rtx static_chain_value = 0;
298
299 funexp = protect_from_queue (funexp, 0);
300
301 if (fndecl != 0)
0f41302f 302 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
303 static_chain_value = lookup_static_chain (fndecl);
304
305 /* Make a valid memory address and copy constants thru pseudo-regs,
306 but not for a constant address if -fno-function-cse. */
307 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 308 /* If we are using registers for parameters, force the
e9a25f70
JL
309 function address into a register now. */
310 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
311 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
312 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
313 else
314 {
315#ifndef NO_FUNCTION_CSE
316 if (optimize && ! flag_no_function_cse)
317#ifdef NO_RECURSIVE_FUNCTION_CSE
318 if (fndecl != current_function_decl)
319#endif
320 funexp = force_reg (Pmode, funexp);
321#endif
322 }
323
324 if (static_chain_value != 0)
325 {
326 emit_move_insn (static_chain_rtx, static_chain_value);
327
f991a240
RK
328 if (GET_CODE (static_chain_rtx) == REG)
329 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
330 }
331
332 return funexp;
333}
334
335/* Generate instructions to call function FUNEXP,
336 and optionally pop the results.
337 The CALL_INSN is the first insn generated.
338
607ea900 339 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
340 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
341
334c4f0f
RK
342 FUNTYPE is the data type of the function. This is given to the macro
343 RETURN_POPS_ARGS to determine whether this function pops its own args.
344 We used to allow an identifier for library functions, but that doesn't
345 work when the return type is an aggregate type and the calling convention
346 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
347
348 STACK_SIZE is the number of bytes of arguments on the stack,
c795bca9 349 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
51bbfa0c
RS
350 This is both to put into the call insn and
351 to generate explicit popping code if necessary.
352
353 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
354 It is zero if this call doesn't want a structure value.
355
356 NEXT_ARG_REG is the rtx that results from executing
357 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
94b25f81
RK
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function.
51bbfa0c
RS
373
374 IS_CONST is true if this is a `const' call. */
375
322e3e34 376static void
fb5eebb9
RH
377emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
378 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
379 call_fusage, is_const)
51bbfa0c 380 rtx funexp;
c84e2712
KG
381 tree fndecl ATTRIBUTE_UNUSED;
382 tree funtype ATTRIBUTE_UNUSED;
6a651371 383 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 384 HOST_WIDE_INT rounded_stack_size;
e5e809f4 385 HOST_WIDE_INT struct_value_size;
51bbfa0c
RS
386 rtx next_arg_reg;
387 rtx valreg;
388 int old_inhibit_defer_pop;
77cac2f2 389 rtx call_fusage;
51bbfa0c
RS
390 int is_const;
391{
062e7fd8 392 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
e5d70561 393 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c 394 rtx call_insn;
081f5e7e 395#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 396 int already_popped = 0;
fb5eebb9 397 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 398#endif
51bbfa0c
RS
399
400 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
401 and we don't want to load it into a register as an optimization,
402 because prepare_call_address already did it if it should be done. */
403 if (GET_CODE (funexp) != SYMBOL_REF)
404 funexp = memory_address (FUNCTION_MODE, funexp);
405
406#ifndef ACCUMULATE_OUTGOING_ARGS
407#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
408/* If the target has "call" or "call_value" insns, then prefer them
409 if no arguments are actually popped. If the target does not have
410 "call" or "call_value" insns, then we must use the popping versions
411 even if the call has no arguments to pop. */
412#if defined (HAVE_call) && defined (HAVE_call_value)
413 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
414 && n_popped > 0)
415#else
416 if (HAVE_call_pop && HAVE_call_value_pop)
417#endif
51bbfa0c 418 {
fb5eebb9 419 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
420 rtx pat;
421
422 /* If this subroutine pops its own args, record that in the call insn
423 if possible, for the sake of frame pointer elimination. */
2c8da025 424
51bbfa0c
RS
425 if (valreg)
426 pat = gen_call_value_pop (valreg,
38a448ca 427 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 428 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 429 else
38a448ca 430 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 431 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
432
433 emit_call_insn (pat);
434 already_popped = 1;
435 }
436 else
437#endif
438#endif
439
440#if defined (HAVE_call) && defined (HAVE_call_value)
441 if (HAVE_call && HAVE_call_value)
442 {
443 if (valreg)
444 emit_call_insn (gen_call_value (valreg,
38a448ca 445 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 446 rounded_stack_size_rtx, next_arg_reg,
e992302c 447 NULL_RTX));
51bbfa0c 448 else
38a448ca 449 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 450 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
451 struct_value_size_rtx));
452 }
453 else
454#endif
455 abort ();
456
77cac2f2 457 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
458 for (call_insn = get_last_insn ();
459 call_insn && GET_CODE (call_insn) != CALL_INSN;
460 call_insn = PREV_INSN (call_insn))
461 ;
462
463 if (! call_insn)
464 abort ();
465
e59e60a7
RK
466 /* Put the register usage information on the CALL. If there is already
467 some usage information, put ours at the end. */
468 if (CALL_INSN_FUNCTION_USAGE (call_insn))
469 {
470 rtx link;
471
472 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
473 link = XEXP (link, 1))
474 ;
475
476 XEXP (link, 1) = call_fusage;
477 }
478 else
479 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
480
481 /* If this is a const call, then set the insn's unchanging bit. */
482 if (is_const)
483 CONST_CALL_P (call_insn) = 1;
484
b1e64e0d
RS
485 /* Restore this now, so that we do defer pops for this call's args
486 if the context of the call as a whole permits. */
487 inhibit_defer_pop = old_inhibit_defer_pop;
488
51bbfa0c
RS
489#ifndef ACCUMULATE_OUTGOING_ARGS
490 /* If returning from the subroutine does not automatically pop the args,
491 we need an instruction to pop them sooner or later.
492 Perhaps do it now; perhaps just record how much space to pop later.
493
494 If returning from the subroutine does pop the args, indicate that the
495 stack pointer will be changed. */
496
fb5eebb9 497 if (n_popped > 0)
51bbfa0c
RS
498 {
499 if (!already_popped)
e3da301d 500 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
501 = gen_rtx_EXPR_LIST (VOIDmode,
502 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
503 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 504 rounded_stack_size -= n_popped;
062e7fd8 505 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
506 }
507
fb5eebb9 508 if (rounded_stack_size != 0)
51bbfa0c 509 {
70a73141 510 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
fb5eebb9 511 pending_stack_adjust += rounded_stack_size;
51bbfa0c 512 else
062e7fd8 513 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
514 }
515#endif
516}
517
20efdf74
JL
518/* Determine if the function identified by NAME and FNDECL is one with
519 special properties we wish to know about.
520
521 For example, if the function might return more than one time (setjmp), then
522 set RETURNS_TWICE to a nonzero value.
523
524 Similarly set IS_LONGJMP for if the function is in the longjmp family.
525
526 Set IS_MALLOC for any of the standard memory allocation functions which
527 allocate from the heap.
528
529 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
530 space from the stack such as alloca. */
531
532static void
533special_function_p (name, fndecl, returns_twice, is_longjmp,
534 is_malloc, may_be_alloca)
535 char *name;
536 tree fndecl;
537 int *returns_twice;
538 int *is_longjmp;
539 int *is_malloc;
540 int *may_be_alloca;
541{
542 *returns_twice = 0;
543 *is_longjmp = 0;
544 *is_malloc = 0;
545 *may_be_alloca = 0;
546
20efdf74
JL
547 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
548 /* Exclude functions not at the file scope, or not `extern',
549 since they are not the magic functions we would otherwise
550 think they are. */
551 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
552 {
553 char *tname = name;
554
ca54603f
JL
555 /* We assume that alloca will always be called by name. It
556 makes no sense to pass it as a pointer-to-function to
557 anything that does not understand its behavior. */
558 *may_be_alloca
559 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
560 && name[0] == 'a'
561 && ! strcmp (name, "alloca"))
562 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
563 && name[0] == '_'
564 && ! strcmp (name, "__builtin_alloca"))));
565
20efdf74
JL
566 /* Disregard prefix _, __ or __x. */
567 if (name[0] == '_')
568 {
569 if (name[1] == '_' && name[2] == 'x')
570 tname += 3;
571 else if (name[1] == '_')
572 tname += 2;
573 else
574 tname += 1;
575 }
576
577 if (tname[0] == 's')
578 {
579 *returns_twice
580 = ((tname[1] == 'e'
581 && (! strcmp (tname, "setjmp")
582 || ! strcmp (tname, "setjmp_syscall")))
583 || (tname[1] == 'i'
584 && ! strcmp (tname, "sigsetjmp"))
585 || (tname[1] == 'a'
586 && ! strcmp (tname, "savectx")));
587 if (tname[1] == 'i'
588 && ! strcmp (tname, "siglongjmp"))
589 *is_longjmp = 1;
590 }
591 else if ((tname[0] == 'q' && tname[1] == 's'
592 && ! strcmp (tname, "qsetjmp"))
593 || (tname[0] == 'v' && tname[1] == 'f'
594 && ! strcmp (tname, "vfork")))
595 *returns_twice = 1;
596
597 else if (tname[0] == 'l' && tname[1] == 'o'
598 && ! strcmp (tname, "longjmp"))
599 *is_longjmp = 1;
600 /* XXX should have "malloc" attribute on functions instead
601 of recognizing them by name. */
602 else if (! strcmp (tname, "malloc")
603 || ! strcmp (tname, "calloc")
604 || ! strcmp (tname, "realloc")
605 /* Note use of NAME rather than TNAME here. These functions
606 are only reserved when preceded with __. */
607 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
608 || ! strcmp (name, "__nw") /* mangled __builtin_new */
609 || ! strcmp (name, "__builtin_new")
610 || ! strcmp (name, "__builtin_vec_new"))
611 *is_malloc = 1;
612 }
613}
614
615/* Precompute all register parameters as described by ARGS, storing values
616 into fields within the ARGS array.
617
618 NUM_ACTUALS indicates the total number elements in the ARGS array.
619
620 Set REG_PARM_SEEN if we encounter a register parameter. */
621
622static void
623precompute_register_parameters (num_actuals, args, reg_parm_seen)
624 int num_actuals;
625 struct arg_data *args;
626 int *reg_parm_seen;
627{
628 int i;
629
630 *reg_parm_seen = 0;
631
632 for (i = 0; i < num_actuals; i++)
633 if (args[i].reg != 0 && ! args[i].pass_on_stack)
634 {
635 *reg_parm_seen = 1;
636
637 if (args[i].value == 0)
638 {
639 push_temp_slots ();
640 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
641 VOIDmode, 0);
642 preserve_temp_slots (args[i].value);
643 pop_temp_slots ();
644
645 /* ANSI doesn't require a sequence point here,
646 but PCC has one, so this will avoid some problems. */
647 emit_queue ();
648 }
649
650 /* If we are to promote the function arg to a wider mode,
651 do it now. */
652
653 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
654 args[i].value
655 = convert_modes (args[i].mode,
656 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
657 args[i].value, args[i].unsignedp);
658
659 /* If the value is expensive, and we are inside an appropriately
660 short loop, put the value into a pseudo and then put the pseudo
661 into the hard reg.
662
663 For small register classes, also do this if this call uses
664 register parameters. This is to avoid reload conflicts while
665 loading the parameters registers. */
666
667 if ((! (GET_CODE (args[i].value) == REG
668 || (GET_CODE (args[i].value) == SUBREG
669 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
670 && args[i].mode != BLKmode
671 && rtx_cost (args[i].value, SET) > 2
672 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
673 || preserve_subexpressions_p ()))
674 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
675 }
676}
677
678#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
679
680 /* The argument list is the property of the called routine and it
681 may clobber it. If the fixed area has been used for previous
682 parameters, we must save and restore it. */
683static rtx
684save_fixed_argument_area (reg_parm_stack_space, argblock,
685 low_to_save, high_to_save)
686 int reg_parm_stack_space;
687 rtx argblock;
688 int *low_to_save;
689 int *high_to_save;
690{
691 int i;
692 rtx save_area = NULL_RTX;
693
694 /* Compute the boundary of the that needs to be saved, if any. */
695#ifdef ARGS_GROW_DOWNWARD
696 for (i = 0; i < reg_parm_stack_space + 1; i++)
697#else
698 for (i = 0; i < reg_parm_stack_space; i++)
699#endif
700 {
701 if (i >= highest_outgoing_arg_in_use
702 || stack_usage_map[i] == 0)
703 continue;
704
705 if (*low_to_save == -1)
706 *low_to_save = i;
707
708 *high_to_save = i;
709 }
710
711 if (*low_to_save >= 0)
712 {
713 int num_to_save = *high_to_save - *low_to_save + 1;
714 enum machine_mode save_mode
715 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
716 rtx stack_area;
717
718 /* If we don't have the required alignment, must do this in BLKmode. */
719 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
720 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
721 save_mode = BLKmode;
722
723#ifdef ARGS_GROW_DOWNWARD
724 stack_area = gen_rtx_MEM (save_mode,
725 memory_address (save_mode,
726 plus_constant (argblock,
727 - *high_to_save)));
728#else
729 stack_area = gen_rtx_MEM (save_mode,
730 memory_address (save_mode,
731 plus_constant (argblock,
732 *low_to_save)));
733#endif
734 if (save_mode == BLKmode)
735 {
736 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
20efdf74
JL
737 emit_block_move (validize_mem (save_area), stack_area,
738 GEN_INT (num_to_save),
739 PARM_BOUNDARY / BITS_PER_UNIT);
740 }
741 else
742 {
743 save_area = gen_reg_rtx (save_mode);
744 emit_move_insn (save_area, stack_area);
745 }
746 }
747 return save_area;
748}
749
750static void
751restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
752 rtx save_area;
753 rtx argblock;
754 int high_to_save;
755 int low_to_save;
756{
757 enum machine_mode save_mode = GET_MODE (save_area);
758#ifdef ARGS_GROW_DOWNWARD
759 rtx stack_area
760 = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
763 - high_to_save)));
764#else
765 rtx stack_area
766 = gen_rtx_MEM (save_mode,
767 memory_address (save_mode,
768 plus_constant (argblock,
769 low_to_save)));
770#endif
771
772 if (save_mode != BLKmode)
773 emit_move_insn (stack_area, save_area);
774 else
775 emit_block_move (stack_area, validize_mem (save_area),
776 GEN_INT (high_to_save - low_to_save + 1),
777 PARM_BOUNDARY / BITS_PER_UNIT);
778}
779#endif
780
781/* If any elements in ARGS refer to parameters that are to be passed in
782 registers, but not in memory, and whose alignment does not permit a
783 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
784 which we will later copy into the appropriate hard registers.
785
786 Pseudos for each unaligned argument will be stored into the array
787 args[argnum].aligned_regs. The caller is responsible for deallocating
788 the aligned_regs array if it is nonzero. */
789
20efdf74
JL
790static void
791store_unaligned_arguments_into_pseudos (args, num_actuals)
792 struct arg_data *args;
793 int num_actuals;
794{
795 int i, j;
796
797 for (i = 0; i < num_actuals; i++)
798 if (args[i].reg != 0 && ! args[i].pass_on_stack
799 && args[i].mode == BLKmode
800 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
801 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
802 {
803 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
804 int big_endian_correction = 0;
805
806 args[i].n_aligned_regs
807 = args[i].partial ? args[i].partial
808 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
809
8e6a59fe
MM
810 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
811 * args[i].n_aligned_regs);
20efdf74
JL
812
813 /* Structures smaller than a word are aligned to the least
814 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
815 this means we must skip the empty high order bytes when
816 calculating the bit offset. */
817 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
818 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
819
820 for (j = 0; j < args[i].n_aligned_regs; j++)
821 {
822 rtx reg = gen_reg_rtx (word_mode);
823 rtx word = operand_subword_force (args[i].value, j, BLKmode);
824 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
825 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
826
827 args[i].aligned_regs[j] = reg;
828
829 /* There is no need to restrict this code to loading items
830 in TYPE_ALIGN sized hunks. The bitfield instructions can
831 load up entire word sized registers efficiently.
832
833 ??? This may not be needed anymore.
834 We use to emit a clobber here but that doesn't let later
835 passes optimize the instructions we emit. By storing 0 into
836 the register later passes know the first AND to zero out the
837 bitfield being set in the register is unnecessary. The store
838 of 0 will be deleted as will at least the first AND. */
839
840 emit_move_insn (reg, const0_rtx);
841
842 bytes -= bitsize / BITS_PER_UNIT;
843 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
844 extract_bit_field (word, bitsize, 0, 1,
845 NULL_RTX, word_mode,
846 word_mode,
847 bitalign / BITS_PER_UNIT,
848 BITS_PER_WORD),
849 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
850 }
851 }
852}
853
d7cdf113
JL
854/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
855 ACTPARMS.
856
857 NUM_ACTUALS is the total number of parameters.
858
859 N_NAMED_ARGS is the total number of named arguments.
860
861 FNDECL is the tree code for the target of this call (if known)
862
863 ARGS_SO_FAR holds state needed by the target to know where to place
864 the next argument.
865
866 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
867 for arguments which are passed in registers.
868
869 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
870 and may be modified by this routine.
871
872 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
873 flags which may may be modified by this routine. */
874
875static void
876initialize_argument_information (num_actuals, args, args_size, n_named_args,
877 actparms, fndecl, args_so_far,
878 reg_parm_stack_space, old_stack_level,
879 old_pending_adj, must_preallocate, is_const)
91813b28 880 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
881 struct arg_data *args;
882 struct args_size *args_size;
91813b28 883 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
884 tree actparms;
885 tree fndecl;
959f3a06 886 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
887 int reg_parm_stack_space;
888 rtx *old_stack_level;
889 int *old_pending_adj;
890 int *must_preallocate;
891 int *is_const;
892{
893 /* 1 if scanning parms front to back, -1 if scanning back to front. */
894 int inc;
895
896 /* Count arg position in order args appear. */
897 int argpos;
898
899 int i;
900 tree p;
901
902 args_size->constant = 0;
903 args_size->var = 0;
904
905 /* In this loop, we consider args in the order they are written.
906 We fill up ARGS from the front or from the back if necessary
907 so that in any case the first arg to be pushed ends up at the front. */
908
909#ifdef PUSH_ARGS_REVERSED
910 i = num_actuals - 1, inc = -1;
911 /* In this case, must reverse order of args
912 so that we compute and push the last arg first. */
913#else
914 i = 0, inc = 1;
915#endif
916
917 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
918 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
919 {
920 tree type = TREE_TYPE (TREE_VALUE (p));
921 int unsignedp;
922 enum machine_mode mode;
923
924 args[i].tree_value = TREE_VALUE (p);
925
926 /* Replace erroneous argument with constant zero. */
927 if (type == error_mark_node || TYPE_SIZE (type) == 0)
928 args[i].tree_value = integer_zero_node, type = integer_type_node;
929
930 /* If TYPE is a transparent union, pass things the way we would
931 pass the first field of the union. We have already verified that
932 the modes are the same. */
933 if (TYPE_TRANSPARENT_UNION (type))
934 type = TREE_TYPE (TYPE_FIELDS (type));
935
936 /* Decide where to pass this arg.
937
938 args[i].reg is nonzero if all or part is passed in registers.
939
940 args[i].partial is nonzero if part but not all is passed in registers,
941 and the exact value says how many words are passed in registers.
942
943 args[i].pass_on_stack is nonzero if the argument must at least be
944 computed on the stack. It may then be loaded back into registers
945 if args[i].reg is nonzero.
946
947 These decisions are driven by the FUNCTION_... macros and must agree
948 with those made by function.c. */
949
950 /* See if this argument should be passed by invisible reference. */
951 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
952 && contains_placeholder_p (TYPE_SIZE (type)))
953 || TREE_ADDRESSABLE (type)
954#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 955 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
956 type, argpos < n_named_args)
957#endif
958 )
959 {
960 /* If we're compiling a thunk, pass through invisible
961 references instead of making a copy. */
962 if (current_function_is_thunk
963#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 964 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
965 type, argpos < n_named_args)
966 /* If it's in a register, we must make a copy of it too. */
967 /* ??? Is this a sufficient test? Is there a better one? */
968 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
969 && REG_P (DECL_RTL (args[i].tree_value)))
970 && ! TREE_ADDRESSABLE (type))
971#endif
972 )
973 {
974 /* C++ uses a TARGET_EXPR to indicate that we want to make a
975 new object from the argument. If we are passing by
976 invisible reference, the callee will do that for us, so we
977 can strip off the TARGET_EXPR. This is not always safe,
978 but it is safe in the only case where this is a useful
979 optimization; namely, when the argument is a plain object.
980 In that case, the frontend is just asking the backend to
981 make a bitwise copy of the argument. */
982
983 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
984 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
985 (args[i].tree_value, 1)))
986 == 'd')
987 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
988 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
989
990 args[i].tree_value = build1 (ADDR_EXPR,
991 build_pointer_type (type),
992 args[i].tree_value);
993 type = build_pointer_type (type);
994 }
995 else
996 {
997 /* We make a copy of the object and pass the address to the
998 function being called. */
999 rtx copy;
1000
1001 if (TYPE_SIZE (type) == 0
1002 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1003 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1004 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1005 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1006 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1007 {
1008 /* This is a variable-sized object. Make space on the stack
1009 for it. */
1010 rtx size_rtx = expr_size (TREE_VALUE (p));
1011
1012 if (*old_stack_level == 0)
1013 {
1014 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1015 *old_pending_adj = pending_stack_adjust;
1016 pending_stack_adjust = 0;
1017 }
1018
1019 copy = gen_rtx_MEM (BLKmode,
1020 allocate_dynamic_stack_space (size_rtx,
1021 NULL_RTX,
1022 TYPE_ALIGN (type)));
1023 }
1024 else
1025 {
1026 int size = int_size_in_bytes (type);
1027 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1028 }
1029
1030 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1031
1032 store_expr (args[i].tree_value, copy, 0);
1033 *is_const = 0;
1034
1035 args[i].tree_value = build1 (ADDR_EXPR,
1036 build_pointer_type (type),
1037 make_tree (type, copy));
1038 type = build_pointer_type (type);
1039 }
1040 }
1041
1042 mode = TYPE_MODE (type);
1043 unsignedp = TREE_UNSIGNED (type);
1044
1045#ifdef PROMOTE_FUNCTION_ARGS
1046 mode = promote_mode (type, mode, &unsignedp, 1);
1047#endif
1048
1049 args[i].unsignedp = unsignedp;
1050 args[i].mode = mode;
959f3a06 1051 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
d7cdf113
JL
1052 argpos < n_named_args);
1053#ifdef FUNCTION_ARG_PARTIAL_NREGS
1054 if (args[i].reg)
1055 args[i].partial
959f3a06 1056 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1057 argpos < n_named_args);
1058#endif
1059
1060 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1061
1062 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1063 it means that we are to pass this arg in the register(s) designated
1064 by the PARALLEL, but also to pass it in the stack. */
1065 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1066 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1067 args[i].pass_on_stack = 1;
1068
1069 /* If this is an addressable type, we must preallocate the stack
1070 since we must evaluate the object into its final location.
1071
1072 If this is to be passed in both registers and the stack, it is simpler
1073 to preallocate. */
1074 if (TREE_ADDRESSABLE (type)
1075 || (args[i].pass_on_stack && args[i].reg != 0))
1076 *must_preallocate = 1;
1077
1078 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1079 we cannot consider this function call constant. */
1080 if (TREE_ADDRESSABLE (type))
1081 *is_const = 0;
1082
1083 /* Compute the stack-size of this argument. */
1084 if (args[i].reg == 0 || args[i].partial != 0
1085 || reg_parm_stack_space > 0
1086 || args[i].pass_on_stack)
1087 locate_and_pad_parm (mode, type,
1088#ifdef STACK_PARMS_IN_REG_PARM_AREA
1089 1,
1090#else
1091 args[i].reg != 0,
1092#endif
1093 fndecl, args_size, &args[i].offset,
1094 &args[i].size);
1095
1096#ifndef ARGS_GROW_DOWNWARD
1097 args[i].slot_offset = *args_size;
1098#endif
1099
1100 /* If a part of the arg was put into registers,
1101 don't include that part in the amount pushed. */
1102 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1103 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1104 / (PARM_BOUNDARY / BITS_PER_UNIT)
1105 * (PARM_BOUNDARY / BITS_PER_UNIT));
1106
1107 /* Update ARGS_SIZE, the total stack space for args so far. */
1108
1109 args_size->constant += args[i].size.constant;
1110 if (args[i].size.var)
1111 {
1112 ADD_PARM_SIZE (*args_size, args[i].size.var);
1113 }
1114
1115 /* Since the slot offset points to the bottom of the slot,
1116 we must record it after incrementing if the args grow down. */
1117#ifdef ARGS_GROW_DOWNWARD
1118 args[i].slot_offset = *args_size;
1119
1120 args[i].slot_offset.constant = -args_size->constant;
1121 if (args_size->var)
1122 {
1123 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1124 }
1125#endif
1126
1127 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1128 have been used, etc. */
1129
959f3a06 1130 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1131 argpos < n_named_args);
1132 }
1133}
1134
599f37b6
JL
1135/* Update ARGS_SIZE to contain the total size for the argument block.
1136 Return the original constant component of the argument block's size.
1137
1138 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1139 for arguments passed in registers. */
1140
1141static int
1142compute_argument_block_size (reg_parm_stack_space, args_size)
1143 int reg_parm_stack_space;
1144 struct args_size *args_size;
1145{
1146 int unadjusted_args_size = args_size->constant;
1147
1148 /* Compute the actual size of the argument block required. The variable
1149 and constant sizes must be combined, the size may have to be rounded,
1150 and there may be a minimum required size. */
1151
1152 if (args_size->var)
1153 {
1154 args_size->var = ARGS_SIZE_TREE (*args_size);
1155 args_size->constant = 0;
1156
1157#ifdef PREFERRED_STACK_BOUNDARY
1158 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1159 args_size->var = round_up (args_size->var, STACK_BYTES);
1160#endif
1161
1162 if (reg_parm_stack_space > 0)
1163 {
1164 args_size->var
1165 = size_binop (MAX_EXPR, args_size->var,
1166 size_int (reg_parm_stack_space));
1167
1168#ifndef OUTGOING_REG_PARM_STACK_SPACE
1169 /* The area corresponding to register parameters is not to count in
1170 the size of the block we need. So make the adjustment. */
1171 args_size->var
1172 = size_binop (MINUS_EXPR, args_size->var,
1173 size_int (reg_parm_stack_space));
1174#endif
1175 }
1176 }
1177 else
1178 {
1179#ifdef PREFERRED_STACK_BOUNDARY
fb5eebb9
RH
1180 args_size->constant = (((args_size->constant
1181 + pending_stack_adjust
1182 + STACK_BYTES - 1)
1183 / STACK_BYTES * STACK_BYTES)
1184 - pending_stack_adjust);
599f37b6
JL
1185#endif
1186
1187 args_size->constant = MAX (args_size->constant,
1188 reg_parm_stack_space);
1189
1190#ifdef MAYBE_REG_PARM_STACK_SPACE
1191 if (reg_parm_stack_space == 0)
1192 args_size->constant = 0;
1193#endif
1194
1195#ifndef OUTGOING_REG_PARM_STACK_SPACE
1196 args_size->constant -= reg_parm_stack_space;
1197#endif
1198 }
1199 return unadjusted_args_size;
1200}
1201
cc0b1adc
JL
1202/* Precompute parameters has needed for a function call.
1203
1204 IS_CONST indicates the target function is a pure function.
1205
1206 MUST_PREALLOCATE indicates that we must preallocate stack space for
1207 any stack arguments.
1208
1209 NUM_ACTUALS is the number of arguments.
1210
1211 ARGS is an array containing information for each argument; this routine
1212 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1213
1214 ARGS_SIZE contains information about the size of the arg list. */
1215
1216static void
1217precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1218 int is_const;
1219 int must_preallocate;
1220 int num_actuals;
1221 struct arg_data *args;
1222 struct args_size *args_size;
1223{
1224 int i;
1225
1226 /* If this function call is cse'able, precompute all the parameters.
1227 Note that if the parameter is constructed into a temporary, this will
1228 cause an additional copy because the parameter will be constructed
1229 into a temporary location and then copied into the outgoing arguments.
1230 If a parameter contains a call to alloca and this function uses the
1231 stack, precompute the parameter. */
1232
1233 /* If we preallocated the stack space, and some arguments must be passed
1234 on the stack, then we must precompute any parameter which contains a
1235 function call which will store arguments on the stack.
1236 Otherwise, evaluating the parameter may clobber previous parameters
1237 which have already been stored into the stack. */
1238
1239 for (i = 0; i < num_actuals; i++)
1240 if (is_const
1241 || ((args_size->var != 0 || args_size->constant != 0)
1242 && calls_function (args[i].tree_value, 1))
1243 || (must_preallocate
1244 && (args_size->var != 0 || args_size->constant != 0)
1245 && calls_function (args[i].tree_value, 0)))
1246 {
1247 /* If this is an addressable type, we cannot pre-evaluate it. */
1248 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1249 abort ();
1250
1251 push_temp_slots ();
1252
1253 args[i].initial_value = args[i].value
1254 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1255
1256 preserve_temp_slots (args[i].value);
1257 pop_temp_slots ();
1258
1259 /* ANSI doesn't require a sequence point here,
1260 but PCC has one, so this will avoid some problems. */
1261 emit_queue ();
1262
1263 args[i].initial_value = args[i].value
1264 = protect_from_queue (args[i].initial_value, 0);
1265
1266 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1267 args[i].value
1268 = convert_modes (args[i].mode,
1269 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1270 args[i].value, args[i].unsignedp);
1271 }
1272}
1273
0f9b3ea6
JL
1274/* Given the current state of MUST_PREALLOCATE and information about
1275 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1276 compute and return the final value for MUST_PREALLOCATE. */
1277
1278static int
1279finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1280 int must_preallocate;
1281 int num_actuals;
1282 struct arg_data *args;
1283 struct args_size *args_size;
1284{
1285 /* See if we have or want to preallocate stack space.
1286
1287 If we would have to push a partially-in-regs parm
1288 before other stack parms, preallocate stack space instead.
1289
1290 If the size of some parm is not a multiple of the required stack
1291 alignment, we must preallocate.
1292
1293 If the total size of arguments that would otherwise create a copy in
1294 a temporary (such as a CALL) is more than half the total argument list
1295 size, preallocation is faster.
1296
1297 Another reason to preallocate is if we have a machine (like the m88k)
1298 where stack alignment is required to be maintained between every
1299 pair of insns, not just when the call is made. However, we assume here
1300 that such machines either do not have push insns (and hence preallocation
1301 would occur anyway) or the problem is taken care of with
1302 PUSH_ROUNDING. */
1303
1304 if (! must_preallocate)
1305 {
1306 int partial_seen = 0;
1307 int copy_to_evaluate_size = 0;
1308 int i;
1309
1310 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1311 {
1312 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1313 partial_seen = 1;
1314 else if (partial_seen && args[i].reg == 0)
1315 must_preallocate = 1;
1316
1317 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1318 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1319 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1320 || TREE_CODE (args[i].tree_value) == COND_EXPR
1321 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1322 copy_to_evaluate_size
1323 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1324 }
1325
1326 if (copy_to_evaluate_size * 2 >= args_size->constant
1327 && args_size->constant > 0)
1328 must_preallocate = 1;
1329 }
1330 return must_preallocate;
1331}
599f37b6 1332
a45bdd02
JL
1333/* If we preallocated stack space, compute the address of each argument
1334 and store it into the ARGS array.
1335
1336 We need not ensure it is a valid memory address here; it will be
1337 validized when it is used.
1338
1339 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1340
1341static void
1342compute_argument_addresses (args, argblock, num_actuals)
1343 struct arg_data *args;
1344 rtx argblock;
1345 int num_actuals;
1346{
1347 if (argblock)
1348 {
1349 rtx arg_reg = argblock;
1350 int i, arg_offset = 0;
1351
1352 if (GET_CODE (argblock) == PLUS)
1353 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1354
1355 for (i = 0; i < num_actuals; i++)
1356 {
1357 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1358 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1359 rtx addr;
1360
1361 /* Skip this parm if it will not be passed on the stack. */
1362 if (! args[i].pass_on_stack && args[i].reg != 0)
1363 continue;
1364
1365 if (GET_CODE (offset) == CONST_INT)
1366 addr = plus_constant (arg_reg, INTVAL (offset));
1367 else
1368 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1369
1370 addr = plus_constant (addr, arg_offset);
1371 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1372 MEM_SET_IN_STRUCT_P
1373 (args[i].stack,
1374 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1375
1376 if (GET_CODE (slot_offset) == CONST_INT)
1377 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1378 else
1379 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1380
1381 addr = plus_constant (addr, arg_offset);
1382 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1383 }
1384 }
1385}
1386
1387/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1388 in a call instruction.
1389
1390 FNDECL is the tree node for the target function. For an indirect call
1391 FNDECL will be NULL_TREE.
1392
1393 EXP is the CALL_EXPR for this call. */
1394
1395static rtx
1396rtx_for_function_call (fndecl, exp)
1397 tree fndecl;
1398 tree exp;
1399{
1400 rtx funexp;
1401
1402 /* Get the function to call, in the form of RTL. */
1403 if (fndecl)
1404 {
1405 /* If this is the first use of the function, see if we need to
1406 make an external definition for it. */
1407 if (! TREE_USED (fndecl))
1408 {
1409 assemble_external (fndecl);
1410 TREE_USED (fndecl) = 1;
1411 }
1412
1413 /* Get a SYMBOL_REF rtx for the function address. */
1414 funexp = XEXP (DECL_RTL (fndecl), 0);
1415 }
1416 else
1417 /* Generate an rtx (probably a pseudo-register) for the address. */
1418 {
91ab1046 1419 rtx funaddr;
a45bdd02 1420 push_temp_slots ();
91ab1046
DT
1421 funaddr = funexp =
1422 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1423 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1424
1425 /* Check the function is executable. */
1426 if (current_function_check_memory_usage)
91ab1046
DT
1427 {
1428#ifdef POINTERS_EXTEND_UNSIGNED
1429 /* It might be OK to convert funexp in place, but there's
1430 a lot going on between here and when it happens naturally
1431 that this seems safer. */
1432 funaddr = convert_memory_address (Pmode, funexp);
1433#endif
1434 emit_library_call (chkr_check_exec_libfunc, 1,
1435 VOIDmode, 1,
1436 funaddr, Pmode);
1437 }
a45bdd02
JL
1438 emit_queue ();
1439 }
1440 return funexp;
1441}
1442
21a3b983
JL
1443/* Do the register loads required for any wholly-register parms or any
1444 parms which are passed both on the stack and in a register. Their
1445 expressions were already evaluated.
1446
1447 Mark all register-parms as living through the call, putting these USE
1448 insns in the CALL_INSN_FUNCTION_USAGE field. */
1449
1450static void
1451load_register_parameters (args, num_actuals, call_fusage)
1452 struct arg_data *args;
1453 int num_actuals;
1454 rtx *call_fusage;
1455{
1456 int i, j;
1457
1458#ifdef LOAD_ARGS_REVERSED
1459 for (i = num_actuals - 1; i >= 0; i--)
1460#else
1461 for (i = 0; i < num_actuals; i++)
1462#endif
1463 {
1464 rtx reg = args[i].reg;
1465 int partial = args[i].partial;
1466 int nregs;
1467
1468 if (reg)
1469 {
1470 /* Set to non-negative if must move a word at a time, even if just
1471 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1472 we just use a normal move insn. This value can be zero if the
1473 argument is a zero size structure with no fields. */
1474 nregs = (partial ? partial
1475 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1476 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1477 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1478 : -1));
1479
1480 /* Handle calls that pass values in multiple non-contiguous
1481 locations. The Irix 6 ABI has examples of this. */
1482
1483 if (GET_CODE (reg) == PARALLEL)
1484 {
1485 emit_group_load (reg, args[i].value,
1486 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1487 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1488 / BITS_PER_UNIT));
1489 }
1490
1491 /* If simple case, just do move. If normal partial, store_one_arg
1492 has already loaded the register for us. In all other cases,
1493 load the register(s) from memory. */
1494
1495 else if (nregs == -1)
1496 emit_move_insn (reg, args[i].value);
1497
1498 /* If we have pre-computed the values to put in the registers in
1499 the case of non-aligned structures, copy them in now. */
1500
1501 else if (args[i].n_aligned_regs != 0)
1502 for (j = 0; j < args[i].n_aligned_regs; j++)
1503 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1504 args[i].aligned_regs[j]);
1505
1506 else if (partial == 0 || args[i].pass_on_stack)
1507 move_block_to_reg (REGNO (reg),
1508 validize_mem (args[i].value), nregs,
1509 args[i].mode);
1510
1511 /* Handle calls that pass values in multiple non-contiguous
1512 locations. The Irix 6 ABI has examples of this. */
1513 if (GET_CODE (reg) == PARALLEL)
1514 use_group_regs (call_fusage, reg);
1515 else if (nregs == -1)
1516 use_reg (call_fusage, reg);
1517 else
1518 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1519 }
1520 }
1521}
1522
51bbfa0c
RS
1523/* Generate all the code for a function call
1524 and return an rtx for its value.
1525 Store the value in TARGET (specified as an rtx) if convenient.
1526 If the value is stored in TARGET then TARGET is returned.
1527 If IGNORE is nonzero, then we ignore the value of the function call. */
1528
1529rtx
8129842c 1530expand_call (exp, target, ignore)
51bbfa0c
RS
1531 tree exp;
1532 rtx target;
1533 int ignore;
51bbfa0c
RS
1534{
1535 /* List of actual parameters. */
1536 tree actparms = TREE_OPERAND (exp, 1);
1537 /* RTX for the function to be called. */
1538 rtx funexp;
51bbfa0c
RS
1539 /* Data type of the function. */
1540 tree funtype;
1541 /* Declaration of the function being called,
1542 or 0 if the function is computed (not known by name). */
1543 tree fndecl = 0;
1544 char *name = 0;
1545
1546 /* Register in which non-BLKmode value will be returned,
1547 or 0 if no value or if value is BLKmode. */
1548 rtx valreg;
1549 /* Address where we should return a BLKmode value;
1550 0 if value not BLKmode. */
1551 rtx structure_value_addr = 0;
1552 /* Nonzero if that address is being passed by treating it as
1553 an extra, implicit first parameter. Otherwise,
1554 it is passed by being copied directly into struct_value_rtx. */
1555 int structure_value_addr_parm = 0;
1556 /* Size of aggregate value wanted, or zero if none wanted
1557 or if we are using the non-reentrant PCC calling convention
1558 or expecting the value in registers. */
e5e809f4 1559 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1560 /* Nonzero if called function returns an aggregate in memory PCC style,
1561 by returning the address of where to find it. */
1562 int pcc_struct_value = 0;
1563
1564 /* Number of actual parameters in this call, including struct value addr. */
1565 int num_actuals;
1566 /* Number of named args. Args after this are anonymous ones
1567 and they must all go on the stack. */
1568 int n_named_args;
51bbfa0c
RS
1569
1570 /* Vector of information about each argument.
1571 Arguments are numbered in the order they will be pushed,
1572 not the order they are written. */
1573 struct arg_data *args;
1574
1575 /* Total size in bytes of all the stack-parms scanned so far. */
1576 struct args_size args_size;
1577 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1578 int unadjusted_args_size;
51bbfa0c
RS
1579 /* Data on reg parms scanned so far. */
1580 CUMULATIVE_ARGS args_so_far;
1581 /* Nonzero if a reg parm has been scanned. */
1582 int reg_parm_seen;
efd65a8b 1583 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1584
1585 /* Nonzero if we must avoid push-insns in the args for this call.
1586 If stack space is allocated for register parameters, but not by the
1587 caller, then it is preallocated in the fixed part of the stack frame.
1588 So the entire argument block must then be preallocated (i.e., we
1589 ignore PUSH_ROUNDING in that case). */
1590
51bbfa0c
RS
1591#ifdef PUSH_ROUNDING
1592 int must_preallocate = 0;
1593#else
1594 int must_preallocate = 1;
51bbfa0c
RS
1595#endif
1596
f72aed24 1597 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1598 int reg_parm_stack_space = 0;
1599
51bbfa0c
RS
1600 /* Address of space preallocated for stack parms
1601 (on machines that lack push insns), or 0 if space not preallocated. */
1602 rtx argblock = 0;
1603
1604 /* Nonzero if it is plausible that this is a call to alloca. */
1605 int may_be_alloca;
9ae8ffe7
JL
1606 /* Nonzero if this is a call to malloc or a related function. */
1607 int is_malloc;
51bbfa0c
RS
1608 /* Nonzero if this is a call to setjmp or a related function. */
1609 int returns_twice;
1610 /* Nonzero if this is a call to `longjmp'. */
1611 int is_longjmp;
1612 /* Nonzero if this is a call to an inline function. */
1613 int is_integrable = 0;
51bbfa0c
RS
1614 /* Nonzero if this is a call to a `const' function.
1615 Note that only explicitly named functions are handled as `const' here. */
1616 int is_const = 0;
1617 /* Nonzero if this is a call to a `volatile' function. */
1618 int is_volatile = 0;
1619#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1620 /* Define the boundary of the register parm stack space that needs to be
1621 save, if any. */
1622 int low_to_save = -1, high_to_save;
1623 rtx save_area = 0; /* Place that it is saved */
1624#endif
1625
1626#ifdef ACCUMULATE_OUTGOING_ARGS
1627 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1628 char *initial_stack_usage_map = stack_usage_map;
69d4ca36 1629 int old_stack_arg_under_construction;
51bbfa0c
RS
1630#endif
1631
1632 rtx old_stack_level = 0;
79be3418 1633 int old_pending_adj = 0;
51bbfa0c 1634 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 1635 rtx call_fusage = 0;
51bbfa0c 1636 register tree p;
21a3b983 1637 register int i;
51bbfa0c 1638
7815214e
RK
1639 /* The value of the function call can be put in a hard register. But
1640 if -fcheck-memory-usage, code which invokes functions (and thus
1641 damages some hard registers) can be inserted before using the value.
1642 So, target is always a pseudo-register in that case. */
7d384cc0 1643 if (current_function_check_memory_usage)
7815214e
RK
1644 target = 0;
1645
51bbfa0c
RS
1646 /* See if we can find a DECL-node for the actual function.
1647 As a result, decide whether this is a call to an integrable function. */
1648
1649 p = TREE_OPERAND (exp, 0);
1650 if (TREE_CODE (p) == ADDR_EXPR)
1651 {
1652 fndecl = TREE_OPERAND (p, 0);
1653 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1654 fndecl = 0;
51bbfa0c
RS
1655 else
1656 {
1657 if (!flag_no_inline
1658 && fndecl != current_function_decl
aa10adff 1659 && DECL_INLINE (fndecl)
1cf4f698 1660 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1661 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1662 is_integrable = 1;
1663 else if (! TREE_ADDRESSABLE (fndecl))
1664 {
13d39dbc 1665 /* In case this function later becomes inlinable,
51bbfa0c
RS
1666 record that there was already a non-inline call to it.
1667
1668 Use abstraction instead of setting TREE_ADDRESSABLE
1669 directly. */
da8c1713
RK
1670 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1671 && optimize > 0)
1907795e
JM
1672 {
1673 warning_with_decl (fndecl, "can't inline call to `%s'");
1674 warning ("called from here");
1675 }
51bbfa0c
RS
1676 mark_addressable (fndecl);
1677 }
1678
d45cf215
RS
1679 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1680 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1681 is_const = 1;
5e24110e
RS
1682
1683 if (TREE_THIS_VOLATILE (fndecl))
1684 is_volatile = 1;
51bbfa0c
RS
1685 }
1686 }
1687
fdff8c6d
RK
1688 /* If we don't have specific function to call, see if we have a
1689 constant or `noreturn' function from the type. */
1690 if (fndecl == 0)
1691 {
1692 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1693 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1694 }
1695
6f90e075
JW
1696#ifdef REG_PARM_STACK_SPACE
1697#ifdef MAYBE_REG_PARM_STACK_SPACE
1698 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1699#else
1700 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1701#endif
1702#endif
1703
e5e809f4
JL
1704#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1705 if (reg_parm_stack_space > 0)
1706 must_preallocate = 1;
1707#endif
1708
51bbfa0c
RS
1709 /* Warn if this value is an aggregate type,
1710 regardless of which calling convention we are using for it. */
05e3bdb9 1711 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1712 warning ("function call has aggregate value");
1713
1714 /* Set up a place to return a structure. */
1715
1716 /* Cater to broken compilers. */
1717 if (aggregate_value_p (exp))
1718 {
1719 /* This call returns a big structure. */
1720 is_const = 0;
1721
1722#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1723 {
1724 pcc_struct_value = 1;
0dd532dc
JW
1725 /* Easier than making that case work right. */
1726 if (is_integrable)
1727 {
1728 /* In case this is a static function, note that it has been
1729 used. */
1730 if (! TREE_ADDRESSABLE (fndecl))
1731 mark_addressable (fndecl);
1732 is_integrable = 0;
1733 }
9e7b1d0a
RS
1734 }
1735#else /* not PCC_STATIC_STRUCT_RETURN */
1736 {
1737 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1738
9e7b1d0a
RS
1739 if (target && GET_CODE (target) == MEM)
1740 structure_value_addr = XEXP (target, 0);
1741 else
1742 {
e9a25f70
JL
1743 /* Assign a temporary to hold the value. */
1744 tree d;
51bbfa0c 1745
9e7b1d0a
RS
1746 /* For variable-sized objects, we must be called with a target
1747 specified. If we were to allocate space on the stack here,
1748 we would have no way of knowing when to free it. */
51bbfa0c 1749
002bdd6c
RK
1750 if (struct_value_size < 0)
1751 abort ();
1752
e9a25f70
JL
1753 /* This DECL is just something to feed to mark_addressable;
1754 it doesn't get pushed. */
1755 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1756 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1757 mark_addressable (d);
1758 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1759 TREE_USED (d) = 1;
9e7b1d0a
RS
1760 target = 0;
1761 }
1762 }
1763#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1764 }
1765
1766 /* If called function is inline, try to integrate it. */
1767
1768 if (is_integrable)
1769 {
1770 rtx temp;
69d4ca36 1771#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534 1772 rtx before_call = get_last_insn ();
69d4ca36 1773#endif
51bbfa0c
RS
1774
1775 temp = expand_inline_function (fndecl, actparms, target,
1776 ignore, TREE_TYPE (exp),
1777 structure_value_addr);
1778
1779 /* If inlining succeeded, return. */
2e0dd623 1780 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1781 {
d64f5a78 1782#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1783 /* If the outgoing argument list must be preserved, push
1784 the stack before executing the inlined function if it
1785 makes any calls. */
1786
1787 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1788 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1789 break;
1790
1791 if (stack_arg_under_construction || i >= 0)
1792 {
a1917650
RK
1793 rtx first_insn
1794 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1795 rtx insn = NULL_RTX, seq;
2f4aa534 1796
d64f5a78 1797 /* Look for a call in the inline function code.
49ad7cfa 1798 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1799 nonzero then there is a call and it is not necessary
1800 to scan the insns. */
1801
49ad7cfa 1802 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1803 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1804 if (GET_CODE (insn) == CALL_INSN)
1805 break;
2f4aa534
RS
1806
1807 if (insn)
1808 {
d64f5a78
RS
1809 /* Reserve enough stack space so that the largest
1810 argument list of any function call in the inline
1811 function does not overlap the argument list being
1812 evaluated. This is usually an overestimate because
1813 allocate_dynamic_stack_space reserves space for an
1814 outgoing argument list in addition to the requested
1815 space, but there is no way to ask for stack space such
1816 that an argument list of a certain length can be
e5e809f4 1817 safely constructed.
d64f5a78 1818
e5e809f4
JL
1819 Add the stack space reserved for register arguments, if
1820 any, in the inline function. What is really needed is the
d64f5a78
RS
1821 largest value of reg_parm_stack_space in the inline
1822 function, but that is not available. Using the current
1823 value of reg_parm_stack_space is wrong, but gives
1824 correct results on all supported machines. */
e5e809f4 1825
49ad7cfa 1826 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1827 + reg_parm_stack_space);
1828
2f4aa534 1829 start_sequence ();
ccf5d244 1830 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1831 allocate_dynamic_stack_space (GEN_INT (adjust),
1832 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
1833 seq = get_insns ();
1834 end_sequence ();
a1917650 1835 emit_insns_before (seq, first_insn);
e5d70561 1836 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
1837 }
1838 }
d64f5a78 1839#endif
51bbfa0c
RS
1840
1841 /* If the result is equivalent to TARGET, return TARGET to simplify
1842 checks in store_expr. They can be equivalent but not equal in the
1843 case of a function that returns BLKmode. */
1844 if (temp != target && rtx_equal_p (temp, target))
1845 return target;
1846 return temp;
1847 }
1848
1849 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
1850 separately after all. If function was declared inline,
1851 give a warning. */
1852 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 1853 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
1854 {
1855 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1856 warning ("called from here");
1857 }
51bbfa0c
RS
1858 mark_addressable (fndecl);
1859 }
1860
51bbfa0c
RS
1861 function_call_count++;
1862
1863 if (fndecl && DECL_NAME (fndecl))
1864 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1865
51bbfa0c 1866 /* See if this is a call to a function that can return more than once
20efdf74
JL
1867 or a call to longjmp or malloc. */
1868 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1869 &is_malloc, &may_be_alloca);
51bbfa0c 1870
51bbfa0c
RS
1871 if (may_be_alloca)
1872 current_function_calls_alloca = 1;
1873
39842893
JL
1874 /* Operand 0 is a pointer-to-function; get the type of the function. */
1875 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1876 if (! POINTER_TYPE_P (funtype))
1877 abort ();
1878 funtype = TREE_TYPE (funtype);
1879
1880 /* When calling a const function, we must pop the stack args right away,
1881 so that the pop is deleted or moved with the call. */
1882 if (is_const)
1883 NO_DEFER_POP;
1884
51bbfa0c
RS
1885 /* Don't let pending stack adjusts add up to too much.
1886 Also, do all pending adjustments now
1887 if there is any chance this might be a call to alloca. */
1888
1889 if (pending_stack_adjust >= 32
1890 || (pending_stack_adjust > 0 && may_be_alloca))
1891 do_pending_stack_adjust ();
1892
cc79451b
RK
1893 /* Push the temporary stack slot level so that we can free any temporaries
1894 we make. */
51bbfa0c
RS
1895 push_temp_slots ();
1896
eecb6f50
JL
1897 /* Start updating where the next arg would go.
1898
1899 On some machines (such as the PA) indirect calls have a different
1900 calling convention than normal calls. The last argument in
1901 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1902 or not. */
1903 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
1904
1905 /* If struct_value_rtx is 0, it means pass the address
1906 as if it were an extra parameter. */
1907 if (structure_value_addr && struct_value_rtx == 0)
1908 {
5582b006
RK
1909 /* If structure_value_addr is a REG other than
1910 virtual_outgoing_args_rtx, we can use always use it. If it
1911 is not a REG, we must always copy it into a register.
1912 If it is virtual_outgoing_args_rtx, we must copy it to another
1913 register in some cases. */
1914 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 1915#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
1916 || (stack_arg_under_construction
1917 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 1918#endif
5582b006
RK
1919 ? copy_addr_to_reg (structure_value_addr)
1920 : structure_value_addr);
d64f5a78 1921
51bbfa0c
RS
1922 actparms
1923 = tree_cons (error_mark_node,
1924 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 1925 temp),
51bbfa0c
RS
1926 actparms);
1927 structure_value_addr_parm = 1;
1928 }
1929
1930 /* Count the arguments and set NUM_ACTUALS. */
1931 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1932 num_actuals = i;
1933
1934 /* Compute number of named args.
1935 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 1936 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
1937 (If no anonymous args follow, the result of list_length is actually
1938 one too large. This is harmless.)
51bbfa0c 1939
9ab70a9b
R
1940 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1941 zero, this machine will be able to place unnamed args that were passed in
469225d8
JW
1942 registers into the stack. So treat all args as named. This allows the
1943 insns emitting for a specific argument list to be independent of the
1944 function declaration.
51bbfa0c 1945
9ab70a9b 1946 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
51bbfa0c
RS
1947 way to pass unnamed args in registers, so we must force them into
1948 memory. */
e5e809f4
JL
1949
1950 if ((STRICT_ARGUMENT_NAMING
9ab70a9b 1951 || ! PRETEND_OUTGOING_VARARGS_NAMED)
e5e809f4 1952 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 1953 n_named_args
0ee902cb 1954 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 1955 /* Don't include the last named arg. */
d0f9021a 1956 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
0ee902cb
RM
1957 /* Count the struct value address, if it is passed as a parm. */
1958 + structure_value_addr_parm);
51bbfa0c 1959 else
51bbfa0c
RS
1960 /* If we know nothing, treat all args as named. */
1961 n_named_args = num_actuals;
1962
1963 /* Make a vector to hold all the information about each arg. */
1964 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 1965 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c 1966
d7cdf113
JL
1967 /* Build up entries inthe ARGS array, compute the size of the arguments
1968 into ARGS_SIZE, etc. */
1969 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
959f3a06 1970 actparms, fndecl, &args_so_far,
d7cdf113
JL
1971 reg_parm_stack_space, &old_stack_level,
1972 &old_pending_adj, &must_preallocate,
1973 &is_const);
51bbfa0c 1974
6f90e075
JW
1975#ifdef FINAL_REG_PARM_STACK_SPACE
1976 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1977 args_size.var);
1978#endif
1979
51bbfa0c
RS
1980 if (args_size.var)
1981 {
1982 /* If this function requires a variable-sized argument list, don't try to
1983 make a cse'able block for this call. We may be able to do this
1984 eventually, but it is too complicated to keep track of what insns go
1985 in the cse'able block and which don't. */
1986
1987 is_const = 0;
1988 must_preallocate = 1;
51bbfa0c 1989 }
e5e809f4 1990
599f37b6
JL
1991 /* Compute the actual size of the argument block required. The variable
1992 and constant sizes must be combined, the size may have to be rounded,
1993 and there may be a minimum required size. */
1994 unadjusted_args_size
1995 = compute_argument_block_size (reg_parm_stack_space, &args_size);
51bbfa0c 1996
0f9b3ea6
JL
1997 /* Now make final decision about preallocating stack space. */
1998 must_preallocate = finalize_must_preallocate (must_preallocate,
1999 num_actuals, args, &args_size);
51bbfa0c
RS
2000
2001 /* If the structure value address will reference the stack pointer, we must
2002 stabilize it. We don't need to do this if we know that we are not going
2003 to adjust the stack pointer in processing this call. */
2004
2005 if (structure_value_addr
2006 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2007 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2008 && (args_size.var
2009#ifndef ACCUMULATE_OUTGOING_ARGS
2010 || args_size.constant
2011#endif
2012 ))
2013 structure_value_addr = copy_to_reg (structure_value_addr);
2014
cc0b1adc
JL
2015 /* Precompute any arguments as needed. */
2016 precompute_arguments (is_const, must_preallocate, num_actuals,
2017 args, &args_size);
51bbfa0c
RS
2018
2019 /* Now we are about to start emitting insns that can be deleted
2020 if a libcall is deleted. */
9ae8ffe7 2021 if (is_const || is_malloc)
51bbfa0c
RS
2022 start_sequence ();
2023
2024 /* If we have no actual push instructions, or shouldn't use them,
2025 make space for all args right now. */
2026
2027 if (args_size.var != 0)
2028 {
2029 if (old_stack_level == 0)
2030 {
e5d70561 2031 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
2032 old_pending_adj = pending_stack_adjust;
2033 pending_stack_adjust = 0;
d64f5a78 2034#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2035 /* stack_arg_under_construction says whether a stack arg is
2036 being constructed at the old stack level. Pushing the stack
2037 gets a clean outgoing argument block. */
2038 old_stack_arg_under_construction = stack_arg_under_construction;
2039 stack_arg_under_construction = 0;
d64f5a78 2040#endif
51bbfa0c
RS
2041 }
2042 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2043 }
26a258fe 2044 else
51bbfa0c
RS
2045 {
2046 /* Note that we must go through the motions of allocating an argument
2047 block even if the size is zero because we may be storing args
2048 in the area reserved for register arguments, which may be part of
2049 the stack frame. */
26a258fe 2050
51bbfa0c
RS
2051 int needed = args_size.constant;
2052
0f41302f
MS
2053 /* Store the maximum argument space used. It will be pushed by
2054 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2055 checking). */
51bbfa0c
RS
2056
2057 if (needed > current_function_outgoing_args_size)
2058 current_function_outgoing_args_size = needed;
2059
26a258fe
PB
2060 if (must_preallocate)
2061 {
2062#ifdef ACCUMULATE_OUTGOING_ARGS
2063 /* Since the stack pointer will never be pushed, it is possible for
2064 the evaluation of a parm to clobber something we have already
2065 written to the stack. Since most function calls on RISC machines
2066 do not use the stack, this is uncommon, but must work correctly.
2067
2068 Therefore, we save any area of the stack that was already written
2069 and that we are using. Here we set up to do this by making a new
2070 stack usage map from the old one. The actual save will be done
2071 by store_one_arg.
2072
2073 Another approach might be to try to reorder the argument
2074 evaluations to avoid this conflicting stack usage. */
2075
e5e809f4 2076#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
2077 /* Since we will be writing into the entire argument area, the
2078 map must be allocated for its entire size, not just the part that
2079 is the responsibility of the caller. */
2080 needed += reg_parm_stack_space;
51bbfa0c
RS
2081#endif
2082
2083#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
2084 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2085 needed + 1);
51bbfa0c 2086#else
26a258fe
PB
2087 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2088 needed);
51bbfa0c 2089#endif
26a258fe 2090 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2091
26a258fe
PB
2092 if (initial_highest_arg_in_use)
2093 bcopy (initial_stack_usage_map, stack_usage_map,
2094 initial_highest_arg_in_use);
51bbfa0c 2095
26a258fe
PB
2096 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2097 bzero (&stack_usage_map[initial_highest_arg_in_use],
2098 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2099 needed = 0;
2f4aa534 2100
26a258fe
PB
2101 /* The address of the outgoing argument list must not be copied to a
2102 register here, because argblock would be left pointing to the
2103 wrong place after the call to allocate_dynamic_stack_space below.
2104 */
2f4aa534 2105
26a258fe 2106 argblock = virtual_outgoing_args_rtx;
2f4aa534 2107
51bbfa0c 2108#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2109 if (inhibit_defer_pop == 0)
51bbfa0c 2110 {
26a258fe
PB
2111 /* Try to reuse some or all of the pending_stack_adjust
2112 to get this space. Maybe we can avoid any pushing. */
2113 if (needed > pending_stack_adjust)
2114 {
2115 needed -= pending_stack_adjust;
2116 pending_stack_adjust = 0;
2117 }
2118 else
2119 {
2120 pending_stack_adjust -= needed;
2121 needed = 0;
2122 }
51bbfa0c 2123 }
26a258fe
PB
2124 /* Special case this because overhead of `push_block' in this
2125 case is non-trivial. */
2126 if (needed == 0)
2127 argblock = virtual_outgoing_args_rtx;
51bbfa0c 2128 else
26a258fe
PB
2129 argblock = push_block (GEN_INT (needed), 0, 0);
2130
2131 /* We only really need to call `copy_to_reg' in the case where push
2132 insns are going to be used to pass ARGBLOCK to a function
2133 call in ARGS. In that case, the stack pointer changes value
2134 from the allocation point to the call point, and hence
2135 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2136 But might as well always do it. */
2137 argblock = copy_to_reg (argblock);
51bbfa0c 2138#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2139 }
51bbfa0c
RS
2140 }
2141
bfbf933a
RS
2142#ifdef ACCUMULATE_OUTGOING_ARGS
2143 /* The save/restore code in store_one_arg handles all cases except one:
2144 a constructor call (including a C function returning a BLKmode struct)
2145 to initialize an argument. */
2146 if (stack_arg_under_construction)
2147 {
e5e809f4 2148#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 2149 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2150#else
e5d70561 2151 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
2152#endif
2153 if (old_stack_level == 0)
2154 {
e5d70561 2155 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
2156 old_pending_adj = pending_stack_adjust;
2157 pending_stack_adjust = 0;
2158 /* stack_arg_under_construction says whether a stack arg is
2159 being constructed at the old stack level. Pushing the stack
2160 gets a clean outgoing argument block. */
2161 old_stack_arg_under_construction = stack_arg_under_construction;
2162 stack_arg_under_construction = 0;
2163 /* Make a new map for the new argument list. */
2164 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2165 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2166 highest_outgoing_arg_in_use = 0;
2167 }
e5d70561 2168 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
2169 }
2170 /* If argument evaluation might modify the stack pointer, copy the
2171 address of the argument list to a register. */
2172 for (i = 0; i < num_actuals; i++)
2173 if (args[i].pass_on_stack)
2174 {
2175 argblock = copy_addr_to_reg (argblock);
2176 break;
2177 }
2178#endif
2179
a45bdd02 2180 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2181
51bbfa0c 2182#ifdef PUSH_ARGS_REVERSED
c795bca9 2183#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2184 /* If we push args individually in reverse order, perform stack alignment
2185 before the first push (the last arg). */
2186 if (argblock == 0)
599f37b6 2187 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
51bbfa0c
RS
2188#endif
2189#endif
2190
2191 /* Don't try to defer pops if preallocating, not even from the first arg,
2192 since ARGBLOCK probably refers to the SP. */
2193 if (argblock)
2194 NO_DEFER_POP;
2195
a45bdd02 2196 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c
RS
2197
2198 /* Figure out the register where the value, if any, will come back. */
2199 valreg = 0;
2200 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2201 && ! structure_value_addr)
2202 {
2203 if (pcc_struct_value)
2204 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2205 fndecl);
2206 else
2207 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2208 }
2209
2210 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 2211 once we have started filling any specific hard regs. */
20efdf74 2212 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2213
2214#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 2215
20efdf74
JL
2216 /* Save the fixed argument area if it's part of the caller's frame and
2217 is clobbered by argument setup for this call. */
2218 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2219 &low_to_save, &high_to_save);
b94301c2 2220#endif
20efdf74 2221
51bbfa0c
RS
2222
2223 /* Now store (and compute if necessary) all non-register parms.
2224 These come before register parms, since they can require block-moves,
2225 which could clobber the registers used for register parms.
2226 Parms which have partial registers are not stored here,
2227 but we do preallocate space here if they want that. */
2228
2229 for (i = 0; i < num_actuals; i++)
2230 if (args[i].reg == 0 || args[i].pass_on_stack)
2231 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2232 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2233
4ab56118
RK
2234 /* If we have a parm that is passed in registers but not in memory
2235 and whose alignment does not permit a direct copy into registers,
2236 make a group of pseudos that correspond to each register that we
2237 will later fill. */
45d44c98 2238 if (STRICT_ALIGNMENT)
20efdf74 2239 store_unaligned_arguments_into_pseudos (args, num_actuals);
4ab56118 2240
51bbfa0c
RS
2241 /* Now store any partially-in-registers parm.
2242 This is the last place a block-move can happen. */
2243 if (reg_parm_seen)
2244 for (i = 0; i < num_actuals; i++)
2245 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2246 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2247 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2248
2249#ifndef PUSH_ARGS_REVERSED
c795bca9 2250#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2251 /* If we pushed args in forward order, perform stack alignment
2252 after pushing the last arg. */
2253 if (argblock == 0)
599f37b6 2254 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
51bbfa0c
RS
2255#endif
2256#endif
2257
756e0e12
RS
2258 /* If register arguments require space on the stack and stack space
2259 was not preallocated, allocate stack space here for arguments
2260 passed in registers. */
6e716e89 2261#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 2262 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 2263 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2264#endif
2265
51bbfa0c
RS
2266 /* Pass the function the address in which to return a structure value. */
2267 if (structure_value_addr && ! structure_value_addr_parm)
2268 {
2269 emit_move_insn (struct_value_rtx,
2270 force_reg (Pmode,
e5d70561
RK
2271 force_operand (structure_value_addr,
2272 NULL_RTX)));
7815214e
RK
2273
2274 /* Mark the memory for the aggregate as write-only. */
7d384cc0 2275 if (current_function_check_memory_usage)
7815214e
RK
2276 emit_library_call (chkr_set_right_libfunc, 1,
2277 VOIDmode, 3,
6a9c4aed 2278 structure_value_addr, Pmode,
7815214e 2279 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
2280 GEN_INT (MEMORY_USE_WO),
2281 TYPE_MODE (integer_type_node));
7815214e 2282
51bbfa0c 2283 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2284 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
2285 }
2286
77cac2f2 2287 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 2288
21a3b983 2289 load_register_parameters (args, num_actuals, &call_fusage);
51bbfa0c
RS
2290
2291 /* Perform postincrements before actually calling the function. */
2292 emit_queue ();
2293
2294 /* All arguments and registers used for the call must be set up by now! */
2295
51bbfa0c 2296 /* Generate the actual call instruction. */
fb5eebb9
RH
2297 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2298 args_size.constant, struct_value_size,
51bbfa0c 2299 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 2300 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
2301
2302 /* If call is cse'able, make appropriate pair of reg-notes around it.
2303 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
2304 if return type is void. Disable for PARALLEL return values, because
2305 we have no way to move such values into a pseudo register. */
2306 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
2307 {
2308 rtx note = 0;
2309 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2310 rtx insns;
2311
9ae8ffe7
JL
2312 /* Mark the return value as a pointer if needed. */
2313 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2314 {
2315 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2316 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2317 }
2318
51bbfa0c
RS
2319 /* Construct an "equal form" for the value which mentions all the
2320 arguments in order as well as the function name. */
2321#ifdef PUSH_ARGS_REVERSED
2322 for (i = 0; i < num_actuals; i++)
38a448ca 2323 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
2324#else
2325 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 2326 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2327#endif
38a448ca 2328 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
2329
2330 insns = get_insns ();
2331 end_sequence ();
2332
2333 emit_libcall_block (insns, temp, valreg, note);
2334
2335 valreg = temp;
2336 }
4f48d56a
RK
2337 else if (is_const)
2338 {
2339 /* Otherwise, just write out the sequence without a note. */
2340 rtx insns = get_insns ();
2341
2342 end_sequence ();
2343 emit_insns (insns);
2344 }
9ae8ffe7
JL
2345 else if (is_malloc)
2346 {
2347 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2348 rtx last, insns;
2349
2350 /* The return value from a malloc-like function is a pointer. */
2351 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2352 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2353
2354 emit_move_insn (temp, valreg);
2355
2356 /* The return value from a malloc-like function can not alias
2357 anything else. */
2358 last = get_last_insn ();
2359 REG_NOTES (last) =
38a448ca 2360 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2361
2362 /* Write out the sequence. */
2363 insns = get_insns ();
2364 end_sequence ();
2365 emit_insns (insns);
2366 valreg = temp;
2367 }
51bbfa0c
RS
2368
2369 /* For calls to `setjmp', etc., inform flow.c it should complain
2370 if nonvolatile values are live. */
2371
2372 if (returns_twice)
2373 {
2374 emit_note (name, NOTE_INSN_SETJMP);
2375 current_function_calls_setjmp = 1;
2376 }
2377
2378 if (is_longjmp)
2379 current_function_calls_longjmp = 1;
2380
2381 /* Notice functions that cannot return.
2382 If optimizing, insns emitted below will be dead.
2383 If not optimizing, they will exist, which is useful
2384 if the user uses the `return' command in the debugger. */
2385
2386 if (is_volatile || is_longjmp)
2387 emit_barrier ();
2388
51bbfa0c
RS
2389 /* If value type not void, return an rtx for the value. */
2390
e976b8b2
MS
2391 /* If there are cleanups to be called, don't use a hard reg as target.
2392 We need to double check this and see if it matters anymore. */
e9a25f70 2393 if (any_pending_cleanups (1)
51bbfa0c
RS
2394 && target && REG_P (target)
2395 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2396 target = 0;
2397
2398 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2399 || ignore)
2400 {
2401 target = const0_rtx;
2402 }
2403 else if (structure_value_addr)
2404 {
2405 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2406 {
38a448ca
RH
2407 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2408 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2409 structure_value_addr));
c6df88cb
MM
2410 MEM_SET_IN_STRUCT_P (target,
2411 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
29008b51 2412 }
51bbfa0c
RS
2413 }
2414 else if (pcc_struct_value)
2415 {
f78b5ca1
JL
2416 /* This is the special C++ case where we need to
2417 know what the true target was. We take care to
2418 never use this value more than once in one expression. */
38a448ca
RH
2419 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2420 copy_to_reg (valreg));
c6df88cb 2421 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
51bbfa0c 2422 }
cacbd532
JW
2423 /* Handle calls that return values in multiple non-contiguous locations.
2424 The Irix 6 ABI has examples of this. */
2425 else if (GET_CODE (valreg) == PARALLEL)
2426 {
aac5cc16
RH
2427 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2428
cacbd532
JW
2429 if (target == 0)
2430 {
2b4092f2 2431 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
c6df88cb 2432 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532
JW
2433 preserve_temp_slots (target);
2434 }
2435
aac5cc16
RH
2436 emit_group_store (target, valreg, bytes,
2437 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
cacbd532 2438 }
059c3d84
JW
2439 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2440 && GET_MODE (target) == GET_MODE (valreg))
2441 /* TARGET and VALREG cannot be equal at this point because the latter
2442 would not have REG_FUNCTION_VALUE_P true, while the former would if
2443 it were referring to the same register.
2444
2445 If they refer to the same register, this move will be a no-op, except
2446 when function inlining is being done. */
2447 emit_move_insn (target, valreg);
766b19fb 2448 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
c36fce9a 2449 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
51bbfa0c
RS
2450 else
2451 target = copy_to_reg (valreg);
2452
84b55618 2453#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2454 /* If we promoted this return value, make the proper SUBREG. TARGET
2455 might be const0_rtx here, so be careful. */
2456 if (GET_CODE (target) == REG
766b19fb 2457 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2458 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2459 {
321e0bba
RK
2460 tree type = TREE_TYPE (exp);
2461 int unsignedp = TREE_UNSIGNED (type);
84b55618 2462
321e0bba
RK
2463 /* If we don't promote as expected, something is wrong. */
2464 if (GET_MODE (target)
2465 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2466 abort ();
2467
38a448ca 2468 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2469 SUBREG_PROMOTED_VAR_P (target) = 1;
2470 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2471 }
2472#endif
2473
2f4aa534
RS
2474 /* If size of args is variable or this was a constructor call for a stack
2475 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2476
2477 if (old_stack_level)
2478 {
e5d70561 2479 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2480 pending_stack_adjust = old_pending_adj;
d64f5a78 2481#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2482 stack_arg_under_construction = old_stack_arg_under_construction;
2483 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2484 stack_usage_map = initial_stack_usage_map;
d64f5a78 2485#endif
51bbfa0c 2486 }
51bbfa0c
RS
2487#ifdef ACCUMULATE_OUTGOING_ARGS
2488 else
2489 {
2490#ifdef REG_PARM_STACK_SPACE
2491 if (save_area)
20efdf74
JL
2492 restore_fixed_argument_area (save_area, argblock,
2493 high_to_save, low_to_save);
b94301c2 2494#endif
51bbfa0c 2495
51bbfa0c
RS
2496 /* If we saved any argument areas, restore them. */
2497 for (i = 0; i < num_actuals; i++)
2498 if (args[i].save_area)
2499 {
2500 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2501 rtx stack_area
38a448ca
RH
2502 = gen_rtx_MEM (save_mode,
2503 memory_address (save_mode,
2504 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2505
2506 if (save_mode != BLKmode)
2507 emit_move_insn (stack_area, args[i].save_area);
2508 else
2509 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2510 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2511 PARM_BOUNDARY / BITS_PER_UNIT);
2512 }
2513
2514 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2515 stack_usage_map = initial_stack_usage_map;
2516 }
2517#endif
2518
59257ff7
RK
2519 /* If this was alloca, record the new stack level for nonlocal gotos.
2520 Check for the handler slots since we might not have a save area
0f41302f 2521 for non-local gotos. */
59257ff7 2522
ba716ac9 2523 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
e5d70561 2524 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2525
2526 pop_temp_slots ();
2527
8e6a59fe
MM
2528 /* Free up storage we no longer need. */
2529 for (i = 0; i < num_actuals; ++i)
2530 if (args[i].aligned_regs)
2531 free (args[i].aligned_regs);
2532
51bbfa0c
RS
2533 return target;
2534}
2535\f
322e3e34
RK
2536/* Output a library call to function FUN (a SYMBOL_REF rtx)
2537 (emitting the queue unless NO_QUEUE is nonzero),
2538 for a value of mode OUTMODE,
2539 with NARGS different arguments, passed as alternating rtx values
2540 and machine_modes to convert them to.
2541 The rtx values should have been passed through protect_from_queue already.
2542
2543 NO_QUEUE will be true if and only if the library call is a `const' call
2544 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2545 to the variable is_const in expand_call.
2546
2547 NO_QUEUE must be true for const calls, because if it isn't, then
2548 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2549 and will be lost if the libcall sequence is optimized away.
2550
2551 NO_QUEUE must be false for non-const calls, because if it isn't, the
2552 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2553 optimized. For instance, the instruction scheduler may incorrectly
2554 move memory references across the non-const call. */
2555
2556void
4f90e4a0
RK
2557emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2558 int nargs, ...))
322e3e34 2559{
5148a72b 2560#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
2561 rtx orgfun;
2562 int no_queue;
2563 enum machine_mode outmode;
2564 int nargs;
2565#endif
322e3e34
RK
2566 va_list p;
2567 /* Total size in bytes of all the stack-parms scanned so far. */
2568 struct args_size args_size;
2569 /* Size of arguments before any adjustments (such as rounding). */
2570 struct args_size original_args_size;
2571 register int argnum;
322e3e34 2572 rtx fun;
322e3e34
RK
2573 int inc;
2574 int count;
2575 rtx argblock = 0;
2576 CUMULATIVE_ARGS args_so_far;
2577 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2578 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2579 struct arg *argvec;
2580 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2581 rtx call_fusage = 0;
e5e809f4 2582 int reg_parm_stack_space = 0;
f046b3cc
JL
2583#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2584 /* Define the boundary of the register parm stack space that needs to be
2585 save, if any. */
6a651371 2586 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
2587 rtx save_area = 0; /* Place that it is saved */
2588#endif
2589
2590#ifdef ACCUMULATE_OUTGOING_ARGS
2591 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2592 char *initial_stack_usage_map = stack_usage_map;
2593 int needed;
2594#endif
2595
2596#ifdef REG_PARM_STACK_SPACE
69d4ca36 2597 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2598#ifdef MAYBE_REG_PARM_STACK_SPACE
2599 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2600#else
ab87f8c8 2601 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
2602#endif
2603#endif
322e3e34 2604
4f90e4a0
RK
2605 VA_START (p, nargs);
2606
5148a72b 2607#ifndef ANSI_PROTOTYPES
4f90e4a0 2608 orgfun = va_arg (p, rtx);
322e3e34
RK
2609 no_queue = va_arg (p, int);
2610 outmode = va_arg (p, enum machine_mode);
2611 nargs = va_arg (p, int);
4f90e4a0
RK
2612#endif
2613
2614 fun = orgfun;
322e3e34
RK
2615
2616 /* Copy all the libcall-arguments out of the varargs data
2617 and into a vector ARGVEC.
2618
2619 Compute how to pass each argument. We only support a very small subset
2620 of the full argument passing conventions to limit complexity here since
2621 library functions shouldn't have many args. */
2622
2623 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2624 bzero ((char *) argvec, nargs * sizeof (struct arg));
2625
322e3e34 2626
eecb6f50 2627 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2628
2629 args_size.constant = 0;
2630 args_size.var = 0;
2631
888aa7a9
RS
2632 push_temp_slots ();
2633
322e3e34
RK
2634 for (count = 0; count < nargs; count++)
2635 {
2636 rtx val = va_arg (p, rtx);
2637 enum machine_mode mode = va_arg (p, enum machine_mode);
2638
2639 /* We cannot convert the arg value to the mode the library wants here;
2640 must do it earlier where we know the signedness of the arg. */
2641 if (mode == BLKmode
2642 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2643 abort ();
2644
2645 /* On some machines, there's no way to pass a float to a library fcn.
2646 Pass it as a double instead. */
2647#ifdef LIBGCC_NEEDS_DOUBLE
2648 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2649 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2650#endif
2651
2652 /* There's no need to call protect_from_queue, because
2653 either emit_move_insn or emit_push_insn will do that. */
2654
2655 /* Make sure it is a reasonable operand for a move or push insn. */
2656 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2657 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2658 val = force_operand (val, NULL_RTX);
2659
322e3e34
RK
2660#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2661 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2662 {
a44492f0
RK
2663 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2664 be viewed as just an efficiency improvement. */
888aa7a9
RS
2665 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2666 emit_move_insn (slot, val);
8301b6e2 2667 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2668 mode = Pmode;
888aa7a9 2669 }
322e3e34
RK
2670#endif
2671
888aa7a9
RS
2672 argvec[count].value = val;
2673 argvec[count].mode = mode;
2674
322e3e34 2675 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2676 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2677 abort ();
2678#ifdef FUNCTION_ARG_PARTIAL_NREGS
2679 argvec[count].partial
2680 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2681#else
2682 argvec[count].partial = 0;
2683#endif
2684
2685 locate_and_pad_parm (mode, NULL_TREE,
2686 argvec[count].reg && argvec[count].partial == 0,
2687 NULL_TREE, &args_size, &argvec[count].offset,
2688 &argvec[count].size);
2689
2690 if (argvec[count].size.var)
2691 abort ();
2692
e5e809f4 2693 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2694 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2695
2696 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2697 || reg_parm_stack_space > 0)
322e3e34
RK
2698 args_size.constant += argvec[count].size.constant;
2699
0f41302f 2700 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2701 }
2702 va_end (p);
2703
f046b3cc
JL
2704#ifdef FINAL_REG_PARM_STACK_SPACE
2705 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2706 args_size.var);
2707#endif
2708
322e3e34
RK
2709 /* If this machine requires an external definition for library
2710 functions, write one out. */
2711 assemble_external_libcall (fun);
2712
2713 original_args_size = args_size;
c795bca9 2714#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2715 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2716 / STACK_BYTES) * STACK_BYTES);
2717#endif
2718
322e3e34 2719 args_size.constant = MAX (args_size.constant,
f046b3cc 2720 reg_parm_stack_space);
e5e809f4 2721
322e3e34 2722#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2723 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2724#endif
2725
322e3e34
RK
2726 if (args_size.constant > current_function_outgoing_args_size)
2727 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2728
2729#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2730 /* Since the stack pointer will never be pushed, it is possible for
2731 the evaluation of a parm to clobber something we have already
2732 written to the stack. Since most function calls on RISC machines
2733 do not use the stack, this is uncommon, but must work correctly.
2734
2735 Therefore, we save any area of the stack that was already written
2736 and that we are using. Here we set up to do this by making a new
2737 stack usage map from the old one.
2738
2739 Another approach might be to try to reorder the argument
2740 evaluations to avoid this conflicting stack usage. */
2741
2742 needed = args_size.constant;
e5e809f4
JL
2743
2744#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2745 /* Since we will be writing into the entire argument area, the
2746 map must be allocated for its entire size, not just the part that
2747 is the responsibility of the caller. */
2748 needed += reg_parm_stack_space;
2749#endif
2750
2751#ifdef ARGS_GROW_DOWNWARD
2752 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2753 needed + 1);
2754#else
2755 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2756 needed);
322e3e34 2757#endif
f046b3cc
JL
2758 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2759
2760 if (initial_highest_arg_in_use)
2761 bcopy (initial_stack_usage_map, stack_usage_map,
2762 initial_highest_arg_in_use);
2763
2764 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2765 bzero (&stack_usage_map[initial_highest_arg_in_use],
2766 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2767 needed = 0;
322e3e34 2768
f046b3cc
JL
2769 /* The address of the outgoing argument list must not be copied to a
2770 register here, because argblock would be left pointing to the
2771 wrong place after the call to allocate_dynamic_stack_space below.
2772 */
2773
2774 argblock = virtual_outgoing_args_rtx;
2775#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2776#ifndef PUSH_ROUNDING
2777 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2778#endif
f046b3cc 2779#endif
322e3e34
RK
2780
2781#ifdef PUSH_ARGS_REVERSED
c795bca9 2782#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2783 /* If we push args individually in reverse order, perform stack alignment
2784 before the first push (the last arg). */
2785 if (argblock == 0)
2786 anti_adjust_stack (GEN_INT (args_size.constant
2787 - original_args_size.constant));
2788#endif
2789#endif
2790
2791#ifdef PUSH_ARGS_REVERSED
2792 inc = -1;
2793 argnum = nargs - 1;
2794#else
2795 inc = 1;
2796 argnum = 0;
2797#endif
2798
f046b3cc
JL
2799#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2800 /* The argument list is the property of the called routine and it
2801 may clobber it. If the fixed area has been used for previous
2802 parameters, we must save and restore it.
2803
2804 Here we compute the boundary of the that needs to be saved, if any. */
2805
2806#ifdef ARGS_GROW_DOWNWARD
2807 for (count = 0; count < reg_parm_stack_space + 1; count++)
2808#else
2809 for (count = 0; count < reg_parm_stack_space; count++)
2810#endif
2811 {
2812 if (count >= highest_outgoing_arg_in_use
2813 || stack_usage_map[count] == 0)
2814 continue;
2815
2816 if (low_to_save == -1)
2817 low_to_save = count;
2818
2819 high_to_save = count;
2820 }
2821
2822 if (low_to_save >= 0)
2823 {
2824 int num_to_save = high_to_save - low_to_save + 1;
2825 enum machine_mode save_mode
2826 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2827 rtx stack_area;
2828
2829 /* If we don't have the required alignment, must do this in BLKmode. */
2830 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2831 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2832 save_mode = BLKmode;
2833
ceb83206 2834#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
2835 stack_area = gen_rtx_MEM (save_mode,
2836 memory_address (save_mode,
38a448ca 2837 plus_constant (argblock,
ceb83206 2838 - high_to_save)));
f046b3cc 2839#else
ceb83206
JL
2840 stack_area = gen_rtx_MEM (save_mode,
2841 memory_address (save_mode,
38a448ca 2842 plus_constant (argblock,
ceb83206 2843 low_to_save)));
f046b3cc 2844#endif
f046b3cc
JL
2845 if (save_mode == BLKmode)
2846 {
2847 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
2848 emit_block_move (validize_mem (save_area), stack_area,
2849 GEN_INT (num_to_save),
2850 PARM_BOUNDARY / BITS_PER_UNIT);
2851 }
2852 else
2853 {
2854 save_area = gen_reg_rtx (save_mode);
2855 emit_move_insn (save_area, stack_area);
2856 }
2857 }
2858#endif
2859
322e3e34
RK
2860 /* Push the args that need to be pushed. */
2861
5e26979c
JL
2862 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2863 are to be pushed. */
322e3e34
RK
2864 for (count = 0; count < nargs; count++, argnum += inc)
2865 {
2866 register enum machine_mode mode = argvec[argnum].mode;
2867 register rtx val = argvec[argnum].value;
2868 rtx reg = argvec[argnum].reg;
2869 int partial = argvec[argnum].partial;
69d4ca36 2870#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2871 int lower_bound, upper_bound, i;
69d4ca36 2872#endif
322e3e34
RK
2873
2874 if (! (reg != 0 && partial == 0))
f046b3cc
JL
2875 {
2876#ifdef ACCUMULATE_OUTGOING_ARGS
2877 /* If this is being stored into a pre-allocated, fixed-size, stack
2878 area, save any previous data at that location. */
2879
2880#ifdef ARGS_GROW_DOWNWARD
2881 /* stack_slot is negative, but we want to index stack_usage_map
2882 with positive values. */
5e26979c
JL
2883 upper_bound = -argvec[argnum].offset.constant + 1;
2884 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 2885#else
5e26979c
JL
2886 lower_bound = argvec[argnum].offset.constant;
2887 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
2888#endif
2889
2890 for (i = lower_bound; i < upper_bound; i++)
2891 if (stack_usage_map[i]
f046b3cc
JL
2892 /* Don't store things in the fixed argument area at this point;
2893 it has already been saved. */
e5e809f4 2894 && i > reg_parm_stack_space)
f046b3cc
JL
2895 break;
2896
2897 if (i != upper_bound)
2898 {
e5e809f4 2899 /* We need to make a save area. See what mode we can make it. */
f046b3cc 2900 enum machine_mode save_mode
5e26979c 2901 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
2902 MODE_INT, 1);
2903 rtx stack_area
38a448ca
RH
2904 = gen_rtx_MEM (save_mode,
2905 memory_address (save_mode,
2906 plus_constant (argblock, argvec[argnum].offset.constant)));
5e26979c
JL
2907 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2908 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
2909 }
2910#endif
2911 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4
JL
2912 argblock, GEN_INT (argvec[argnum].offset.constant),
2913 reg_parm_stack_space);
f046b3cc
JL
2914
2915#ifdef ACCUMULATE_OUTGOING_ARGS
2916 /* Now mark the segment we just used. */
2917 for (i = lower_bound; i < upper_bound; i++)
2918 stack_usage_map[i] = 1;
2919#endif
2920
2921 NO_DEFER_POP;
2922 }
322e3e34
RK
2923 }
2924
2925#ifndef PUSH_ARGS_REVERSED
c795bca9 2926#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2927 /* If we pushed args in forward order, perform stack alignment
2928 after pushing the last arg. */
2929 if (argblock == 0)
2930 anti_adjust_stack (GEN_INT (args_size.constant
2931 - original_args_size.constant));
2932#endif
2933#endif
2934
2935#ifdef PUSH_ARGS_REVERSED
2936 argnum = nargs - 1;
2937#else
2938 argnum = 0;
2939#endif
2940
77cac2f2 2941 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2942
322e3e34
RK
2943 /* Now load any reg parms into their regs. */
2944
5e26979c
JL
2945 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2946 are to be pushed. */
322e3e34
RK
2947 for (count = 0; count < nargs; count++, argnum += inc)
2948 {
322e3e34
RK
2949 register rtx val = argvec[argnum].value;
2950 rtx reg = argvec[argnum].reg;
2951 int partial = argvec[argnum].partial;
2952
2953 if (reg != 0 && partial == 0)
2954 emit_move_insn (reg, val);
2955 NO_DEFER_POP;
2956 }
2957
2958 /* For version 1.37, try deleting this entirely. */
2959 if (! no_queue)
2960 emit_queue ();
2961
2962 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2963 for (count = 0; count < nargs; count++)
2964 if (argvec[count].reg != 0)
77cac2f2 2965 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2966
322e3e34
RK
2967 /* Don't allow popping to be deferred, since then
2968 cse'ing of library calls could delete a call and leave the pop. */
2969 NO_DEFER_POP;
2970
2971 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2972 will set inhibit_defer_pop to that value. */
2973
334c4f0f
RK
2974 /* The return type is needed to decide how many bytes the function pops.
2975 Signedness plays no role in that, so for simplicity, we pretend it's
2976 always signed. We also assume that the list of arguments passed has
2977 no impact, so we pretend it is unknown. */
2978
2c8da025
RK
2979 emit_call_1 (fun,
2980 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
2981 build_function_type (outmode == VOIDmode ? void_type_node
2982 : type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9 2983 original_args_size.constant, args_size.constant, 0,
322e3e34
RK
2984 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2985 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2986 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2987
888aa7a9
RS
2988 pop_temp_slots ();
2989
322e3e34
RK
2990 /* Now restore inhibit_defer_pop to its actual original value. */
2991 OK_DEFER_POP;
f046b3cc
JL
2992
2993#ifdef ACCUMULATE_OUTGOING_ARGS
2994#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
2995 if (save_area)
2996 {
2997 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 2998#ifdef ARGS_GROW_DOWNWARD
e9a25f70 2999 rtx stack_area
38a448ca
RH
3000 = gen_rtx_MEM (save_mode,
3001 memory_address (save_mode,
ceb83206
JL
3002 plus_constant (argblock,
3003 - high_to_save)));
f046b3cc 3004#else
ceb83206
JL
3005 rtx stack_area
3006 = gen_rtx_MEM (save_mode,
3007 memory_address (save_mode,
3008 plus_constant (argblock, low_to_save)));
f046b3cc 3009#endif
f046b3cc 3010
e9a25f70
JL
3011 if (save_mode != BLKmode)
3012 emit_move_insn (stack_area, save_area);
3013 else
3014 emit_block_move (stack_area, validize_mem (save_area),
3015 GEN_INT (high_to_save - low_to_save + 1),
3016 PARM_BOUNDARY / BITS_PER_UNIT);
3017 }
f046b3cc
JL
3018#endif
3019
3020 /* If we saved any argument areas, restore them. */
3021 for (count = 0; count < nargs; count++)
3022 if (argvec[count].save_area)
3023 {
3024 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3025 rtx stack_area
38a448ca
RH
3026 = gen_rtx_MEM (save_mode,
3027 memory_address (save_mode,
3028 plus_constant (argblock, argvec[count].offset.constant)));
f046b3cc
JL
3029
3030 emit_move_insn (stack_area, argvec[count].save_area);
3031 }
3032
3033 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3034 stack_usage_map = initial_stack_usage_map;
3035#endif
322e3e34
RK
3036}
3037\f
3038/* Like emit_library_call except that an extra argument, VALUE,
3039 comes second and says where to store the result.
fac0ad80
RS
3040 (If VALUE is zero, this function chooses a convenient way
3041 to return the value.
322e3e34 3042
fac0ad80
RS
3043 This function returns an rtx for where the value is to be found.
3044 If VALUE is nonzero, VALUE is returned. */
3045
3046rtx
4f90e4a0
RK
3047emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3048 enum machine_mode outmode, int nargs, ...))
322e3e34 3049{
5148a72b 3050#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
3051 rtx orgfun;
3052 rtx value;
3053 int no_queue;
3054 enum machine_mode outmode;
3055 int nargs;
3056#endif
322e3e34
RK
3057 va_list p;
3058 /* Total size in bytes of all the stack-parms scanned so far. */
3059 struct args_size args_size;
3060 /* Size of arguments before any adjustments (such as rounding). */
3061 struct args_size original_args_size;
3062 register int argnum;
322e3e34 3063 rtx fun;
322e3e34
RK
3064 int inc;
3065 int count;
3066 rtx argblock = 0;
3067 CUMULATIVE_ARGS args_so_far;
3068 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 3069 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
3070 struct arg *argvec;
3071 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 3072 rtx call_fusage = 0;
322e3e34 3073 rtx mem_value = 0;
fac0ad80 3074 int pcc_struct_value = 0;
4f389214 3075 int struct_value_size = 0;
d61bee95 3076 int is_const;
e5e809f4 3077 int reg_parm_stack_space = 0;
69d4ca36 3078#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3079 int needed;
69d4ca36 3080#endif
f046b3cc
JL
3081
3082#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3083 /* Define the boundary of the register parm stack space that needs to be
3084 save, if any. */
6a651371 3085 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
3086 rtx save_area = 0; /* Place that it is saved */
3087#endif
3088
3089#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36 3090 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
3091 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3092 char *initial_stack_usage_map = stack_usage_map;
3093#endif
3094
3095#ifdef REG_PARM_STACK_SPACE
3096#ifdef MAYBE_REG_PARM_STACK_SPACE
3097 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3098#else
ab87f8c8 3099 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
3100#endif
3101#endif
322e3e34 3102
4f90e4a0
RK
3103 VA_START (p, nargs);
3104
5148a72b 3105#ifndef ANSI_PROTOTYPES
4f90e4a0 3106 orgfun = va_arg (p, rtx);
322e3e34
RK
3107 value = va_arg (p, rtx);
3108 no_queue = va_arg (p, int);
3109 outmode = va_arg (p, enum machine_mode);
3110 nargs = va_arg (p, int);
4f90e4a0
RK
3111#endif
3112
d61bee95 3113 is_const = no_queue;
4f90e4a0 3114 fun = orgfun;
322e3e34
RK
3115
3116 /* If this kind of value comes back in memory,
3117 decide where in memory it should come back. */
fac0ad80 3118 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 3119 {
fac0ad80
RS
3120#ifdef PCC_STATIC_STRUCT_RETURN
3121 rtx pointer_reg
3122 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3123 0);
38a448ca 3124 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
3125 pcc_struct_value = 1;
3126 if (value == 0)
3127 value = gen_reg_rtx (outmode);
3128#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 3129 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 3130 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
3131 mem_value = value;
3132 else
3133 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 3134#endif
779c643a
JW
3135
3136 /* This call returns a big structure. */
3137 is_const = 0;
322e3e34
RK
3138 }
3139
3140 /* ??? Unfinished: must pass the memory address as an argument. */
3141
3142 /* Copy all the libcall-arguments out of the varargs data
3143 and into a vector ARGVEC.
3144
3145 Compute how to pass each argument. We only support a very small subset
3146 of the full argument passing conventions to limit complexity here since
3147 library functions shouldn't have many args. */
3148
3149 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 3150 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 3151
eecb6f50 3152 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
3153
3154 args_size.constant = 0;
3155 args_size.var = 0;
3156
3157 count = 0;
3158
888aa7a9
RS
3159 push_temp_slots ();
3160
322e3e34
RK
3161 /* If there's a structure value address to be passed,
3162 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 3163 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
3164 {
3165 rtx addr = XEXP (mem_value, 0);
fac0ad80 3166 nargs++;
322e3e34 3167
fac0ad80
RS
3168 /* Make sure it is a reasonable operand for a move or push insn. */
3169 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3170 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3171 addr = force_operand (addr, NULL_RTX);
322e3e34 3172
fac0ad80 3173 argvec[count].value = addr;
4fc3dcd5 3174 argvec[count].mode = Pmode;
fac0ad80 3175 argvec[count].partial = 0;
322e3e34 3176
4fc3dcd5 3177 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 3178#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 3179 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 3180 abort ();
322e3e34
RK
3181#endif
3182
4fc3dcd5 3183 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
3184 argvec[count].reg && argvec[count].partial == 0,
3185 NULL_TREE, &args_size, &argvec[count].offset,
3186 &argvec[count].size);
322e3e34
RK
3187
3188
fac0ad80 3189 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3190 || reg_parm_stack_space > 0)
fac0ad80 3191 args_size.constant += argvec[count].size.constant;
322e3e34 3192
0f41302f 3193 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
3194
3195 count++;
322e3e34
RK
3196 }
3197
3198 for (; count < nargs; count++)
3199 {
3200 rtx val = va_arg (p, rtx);
3201 enum machine_mode mode = va_arg (p, enum machine_mode);
3202
3203 /* We cannot convert the arg value to the mode the library wants here;
3204 must do it earlier where we know the signedness of the arg. */
3205 if (mode == BLKmode
3206 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3207 abort ();
3208
3209 /* On some machines, there's no way to pass a float to a library fcn.
3210 Pass it as a double instead. */
3211#ifdef LIBGCC_NEEDS_DOUBLE
3212 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 3213 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
3214#endif
3215
3216 /* There's no need to call protect_from_queue, because
3217 either emit_move_insn or emit_push_insn will do that. */
3218
3219 /* Make sure it is a reasonable operand for a move or push insn. */
3220 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3221 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3222 val = force_operand (val, NULL_RTX);
3223
322e3e34
RK
3224#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3225 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 3226 {
a44492f0
RK
3227 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3228 be viewed as just an efficiency improvement. */
888aa7a9
RS
3229 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3230 emit_move_insn (slot, val);
3231 val = XEXP (slot, 0);
3232 mode = Pmode;
3233 }
322e3e34
RK
3234#endif
3235
888aa7a9
RS
3236 argvec[count].value = val;
3237 argvec[count].mode = mode;
3238
322e3e34 3239 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 3240 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
3241 abort ();
3242#ifdef FUNCTION_ARG_PARTIAL_NREGS
3243 argvec[count].partial
3244 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3245#else
3246 argvec[count].partial = 0;
3247#endif
3248
3249 locate_and_pad_parm (mode, NULL_TREE,
3250 argvec[count].reg && argvec[count].partial == 0,
3251 NULL_TREE, &args_size, &argvec[count].offset,
3252 &argvec[count].size);
3253
3254 if (argvec[count].size.var)
3255 abort ();
3256
e5e809f4 3257 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 3258 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
3259
3260 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3261 || reg_parm_stack_space > 0)
322e3e34
RK
3262 args_size.constant += argvec[count].size.constant;
3263
0f41302f 3264 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3265 }
3266 va_end (p);
3267
f046b3cc
JL
3268#ifdef FINAL_REG_PARM_STACK_SPACE
3269 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3270 args_size.var);
3271#endif
322e3e34
RK
3272 /* If this machine requires an external definition for library
3273 functions, write one out. */
3274 assemble_external_libcall (fun);
3275
3276 original_args_size = args_size;
c795bca9 3277#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3278 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3279 / STACK_BYTES) * STACK_BYTES);
3280#endif
3281
322e3e34 3282 args_size.constant = MAX (args_size.constant,
f046b3cc 3283 reg_parm_stack_space);
e5e809f4 3284
322e3e34 3285#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3286 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3287#endif
3288
322e3e34
RK
3289 if (args_size.constant > current_function_outgoing_args_size)
3290 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3291
3292#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3293 /* Since the stack pointer will never be pushed, it is possible for
3294 the evaluation of a parm to clobber something we have already
3295 written to the stack. Since most function calls on RISC machines
3296 do not use the stack, this is uncommon, but must work correctly.
3297
3298 Therefore, we save any area of the stack that was already written
3299 and that we are using. Here we set up to do this by making a new
3300 stack usage map from the old one.
3301
3302 Another approach might be to try to reorder the argument
3303 evaluations to avoid this conflicting stack usage. */
3304
3305 needed = args_size.constant;
e5e809f4
JL
3306
3307#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
3308 /* Since we will be writing into the entire argument area, the
3309 map must be allocated for its entire size, not just the part that
3310 is the responsibility of the caller. */
3311 needed += reg_parm_stack_space;
3312#endif
3313
3314#ifdef ARGS_GROW_DOWNWARD
3315 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3316 needed + 1);
3317#else
3318 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3319 needed);
322e3e34 3320#endif
f046b3cc
JL
3321 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3322
3323 if (initial_highest_arg_in_use)
3324 bcopy (initial_stack_usage_map, stack_usage_map,
3325 initial_highest_arg_in_use);
3326
3327 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3328 bzero (&stack_usage_map[initial_highest_arg_in_use],
3329 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3330 needed = 0;
322e3e34 3331
f046b3cc
JL
3332 /* The address of the outgoing argument list must not be copied to a
3333 register here, because argblock would be left pointing to the
3334 wrong place after the call to allocate_dynamic_stack_space below.
3335 */
3336
3337 argblock = virtual_outgoing_args_rtx;
3338#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3339#ifndef PUSH_ROUNDING
3340 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3341#endif
f046b3cc 3342#endif
322e3e34
RK
3343
3344#ifdef PUSH_ARGS_REVERSED
c795bca9 3345#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3346 /* If we push args individually in reverse order, perform stack alignment
3347 before the first push (the last arg). */
3348 if (argblock == 0)
3349 anti_adjust_stack (GEN_INT (args_size.constant
3350 - original_args_size.constant));
3351#endif
3352#endif
3353
3354#ifdef PUSH_ARGS_REVERSED
3355 inc = -1;
3356 argnum = nargs - 1;
3357#else
3358 inc = 1;
3359 argnum = 0;
3360#endif
3361
f046b3cc
JL
3362#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3363 /* The argument list is the property of the called routine and it
3364 may clobber it. If the fixed area has been used for previous
3365 parameters, we must save and restore it.
3366
3367 Here we compute the boundary of the that needs to be saved, if any. */
3368
3369#ifdef ARGS_GROW_DOWNWARD
3370 for (count = 0; count < reg_parm_stack_space + 1; count++)
3371#else
3372 for (count = 0; count < reg_parm_stack_space; count++)
3373#endif
3374 {
3375 if (count >= highest_outgoing_arg_in_use
3376 || stack_usage_map[count] == 0)
3377 continue;
3378
3379 if (low_to_save == -1)
3380 low_to_save = count;
3381
3382 high_to_save = count;
3383 }
3384
3385 if (low_to_save >= 0)
3386 {
3387 int num_to_save = high_to_save - low_to_save + 1;
3388 enum machine_mode save_mode
3389 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3390 rtx stack_area;
3391
3392 /* If we don't have the required alignment, must do this in BLKmode. */
3393 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3394 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3395 save_mode = BLKmode;
3396
ceb83206 3397#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3398 stack_area = gen_rtx_MEM (save_mode,
3399 memory_address (save_mode,
38a448ca 3400 plus_constant (argblock,
ceb83206 3401 - high_to_save)));
f046b3cc 3402#else
ceb83206
JL
3403 stack_area = gen_rtx_MEM (save_mode,
3404 memory_address (save_mode,
38a448ca 3405 plus_constant (argblock,
ceb83206 3406 low_to_save)));
f046b3cc 3407#endif
f046b3cc
JL
3408 if (save_mode == BLKmode)
3409 {
3410 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3411 emit_block_move (validize_mem (save_area), stack_area,
3412 GEN_INT (num_to_save),
3413 PARM_BOUNDARY / BITS_PER_UNIT);
3414 }
3415 else
3416 {
3417 save_area = gen_reg_rtx (save_mode);
3418 emit_move_insn (save_area, stack_area);
3419 }
3420 }
3421#endif
3422
322e3e34
RK
3423 /* Push the args that need to be pushed. */
3424
5e26979c
JL
3425 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3426 are to be pushed. */
322e3e34
RK
3427 for (count = 0; count < nargs; count++, argnum += inc)
3428 {
3429 register enum machine_mode mode = argvec[argnum].mode;
3430 register rtx val = argvec[argnum].value;
3431 rtx reg = argvec[argnum].reg;
3432 int partial = argvec[argnum].partial;
69d4ca36 3433#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3434 int lower_bound, upper_bound, i;
69d4ca36 3435#endif
322e3e34
RK
3436
3437 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3438 {
3439#ifdef ACCUMULATE_OUTGOING_ARGS
3440 /* If this is being stored into a pre-allocated, fixed-size, stack
3441 area, save any previous data at that location. */
3442
3443#ifdef ARGS_GROW_DOWNWARD
3444 /* stack_slot is negative, but we want to index stack_usage_map
3445 with positive values. */
5e26979c
JL
3446 upper_bound = -argvec[argnum].offset.constant + 1;
3447 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3448#else
5e26979c
JL
3449 lower_bound = argvec[argnum].offset.constant;
3450 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3451#endif
3452
3453 for (i = lower_bound; i < upper_bound; i++)
3454 if (stack_usage_map[i]
f046b3cc
JL
3455 /* Don't store things in the fixed argument area at this point;
3456 it has already been saved. */
e5e809f4 3457 && i > reg_parm_stack_space)
f046b3cc
JL
3458 break;
3459
3460 if (i != upper_bound)
3461 {
e5e809f4 3462 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3463 enum machine_mode save_mode
5e26979c 3464 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3465 MODE_INT, 1);
3466 rtx stack_area
38a448ca
RH
3467 = gen_rtx_MEM (save_mode,
3468 memory_address (save_mode,
3469 plus_constant (argblock,
3470 argvec[argnum].offset.constant)));
5e26979c
JL
3471 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3472 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3473 }
3474#endif
3475 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4
JL
3476 argblock, GEN_INT (argvec[argnum].offset.constant),
3477 reg_parm_stack_space);
f046b3cc
JL
3478
3479#ifdef ACCUMULATE_OUTGOING_ARGS
3480 /* Now mark the segment we just used. */
3481 for (i = lower_bound; i < upper_bound; i++)
3482 stack_usage_map[i] = 1;
3483#endif
3484
3485 NO_DEFER_POP;
3486 }
322e3e34
RK
3487 }
3488
3489#ifndef PUSH_ARGS_REVERSED
c795bca9 3490#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3491 /* If we pushed args in forward order, perform stack alignment
3492 after pushing the last arg. */
3493 if (argblock == 0)
3494 anti_adjust_stack (GEN_INT (args_size.constant
3495 - original_args_size.constant));
3496#endif
3497#endif
3498
3499#ifdef PUSH_ARGS_REVERSED
3500 argnum = nargs - 1;
3501#else
3502 argnum = 0;
3503#endif
3504
77cac2f2 3505 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3506
322e3e34
RK
3507 /* Now load any reg parms into their regs. */
3508
5e26979c
JL
3509 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3510 are to be pushed. */
322e3e34
RK
3511 for (count = 0; count < nargs; count++, argnum += inc)
3512 {
322e3e34
RK
3513 register rtx val = argvec[argnum].value;
3514 rtx reg = argvec[argnum].reg;
3515 int partial = argvec[argnum].partial;
3516
3517 if (reg != 0 && partial == 0)
3518 emit_move_insn (reg, val);
3519 NO_DEFER_POP;
3520 }
3521
3522#if 0
3523 /* For version 1.37, try deleting this entirely. */
3524 if (! no_queue)
3525 emit_queue ();
3526#endif
3527
3528 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3529 for (count = 0; count < nargs; count++)
3530 if (argvec[count].reg != 0)
77cac2f2 3531 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3532
fac0ad80
RS
3533 /* Pass the function the address in which to return a structure value. */
3534 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3535 {
3536 emit_move_insn (struct_value_rtx,
3537 force_reg (Pmode,
3538 force_operand (XEXP (mem_value, 0),
3539 NULL_RTX)));
3540 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3541 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3542 }
3543
322e3e34
RK
3544 /* Don't allow popping to be deferred, since then
3545 cse'ing of library calls could delete a call and leave the pop. */
3546 NO_DEFER_POP;
3547
3548 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3549 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3550 /* See the comment in emit_library_call about the function type we build
3551 and pass here. */
322e3e34 3552
2c8da025
RK
3553 emit_call_1 (fun,
3554 get_identifier (XSTR (orgfun, 0)),
334c4f0f 3555 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9
RH
3556 original_args_size.constant, args_size.constant,
3557 struct_value_size,
322e3e34 3558 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3559 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3560 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
3561
3562 /* Now restore inhibit_defer_pop to its actual original value. */
3563 OK_DEFER_POP;
3564
888aa7a9
RS
3565 pop_temp_slots ();
3566
322e3e34
RK
3567 /* Copy the value to the right place. */
3568 if (outmode != VOIDmode)
3569 {
3570 if (mem_value)
3571 {
3572 if (value == 0)
fac0ad80 3573 value = mem_value;
322e3e34
RK
3574 if (value != mem_value)
3575 emit_move_insn (value, mem_value);
3576 }
3577 else if (value != 0)
3578 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3579 else
3580 value = hard_libcall_value (outmode);
322e3e34 3581 }
fac0ad80 3582
f046b3cc
JL
3583#ifdef ACCUMULATE_OUTGOING_ARGS
3584#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3585 if (save_area)
3586 {
3587 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3588#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3589 rtx stack_area
38a448ca
RH
3590 = gen_rtx_MEM (save_mode,
3591 memory_address (save_mode,
ceb83206
JL
3592 plus_constant (argblock,
3593 - high_to_save)));
f046b3cc 3594#else
ceb83206
JL
3595 rtx stack_area
3596 = gen_rtx_MEM (save_mode,
3597 memory_address (save_mode,
3598 plus_constant (argblock, low_to_save)));
f046b3cc 3599#endif
e9a25f70
JL
3600 if (save_mode != BLKmode)
3601 emit_move_insn (stack_area, save_area);
3602 else
3603 emit_block_move (stack_area, validize_mem (save_area),
3604 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3605 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3606 }
f046b3cc
JL
3607#endif
3608
3609 /* If we saved any argument areas, restore them. */
3610 for (count = 0; count < nargs; count++)
3611 if (argvec[count].save_area)
3612 {
3613 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3614 rtx stack_area
38a448ca 3615 = gen_rtx_MEM (save_mode,
f046b3cc
JL
3616 memory_address (save_mode, plus_constant (argblock,
3617 argvec[count].offset.constant)));
3618
3619 emit_move_insn (stack_area, argvec[count].save_area);
3620 }
3621
3622 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3623 stack_usage_map = initial_stack_usage_map;
3624#endif
3625
fac0ad80 3626 return value;
322e3e34
RK
3627}
3628\f
51bbfa0c
RS
3629#if 0
3630/* Return an rtx which represents a suitable home on the stack
3631 given TYPE, the type of the argument looking for a home.
3632 This is called only for BLKmode arguments.
3633
3634 SIZE is the size needed for this target.
3635 ARGS_ADDR is the address of the bottom of the argument block for this call.
3636 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3637 if this machine uses push insns. */
3638
3639static rtx
3640target_for_arg (type, size, args_addr, offset)
3641 tree type;
3642 rtx size;
3643 rtx args_addr;
3644 struct args_size offset;
3645{
3646 rtx target;
3647 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3648
3649 /* We do not call memory_address if possible,
3650 because we want to address as close to the stack
3651 as possible. For non-variable sized arguments,
3652 this will be stack-pointer relative addressing. */
3653 if (GET_CODE (offset_rtx) == CONST_INT)
3654 target = plus_constant (args_addr, INTVAL (offset_rtx));
3655 else
3656 {
3657 /* I have no idea how to guarantee that this
3658 will work in the presence of register parameters. */
38a448ca 3659 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3660 target = memory_address (QImode, target);
3661 }
3662
38a448ca 3663 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3664}
3665#endif
3666\f
3667/* Store a single argument for a function call
3668 into the register or memory area where it must be passed.
3669 *ARG describes the argument value and where to pass it.
3670
3671 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3672 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3673
3674 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3675 so must be careful about how the stack is used.
3676
3677 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3678 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3679 that we need not worry about saving and restoring the stack.
3680
3681 FNDECL is the declaration of the function we are calling. */
3682
3683static void
c84e2712 3684store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3685 reg_parm_stack_space)
51bbfa0c
RS
3686 struct arg_data *arg;
3687 rtx argblock;
3688 int may_be_alloca;
0f9b3ea6 3689 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3690 int reg_parm_stack_space;
51bbfa0c
RS
3691{
3692 register tree pval = arg->tree_value;
3693 rtx reg = 0;
3694 int partial = 0;
3695 int used = 0;
69d4ca36 3696#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3697 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3698#endif
51bbfa0c
RS
3699
3700 if (TREE_CODE (pval) == ERROR_MARK)
3701 return;
3702
cc79451b
RK
3703 /* Push a new temporary level for any temporaries we make for
3704 this argument. */
3705 push_temp_slots ();
3706
51bbfa0c
RS
3707#ifdef ACCUMULATE_OUTGOING_ARGS
3708 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3709 save any previous data at that location. */
3710 if (argblock && ! variable_size && arg->stack)
3711 {
3712#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3713 /* stack_slot is negative, but we want to index stack_usage_map
3714 with positive values. */
51bbfa0c
RS
3715 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3716 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3717 else
50eb43ca 3718 upper_bound = 0;
51bbfa0c
RS
3719
3720 lower_bound = upper_bound - arg->size.constant;
3721#else
3722 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3723 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3724 else
3725 lower_bound = 0;
3726
3727 upper_bound = lower_bound + arg->size.constant;
3728#endif
3729
3730 for (i = lower_bound; i < upper_bound; i++)
3731 if (stack_usage_map[i]
51bbfa0c
RS
3732 /* Don't store things in the fixed argument area at this point;
3733 it has already been saved. */
e5e809f4 3734 && i > reg_parm_stack_space)
51bbfa0c
RS
3735 break;
3736
3737 if (i != upper_bound)
3738 {
3739 /* We need to make a save area. See what mode we can make it. */
3740 enum machine_mode save_mode
3741 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3742 rtx stack_area
38a448ca
RH
3743 = gen_rtx_MEM (save_mode,
3744 memory_address (save_mode,
3745 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3746
3747 if (save_mode == BLKmode)
3748 {
3749 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3750 arg->size.constant, 0);
c6df88cb
MM
3751 MEM_SET_IN_STRUCT_P (arg->save_area,
3752 AGGREGATE_TYPE_P (TREE_TYPE
3753 (arg->tree_value)));
cc79451b 3754 preserve_temp_slots (arg->save_area);
51bbfa0c 3755 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3756 GEN_INT (arg->size.constant),
51bbfa0c
RS
3757 PARM_BOUNDARY / BITS_PER_UNIT);
3758 }
3759 else
3760 {
3761 arg->save_area = gen_reg_rtx (save_mode);
3762 emit_move_insn (arg->save_area, stack_area);
3763 }
3764 }
3765 }
b564df06
JL
3766
3767 /* Now that we have saved any slots that will be overwritten by this
3768 store, mark all slots this store will use. We must do this before
3769 we actually expand the argument since the expansion itself may
3770 trigger library calls which might need to use the same stack slot. */
3771 if (argblock && ! variable_size && arg->stack)
3772 for (i = lower_bound; i < upper_bound; i++)
3773 stack_usage_map[i] = 1;
51bbfa0c
RS
3774#endif
3775
3776 /* If this isn't going to be placed on both the stack and in registers,
3777 set up the register and number of words. */
3778 if (! arg->pass_on_stack)
3779 reg = arg->reg, partial = arg->partial;
3780
3781 if (reg != 0 && partial == 0)
3782 /* Being passed entirely in a register. We shouldn't be called in
3783 this case. */
3784 abort ();
3785
4ab56118
RK
3786 /* If this arg needs special alignment, don't load the registers
3787 here. */
3788 if (arg->n_aligned_regs != 0)
3789 reg = 0;
4ab56118 3790
4ab56118 3791 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3792 it directly into its stack slot. Otherwise, we can. */
3793 if (arg->value == 0)
d64f5a78
RS
3794 {
3795#ifdef ACCUMULATE_OUTGOING_ARGS
3796 /* stack_arg_under_construction is nonzero if a function argument is
3797 being evaluated directly into the outgoing argument list and
3798 expand_call must take special action to preserve the argument list
3799 if it is called recursively.
3800
3801 For scalar function arguments stack_usage_map is sufficient to
3802 determine which stack slots must be saved and restored. Scalar
3803 arguments in general have pass_on_stack == 0.
3804
3805 If this argument is initialized by a function which takes the
3806 address of the argument (a C++ constructor or a C function
3807 returning a BLKmode structure), then stack_usage_map is
3808 insufficient and expand_call must push the stack around the
3809 function call. Such arguments have pass_on_stack == 1.
3810
3811 Note that it is always safe to set stack_arg_under_construction,
3812 but this generates suboptimal code if set when not needed. */
3813
3814 if (arg->pass_on_stack)
3815 stack_arg_under_construction++;
3816#endif
3a08477a
RK
3817 arg->value = expand_expr (pval,
3818 (partial
3819 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3820 ? NULL_RTX : arg->stack,
e5d70561 3821 VOIDmode, 0);
1efe6448
RK
3822
3823 /* If we are promoting object (or for any other reason) the mode
3824 doesn't agree, convert the mode. */
3825
7373d92d
RK
3826 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3827 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3828 arg->value, arg->unsignedp);
1efe6448 3829
d64f5a78
RS
3830#ifdef ACCUMULATE_OUTGOING_ARGS
3831 if (arg->pass_on_stack)
3832 stack_arg_under_construction--;
3833#endif
3834 }
51bbfa0c
RS
3835
3836 /* Don't allow anything left on stack from computation
3837 of argument to alloca. */
3838 if (may_be_alloca)
3839 do_pending_stack_adjust ();
3840
3841 if (arg->value == arg->stack)
7815214e 3842 {
7d384cc0
KR
3843 /* If the value is already in the stack slot, we are done moving
3844 data. */
3845 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 3846 {
7815214e 3847 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3848 XEXP (arg->stack, 0), Pmode,
7d384cc0 3849 ARGS_SIZE_RTX (arg->size),
7815214e 3850 TYPE_MODE (sizetype),
956d6950
JL
3851 GEN_INT (MEMORY_USE_RW),
3852 TYPE_MODE (integer_type_node));
7815214e
RK
3853 }
3854 }
1efe6448 3855 else if (arg->mode != BLKmode)
51bbfa0c
RS
3856 {
3857 register int size;
3858
3859 /* Argument is a scalar, not entirely passed in registers.
3860 (If part is passed in registers, arg->partial says how much
3861 and emit_push_insn will take care of putting it there.)
3862
3863 Push it, and if its size is less than the
3864 amount of space allocated to it,
3865 also bump stack pointer by the additional space.
3866 Note that in C the default argument promotions
3867 will prevent such mismatches. */
3868
1efe6448 3869 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3870 /* Compute how much space the push instruction will push.
3871 On many machines, pushing a byte will advance the stack
3872 pointer by a halfword. */
3873#ifdef PUSH_ROUNDING
3874 size = PUSH_ROUNDING (size);
3875#endif
3876 used = size;
3877
3878 /* Compute how much space the argument should get:
3879 round up to a multiple of the alignment for arguments. */
1efe6448 3880 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3881 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3882 / (PARM_BOUNDARY / BITS_PER_UNIT))
3883 * (PARM_BOUNDARY / BITS_PER_UNIT));
3884
3885 /* This isn't already where we want it on the stack, so put it there.
3886 This can either be done with push or copy insns. */
e5e809f4
JL
3887 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3888 partial, reg, used - size, argblock,
3889 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
51bbfa0c
RS
3890 }
3891 else
3892 {
3893 /* BLKmode, at least partly to be pushed. */
3894
3895 register int excess;
3896 rtx size_rtx;
3897
3898 /* Pushing a nonscalar.
3899 If part is passed in registers, PARTIAL says how much
3900 and emit_push_insn will take care of putting it there. */
3901
3902 /* Round its size up to a multiple
3903 of the allocation unit for arguments. */
3904
3905 if (arg->size.var != 0)
3906 {
3907 excess = 0;
3908 size_rtx = ARGS_SIZE_RTX (arg->size);
3909 }
3910 else
3911 {
51bbfa0c
RS
3912 /* PUSH_ROUNDING has no effect on us, because
3913 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3914 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3915 + partial * UNITS_PER_WORD);
e4f93898 3916 size_rtx = expr_size (pval);
51bbfa0c
RS
3917 }
3918
1efe6448 3919 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 3920 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4
JL
3921 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3922 reg_parm_stack_space);
51bbfa0c
RS
3923 }
3924
3925
3926 /* Unless this is a partially-in-register argument, the argument is now
3927 in the stack.
3928
3929 ??? Note that this can change arg->value from arg->stack to
3930 arg->stack_slot and it matters when they are not the same.
3931 It isn't totally clear that this is correct in all cases. */
3932 if (partial == 0)
3b917a55 3933 arg->value = arg->stack_slot;
51bbfa0c
RS
3934
3935 /* Once we have pushed something, pops can't safely
3936 be deferred during the rest of the arguments. */
3937 NO_DEFER_POP;
3938
3939 /* ANSI doesn't require a sequence point here,
3940 but PCC has one, so this will avoid some problems. */
3941 emit_queue ();
3942
db907e7b
RK
3943 /* Free any temporary slots made in processing this argument. Show
3944 that we might have taken the address of something and pushed that
3945 as an operand. */
3946 preserve_temp_slots (NULL_RTX);
51bbfa0c 3947 free_temp_slots ();
cc79451b 3948 pop_temp_slots ();
51bbfa0c 3949}
This page took 1.104577 seconds and 5 git commands to generate.