]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
va-arg-14.c: New test for va_start where the first parameter is a function argument.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f
JL
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920
KG
23#include "system.h"
24#include "rtl.h"
25#include "tree.h"
26#include "flags.h"
27#include "expr.h"
49ad7cfa 28#include "function.h"
670ee920 29#include "regs.h"
51bbfa0c 30#include "insn-flags.h"
5f6da302 31#include "toplev.h"
d6f4ec51 32#include "output.h"
b1474bb7 33#include "tm_p.h"
51bbfa0c 34
0a1c58a2
JL
35#if !defined FUNCTION_OK_FOR_SIBCALL
36#define FUNCTION_OK_FOR_SIBCALL(DECL) 1
37#endif
38
c795bca9
BS
39#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
40#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
41#endif
42
51bbfa0c 43/* Decide whether a function's arguments should be processed
bbc8a071
RK
44 from first to last or from last to first.
45
46 They should if the stack and args grow in opposite directions, but
47 only if we have push insns. */
51bbfa0c 48
51bbfa0c 49#ifdef PUSH_ROUNDING
bbc8a071 50
40083ddf 51#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
52#define PUSH_ARGS_REVERSED /* If it's last to first */
53#endif
bbc8a071 54
51bbfa0c
RS
55#endif
56
c795bca9
BS
57/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
58#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
59
60/* Data structure and subroutines used within expand_call. */
61
62struct arg_data
63{
64 /* Tree node for this argument. */
65 tree tree_value;
1efe6448
RK
66 /* Mode for value; TYPE_MODE unless promoted. */
67 enum machine_mode mode;
51bbfa0c
RS
68 /* Current RTL value for argument, or 0 if it isn't precomputed. */
69 rtx value;
70 /* Initially-compute RTL value for argument; only for const functions. */
71 rtx initial_value;
72 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 73 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
74 registers. */
75 rtx reg;
84b55618
RK
76 /* If REG was promoted from the actual mode of the argument expression,
77 indicates whether the promotion is sign- or zero-extended. */
78 int unsignedp;
51bbfa0c
RS
79 /* Number of registers to use. 0 means put the whole arg in registers.
80 Also 0 if not passed in registers. */
81 int partial;
d64f5a78
RS
82 /* Non-zero if argument must be passed on stack.
83 Note that some arguments may be passed on the stack
84 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
85 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
86 int pass_on_stack;
87 /* Offset of this argument from beginning of stack-args. */
88 struct args_size offset;
89 /* Similar, but offset to the start of the stack slot. Different from
90 OFFSET if this arg pads downward. */
91 struct args_size slot_offset;
92 /* Size of this argument on the stack, rounded up for any padding it gets,
93 parts of the argument passed in registers do not count.
94 If REG_PARM_STACK_SPACE is defined, then register parms
95 are counted here as well. */
96 struct args_size size;
97 /* Location on the stack at which parameter should be stored. The store
98 has already been done if STACK == VALUE. */
99 rtx stack;
100 /* Location on the stack of the start of this argument slot. This can
101 differ from STACK if this arg pads downward. This location is known
102 to be aligned to FUNCTION_ARG_BOUNDARY. */
103 rtx stack_slot;
104#ifdef ACCUMULATE_OUTGOING_ARGS
105 /* Place that this stack area has been saved, if needed. */
106 rtx save_area;
107#endif
4ab56118
RK
108 /* If an argument's alignment does not permit direct copying into registers,
109 copy in smaller-sized pieces into pseudos. These are stored in a
110 block pointed to by this field. The next field says how many
111 word-sized pseudos we made. */
112 rtx *aligned_regs;
113 int n_aligned_regs;
4fc026cd
CM
114 /* The amount that the stack pointer needs to be adjusted to
115 force alignment for the next argument. */
116 struct args_size alignment_pad;
51bbfa0c
RS
117};
118
119#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 120/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
121 the corresponding stack location has been used.
122 This vector is used to prevent a function call within an argument from
123 clobbering any stack already set up. */
124static char *stack_usage_map;
125
126/* Size of STACK_USAGE_MAP. */
127static int highest_outgoing_arg_in_use;
2f4aa534
RS
128
129/* stack_arg_under_construction is nonzero when an argument may be
130 initialized with a constructor call (including a C function that
131 returns a BLKmode struct) and expand_call must take special action
132 to make sure the object being constructed does not overlap the
133 argument list for the constructor call. */
134int stack_arg_under_construction;
51bbfa0c
RS
135#endif
136
3d994c6b
KG
137static int calls_function PARAMS ((tree, int));
138static int calls_function_1 PARAMS ((tree, int));
0a1c58a2
JL
139
140#define ECF_IS_CONST 1
141#define ECF_NOTHROW 2
142#define ECF_SIBCALL 4
3d994c6b
KG
143static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
144 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
0a1c58a2 145 rtx, int, rtx, int));
3d994c6b
KG
146static void precompute_register_parameters PARAMS ((int,
147 struct arg_data *,
148 int *));
149static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
150 int));
151static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
152 int));
153static int finalize_must_preallocate PARAMS ((int, int,
154 struct arg_data *,
155 struct args_size *));
156static void precompute_arguments PARAMS ((int, int, int,
157 struct arg_data *,
158 struct args_size *));
159static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
160 struct args_size *,
161 int));
3d994c6b
KG
162static void initialize_argument_information PARAMS ((int,
163 struct arg_data *,
164 struct args_size *,
165 int, tree, tree,
166 CUMULATIVE_ARGS *,
167 int, rtx *, int *,
7d167afd 168 int *, int *, int));
3d994c6b
KG
169static void compute_argument_addresses PARAMS ((struct arg_data *,
170 rtx, int));
171static rtx rtx_for_function_call PARAMS ((tree, tree));
172static void load_register_parameters PARAMS ((struct arg_data *,
173 int, rtx *));
12a22e76 174static int libfunc_nothrow PARAMS ((rtx));
de76b467
JH
175static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
176 enum machine_mode,
177 int, va_list));
21a3b983 178
20efdf74 179#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3d994c6b
KG
180static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
181static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 182#endif
51bbfa0c 183\f
1ce0cb53
JW
184/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
185 `alloca'.
186
187 If WHICH is 0, return 1 if EXP contains a call to any function.
188 Actually, we only need return 1 if evaluating EXP would require pushing
189 arguments on the stack, but that is too difficult to compute, so we just
190 assume any function call might require the stack. */
51bbfa0c 191
1c8d7aef
RS
192static tree calls_function_save_exprs;
193
51bbfa0c 194static int
1ce0cb53 195calls_function (exp, which)
51bbfa0c 196 tree exp;
1ce0cb53 197 int which;
1c8d7aef
RS
198{
199 int val;
200 calls_function_save_exprs = 0;
201 val = calls_function_1 (exp, which);
202 calls_function_save_exprs = 0;
203 return val;
204}
205
206static int
207calls_function_1 (exp, which)
208 tree exp;
209 int which;
51bbfa0c
RS
210{
211 register int i;
0207efa2
RK
212 enum tree_code code = TREE_CODE (exp);
213 int type = TREE_CODE_CLASS (code);
214 int length = tree_code_length[(int) code];
51bbfa0c 215
ddd5a7c1 216 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
217 if ((int) code >= NUM_TREE_CODES)
218 return 1;
51bbfa0c 219
0207efa2 220 /* Only expressions and references can contain calls. */
3b59a331
RS
221 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
222 && type != 'b')
51bbfa0c
RS
223 return 0;
224
0207efa2 225 switch (code)
51bbfa0c
RS
226 {
227 case CALL_EXPR:
1ce0cb53
JW
228 if (which == 0)
229 return 1;
230 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
231 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
232 == FUNCTION_DECL))
233 {
234 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
235
236 if ((DECL_BUILT_IN (fndecl)
95815af9 237 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
238 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
239 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 240 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
241 return 1;
242 }
51bbfa0c
RS
243
244 /* Third operand is RTL. */
245 length = 2;
246 break;
247
248 case SAVE_EXPR:
249 if (SAVE_EXPR_RTL (exp) != 0)
250 return 0;
1c8d7aef
RS
251 if (value_member (exp, calls_function_save_exprs))
252 return 0;
253 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
254 calls_function_save_exprs);
255 return (TREE_OPERAND (exp, 0) != 0
256 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
257
258 case BLOCK:
ef03bc85
CH
259 {
260 register tree local;
261
262 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 263 if (DECL_INITIAL (local) != 0
1c8d7aef 264 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
265 return 1;
266 }
267 {
268 register tree subblock;
269
270 for (subblock = BLOCK_SUBBLOCKS (exp);
271 subblock;
272 subblock = TREE_CHAIN (subblock))
1c8d7aef 273 if (calls_function_1 (subblock, which))
ef03bc85
CH
274 return 1;
275 }
276 return 0;
51bbfa0c
RS
277
278 case METHOD_CALL_EXPR:
279 length = 3;
280 break;
281
282 case WITH_CLEANUP_EXPR:
283 length = 1;
284 break;
285
286 case RTL_EXPR:
287 return 0;
e9a25f70
JL
288
289 default:
290 break;
51bbfa0c
RS
291 }
292
293 for (i = 0; i < length; i++)
294 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 295 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
296 return 1;
297
298 return 0;
299}
300\f
301/* Force FUNEXP into a form suitable for the address of a CALL,
302 and return that as an rtx. Also load the static chain register
303 if FNDECL is a nested function.
304
77cac2f2
RK
305 CALL_FUSAGE points to a variable holding the prospective
306 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 307
03dacb02 308rtx
77cac2f2 309prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
310 rtx funexp;
311 tree fndecl;
77cac2f2 312 rtx *call_fusage;
01368078 313 int reg_parm_seen;
51bbfa0c
RS
314{
315 rtx static_chain_value = 0;
316
317 funexp = protect_from_queue (funexp, 0);
318
319 if (fndecl != 0)
0f41302f 320 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
321 static_chain_value = lookup_static_chain (fndecl);
322
323 /* Make a valid memory address and copy constants thru pseudo-regs,
324 but not for a constant address if -fno-function-cse. */
325 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 326 /* If we are using registers for parameters, force the
e9a25f70
JL
327 function address into a register now. */
328 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
329 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
330 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
331 else
332 {
333#ifndef NO_FUNCTION_CSE
334 if (optimize && ! flag_no_function_cse)
335#ifdef NO_RECURSIVE_FUNCTION_CSE
336 if (fndecl != current_function_decl)
337#endif
338 funexp = force_reg (Pmode, funexp);
339#endif
340 }
341
342 if (static_chain_value != 0)
343 {
344 emit_move_insn (static_chain_rtx, static_chain_value);
345
f991a240
RK
346 if (GET_CODE (static_chain_rtx) == REG)
347 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
348 }
349
350 return funexp;
351}
352
353/* Generate instructions to call function FUNEXP,
354 and optionally pop the results.
355 The CALL_INSN is the first insn generated.
356
607ea900 357 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
358 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
359
334c4f0f
RK
360 FUNTYPE is the data type of the function. This is given to the macro
361 RETURN_POPS_ARGS to determine whether this function pops its own args.
362 We used to allow an identifier for library functions, but that doesn't
363 work when the return type is an aggregate type and the calling convention
364 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
365
366 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
367 ROUNDED_STACK_SIZE is that number rounded up to
368 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
369 both to put into the call insn and to generate explicit popping
370 code if necessary.
51bbfa0c
RS
371
372 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
373 It is zero if this call doesn't want a structure value.
374
375 NEXT_ARG_REG is the rtx that results from executing
376 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
377 just after all the args have had their registers assigned.
378 This could be whatever you like, but normally it is the first
379 arg-register beyond those used for args in this call,
380 or 0 if all the arg-registers are used in this call.
381 It is passed on to `gen_call' so you can put this info in the call insn.
382
383 VALREG is a hard register in which a value is returned,
384 or 0 if the call does not return a value.
385
386 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
387 the args to this call were processed.
388 We restore `inhibit_defer_pop' to that value.
389
94b25f81
RK
390 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
391 denote registers used by the called function.
51bbfa0c
RS
392
393 IS_CONST is true if this is a `const' call. */
394
322e3e34 395static void
fb5eebb9
RH
396emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
397 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
0a1c58a2 398 call_fusage, ecf_flags)
51bbfa0c 399 rtx funexp;
c84e2712
KG
400 tree fndecl ATTRIBUTE_UNUSED;
401 tree funtype ATTRIBUTE_UNUSED;
6a651371 402 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 403 HOST_WIDE_INT rounded_stack_size;
962f1324 404 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
405 rtx next_arg_reg;
406 rtx valreg;
407 int old_inhibit_defer_pop;
77cac2f2 408 rtx call_fusage;
0a1c58a2 409 int ecf_flags;
51bbfa0c 410{
062e7fd8 411 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 412#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 413 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 414#endif
51bbfa0c 415 rtx call_insn;
081f5e7e 416#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 417 int already_popped = 0;
fb5eebb9 418 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 419#endif
51bbfa0c
RS
420
421 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
422 and we don't want to load it into a register as an optimization,
423 because prepare_call_address already did it if it should be done. */
424 if (GET_CODE (funexp) != SYMBOL_REF)
425 funexp = memory_address (FUNCTION_MODE, funexp);
426
427#ifndef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
428#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
429 if ((ecf_flags & ECF_SIBCALL)
430 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
431 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
432 || stack_size == 0))
433 {
434 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
435 rtx pat;
436
437 /* If this subroutine pops its own args, record that in the call insn
438 if possible, for the sake of frame pointer elimination. */
439
440 if (valreg)
441 pat = gen_sibcall_value_pop (valreg,
442 gen_rtx_MEM (FUNCTION_MODE, funexp),
443 rounded_stack_size_rtx, next_arg_reg,
444 n_pop);
445 else
446 pat = gen_sibcall_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
447 rounded_stack_size_rtx, next_arg_reg, n_pop);
448
449 emit_call_insn (pat);
450 already_popped = 1;
451 }
452 else
453#endif
454
51bbfa0c 455#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
456/* If the target has "call" or "call_value" insns, then prefer them
457 if no arguments are actually popped. If the target does not have
458 "call" or "call_value" insns, then we must use the popping versions
459 even if the call has no arguments to pop. */
460#if defined (HAVE_call) && defined (HAVE_call_value)
461 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
462 && n_popped > 0)
463#else
464 if (HAVE_call_pop && HAVE_call_value_pop)
465#endif
51bbfa0c 466 {
fb5eebb9 467 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
468 rtx pat;
469
470 /* If this subroutine pops its own args, record that in the call insn
471 if possible, for the sake of frame pointer elimination. */
2c8da025 472
51bbfa0c
RS
473 if (valreg)
474 pat = gen_call_value_pop (valreg,
38a448ca 475 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 476 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 477 else
38a448ca 478 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 479 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
480
481 emit_call_insn (pat);
482 already_popped = 1;
483 }
484 else
485#endif
486#endif
487
0a1c58a2
JL
488#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
489 if ((ecf_flags & ECF_SIBCALL)
490 && HAVE_sibcall && HAVE_sibcall_value)
491 {
492 if (valreg)
493 emit_call_insn (gen_sibcall_value (valreg,
494 gen_rtx_MEM (FUNCTION_MODE, funexp),
495 rounded_stack_size_rtx,
496 next_arg_reg, NULL_RTX));
497 else
498 emit_call_insn (gen_sibcall (gen_rtx_MEM (FUNCTION_MODE, funexp),
499 rounded_stack_size_rtx, next_arg_reg,
500 struct_value_size_rtx));
501 }
502 else
503#endif
504
51bbfa0c
RS
505#if defined (HAVE_call) && defined (HAVE_call_value)
506 if (HAVE_call && HAVE_call_value)
507 {
508 if (valreg)
509 emit_call_insn (gen_call_value (valreg,
38a448ca 510 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 511 rounded_stack_size_rtx, next_arg_reg,
e992302c 512 NULL_RTX));
51bbfa0c 513 else
38a448ca 514 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 515 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
516 struct_value_size_rtx));
517 }
518 else
519#endif
520 abort ();
521
77cac2f2 522 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
523 for (call_insn = get_last_insn ();
524 call_insn && GET_CODE (call_insn) != CALL_INSN;
525 call_insn = PREV_INSN (call_insn))
526 ;
527
528 if (! call_insn)
529 abort ();
530
e59e60a7
RK
531 /* Put the register usage information on the CALL. If there is already
532 some usage information, put ours at the end. */
533 if (CALL_INSN_FUNCTION_USAGE (call_insn))
534 {
535 rtx link;
536
537 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
538 link = XEXP (link, 1))
539 ;
540
541 XEXP (link, 1) = call_fusage;
542 }
543 else
544 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
545
546 /* If this is a const call, then set the insn's unchanging bit. */
0a1c58a2 547 if (ecf_flags & ECF_IS_CONST)
51bbfa0c
RS
548 CONST_CALL_P (call_insn) = 1;
549
12a22e76
JM
550 /* If this call can't throw, attach a REG_EH_REGION reg note to that
551 effect. */
0a1c58a2 552 if (ecf_flags & ECF_NOTHROW)
54cea123 553 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
554 REG_NOTES (call_insn));
555
0a1c58a2
JL
556 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
557
b1e64e0d
RS
558 /* Restore this now, so that we do defer pops for this call's args
559 if the context of the call as a whole permits. */
560 inhibit_defer_pop = old_inhibit_defer_pop;
561
51bbfa0c
RS
562#ifndef ACCUMULATE_OUTGOING_ARGS
563 /* If returning from the subroutine does not automatically pop the args,
564 we need an instruction to pop them sooner or later.
565 Perhaps do it now; perhaps just record how much space to pop later.
566
567 If returning from the subroutine does pop the args, indicate that the
568 stack pointer will be changed. */
569
c2732da3
JM
570 /* The space for the args is no longer waiting for the call; either it
571 was popped by the call, or it'll be popped below. */
572 arg_space_so_far -= rounded_stack_size;
573
fb5eebb9 574 if (n_popped > 0)
51bbfa0c
RS
575 {
576 if (!already_popped)
e3da301d 577 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
578 = gen_rtx_EXPR_LIST (VOIDmode,
579 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
580 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 581 rounded_stack_size -= n_popped;
062e7fd8 582 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
583 }
584
fb5eebb9 585 if (rounded_stack_size != 0)
51bbfa0c 586 {
0a1c58a2
JL
587 if (flag_defer_pop && inhibit_defer_pop == 0
588 && !(ecf_flags & ECF_IS_CONST))
fb5eebb9 589 pending_stack_adjust += rounded_stack_size;
51bbfa0c 590 else
062e7fd8 591 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
592 }
593#endif
594}
595
20efdf74
JL
596/* Determine if the function identified by NAME and FNDECL is one with
597 special properties we wish to know about.
598
599 For example, if the function might return more than one time (setjmp), then
600 set RETURNS_TWICE to a nonzero value.
601
602 Similarly set IS_LONGJMP for if the function is in the longjmp family.
603
604 Set IS_MALLOC for any of the standard memory allocation functions which
605 allocate from the heap.
606
607 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
608 space from the stack such as alloca. */
609
3a8c995b 610void
fa76d9e0 611special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
20efdf74 612 is_malloc, may_be_alloca)
20efdf74
JL
613 tree fndecl;
614 int *returns_twice;
615 int *is_longjmp;
fa76d9e0 616 int *fork_or_exec;
20efdf74
JL
617 int *is_malloc;
618 int *may_be_alloca;
619{
620 *returns_twice = 0;
621 *is_longjmp = 0;
fa76d9e0 622 *fork_or_exec = 0;
20efdf74
JL
623 *may_be_alloca = 0;
624
140592a0
AG
625 /* The function decl may have the `malloc' attribute. */
626 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
627
3a8c995b
MM
628 if (! *is_malloc
629 && fndecl && DECL_NAME (fndecl)
140592a0 630 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
631 /* Exclude functions not at the file scope, or not `extern',
632 since they are not the magic functions we would otherwise
633 think they are. */
634 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
635 {
3a8c995b 636 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
637 char *tname = name;
638
ca54603f
JL
639 /* We assume that alloca will always be called by name. It
640 makes no sense to pass it as a pointer-to-function to
641 anything that does not understand its behavior. */
642 *may_be_alloca
643 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
644 && name[0] == 'a'
645 && ! strcmp (name, "alloca"))
646 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
647 && name[0] == '_'
648 && ! strcmp (name, "__builtin_alloca"))));
649
20efdf74
JL
650 /* Disregard prefix _, __ or __x. */
651 if (name[0] == '_')
652 {
653 if (name[1] == '_' && name[2] == 'x')
654 tname += 3;
655 else if (name[1] == '_')
656 tname += 2;
657 else
658 tname += 1;
659 }
660
661 if (tname[0] == 's')
662 {
663 *returns_twice
664 = ((tname[1] == 'e'
665 && (! strcmp (tname, "setjmp")
666 || ! strcmp (tname, "setjmp_syscall")))
667 || (tname[1] == 'i'
668 && ! strcmp (tname, "sigsetjmp"))
669 || (tname[1] == 'a'
670 && ! strcmp (tname, "savectx")));
671 if (tname[1] == 'i'
672 && ! strcmp (tname, "siglongjmp"))
673 *is_longjmp = 1;
674 }
675 else if ((tname[0] == 'q' && tname[1] == 's'
676 && ! strcmp (tname, "qsetjmp"))
677 || (tname[0] == 'v' && tname[1] == 'f'
678 && ! strcmp (tname, "vfork")))
679 *returns_twice = 1;
680
681 else if (tname[0] == 'l' && tname[1] == 'o'
682 && ! strcmp (tname, "longjmp"))
683 *is_longjmp = 1;
fa76d9e0
JR
684
685 else if ((tname[0] == 'f' && tname[1] == 'o'
686 && ! strcmp (tname, "fork"))
687 /* Linux specific: __clone. check NAME to insist on the
688 leading underscores, to avoid polluting the ISO / POSIX
689 namespace. */
690 || (name[0] == '_' && name[1] == '_'
691 && ! strcmp (tname, "clone"))
692 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
693 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
694 && (tname[5] == '\0'
695 || ((tname[5] == 'p' || tname[5] == 'e')
696 && tname[6] == '\0'))))
697 *fork_or_exec = 1;
698
140592a0 699 /* Do not add any more malloc-like functions to this list,
82514696
KG
700 instead mark them as malloc functions using the malloc attribute.
701 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
702 it may return the same address across multiple calls.
703 C++ operator new is not suitable because it is not required
704 to return a unique pointer; indeed, the standard placement new
705 just returns its argument. */
91d024d5
ML
706 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
707 && (! strcmp (tname, "malloc")
708 || ! strcmp (tname, "calloc")
709 || ! strcmp (tname, "strdup")))
20efdf74
JL
710 *is_malloc = 1;
711 }
712}
713
714/* Precompute all register parameters as described by ARGS, storing values
715 into fields within the ARGS array.
716
717 NUM_ACTUALS indicates the total number elements in the ARGS array.
718
719 Set REG_PARM_SEEN if we encounter a register parameter. */
720
721static void
722precompute_register_parameters (num_actuals, args, reg_parm_seen)
723 int num_actuals;
724 struct arg_data *args;
725 int *reg_parm_seen;
726{
727 int i;
728
729 *reg_parm_seen = 0;
730
731 for (i = 0; i < num_actuals; i++)
732 if (args[i].reg != 0 && ! args[i].pass_on_stack)
733 {
734 *reg_parm_seen = 1;
735
736 if (args[i].value == 0)
737 {
738 push_temp_slots ();
739 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
740 VOIDmode, 0);
741 preserve_temp_slots (args[i].value);
742 pop_temp_slots ();
743
744 /* ANSI doesn't require a sequence point here,
745 but PCC has one, so this will avoid some problems. */
746 emit_queue ();
747 }
748
749 /* If we are to promote the function arg to a wider mode,
750 do it now. */
751
752 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
753 args[i].value
754 = convert_modes (args[i].mode,
755 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
756 args[i].value, args[i].unsignedp);
757
758 /* If the value is expensive, and we are inside an appropriately
759 short loop, put the value into a pseudo and then put the pseudo
760 into the hard reg.
761
762 For small register classes, also do this if this call uses
763 register parameters. This is to avoid reload conflicts while
764 loading the parameters registers. */
765
766 if ((! (GET_CODE (args[i].value) == REG
767 || (GET_CODE (args[i].value) == SUBREG
768 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
769 && args[i].mode != BLKmode
770 && rtx_cost (args[i].value, SET) > 2
771 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
772 || preserve_subexpressions_p ()))
773 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
774 }
775}
776
777#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
778
779 /* The argument list is the property of the called routine and it
780 may clobber it. If the fixed area has been used for previous
781 parameters, we must save and restore it. */
782static rtx
783save_fixed_argument_area (reg_parm_stack_space, argblock,
784 low_to_save, high_to_save)
785 int reg_parm_stack_space;
786 rtx argblock;
787 int *low_to_save;
788 int *high_to_save;
789{
790 int i;
791 rtx save_area = NULL_RTX;
792
793 /* Compute the boundary of the that needs to be saved, if any. */
794#ifdef ARGS_GROW_DOWNWARD
795 for (i = 0; i < reg_parm_stack_space + 1; i++)
796#else
797 for (i = 0; i < reg_parm_stack_space; i++)
798#endif
799 {
800 if (i >= highest_outgoing_arg_in_use
801 || stack_usage_map[i] == 0)
802 continue;
803
804 if (*low_to_save == -1)
805 *low_to_save = i;
806
807 *high_to_save = i;
808 }
809
810 if (*low_to_save >= 0)
811 {
812 int num_to_save = *high_to_save - *low_to_save + 1;
813 enum machine_mode save_mode
814 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
815 rtx stack_area;
816
817 /* If we don't have the required alignment, must do this in BLKmode. */
818 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
819 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
820 save_mode = BLKmode;
821
822#ifdef ARGS_GROW_DOWNWARD
823 stack_area = gen_rtx_MEM (save_mode,
824 memory_address (save_mode,
825 plus_constant (argblock,
826 - *high_to_save)));
827#else
828 stack_area = gen_rtx_MEM (save_mode,
829 memory_address (save_mode,
830 plus_constant (argblock,
831 *low_to_save)));
832#endif
833 if (save_mode == BLKmode)
834 {
835 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
04572513
JJ
836 /* Cannot use emit_block_move here because it can be done by a library
837 call which in turn gets into this place again and deadly infinite
838 recursion happens. */
839 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
840 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
841 }
842 else
843 {
844 save_area = gen_reg_rtx (save_mode);
845 emit_move_insn (save_area, stack_area);
846 }
847 }
848 return save_area;
849}
850
851static void
852restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
853 rtx save_area;
854 rtx argblock;
855 int high_to_save;
856 int low_to_save;
857{
858 enum machine_mode save_mode = GET_MODE (save_area);
859#ifdef ARGS_GROW_DOWNWARD
860 rtx stack_area
861 = gen_rtx_MEM (save_mode,
862 memory_address (save_mode,
863 plus_constant (argblock,
864 - high_to_save)));
865#else
866 rtx stack_area
867 = gen_rtx_MEM (save_mode,
868 memory_address (save_mode,
869 plus_constant (argblock,
870 low_to_save)));
871#endif
872
873 if (save_mode != BLKmode)
874 emit_move_insn (stack_area, save_area);
875 else
04572513
JJ
876 /* Cannot use emit_block_move here because it can be done by a library
877 call which in turn gets into this place again and deadly infinite
878 recursion happens. */
879 move_by_pieces (stack_area, validize_mem (save_area),
880 high_to_save - low_to_save + 1,
881 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
882}
883#endif
884
885/* If any elements in ARGS refer to parameters that are to be passed in
886 registers, but not in memory, and whose alignment does not permit a
887 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
888 which we will later copy into the appropriate hard registers.
889
890 Pseudos for each unaligned argument will be stored into the array
891 args[argnum].aligned_regs. The caller is responsible for deallocating
892 the aligned_regs array if it is nonzero. */
893
20efdf74
JL
894static void
895store_unaligned_arguments_into_pseudos (args, num_actuals)
896 struct arg_data *args;
897 int num_actuals;
898{
899 int i, j;
900
901 for (i = 0; i < num_actuals; i++)
902 if (args[i].reg != 0 && ! args[i].pass_on_stack
903 && args[i].mode == BLKmode
904 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
905 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
906 {
907 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
908 int big_endian_correction = 0;
909
910 args[i].n_aligned_regs
911 = args[i].partial ? args[i].partial
912 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
913
8e6a59fe
MM
914 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
915 * args[i].n_aligned_regs);
20efdf74
JL
916
917 /* Structures smaller than a word are aligned to the least
918 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
919 this means we must skip the empty high order bytes when
920 calculating the bit offset. */
921 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
922 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
923
924 for (j = 0; j < args[i].n_aligned_regs; j++)
925 {
926 rtx reg = gen_reg_rtx (word_mode);
927 rtx word = operand_subword_force (args[i].value, j, BLKmode);
928 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
929 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
930
931 args[i].aligned_regs[j] = reg;
932
933 /* There is no need to restrict this code to loading items
934 in TYPE_ALIGN sized hunks. The bitfield instructions can
935 load up entire word sized registers efficiently.
936
937 ??? This may not be needed anymore.
938 We use to emit a clobber here but that doesn't let later
939 passes optimize the instructions we emit. By storing 0 into
940 the register later passes know the first AND to zero out the
941 bitfield being set in the register is unnecessary. The store
942 of 0 will be deleted as will at least the first AND. */
943
944 emit_move_insn (reg, const0_rtx);
945
946 bytes -= bitsize / BITS_PER_UNIT;
947 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
948 extract_bit_field (word, bitsize, 0, 1,
949 NULL_RTX, word_mode,
950 word_mode,
951 bitalign / BITS_PER_UNIT,
952 BITS_PER_WORD),
953 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
954 }
955 }
956}
957
d7cdf113
JL
958/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
959 ACTPARMS.
960
961 NUM_ACTUALS is the total number of parameters.
962
963 N_NAMED_ARGS is the total number of named arguments.
964
965 FNDECL is the tree code for the target of this call (if known)
966
967 ARGS_SO_FAR holds state needed by the target to know where to place
968 the next argument.
969
970 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
971 for arguments which are passed in registers.
972
973 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
974 and may be modified by this routine.
975
976 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
977 flags which may may be modified by this routine. */
978
979static void
980initialize_argument_information (num_actuals, args, args_size, n_named_args,
981 actparms, fndecl, args_so_far,
982 reg_parm_stack_space, old_stack_level,
7d167afd
JJ
983 old_pending_adj, must_preallocate, is_const,
984 ecf_flags)
91813b28 985 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
986 struct arg_data *args;
987 struct args_size *args_size;
91813b28 988 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
989 tree actparms;
990 tree fndecl;
959f3a06 991 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
992 int reg_parm_stack_space;
993 rtx *old_stack_level;
994 int *old_pending_adj;
995 int *must_preallocate;
996 int *is_const;
7d167afd 997 int ecf_flags;
d7cdf113
JL
998{
999 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1000 int inc;
1001
1002 /* Count arg position in order args appear. */
1003 int argpos;
1004
4fc026cd 1005 struct args_size alignment_pad;
d7cdf113
JL
1006 int i;
1007 tree p;
1008
1009 args_size->constant = 0;
1010 args_size->var = 0;
1011
1012 /* In this loop, we consider args in the order they are written.
1013 We fill up ARGS from the front or from the back if necessary
1014 so that in any case the first arg to be pushed ends up at the front. */
1015
1016#ifdef PUSH_ARGS_REVERSED
1017 i = num_actuals - 1, inc = -1;
1018 /* In this case, must reverse order of args
1019 so that we compute and push the last arg first. */
1020#else
1021 i = 0, inc = 1;
1022#endif
1023
1024 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1025 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1026 {
1027 tree type = TREE_TYPE (TREE_VALUE (p));
1028 int unsignedp;
1029 enum machine_mode mode;
1030
1031 args[i].tree_value = TREE_VALUE (p);
1032
1033 /* Replace erroneous argument with constant zero. */
d0f062fb 1034 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1035 args[i].tree_value = integer_zero_node, type = integer_type_node;
1036
1037 /* If TYPE is a transparent union, pass things the way we would
1038 pass the first field of the union. We have already verified that
1039 the modes are the same. */
1040 if (TYPE_TRANSPARENT_UNION (type))
1041 type = TREE_TYPE (TYPE_FIELDS (type));
1042
1043 /* Decide where to pass this arg.
1044
1045 args[i].reg is nonzero if all or part is passed in registers.
1046
1047 args[i].partial is nonzero if part but not all is passed in registers,
1048 and the exact value says how many words are passed in registers.
1049
1050 args[i].pass_on_stack is nonzero if the argument must at least be
1051 computed on the stack. It may then be loaded back into registers
1052 if args[i].reg is nonzero.
1053
1054 These decisions are driven by the FUNCTION_... macros and must agree
1055 with those made by function.c. */
1056
1057 /* See if this argument should be passed by invisible reference. */
1058 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1059 && contains_placeholder_p (TYPE_SIZE (type)))
1060 || TREE_ADDRESSABLE (type)
1061#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1062 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1063 type, argpos < n_named_args)
1064#endif
1065 )
1066 {
1067 /* If we're compiling a thunk, pass through invisible
1068 references instead of making a copy. */
1069 if (current_function_is_thunk
1070#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1071 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1072 type, argpos < n_named_args)
1073 /* If it's in a register, we must make a copy of it too. */
1074 /* ??? Is this a sufficient test? Is there a better one? */
1075 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1076 && REG_P (DECL_RTL (args[i].tree_value)))
1077 && ! TREE_ADDRESSABLE (type))
1078#endif
1079 )
1080 {
1081 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1082 new object from the argument. If we are passing by
1083 invisible reference, the callee will do that for us, so we
1084 can strip off the TARGET_EXPR. This is not always safe,
1085 but it is safe in the only case where this is a useful
1086 optimization; namely, when the argument is a plain object.
1087 In that case, the frontend is just asking the backend to
1088 make a bitwise copy of the argument. */
1089
1090 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
2f939d94 1091 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
d7cdf113
JL
1092 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1093 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1094
1095 args[i].tree_value = build1 (ADDR_EXPR,
1096 build_pointer_type (type),
1097 args[i].tree_value);
1098 type = build_pointer_type (type);
1099 }
1100 else
1101 {
1102 /* We make a copy of the object and pass the address to the
1103 function being called. */
1104 rtx copy;
1105
d0f062fb 1106 if (!COMPLETE_TYPE_P (type)
d7cdf113
JL
1107 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1108 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1109 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1110 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1111 {
1112 /* This is a variable-sized object. Make space on the stack
1113 for it. */
1114 rtx size_rtx = expr_size (TREE_VALUE (p));
1115
1116 if (*old_stack_level == 0)
1117 {
1118 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1119 *old_pending_adj = pending_stack_adjust;
1120 pending_stack_adjust = 0;
1121 }
1122
1123 copy = gen_rtx_MEM (BLKmode,
1124 allocate_dynamic_stack_space (size_rtx,
1125 NULL_RTX,
1126 TYPE_ALIGN (type)));
1127 }
1128 else
1129 {
1130 int size = int_size_in_bytes (type);
1131 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1132 }
1133
1134 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1135
1136 store_expr (args[i].tree_value, copy, 0);
1137 *is_const = 0;
1138
1139 args[i].tree_value = build1 (ADDR_EXPR,
1140 build_pointer_type (type),
1141 make_tree (type, copy));
1142 type = build_pointer_type (type);
1143 }
1144 }
1145
1146 mode = TYPE_MODE (type);
1147 unsignedp = TREE_UNSIGNED (type);
1148
1149#ifdef PROMOTE_FUNCTION_ARGS
1150 mode = promote_mode (type, mode, &unsignedp, 1);
1151#endif
1152
1153 args[i].unsignedp = unsignedp;
1154 args[i].mode = mode;
7d167afd
JJ
1155
1156#ifdef FUNCTION_INCOMING_ARG
1157 /* If this is a sibling call and the machine has register windows, the
1158 register window has to be unwinded before calling the routine, so
1159 arguments have to go into the incoming registers. */
1160 if (ecf_flags & ECF_SIBCALL)
1161 args[i].reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1162 argpos < n_named_args);
1163 else
1164#endif
1165 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1166 argpos < n_named_args);
1167
d7cdf113
JL
1168#ifdef FUNCTION_ARG_PARTIAL_NREGS
1169 if (args[i].reg)
1170 args[i].partial
959f3a06 1171 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1172 argpos < n_named_args);
1173#endif
1174
1175 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1176
1177 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1178 it means that we are to pass this arg in the register(s) designated
1179 by the PARALLEL, but also to pass it in the stack. */
1180 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1181 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1182 args[i].pass_on_stack = 1;
1183
1184 /* If this is an addressable type, we must preallocate the stack
1185 since we must evaluate the object into its final location.
1186
1187 If this is to be passed in both registers and the stack, it is simpler
1188 to preallocate. */
1189 if (TREE_ADDRESSABLE (type)
1190 || (args[i].pass_on_stack && args[i].reg != 0))
1191 *must_preallocate = 1;
1192
1193 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1194 we cannot consider this function call constant. */
1195 if (TREE_ADDRESSABLE (type))
1196 *is_const = 0;
1197
1198 /* Compute the stack-size of this argument. */
1199 if (args[i].reg == 0 || args[i].partial != 0
1200 || reg_parm_stack_space > 0
1201 || args[i].pass_on_stack)
1202 locate_and_pad_parm (mode, type,
1203#ifdef STACK_PARMS_IN_REG_PARM_AREA
1204 1,
1205#else
1206 args[i].reg != 0,
1207#endif
1208 fndecl, args_size, &args[i].offset,
4fc026cd 1209 &args[i].size, &alignment_pad);
d7cdf113
JL
1210
1211#ifndef ARGS_GROW_DOWNWARD
1212 args[i].slot_offset = *args_size;
1213#endif
1214
4fc026cd
CM
1215 args[i].alignment_pad = alignment_pad;
1216
d7cdf113
JL
1217 /* If a part of the arg was put into registers,
1218 don't include that part in the amount pushed. */
1219 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1220 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1221 / (PARM_BOUNDARY / BITS_PER_UNIT)
1222 * (PARM_BOUNDARY / BITS_PER_UNIT));
1223
1224 /* Update ARGS_SIZE, the total stack space for args so far. */
1225
1226 args_size->constant += args[i].size.constant;
1227 if (args[i].size.var)
1228 {
1229 ADD_PARM_SIZE (*args_size, args[i].size.var);
1230 }
1231
1232 /* Since the slot offset points to the bottom of the slot,
1233 we must record it after incrementing if the args grow down. */
1234#ifdef ARGS_GROW_DOWNWARD
1235 args[i].slot_offset = *args_size;
1236
1237 args[i].slot_offset.constant = -args_size->constant;
1238 if (args_size->var)
fed3cef0 1239 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1240#endif
1241
1242 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1243 have been used, etc. */
1244
959f3a06 1245 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1246 argpos < n_named_args);
1247 }
1248}
1249
599f37b6
JL
1250/* Update ARGS_SIZE to contain the total size for the argument block.
1251 Return the original constant component of the argument block's size.
1252
1253 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1254 for arguments passed in registers. */
1255
1256static int
c2f8b491
JH
1257compute_argument_block_size (reg_parm_stack_space, args_size,
1258 preferred_stack_boundary)
599f37b6
JL
1259 int reg_parm_stack_space;
1260 struct args_size *args_size;
c2f8b491 1261 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1262{
1263 int unadjusted_args_size = args_size->constant;
1264
1265 /* Compute the actual size of the argument block required. The variable
1266 and constant sizes must be combined, the size may have to be rounded,
1267 and there may be a minimum required size. */
1268
1269 if (args_size->var)
1270 {
1271 args_size->var = ARGS_SIZE_TREE (*args_size);
1272 args_size->constant = 0;
1273
1274#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491
JH
1275 preferred_stack_boundary /= BITS_PER_UNIT;
1276 if (preferred_stack_boundary > 1)
1277 args_size->var = round_up (args_size->var, preferred_stack_boundary);
599f37b6
JL
1278#endif
1279
1280 if (reg_parm_stack_space > 0)
1281 {
1282 args_size->var
1283 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1284 ssize_int (reg_parm_stack_space));
599f37b6
JL
1285
1286#ifndef OUTGOING_REG_PARM_STACK_SPACE
1287 /* The area corresponding to register parameters is not to count in
1288 the size of the block we need. So make the adjustment. */
1289 args_size->var
1290 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1291 ssize_int (reg_parm_stack_space));
599f37b6
JL
1292#endif
1293 }
1294 }
1295 else
1296 {
1297#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491 1298 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1299 if (preferred_stack_boundary < 1)
1300 preferred_stack_boundary = 1;
fb5eebb9 1301 args_size->constant = (((args_size->constant
c2732da3 1302 + arg_space_so_far
fb5eebb9 1303 + pending_stack_adjust
c2f8b491
JH
1304 + preferred_stack_boundary - 1)
1305 / preferred_stack_boundary
1306 * preferred_stack_boundary)
c2732da3 1307 - arg_space_so_far
fb5eebb9 1308 - pending_stack_adjust);
599f37b6
JL
1309#endif
1310
1311 args_size->constant = MAX (args_size->constant,
1312 reg_parm_stack_space);
1313
1314#ifdef MAYBE_REG_PARM_STACK_SPACE
1315 if (reg_parm_stack_space == 0)
1316 args_size->constant = 0;
1317#endif
1318
1319#ifndef OUTGOING_REG_PARM_STACK_SPACE
1320 args_size->constant -= reg_parm_stack_space;
1321#endif
1322 }
1323 return unadjusted_args_size;
1324}
1325
19832c77 1326/* Precompute parameters as needed for a function call.
cc0b1adc
JL
1327
1328 IS_CONST indicates the target function is a pure function.
1329
1330 MUST_PREALLOCATE indicates that we must preallocate stack space for
1331 any stack arguments.
1332
1333 NUM_ACTUALS is the number of arguments.
1334
1335 ARGS is an array containing information for each argument; this routine
1336 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1337
1338 ARGS_SIZE contains information about the size of the arg list. */
1339
1340static void
1341precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1342 int is_const;
1343 int must_preallocate;
1344 int num_actuals;
1345 struct arg_data *args;
1346 struct args_size *args_size;
1347{
1348 int i;
1349
1350 /* If this function call is cse'able, precompute all the parameters.
1351 Note that if the parameter is constructed into a temporary, this will
1352 cause an additional copy because the parameter will be constructed
1353 into a temporary location and then copied into the outgoing arguments.
1354 If a parameter contains a call to alloca and this function uses the
1355 stack, precompute the parameter. */
1356
1357 /* If we preallocated the stack space, and some arguments must be passed
1358 on the stack, then we must precompute any parameter which contains a
1359 function call which will store arguments on the stack.
1360 Otherwise, evaluating the parameter may clobber previous parameters
1361 which have already been stored into the stack. */
1362
1363 for (i = 0; i < num_actuals; i++)
1364 if (is_const
1365 || ((args_size->var != 0 || args_size->constant != 0)
1366 && calls_function (args[i].tree_value, 1))
1367 || (must_preallocate
1368 && (args_size->var != 0 || args_size->constant != 0)
1369 && calls_function (args[i].tree_value, 0)))
1370 {
1371 /* If this is an addressable type, we cannot pre-evaluate it. */
1372 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1373 abort ();
1374
1375 push_temp_slots ();
1376
47841d1b 1377 args[i].value
cc0b1adc
JL
1378 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1379
1380 preserve_temp_slots (args[i].value);
1381 pop_temp_slots ();
1382
1383 /* ANSI doesn't require a sequence point here,
1384 but PCC has one, so this will avoid some problems. */
1385 emit_queue ();
1386
1387 args[i].initial_value = args[i].value
47841d1b 1388 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1389
1390 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1391 {
1392 args[i].value
1393 = convert_modes (args[i].mode,
1394 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1395 args[i].value, args[i].unsignedp);
1396#ifdef PROMOTE_FOR_CALL_ONLY
1397 /* CSE will replace this only if it contains args[i].value
1398 pseudo, so convert it down to the declared mode using
1399 a SUBREG. */
1400 if (GET_CODE (args[i].value) == REG
1401 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1402 {
1403 args[i].initial_value
1404 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1405 args[i].value, 0);
1406 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1407 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1408 = args[i].unsignedp;
1409 }
1410#endif
1411 }
cc0b1adc
JL
1412 }
1413}
1414
0f9b3ea6
JL
1415/* Given the current state of MUST_PREALLOCATE and information about
1416 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1417 compute and return the final value for MUST_PREALLOCATE. */
1418
1419static int
1420finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1421 int must_preallocate;
1422 int num_actuals;
1423 struct arg_data *args;
1424 struct args_size *args_size;
1425{
1426 /* See if we have or want to preallocate stack space.
1427
1428 If we would have to push a partially-in-regs parm
1429 before other stack parms, preallocate stack space instead.
1430
1431 If the size of some parm is not a multiple of the required stack
1432 alignment, we must preallocate.
1433
1434 If the total size of arguments that would otherwise create a copy in
1435 a temporary (such as a CALL) is more than half the total argument list
1436 size, preallocation is faster.
1437
1438 Another reason to preallocate is if we have a machine (like the m88k)
1439 where stack alignment is required to be maintained between every
1440 pair of insns, not just when the call is made. However, we assume here
1441 that such machines either do not have push insns (and hence preallocation
1442 would occur anyway) or the problem is taken care of with
1443 PUSH_ROUNDING. */
1444
1445 if (! must_preallocate)
1446 {
1447 int partial_seen = 0;
1448 int copy_to_evaluate_size = 0;
1449 int i;
1450
1451 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1452 {
1453 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1454 partial_seen = 1;
1455 else if (partial_seen && args[i].reg == 0)
1456 must_preallocate = 1;
1457
1458 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1459 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1460 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1461 || TREE_CODE (args[i].tree_value) == COND_EXPR
1462 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1463 copy_to_evaluate_size
1464 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1465 }
1466
1467 if (copy_to_evaluate_size * 2 >= args_size->constant
1468 && args_size->constant > 0)
1469 must_preallocate = 1;
1470 }
1471 return must_preallocate;
1472}
599f37b6 1473
a45bdd02
JL
1474/* If we preallocated stack space, compute the address of each argument
1475 and store it into the ARGS array.
1476
1477 We need not ensure it is a valid memory address here; it will be
1478 validized when it is used.
1479
1480 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1481
1482static void
1483compute_argument_addresses (args, argblock, num_actuals)
1484 struct arg_data *args;
1485 rtx argblock;
1486 int num_actuals;
1487{
1488 if (argblock)
1489 {
1490 rtx arg_reg = argblock;
1491 int i, arg_offset = 0;
1492
1493 if (GET_CODE (argblock) == PLUS)
1494 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1495
1496 for (i = 0; i < num_actuals; i++)
1497 {
1498 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1499 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1500 rtx addr;
1501
1502 /* Skip this parm if it will not be passed on the stack. */
1503 if (! args[i].pass_on_stack && args[i].reg != 0)
1504 continue;
1505
1506 if (GET_CODE (offset) == CONST_INT)
1507 addr = plus_constant (arg_reg, INTVAL (offset));
1508 else
1509 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1510
1511 addr = plus_constant (addr, arg_offset);
1512 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1513 MEM_SET_IN_STRUCT_P
1514 (args[i].stack,
1515 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1516
1517 if (GET_CODE (slot_offset) == CONST_INT)
1518 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1519 else
1520 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1521
1522 addr = plus_constant (addr, arg_offset);
1523 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1524 }
1525 }
1526}
1527
1528/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1529 in a call instruction.
1530
1531 FNDECL is the tree node for the target function. For an indirect call
1532 FNDECL will be NULL_TREE.
1533
1534 EXP is the CALL_EXPR for this call. */
1535
1536static rtx
1537rtx_for_function_call (fndecl, exp)
1538 tree fndecl;
1539 tree exp;
1540{
1541 rtx funexp;
1542
1543 /* Get the function to call, in the form of RTL. */
1544 if (fndecl)
1545 {
1546 /* If this is the first use of the function, see if we need to
1547 make an external definition for it. */
1548 if (! TREE_USED (fndecl))
1549 {
1550 assemble_external (fndecl);
1551 TREE_USED (fndecl) = 1;
1552 }
1553
1554 /* Get a SYMBOL_REF rtx for the function address. */
1555 funexp = XEXP (DECL_RTL (fndecl), 0);
1556 }
1557 else
1558 /* Generate an rtx (probably a pseudo-register) for the address. */
1559 {
91ab1046 1560 rtx funaddr;
a45bdd02 1561 push_temp_slots ();
91ab1046
DT
1562 funaddr = funexp =
1563 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1564 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1565
1566 /* Check the function is executable. */
1567 if (current_function_check_memory_usage)
91ab1046
DT
1568 {
1569#ifdef POINTERS_EXTEND_UNSIGNED
1570 /* It might be OK to convert funexp in place, but there's
1571 a lot going on between here and when it happens naturally
1572 that this seems safer. */
1573 funaddr = convert_memory_address (Pmode, funexp);
1574#endif
1575 emit_library_call (chkr_check_exec_libfunc, 1,
1576 VOIDmode, 1,
1577 funaddr, Pmode);
1578 }
a45bdd02
JL
1579 emit_queue ();
1580 }
1581 return funexp;
1582}
1583
21a3b983
JL
1584/* Do the register loads required for any wholly-register parms or any
1585 parms which are passed both on the stack and in a register. Their
1586 expressions were already evaluated.
1587
1588 Mark all register-parms as living through the call, putting these USE
1589 insns in the CALL_INSN_FUNCTION_USAGE field. */
1590
1591static void
1592load_register_parameters (args, num_actuals, call_fusage)
1593 struct arg_data *args;
1594 int num_actuals;
1595 rtx *call_fusage;
1596{
1597 int i, j;
1598
1599#ifdef LOAD_ARGS_REVERSED
1600 for (i = num_actuals - 1; i >= 0; i--)
1601#else
1602 for (i = 0; i < num_actuals; i++)
1603#endif
1604 {
1605 rtx reg = args[i].reg;
1606 int partial = args[i].partial;
1607 int nregs;
1608
1609 if (reg)
1610 {
1611 /* Set to non-negative if must move a word at a time, even if just
1612 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1613 we just use a normal move insn. This value can be zero if the
1614 argument is a zero size structure with no fields. */
1615 nregs = (partial ? partial
1616 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1617 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1618 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1619 : -1));
1620
1621 /* Handle calls that pass values in multiple non-contiguous
1622 locations. The Irix 6 ABI has examples of this. */
1623
1624 if (GET_CODE (reg) == PARALLEL)
1625 {
1626 emit_group_load (reg, args[i].value,
1627 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1628 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1629 / BITS_PER_UNIT));
1630 }
1631
1632 /* If simple case, just do move. If normal partial, store_one_arg
1633 has already loaded the register for us. In all other cases,
1634 load the register(s) from memory. */
1635
1636 else if (nregs == -1)
1637 emit_move_insn (reg, args[i].value);
1638
1639 /* If we have pre-computed the values to put in the registers in
1640 the case of non-aligned structures, copy them in now. */
1641
1642 else if (args[i].n_aligned_regs != 0)
1643 for (j = 0; j < args[i].n_aligned_regs; j++)
1644 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1645 args[i].aligned_regs[j]);
1646
1647 else if (partial == 0 || args[i].pass_on_stack)
1648 move_block_to_reg (REGNO (reg),
1649 validize_mem (args[i].value), nregs,
1650 args[i].mode);
1651
1652 /* Handle calls that pass values in multiple non-contiguous
1653 locations. The Irix 6 ABI has examples of this. */
1654 if (GET_CODE (reg) == PARALLEL)
1655 use_group_regs (call_fusage, reg);
1656 else if (nregs == -1)
1657 use_reg (call_fusage, reg);
1658 else
1659 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1660 }
1661 }
1662}
1663
51bbfa0c
RS
1664/* Generate all the code for a function call
1665 and return an rtx for its value.
1666 Store the value in TARGET (specified as an rtx) if convenient.
1667 If the value is stored in TARGET then TARGET is returned.
1668 If IGNORE is nonzero, then we ignore the value of the function call. */
1669
1670rtx
8129842c 1671expand_call (exp, target, ignore)
51bbfa0c
RS
1672 tree exp;
1673 rtx target;
1674 int ignore;
51bbfa0c 1675{
0a1c58a2
JL
1676 /* Nonzero if we are currently expanding a call. */
1677 static int currently_expanding_call = 0;
1678
51bbfa0c
RS
1679 /* List of actual parameters. */
1680 tree actparms = TREE_OPERAND (exp, 1);
1681 /* RTX for the function to be called. */
1682 rtx funexp;
0a1c58a2
JL
1683 /* Sequence of insns to perform a tail recursive "call". */
1684 rtx tail_recursion_insns = NULL_RTX;
1685 /* Sequence of insns to perform a normal "call". */
1686 rtx normal_call_insns = NULL_RTX;
1687 /* Sequence of insns to perform a tail recursive "call". */
1688 rtx tail_call_insns = NULL_RTX;
51bbfa0c
RS
1689 /* Data type of the function. */
1690 tree funtype;
1691 /* Declaration of the function being called,
1692 or 0 if the function is computed (not known by name). */
1693 tree fndecl = 0;
1694 char *name = 0;
0a1c58a2 1695#ifdef ACCUMULATE_OUTGOING_ARGS
c2939b57 1696 rtx before_call;
0a1c58a2
JL
1697#endif
1698 rtx insn;
1699 int safe_for_reeval;
1700 int pass;
51bbfa0c
RS
1701
1702 /* Register in which non-BLKmode value will be returned,
1703 or 0 if no value or if value is BLKmode. */
1704 rtx valreg;
1705 /* Address where we should return a BLKmode value;
1706 0 if value not BLKmode. */
1707 rtx structure_value_addr = 0;
1708 /* Nonzero if that address is being passed by treating it as
1709 an extra, implicit first parameter. Otherwise,
1710 it is passed by being copied directly into struct_value_rtx. */
1711 int structure_value_addr_parm = 0;
1712 /* Size of aggregate value wanted, or zero if none wanted
1713 or if we are using the non-reentrant PCC calling convention
1714 or expecting the value in registers. */
e5e809f4 1715 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1716 /* Nonzero if called function returns an aggregate in memory PCC style,
1717 by returning the address of where to find it. */
1718 int pcc_struct_value = 0;
1719
1720 /* Number of actual parameters in this call, including struct value addr. */
1721 int num_actuals;
1722 /* Number of named args. Args after this are anonymous ones
1723 and they must all go on the stack. */
1724 int n_named_args;
51bbfa0c
RS
1725
1726 /* Vector of information about each argument.
1727 Arguments are numbered in the order they will be pushed,
1728 not the order they are written. */
1729 struct arg_data *args;
1730
1731 /* Total size in bytes of all the stack-parms scanned so far. */
1732 struct args_size args_size;
1733 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1734 int unadjusted_args_size;
51bbfa0c
RS
1735 /* Data on reg parms scanned so far. */
1736 CUMULATIVE_ARGS args_so_far;
1737 /* Nonzero if a reg parm has been scanned. */
1738 int reg_parm_seen;
efd65a8b 1739 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1740
1741 /* Nonzero if we must avoid push-insns in the args for this call.
1742 If stack space is allocated for register parameters, but not by the
1743 caller, then it is preallocated in the fixed part of the stack frame.
1744 So the entire argument block must then be preallocated (i.e., we
1745 ignore PUSH_ROUNDING in that case). */
1746
51bbfa0c
RS
1747#ifdef PUSH_ROUNDING
1748 int must_preallocate = 0;
1749#else
1750 int must_preallocate = 1;
51bbfa0c
RS
1751#endif
1752
f72aed24 1753 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1754 int reg_parm_stack_space = 0;
1755
51bbfa0c
RS
1756 /* Address of space preallocated for stack parms
1757 (on machines that lack push insns), or 0 if space not preallocated. */
1758 rtx argblock = 0;
1759
1760 /* Nonzero if it is plausible that this is a call to alloca. */
1761 int may_be_alloca;
9ae8ffe7
JL
1762 /* Nonzero if this is a call to malloc or a related function. */
1763 int is_malloc;
51bbfa0c
RS
1764 /* Nonzero if this is a call to setjmp or a related function. */
1765 int returns_twice;
1766 /* Nonzero if this is a call to `longjmp'. */
1767 int is_longjmp;
fa76d9e0
JR
1768 /* Nonzero if this is a syscall that makes a new process in the image of
1769 the current one. */
1770 int fork_or_exec;
51bbfa0c
RS
1771 /* Nonzero if this is a call to an inline function. */
1772 int is_integrable = 0;
51bbfa0c
RS
1773 /* Nonzero if this is a call to a `const' function.
1774 Note that only explicitly named functions are handled as `const' here. */
1775 int is_const = 0;
1776 /* Nonzero if this is a call to a `volatile' function. */
1777 int is_volatile = 0;
12a22e76
JM
1778 /* Nonzero if this is a call to a function that won't throw an exception. */
1779 int nothrow = TREE_NOTHROW (exp);
51bbfa0c
RS
1780#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1781 /* Define the boundary of the register parm stack space that needs to be
1782 save, if any. */
1783 int low_to_save = -1, high_to_save;
1784 rtx save_area = 0; /* Place that it is saved */
1785#endif
1786
1787#ifdef ACCUMULATE_OUTGOING_ARGS
1788 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1789 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1790 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1791#endif
1792
1793 rtx old_stack_level = 0;
79be3418 1794 int old_pending_adj = 0;
51bbfa0c 1795 int old_inhibit_defer_pop = inhibit_defer_pop;
0a1c58a2 1796 rtx call_fusage;
51bbfa0c 1797 register tree p;
21a3b983 1798 register int i;
0a1c58a2 1799 int preferred_stack_boundary;
51bbfa0c 1800
7815214e
RK
1801 /* The value of the function call can be put in a hard register. But
1802 if -fcheck-memory-usage, code which invokes functions (and thus
1803 damages some hard registers) can be inserted before using the value.
1804 So, target is always a pseudo-register in that case. */
7d384cc0 1805 if (current_function_check_memory_usage)
7815214e
RK
1806 target = 0;
1807
51bbfa0c
RS
1808 /* See if we can find a DECL-node for the actual function.
1809 As a result, decide whether this is a call to an integrable function. */
1810
1811 p = TREE_OPERAND (exp, 0);
1812 if (TREE_CODE (p) == ADDR_EXPR)
1813 {
1814 fndecl = TREE_OPERAND (p, 0);
1815 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1816 fndecl = 0;
51bbfa0c
RS
1817 else
1818 {
1819 if (!flag_no_inline
1820 && fndecl != current_function_decl
aa10adff 1821 && DECL_INLINE (fndecl)
1cf4f698 1822 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1823 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1824 is_integrable = 1;
1825 else if (! TREE_ADDRESSABLE (fndecl))
1826 {
13d39dbc 1827 /* In case this function later becomes inlinable,
51bbfa0c
RS
1828 record that there was already a non-inline call to it.
1829
1830 Use abstraction instead of setting TREE_ADDRESSABLE
1831 directly. */
da8c1713
RK
1832 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1833 && optimize > 0)
1907795e
JM
1834 {
1835 warning_with_decl (fndecl, "can't inline call to `%s'");
1836 warning ("called from here");
1837 }
51bbfa0c
RS
1838 mark_addressable (fndecl);
1839 }
1840
d45cf215
RS
1841 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1842 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1843 is_const = 1;
5e24110e
RS
1844
1845 if (TREE_THIS_VOLATILE (fndecl))
1846 is_volatile = 1;
12a22e76
JM
1847
1848 if (TREE_NOTHROW (fndecl))
1849 nothrow = 1;
51bbfa0c
RS
1850 }
1851 }
1852
fdff8c6d
RK
1853 /* If we don't have specific function to call, see if we have a
1854 constant or `noreturn' function from the type. */
1855 if (fndecl == 0)
1856 {
1857 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1858 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1859 }
1860
6f90e075
JW
1861#ifdef REG_PARM_STACK_SPACE
1862#ifdef MAYBE_REG_PARM_STACK_SPACE
1863 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1864#else
1865 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1866#endif
1867#endif
1868
e5e809f4
JL
1869#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1870 if (reg_parm_stack_space > 0)
1871 must_preallocate = 1;
1872#endif
1873
51bbfa0c
RS
1874 /* Warn if this value is an aggregate type,
1875 regardless of which calling convention we are using for it. */
05e3bdb9 1876 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1877 warning ("function call has aggregate value");
1878
1879 /* Set up a place to return a structure. */
1880
1881 /* Cater to broken compilers. */
1882 if (aggregate_value_p (exp))
1883 {
1884 /* This call returns a big structure. */
1885 is_const = 0;
1886
1887#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1888 {
1889 pcc_struct_value = 1;
0dd532dc
JW
1890 /* Easier than making that case work right. */
1891 if (is_integrable)
1892 {
1893 /* In case this is a static function, note that it has been
1894 used. */
1895 if (! TREE_ADDRESSABLE (fndecl))
1896 mark_addressable (fndecl);
1897 is_integrable = 0;
1898 }
9e7b1d0a
RS
1899 }
1900#else /* not PCC_STATIC_STRUCT_RETURN */
1901 {
1902 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1903
9e7b1d0a
RS
1904 if (target && GET_CODE (target) == MEM)
1905 structure_value_addr = XEXP (target, 0);
1906 else
1907 {
e9a25f70
JL
1908 /* Assign a temporary to hold the value. */
1909 tree d;
51bbfa0c 1910
9e7b1d0a
RS
1911 /* For variable-sized objects, we must be called with a target
1912 specified. If we were to allocate space on the stack here,
1913 we would have no way of knowing when to free it. */
51bbfa0c 1914
002bdd6c
RK
1915 if (struct_value_size < 0)
1916 abort ();
1917
e9a25f70
JL
1918 /* This DECL is just something to feed to mark_addressable;
1919 it doesn't get pushed. */
1920 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1921 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1922 mark_addressable (d);
14a774a9 1923 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 1924 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1925 TREE_USED (d) = 1;
9e7b1d0a
RS
1926 target = 0;
1927 }
1928 }
1929#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1930 }
1931
1932 /* If called function is inline, try to integrate it. */
1933
1934 if (is_integrable)
1935 {
1936 rtx temp;
c2939b57 1937
69d4ca36 1938#ifdef ACCUMULATE_OUTGOING_ARGS
c2939b57 1939 before_call = get_last_insn ();
69d4ca36 1940#endif
51bbfa0c
RS
1941
1942 temp = expand_inline_function (fndecl, actparms, target,
1943 ignore, TREE_TYPE (exp),
1944 structure_value_addr);
1945
1946 /* If inlining succeeded, return. */
2e0dd623 1947 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1948 {
d64f5a78 1949#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1950 /* If the outgoing argument list must be preserved, push
1951 the stack before executing the inlined function if it
1952 makes any calls. */
1953
1954 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1955 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1956 break;
1957
1958 if (stack_arg_under_construction || i >= 0)
1959 {
a1917650
RK
1960 rtx first_insn
1961 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1962 rtx insn = NULL_RTX, seq;
2f4aa534 1963
d64f5a78 1964 /* Look for a call in the inline function code.
49ad7cfa 1965 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1966 nonzero then there is a call and it is not necessary
1967 to scan the insns. */
1968
49ad7cfa 1969 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1970 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1971 if (GET_CODE (insn) == CALL_INSN)
1972 break;
2f4aa534
RS
1973
1974 if (insn)
1975 {
d64f5a78
RS
1976 /* Reserve enough stack space so that the largest
1977 argument list of any function call in the inline
1978 function does not overlap the argument list being
1979 evaluated. This is usually an overestimate because
1980 allocate_dynamic_stack_space reserves space for an
1981 outgoing argument list in addition to the requested
1982 space, but there is no way to ask for stack space such
1983 that an argument list of a certain length can be
e5e809f4 1984 safely constructed.
d64f5a78 1985
e5e809f4
JL
1986 Add the stack space reserved for register arguments, if
1987 any, in the inline function. What is really needed is the
d64f5a78
RS
1988 largest value of reg_parm_stack_space in the inline
1989 function, but that is not available. Using the current
1990 value of reg_parm_stack_space is wrong, but gives
1991 correct results on all supported machines. */
e5e809f4 1992
49ad7cfa 1993 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1994 + reg_parm_stack_space);
1995
2f4aa534 1996 start_sequence ();
ccf5d244 1997 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1998 allocate_dynamic_stack_space (GEN_INT (adjust),
1999 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
2000 seq = get_insns ();
2001 end_sequence ();
a1917650 2002 emit_insns_before (seq, first_insn);
e5d70561 2003 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
2004 }
2005 }
d64f5a78 2006#endif
51bbfa0c
RS
2007
2008 /* If the result is equivalent to TARGET, return TARGET to simplify
2009 checks in store_expr. They can be equivalent but not equal in the
2010 case of a function that returns BLKmode. */
2011 if (temp != target && rtx_equal_p (temp, target))
2012 return target;
2013 return temp;
2014 }
2015
2016 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
2017 separately after all. If function was declared inline,
2018 give a warning. */
2019 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 2020 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
2021 {
2022 warning_with_decl (fndecl, "inlining failed in call to `%s'");
2023 warning ("called from here");
2024 }
51bbfa0c
RS
2025 mark_addressable (fndecl);
2026 }
2027
0a1c58a2
JL
2028 currently_expanding_call++;
2029
2030 /* If we're considering tail recursion optimizations, verify that the
2031 arguments are safe for re-evaluation. If we can unsave them, wrap
2032 each argument in an UNSAVE_EXPR. */
2033
2034 safe_for_reeval = 0;
2035 if (optimize >= 2
2036 && currently_expanding_call == 1
e245d3af
RH
2037 && stmt_loop_nest_empty ()
2038 && ! any_pending_cleanups (1))
0a1c58a2
JL
2039 {
2040 /* Verify that each argument is safe for re-evaluation. */
2041 for (p = actparms; p; p = TREE_CHAIN (p))
2042 if (! safe_for_unsave (TREE_VALUE (p)))
2043 break;
2044
2045 if (p == NULL_TREE)
2046 {
2047 tree new_actparms = NULL_TREE, q;
2048
2049 for (p = actparms; p ; p = TREE_CHAIN (p))
2050 {
2051 tree np = build_tree_list (TREE_PURPOSE (p),
2052 unsave_expr (TREE_VALUE (p)));
2053 if (new_actparms)
2054 TREE_CHAIN (q) = np;
2055 else
2056 new_actparms = np;
2057 q = np;
2058 }
2059
2060 actparms = new_actparms;
2061 safe_for_reeval = 1;
2062 }
2063 }
2064
2065 /* Generate a tail recursion sequence when calling ourselves. */
2066
2067 if (safe_for_reeval
2068 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2069 && TREE_OPERAND (TREE_OPERAND (exp, 0), 0) == current_function_decl)
2070 {
2071 /* We want to emit any pending stack adjustments before the tail
2072 recursion "call". That way we know any adjustment after the tail
2073 recursion call can be ignored if we indeed use the tail recursion
2074 call expansion. */
2075 int save_pending_stack_adjust = pending_stack_adjust;
2076 rtx last;
2077
2078 /* Use a new sequence to hold any RTL we generate. We do not even
2079 know if we will use this RTL yet. The final decision can not be
2080 made until after RTL generation for the entire function is
2081 complete. */
2082 push_to_sequence (0);
2083
2084 /* Emit the pending stack adjustments before we expand any arguments. */
2085 do_pending_stack_adjust ();
2086
2087 optimize_tail_recursion (exp, get_last_insn ());
2088
2089 last = get_last_insn ();
2090 tail_recursion_insns = get_insns ();
2091 end_sequence ();
2092
2093 /* If the last insn on the tail recursion sequence is not a
2094 BARRIER, then tail recursion optimization failed. */
2095 if (last == NULL_RTX || GET_CODE (last) != BARRIER)
2096 tail_recursion_insns = NULL_RTX;
2097
2098 /* Restore the original pending stack adjustment for the sibling and
2099 normal call cases below. */
2100 pending_stack_adjust = save_pending_stack_adjust;
2101 }
2102
51bbfa0c
RS
2103 function_call_count++;
2104
2105 if (fndecl && DECL_NAME (fndecl))
2106 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2107
0a1c58a2
JL
2108#ifdef PREFERRED_STACK_BOUNDARY
2109 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2110#else
2111 preferred_stack_boundary = STACK_BOUNDARY;
2112#endif
2113
c2f8b491
JH
2114 /* Ensure current function's preferred stack boundary is at least
2115 what we need. We don't have to increase alignment for recursive
2116 functions. */
2117 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2118 && fndecl != current_function_decl)
2119 cfun->preferred_stack_boundary = preferred_stack_boundary;
2120
51bbfa0c 2121 /* See if this is a call to a function that can return more than once
20efdf74 2122 or a call to longjmp or malloc. */
fa76d9e0 2123 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
20efdf74 2124 &is_malloc, &may_be_alloca);
51bbfa0c 2125
51bbfa0c
RS
2126 if (may_be_alloca)
2127 current_function_calls_alloca = 1;
2128
39842893
JL
2129 /* Operand 0 is a pointer-to-function; get the type of the function. */
2130 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2131 if (! POINTER_TYPE_P (funtype))
2132 abort ();
2133 funtype = TREE_TYPE (funtype);
2134
0a1c58a2
JL
2135 /* We want to make two insn chains; one for a sibling call, the other
2136 for a normal call. We will select one of the two chains after
2137 initial RTL generation is complete. */
2138 for (pass = 0; pass < 2; pass++)
2139 {
2140 int sibcall_failure = 0;
2141 /* We want to emit ay pending stack adjustments before the tail
2142 recursion "call". That way we know any adjustment after the tail
2143 recursion call can be ignored if we indeed use the tail recursion
2144 call expansion. */
2145 int save_pending_stack_adjust;
2146 rtx insns;
7d167afd 2147 rtx before_call, next_arg_reg;
39842893 2148
0a1c58a2
JL
2149 if (pass == 0)
2150 {
2151 /* Various reasons we can not use a sibling call. */
2152 if (! safe_for_reeval
2153#ifdef HAVE_sibcall_epilogue
2154 || ! HAVE_sibcall_epilogue
2155#else
2156 || 1
2157#endif
2158 /* The structure value address is used and modified in the
2159 loop below. It does not seem worth the effort to save and
2160 restore it as a state variable since few optimizable
2161 sibling calls will return a structure. */
2162 || structure_value_addr != NULL_RTX
2163 /* If the register holding the address is a callee saved
2164 register, then we lose. We have no way to prevent that,
2165 so we only allow calls to named functions. */
2166 || fndecl == NULL_TREE
2167 || ! FUNCTION_OK_FOR_SIBCALL (fndecl))
2168 continue;
51bbfa0c 2169
e245d3af
RH
2170 /* We know at this point that there are not currently any
2171 pending cleanups. If, however, in the process of evaluating
2172 the arguments we were to create some, we'll need to be
2173 able to get rid of them. */
2174 expand_start_target_temps ();
2175
0a1c58a2
JL
2176 /* State variables we need to save and restore between
2177 iterations. */
2178 save_pending_stack_adjust = pending_stack_adjust;
2179 }
51bbfa0c 2180
0a1c58a2
JL
2181 /* Other state variables that we must reinitialize each time
2182 through the loop (that are not initialized by the loop itself. */
2183 argblock = 0;
2184 call_fusage = 0;
fa76d9e0 2185
0a1c58a2 2186 /* Start a new sequence for the normal call case.
51bbfa0c 2187
0a1c58a2
JL
2188 From this point on, if the sibling call fails, we want to set
2189 sibcall_failure instead of continuing the loop. */
2190 start_sequence ();
eecb6f50 2191
0a1c58a2
JL
2192 /* When calling a const function, we must pop the stack args right away,
2193 so that the pop is deleted or moved with the call. */
2194 if (is_const)
2195 NO_DEFER_POP;
51bbfa0c 2196
0a1c58a2
JL
2197 /* Don't let pending stack adjusts add up to too much.
2198 Also, do all pending adjustments now if there is any chance
2199 this might be a call to alloca or if we are expanding a sibling
2200 call sequence. */
2201 if (pending_stack_adjust >= 32
2202 || (pending_stack_adjust > 0 && may_be_alloca)
2203 || pass == 0)
2204 do_pending_stack_adjust ();
51bbfa0c 2205
0a1c58a2
JL
2206 if (profile_arc_flag && fork_or_exec)
2207 {
2208 /* A fork duplicates the profile information, and an exec discards
2209 it. We can't rely on fork/exec to be paired. So write out the
2210 profile information we have gathered so far, and clear it. */
2211 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2212 is subject to race conditions, just as with multithreaded
2213 programs. */
2214
2215 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2216 VOIDmode, 0);
2217 }
2218
2219 /* Push the temporary stack slot level so that we can free any
2220 temporaries we make. */
2221 push_temp_slots ();
51bbfa0c 2222
0a1c58a2 2223 /* Start updating where the next arg would go.
51bbfa0c 2224
0a1c58a2
JL
2225 On some machines (such as the PA) indirect calls have a different
2226 calling convention than normal calls. The last argument in
2227 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2228 or not. */
2229 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2230
2231 /* If struct_value_rtx is 0, it means pass the address
2232 as if it were an extra parameter. */
2233 if (structure_value_addr && struct_value_rtx == 0)
2234 {
2235 /* If structure_value_addr is a REG other than
2236 virtual_outgoing_args_rtx, we can use always use it. If it
2237 is not a REG, we must always copy it into a register.
2238 If it is virtual_outgoing_args_rtx, we must copy it to another
2239 register in some cases. */
2240 rtx temp = (GET_CODE (structure_value_addr) != REG
2241#ifdef ACCUMULATE_OUTGOING_ARGS
2242 || (stack_arg_under_construction
2243 && structure_value_addr == virtual_outgoing_args_rtx)
2244#endif
2245 ? copy_addr_to_reg (structure_value_addr)
2246 : structure_value_addr);
2247
2248 actparms
2249 = tree_cons (error_mark_node,
2250 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2251 temp),
2252 actparms);
2253 structure_value_addr_parm = 1;
2254 }
2255
2256 /* Count the arguments and set NUM_ACTUALS. */
2257 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2258 num_actuals = i;
2259
2260 /* Compute number of named args.
2261 Normally, don't include the last named arg if anonymous args follow.
2262 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2263 (If no anonymous args follow, the result of list_length is actually
2264 one too large. This is harmless.)
2265
2266 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2267 zero, this machine will be able to place unnamed args that were
2268 passed in registers into the stack. So treat all args as named.
2269 This allows the insns emitting for a specific argument list to be
2270 independent of the function declaration.
2271
2272 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2273 reliable way to pass unnamed args in registers, so we must force
2274 them into memory. */
2275
2276 if ((STRICT_ARGUMENT_NAMING
2277 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2278 && TYPE_ARG_TYPES (funtype) != 0)
2279 n_named_args
2280 = (list_length (TYPE_ARG_TYPES (funtype))
2281 /* Don't include the last named arg. */
2282 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2283 /* Count the struct value address, if it is passed as a parm. */
2284 + structure_value_addr_parm);
2285 else
2286 /* If we know nothing, treat all args as named. */
2287 n_named_args = num_actuals;
2288
2289 /* Make a vector to hold all the information about each arg. */
2290 args = (struct arg_data *) alloca (num_actuals
2291 * sizeof (struct arg_data));
2292 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2293
2294 /* Build up entries inthe ARGS array, compute the size of the arguments
2295 into ARGS_SIZE, etc. */
2296 initialize_argument_information (num_actuals, args, &args_size,
2297 n_named_args, actparms, fndecl,
2298 &args_so_far, reg_parm_stack_space,
2299 &old_stack_level, &old_pending_adj,
7d167afd
JJ
2300 &must_preallocate, &is_const,
2301 (pass == 0) ? ECF_SIBCALL : 0);
51bbfa0c 2302
6f90e075 2303#ifdef FINAL_REG_PARM_STACK_SPACE
0a1c58a2
JL
2304 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2305 args_size.var);
6f90e075
JW
2306#endif
2307
0a1c58a2
JL
2308 if (args_size.var)
2309 {
2310 /* If this function requires a variable-sized argument list, don't
2311 try to make a cse'able block for this call. We may be able to
2312 do this eventually, but it is too complicated to keep track of
2313 what insns go in the cse'able block and which don't.
51bbfa0c 2314
0a1c58a2 2315 Also do not make a sibling call. */
e5e809f4 2316
0a1c58a2
JL
2317 is_const = 0;
2318 must_preallocate = 1;
2319 sibcall_failure = 1;
2320 }
2321
7d167afd
JJ
2322 if (args_size.constant > current_function_args_size)
2323 {
2324 /* If this function requires more stack slots than the current
2325 function, we cannot change it into a sibling call. */
2326 sibcall_failure = 1;
2327 }
2328
0a1c58a2
JL
2329 /* Compute the actual size of the argument block required. The variable
2330 and constant sizes must be combined, the size may have to be rounded,
2331 and there may be a minimum required size. When generating a sibcall
2332 pattern, do not round up, since we'll be re-using whatever space our
2333 caller provided. */
2334 unadjusted_args_size
2335 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2336 (pass == 0 ? 0
2337 : preferred_stack_boundary));
2338
2339 /* If the callee pops its own arguments, then it must pop exactly
2340 the same number of arguments as the current function. */
2341 if (RETURN_POPS_ARGS (fndecl, funtype, unadjusted_args_size)
2342 != RETURN_POPS_ARGS (current_function_decl,
2343 TREE_TYPE (current_function_decl),
2344 current_function_args_size))
2345 sibcall_failure = 1;
2346
2347 /* Now make final decision about preallocating stack space. */
2348 must_preallocate = finalize_must_preallocate (must_preallocate,
2349 num_actuals, args,
2350 &args_size);
2351
2352 /* If the structure value address will reference the stack pointer, we
2353 must stabilize it. We don't need to do this if we know that we are
2354 not going to adjust the stack pointer in processing this call. */
2355
2356 if (structure_value_addr
2357 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2358 || reg_mentioned_p (virtual_outgoing_args_rtx,
2359 structure_value_addr))
2360 && (args_size.var
51bbfa0c 2361#ifndef ACCUMULATE_OUTGOING_ARGS
0a1c58a2 2362 || args_size.constant
51bbfa0c 2363#endif
0a1c58a2
JL
2364 ))
2365 structure_value_addr = copy_to_reg (structure_value_addr);
51bbfa0c 2366
0a1c58a2
JL
2367 /* Precompute any arguments as needed. */
2368 precompute_arguments (is_const, must_preallocate, num_actuals,
2369 args, &args_size);
51bbfa0c 2370
0a1c58a2
JL
2371 /* Now we are about to start emitting insns that can be deleted
2372 if a libcall is deleted. */
2373 if (is_const || is_malloc)
2374 start_sequence ();
51bbfa0c 2375
0a1c58a2
JL
2376 /* If we have no actual push instructions, or shouldn't use them,
2377 make space for all args right now. */
51bbfa0c 2378
0a1c58a2 2379 if (args_size.var != 0)
51bbfa0c 2380 {
0a1c58a2
JL
2381 if (old_stack_level == 0)
2382 {
2383 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2384 old_pending_adj = pending_stack_adjust;
2385 pending_stack_adjust = 0;
d64f5a78 2386#ifdef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
2387 /* stack_arg_under_construction says whether a stack arg is
2388 being constructed at the old stack level. Pushing the stack
2389 gets a clean outgoing argument block. */
2390 old_stack_arg_under_construction = stack_arg_under_construction;
2391 stack_arg_under_construction = 0;
d64f5a78 2392#endif
0a1c58a2
JL
2393 }
2394 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
51bbfa0c 2395 }
0a1c58a2
JL
2396 else
2397 {
2398 /* Note that we must go through the motions of allocating an argument
2399 block even if the size is zero because we may be storing args
2400 in the area reserved for register arguments, which may be part of
2401 the stack frame. */
26a258fe 2402
0a1c58a2 2403 int needed = args_size.constant;
51bbfa0c 2404
0a1c58a2
JL
2405 /* Store the maximum argument space used. It will be pushed by
2406 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2407 checking). */
51bbfa0c 2408
0a1c58a2
JL
2409 if (needed > current_function_outgoing_args_size)
2410 current_function_outgoing_args_size = needed;
51bbfa0c 2411
0a1c58a2
JL
2412 if (must_preallocate)
2413 {
26a258fe 2414#ifdef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
2415 /* Since the stack pointer will never be pushed, it is possible
2416 for the evaluation of a parm to clobber something we have
2417 already written to the stack. Since most function calls on
2418 RISC machines do not use the stack, this is uncommon, but
2419 must work correctly.
26a258fe 2420
0a1c58a2
JL
2421 Therefore, we save any area of the stack that was already
2422 written and that we are using. Here we set up to do this by
2423 making a new stack usage map from the old one. The actual
2424 save will be done by store_one_arg.
26a258fe 2425
0a1c58a2
JL
2426 Another approach might be to try to reorder the argument
2427 evaluations to avoid this conflicting stack usage. */
26a258fe 2428
e5e809f4 2429#ifndef OUTGOING_REG_PARM_STACK_SPACE
0a1c58a2
JL
2430 /* Since we will be writing into the entire argument area, the
2431 map must be allocated for its entire size, not just the part
2432 that is the responsibility of the caller. */
2433 needed += reg_parm_stack_space;
51bbfa0c
RS
2434#endif
2435
2436#ifdef ARGS_GROW_DOWNWARD
0a1c58a2
JL
2437 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2438 needed + 1);
51bbfa0c 2439#else
0a1c58a2
JL
2440 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2441 needed);
51bbfa0c 2442#endif
0a1c58a2 2443 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2444
0a1c58a2
JL
2445 if (initial_highest_arg_in_use)
2446 bcopy (initial_stack_usage_map, stack_usage_map,
2447 initial_highest_arg_in_use);
51bbfa0c 2448
0a1c58a2
JL
2449 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2450 bzero (&stack_usage_map[initial_highest_arg_in_use],
2451 (highest_outgoing_arg_in_use
2452 - initial_highest_arg_in_use));
2453 needed = 0;
2f4aa534 2454
0a1c58a2
JL
2455 /* The address of the outgoing argument list must not be copied
2456 to a register here, because argblock would be left pointing
2457 to the wrong place after the call to
2458 allocate_dynamic_stack_space below. */
2f4aa534 2459
0a1c58a2 2460 argblock = virtual_outgoing_args_rtx;
2f4aa534 2461
51bbfa0c 2462#else /* not ACCUMULATE_OUTGOING_ARGS */
0a1c58a2 2463 if (inhibit_defer_pop == 0)
26a258fe 2464 {
0a1c58a2
JL
2465 /* Try to reuse some or all of the pending_stack_adjust
2466 to get this space. Maybe we can avoid any pushing. */
2467 if (needed > pending_stack_adjust)
2468 {
2469 needed -= pending_stack_adjust;
2470 pending_stack_adjust = 0;
2471 }
2472 else
2473 {
2474 pending_stack_adjust -= needed;
2475 needed = 0;
2476 }
26a258fe 2477 }
0a1c58a2
JL
2478 /* Special case this because overhead of `push_block' in this
2479 case is non-trivial. */
2480 if (needed == 0)
2481 argblock = virtual_outgoing_args_rtx;
26a258fe 2482 else
0a1c58a2
JL
2483 argblock = push_block (GEN_INT (needed), 0, 0);
2484
2485 /* We only really need to call `copy_to_reg' in the case where
2486 push insns are going to be used to pass ARGBLOCK to a function
2487 call in ARGS. In that case, the stack pointer changes value
2488 from the allocation point to the call point, and hence
2489 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2490 But might as well always do it. */
2491 argblock = copy_to_reg (argblock);
51bbfa0c 2492#endif /* not ACCUMULATE_OUTGOING_ARGS */
0a1c58a2
JL
2493 }
2494 }
2495
2496 /* The argument block when performing a sibling call is the
2497 incoming argument block. */
2498 if (pass == 0)
2499 {
2500 rtx temp = plus_constant (arg_pointer_rtx,
2501 FIRST_PARM_OFFSET (current_function_decl));
2502 argblock = force_reg (Pmode, force_operand (temp, NULL_RTX));
26a258fe 2503 }
51bbfa0c 2504
bfbf933a 2505#ifdef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
2506 /* The save/restore code in store_one_arg handles all cases except one:
2507 a constructor call (including a C function returning a BLKmode struct)
2508 to initialize an argument. */
2509 if (stack_arg_under_construction)
2510 {
e5e809f4 2511#ifndef OUTGOING_REG_PARM_STACK_SPACE
0a1c58a2 2512 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2513#else
0a1c58a2 2514 rtx push_size = GEN_INT (args_size.constant);
bfbf933a 2515#endif
0a1c58a2
JL
2516 if (old_stack_level == 0)
2517 {
2518 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2519 old_pending_adj = pending_stack_adjust;
2520 pending_stack_adjust = 0;
2521 /* stack_arg_under_construction says whether a stack arg is
2522 being constructed at the old stack level. Pushing the stack
2523 gets a clean outgoing argument block. */
2524 old_stack_arg_under_construction = stack_arg_under_construction;
2525 stack_arg_under_construction = 0;
2526 /* Make a new map for the new argument list. */
2527 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2528 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2529 highest_outgoing_arg_in_use = 0;
2530 }
2531 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a 2532 }
0a1c58a2
JL
2533 /* If argument evaluation might modify the stack pointer, copy the
2534 address of the argument list to a register. */
2535 for (i = 0; i < num_actuals; i++)
2536 if (args[i].pass_on_stack)
2537 {
2538 argblock = copy_addr_to_reg (argblock);
2539 break;
2540 }
bfbf933a
RS
2541#endif
2542
0a1c58a2 2543 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2544
51bbfa0c 2545#ifdef PUSH_ARGS_REVERSED
c795bca9 2546#ifdef PREFERRED_STACK_BOUNDARY
0a1c58a2
JL
2547 /* If we push args individually in reverse order, perform stack alignment
2548 before the first push (the last arg). */
2549 if (args_size.constant != unadjusted_args_size)
4e217aed 2550 {
0a1c58a2
JL
2551 /* When the stack adjustment is pending, we get better code
2552 by combining the adjustments. */
2553 if (pending_stack_adjust && ! is_const
2554 && ! inhibit_defer_pop)
2555 {
2556 args_size.constant = (unadjusted_args_size
2557 + ((pending_stack_adjust
2558 + args_size.constant
2559 + arg_space_so_far
2560 - unadjusted_args_size)
2561 % (preferred_stack_boundary
2562 / BITS_PER_UNIT)));
2563 pending_stack_adjust -= (args_size.constant
2564 - unadjusted_args_size);
2565 do_pending_stack_adjust ();
2566 }
2567 else if (argblock == 0)
2568 anti_adjust_stack (GEN_INT (args_size.constant
2569 - unadjusted_args_size));
2570 arg_space_so_far += args_size.constant - unadjusted_args_size;
c2732da3 2571
0a1c58a2
JL
2572 /* Now that the stack is properly aligned, pops can't safely
2573 be deferred during the evaluation of the arguments. */
2574 NO_DEFER_POP;
2575 }
51bbfa0c
RS
2576#endif
2577#endif
2578
0a1c58a2
JL
2579 /* Don't try to defer pops if preallocating, not even from the first arg,
2580 since ARGBLOCK probably refers to the SP. */
2581 if (argblock)
2582 NO_DEFER_POP;
51bbfa0c 2583
0a1c58a2 2584 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c 2585
0a1c58a2
JL
2586 /* Figure out the register where the value, if any, will come back. */
2587 valreg = 0;
2588 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2589 && ! structure_value_addr)
2590 {
2591 if (pcc_struct_value)
2592 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
7d167afd 2593 fndecl, (pass == 0));
0a1c58a2 2594 else
7d167afd 2595 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
0a1c58a2 2596 }
51bbfa0c 2597
0a1c58a2
JL
2598 /* Precompute all register parameters. It isn't safe to compute anything
2599 once we have started filling any specific hard regs. */
2600 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2601
2602#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
0a1c58a2
JL
2603 /* Save the fixed argument area if it's part of the caller's frame and
2604 is clobbered by argument setup for this call. */
2605 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2606 &low_to_save, &high_to_save);
b94301c2 2607#endif
51bbfa0c 2608
0a1c58a2
JL
2609 /* Now store (and compute if necessary) all non-register parms.
2610 These come before register parms, since they can require block-moves,
2611 which could clobber the registers used for register parms.
2612 Parms which have partial registers are not stored here,
2613 but we do preallocate space here if they want that. */
51bbfa0c 2614
0a1c58a2
JL
2615 for (i = 0; i < num_actuals; i++)
2616 if (args[i].reg == 0 || args[i].pass_on_stack)
2617 store_one_arg (&args[i], argblock, may_be_alloca,
2618 args_size.var != 0, reg_parm_stack_space);
2619
2620 /* If we have a parm that is passed in registers but not in memory
2621 and whose alignment does not permit a direct copy into registers,
2622 make a group of pseudos that correspond to each register that we
2623 will later fill. */
2624 if (STRICT_ALIGNMENT)
2625 store_unaligned_arguments_into_pseudos (args, num_actuals);
2626
2627 /* Now store any partially-in-registers parm.
2628 This is the last place a block-move can happen. */
2629 if (reg_parm_seen)
2630 for (i = 0; i < num_actuals; i++)
2631 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2632 store_one_arg (&args[i], argblock, may_be_alloca,
2633 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2634
2635#ifndef PUSH_ARGS_REVERSED
c795bca9 2636#ifdef PREFERRED_STACK_BOUNDARY
0a1c58a2
JL
2637 /* If we pushed args in forward order, perform stack alignment
2638 after pushing the last arg. */
2639 /* ??? Fix for arg_space_so_far. */
2640 if (argblock == 0)
2641 anti_adjust_stack (GEN_INT (args_size.constant
2642 - unadjusted_args_size));
51bbfa0c
RS
2643#endif
2644#endif
2645
0a1c58a2
JL
2646 /* If register arguments require space on the stack and stack space
2647 was not preallocated, allocate stack space here for arguments
2648 passed in registers. */
6e716e89 2649#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
0a1c58a2
JL
2650 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2651 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2652#endif
2653
0a1c58a2
JL
2654 /* Pass the function the address in which to return a
2655 structure value. */
2656 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2657 {
2658 emit_move_insn (struct_value_rtx,
2659 force_reg (Pmode,
2660 force_operand (structure_value_addr,
2661 NULL_RTX)));
2662
2663 /* Mark the memory for the aggregate as write-only. */
2664 if (current_function_check_memory_usage)
2665 emit_library_call (chkr_set_right_libfunc, 1,
2666 VOIDmode, 3,
2667 structure_value_addr, ptr_mode,
2668 GEN_INT (struct_value_size),
2669 TYPE_MODE (sizetype),
2670 GEN_INT (MEMORY_USE_WO),
2671 TYPE_MODE (integer_type_node));
2672
2673 if (GET_CODE (struct_value_rtx) == REG)
2674 use_reg (&call_fusage, struct_value_rtx);
2675 }
c2939b57 2676
0a1c58a2
JL
2677 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2678 reg_parm_seen);
51bbfa0c 2679
0a1c58a2
JL
2680 load_register_parameters (args, num_actuals, &call_fusage);
2681
2682 /* Perform postincrements before actually calling the function. */
2683 emit_queue ();
51bbfa0c 2684
0a1c58a2
JL
2685 /* Save a pointer to the last insn before the call, so that we can
2686 later safely search backwards to find the CALL_INSN. */
2687 before_call = get_last_insn ();
51bbfa0c 2688
7d167afd
JJ
2689 /* Set up next argument register. For sibling calls on machines
2690 with register windows this should be the incoming register. */
2691#ifdef FUNCTION_INCOMING_ARG
2692 if (pass == 0)
2693 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2694 void_type_node, 1);
2695 else
2696#endif
2697 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2698 void_type_node, 1);
2699
0a1c58a2
JL
2700 /* All arguments and registers used for the call must be set up by
2701 now! */
2702
2703 /* Generate the actual call instruction. */
2704 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2705 args_size.constant, struct_value_size,
7d167afd 2706 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
0a1c58a2
JL
2707 ((is_const ? ECF_IS_CONST : 0)
2708 | (nothrow ? ECF_NOTHROW : 0)
2709 | (pass == 0 ? ECF_SIBCALL : 0)));
2710
2711 /* If call is cse'able, make appropriate pair of reg-notes around it.
2712 Test valreg so we don't crash; may safely ignore `const'
2713 if return type is void. Disable for PARALLEL return values, because
2714 we have no way to move such values into a pseudo register. */
2715 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
9ae8ffe7 2716 {
0a1c58a2
JL
2717 rtx note = 0;
2718 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2719 rtx insns;
9ae8ffe7 2720
0a1c58a2
JL
2721 /* Mark the return value as a pointer if needed. */
2722 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2723 {
2724 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2725 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2726 }
2727
2728 /* Construct an "equal form" for the value which mentions all the
2729 arguments in order as well as the function name. */
51bbfa0c 2730#ifdef PUSH_ARGS_REVERSED
0a1c58a2
JL
2731 for (i = 0; i < num_actuals; i++)
2732 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2733#else
0a1c58a2
JL
2734 for (i = num_actuals - 1; i >= 0; i--)
2735 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2736#endif
0a1c58a2 2737 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
9ae8ffe7 2738
0a1c58a2
JL
2739 insns = get_insns ();
2740 end_sequence ();
9ae8ffe7 2741
0a1c58a2
JL
2742 emit_libcall_block (insns, temp, valreg, note);
2743
2744 valreg = temp;
2745 }
2746 else if (is_const)
2747 {
2748 /* Otherwise, just write out the sequence without a note. */
2749 rtx insns = get_insns ();
9ae8ffe7 2750
0a1c58a2
JL
2751 end_sequence ();
2752 emit_insns (insns);
2753 }
2754 else if (is_malloc)
2755 {
2756 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2757 rtx last, insns;
2758
2759 /* The return value from a malloc-like function is a pointer. */
2760 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2761 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2762
2763 emit_move_insn (temp, valreg);
2764
2765 /* The return value from a malloc-like function can not alias
2766 anything else. */
2767 last = get_last_insn ();
2768 REG_NOTES (last) =
2769 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2770
2771 /* Write out the sequence. */
2772 insns = get_insns ();
2773 end_sequence ();
2774 emit_insns (insns);
2775 valreg = temp;
2776 }
51bbfa0c 2777
0a1c58a2
JL
2778 /* For calls to `setjmp', etc., inform flow.c it should complain
2779 if nonvolatile values are live. For functions that cannot return,
2780 inform flow that control does not fall through. */
51bbfa0c 2781
0a1c58a2 2782 if (returns_twice || is_volatile || is_longjmp || pass == 0)
c2939b57 2783 {
0a1c58a2
JL
2784 /* The barrier or NOTE_INSN_SETJMP note must be emitted
2785 immediately after the CALL_INSN. Some ports emit more
2786 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 2787
0a1c58a2
JL
2788 rtx last = get_last_insn ();
2789 while (GET_CODE (last) != CALL_INSN)
2790 {
2791 last = PREV_INSN (last);
2792 /* There was no CALL_INSN? */
2793 if (last == before_call)
2794 abort ();
2795 }
51bbfa0c 2796
0a1c58a2
JL
2797 if (returns_twice)
2798 {
2799 emit_note_after (NOTE_INSN_SETJMP, last);
2800 current_function_calls_setjmp = 1;
2801 sibcall_failure = 1;
2802 }
2803 else
2804 emit_barrier_after (last);
2805 }
51bbfa0c 2806
0a1c58a2
JL
2807 if (is_longjmp)
2808 current_function_calls_longjmp = 1, sibcall_failure = 1;
51bbfa0c 2809
25a1fcb4
RK
2810 /* If this function is returning into a memory location marked as
2811 readonly, it means it is initializing that location. But we normally
2812 treat functions as not clobbering such locations, so we need to
2813 specify that this one does. */
2814 if (target != 0 && GET_CODE (target) == MEM
2815 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
2816 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
2817
0a1c58a2 2818 /* If value type not void, return an rtx for the value. */
51bbfa0c 2819
0a1c58a2
JL
2820 /* If there are cleanups to be called, don't use a hard reg as target.
2821 We need to double check this and see if it matters anymore. */
2822 if (any_pending_cleanups (1)
2823 && target && REG_P (target)
2824 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2825 target = 0, sibcall_failure = 1;
51bbfa0c 2826
0a1c58a2
JL
2827 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2828 || ignore)
29008b51 2829 {
0a1c58a2 2830 target = const0_rtx;
29008b51 2831 }
0a1c58a2
JL
2832 else if (structure_value_addr)
2833 {
2834 if (target == 0 || GET_CODE (target) != MEM)
2835 {
2836 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2837 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2838 structure_value_addr));
2839 MEM_SET_IN_STRUCT_P (target,
2840 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2841 }
2842 }
2843 else if (pcc_struct_value)
cacbd532 2844 {
0a1c58a2
JL
2845 /* This is the special C++ case where we need to
2846 know what the true target was. We take care to
2847 never use this value more than once in one expression. */
2848 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2849 copy_to_reg (valreg));
c6df88cb 2850 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532 2851 }
0a1c58a2
JL
2852 /* Handle calls that return values in multiple non-contiguous locations.
2853 The Irix 6 ABI has examples of this. */
2854 else if (GET_CODE (valreg) == PARALLEL)
2855 {
2856 int bytes = int_size_in_bytes (TREE_TYPE (exp));
cacbd532 2857
0a1c58a2
JL
2858 if (target == 0)
2859 {
2860 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
2861 bytes, 0);
2862 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2863 preserve_temp_slots (target);
2864 }
2865
2866 if (! rtx_equal_p (target, valreg))
2867 emit_group_store (target, valreg, bytes,
2868 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2869 /* We can not support sibling calls for this case. */
2870 sibcall_failure = 1;
2871 }
2872 else if (target
2873 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2874 && GET_MODE (target) == GET_MODE (valreg))
2875 {
2876 /* TARGET and VALREG cannot be equal at this point because the
2877 latter would not have REG_FUNCTION_VALUE_P true, while the
2878 former would if it were referring to the same register.
2879
2880 If they refer to the same register, this move will be a no-op,
2881 except when function inlining is being done. */
2882 emit_move_insn (target, valreg);
2883 }
2884 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2885 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2886 else
2887 target = copy_to_reg (valreg);
51bbfa0c 2888
84b55618 2889#ifdef PROMOTE_FUNCTION_RETURN
0a1c58a2
JL
2890 /* If we promoted this return value, make the proper SUBREG. TARGET
2891 might be const0_rtx here, so be careful. */
2892 if (GET_CODE (target) == REG
2893 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2894 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2895 {
2896 tree type = TREE_TYPE (exp);
2897 int unsignedp = TREE_UNSIGNED (type);
84b55618 2898
0a1c58a2
JL
2899 /* If we don't promote as expected, something is wrong. */
2900 if (GET_MODE (target)
2901 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2902 abort ();
5d2ac65e 2903
0a1c58a2
JL
2904 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2905 SUBREG_PROMOTED_VAR_P (target) = 1;
2906 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2907 }
84b55618
RK
2908#endif
2909
0a1c58a2
JL
2910 /* If size of args is variable or this was a constructor call for a stack
2911 argument, restore saved stack-pointer value. */
51bbfa0c 2912
0a1c58a2
JL
2913 if (old_stack_level)
2914 {
2915 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2916 pending_stack_adjust = old_pending_adj;
d64f5a78 2917#ifdef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
2918 stack_arg_under_construction = old_stack_arg_under_construction;
2919 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2920 stack_usage_map = initial_stack_usage_map;
d64f5a78 2921#endif
0a1c58a2
JL
2922 sibcall_failure = 1;
2923 }
51bbfa0c 2924#ifdef ACCUMULATE_OUTGOING_ARGS
0a1c58a2
JL
2925 else
2926 {
51bbfa0c 2927#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
2928 if (save_area)
2929 {
2930 restore_fixed_argument_area (save_area, argblock,
2931 high_to_save, low_to_save);
2932 sibcall_failure = 1;
2933 }
b94301c2 2934#endif
51bbfa0c 2935
0a1c58a2
JL
2936 /* If we saved any argument areas, restore them. */
2937 for (i = 0; i < num_actuals; i++)
2938 if (args[i].save_area)
2939 {
2940 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2941 rtx stack_area
2942 = gen_rtx_MEM (save_mode,
2943 memory_address (save_mode,
2944 XEXP (args[i].stack_slot, 0)));
2945
2946 if (save_mode != BLKmode)
2947 emit_move_insn (stack_area, args[i].save_area);
2948 else
2949 emit_block_move (stack_area,
2950 validize_mem (args[i].save_area),
2951 GEN_INT (args[i].size.constant),
2952 PARM_BOUNDARY / BITS_PER_UNIT);
2953 sibcall_failure = 1;
2954 }
51bbfa0c 2955
0a1c58a2
JL
2956 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2957 stack_usage_map = initial_stack_usage_map;
2958 }
51bbfa0c
RS
2959#endif
2960
0a1c58a2
JL
2961 /* If this was alloca, record the new stack level for nonlocal gotos.
2962 Check for the handler slots since we might not have a save area
2963 for non-local gotos. */
59257ff7 2964
0a1c58a2
JL
2965 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2966 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c 2967
0a1c58a2
JL
2968 pop_temp_slots ();
2969
2970 /* Free up storage we no longer need. */
2971 for (i = 0; i < num_actuals; ++i)
2972 if (args[i].aligned_regs)
2973 free (args[i].aligned_regs);
2974
e245d3af
RH
2975 if (pass == 0)
2976 {
2977 /* Undo the fake expand_start_target_temps we did earlier. If
2978 there had been any cleanups created, we've already set
2979 sibcall_failure. */
2980 expand_end_target_temps ();
2981 }
2982
0a1c58a2
JL
2983 insns = get_insns ();
2984 end_sequence ();
2985
2986 if (pass == 0)
2987 {
2988 tail_call_insns = insns;
2989
7d167afd
JJ
2990 /* If something prevents making this a sibling call,
2991 zero out the sequence. */
2992 if (sibcall_failure)
0a1c58a2
JL
2993 tail_call_insns = NULL_RTX;
2994
2995 /* Restore the pending stack adjustment now that we have
2996 finished generating the sibling call sequence. */
2997 pending_stack_adjust = save_pending_stack_adjust;
2998 }
2999 else
3000 normal_call_insns = insns;
3001 }
3002
3003 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3004 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3005 can happen if the arguments to this function call an inline
3006 function who's expansion contains another CALL_PLACEHOLDER.
3007
3008 If there are any C_Ps in any of these sequences, replace them
3009 with their normal call. */
3010
3011 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3012 if (GET_CODE (insn) == CALL_INSN
3013 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3014 replace_call_placeholder (insn, sibcall_use_normal);
3015
3016 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3017 if (GET_CODE (insn) == CALL_INSN
3018 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3019 replace_call_placeholder (insn, sibcall_use_normal);
3020
3021 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3022 if (GET_CODE (insn) == CALL_INSN
3023 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3024 replace_call_placeholder (insn, sibcall_use_normal);
3025
3026 /* If this was a potential tail recursion site, then emit a
3027 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3028 One of them will be selected later. */
3029 if (tail_recursion_insns || tail_call_insns)
3030 {
3031 /* The tail recursion label must be kept around. We could expose
3032 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3033 and makes determining true tail recursion sites difficult.
3034
3035 So we set LABEL_PRESERVE_P here, then clear it when we select
3036 one of the call sequences after rtl generation is complete. */
3037 if (tail_recursion_insns)
3038 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3039 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3040 tail_call_insns,
3041 tail_recursion_insns,
3042 tail_recursion_label));
3043 }
3044 else
3045 emit_insns (normal_call_insns);
51bbfa0c 3046
0a1c58a2 3047 currently_expanding_call--;
8e6a59fe 3048
51bbfa0c
RS
3049 return target;
3050}
3051\f
12a22e76
JM
3052/* Returns nonzero if FUN is the symbol for a library function which can
3053 not throw. */
3054
3055static int
3056libfunc_nothrow (fun)
3057 rtx fun;
3058{
3059 if (fun == throw_libfunc
3060 || fun == rethrow_libfunc
3061 || fun == sjthrow_libfunc
3062 || fun == sjpopnthrow_libfunc)
3063 return 0;
3064
3065 return 1;
3066}
43bc5f13 3067\f
de76b467
JH
3068/* Output a library call to function FUN (a SYMBOL_REF rtx).
3069 The RETVAL parameter specifies whether return value needs to be saved, other
3070 parameters are documented in the emit_library_call function bellow. */
3071static rtx
3072emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p)
3073 int retval;
3074 rtx orgfun;
3075 rtx value;
3076 int no_queue;
3077 enum machine_mode outmode;
3078 int nargs;
3079 va_list p;
43bc5f13 3080{
3c0fca12
RH
3081 /* Total size in bytes of all the stack-parms scanned so far. */
3082 struct args_size args_size;
3083 /* Size of arguments before any adjustments (such as rounding). */
3084 struct args_size original_args_size;
3085 register int argnum;
3086 rtx fun;
3087 int inc;
3088 int count;
3089 struct args_size alignment_pad;
3090 rtx argblock = 0;
3091 CUMULATIVE_ARGS args_so_far;
3092 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3093 struct args_size offset; struct args_size size; rtx save_area; };
3094 struct arg *argvec;
3095 int old_inhibit_defer_pop = inhibit_defer_pop;
3096 rtx call_fusage = 0;
3097 rtx mem_value = 0;
3098 int pcc_struct_value = 0;
3099 int struct_value_size = 0;
3100 int is_const;
3101 int reg_parm_stack_space = 0;
3102 int nothrow;
3103#ifdef ACCUMULATE_OUTGOING_ARGS
3104 int needed;
3105#endif
3106
3107#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3108 /* Define the boundary of the register parm stack space that needs to be
3109 save, if any. */
3110 int low_to_save = -1, high_to_save = 0;
3111 rtx save_area = 0; /* Place that it is saved */
3112#endif
3113
3114#ifdef ACCUMULATE_OUTGOING_ARGS
3115 /* Size of the stack reserved for parameter registers. */
3116 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3117 char *initial_stack_usage_map = stack_usage_map;
3118#endif
3119
3120#ifdef REG_PARM_STACK_SPACE
3121#ifdef MAYBE_REG_PARM_STACK_SPACE
3122 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3123#else
3124 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3125#endif
3126#endif
3127
3c0fca12
RH
3128 is_const = no_queue;
3129 fun = orgfun;
3130
3131 nothrow = libfunc_nothrow (fun);
3132
3133#ifdef PREFERRED_STACK_BOUNDARY
3134 /* Ensure current function's preferred stack boundary is at least
3135 what we need. */
3136 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3137 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3138#endif
3139
3140 /* If this kind of value comes back in memory,
3141 decide where in memory it should come back. */
de76b467 3142 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3c0fca12
RH
3143 {
3144#ifdef PCC_STATIC_STRUCT_RETURN
3145 rtx pointer_reg
3146 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3147 0, 0);
3148 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3149 pcc_struct_value = 1;
3150 if (value == 0)
3151 value = gen_reg_rtx (outmode);
3152#else /* not PCC_STATIC_STRUCT_RETURN */
3153 struct_value_size = GET_MODE_SIZE (outmode);
3154 if (value != 0 && GET_CODE (value) == MEM)
3155 mem_value = value;
3156 else
3157 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3158#endif
3159
3160 /* This call returns a big structure. */
3161 is_const = 0;
3162 }
3163
3164 /* ??? Unfinished: must pass the memory address as an argument. */
3165
3166 /* Copy all the libcall-arguments out of the varargs data
3167 and into a vector ARGVEC.
3168
3169 Compute how to pass each argument. We only support a very small subset
3170 of the full argument passing conventions to limit complexity here since
3171 library functions shouldn't have many args. */
3172
3173 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3174 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3175
3176 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3177
3178 args_size.constant = 0;
3179 args_size.var = 0;
3180
3181 count = 0;
3182
3183 push_temp_slots ();
3184
3185 /* If there's a structure value address to be passed,
3186 either pass it in the special place, or pass it as an extra argument. */
3187 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3188 {
3189 rtx addr = XEXP (mem_value, 0);
3190 nargs++;
3191
3192 /* Make sure it is a reasonable operand for a move or push insn. */
3193 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3194 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3195 addr = force_operand (addr, NULL_RTX);
3196
3197 argvec[count].value = addr;
3198 argvec[count].mode = Pmode;
3199 argvec[count].partial = 0;
3200
3201 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3202#ifdef FUNCTION_ARG_PARTIAL_NREGS
3203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3204 abort ();
3205#endif
3206
3207 locate_and_pad_parm (Pmode, NULL_TREE,
3208 argvec[count].reg && argvec[count].partial == 0,
3209 NULL_TREE, &args_size, &argvec[count].offset,
3210 &argvec[count].size, &alignment_pad);
3211
3212
3213 if (argvec[count].reg == 0 || argvec[count].partial != 0
3214 || reg_parm_stack_space > 0)
3215 args_size.constant += argvec[count].size.constant;
3216
3217 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3218
3219 count++;
3220 }
3221
3222 for (; count < nargs; count++)
3223 {
3224 rtx val = va_arg (p, rtx);
3225 enum machine_mode mode = va_arg (p, enum machine_mode);
3226
3227 /* We cannot convert the arg value to the mode the library wants here;
3228 must do it earlier where we know the signedness of the arg. */
3229 if (mode == BLKmode
3230 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3231 abort ();
3232
3233 /* On some machines, there's no way to pass a float to a library fcn.
3234 Pass it as a double instead. */
3235#ifdef LIBGCC_NEEDS_DOUBLE
3236 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3237 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3238#endif
3239
3240 /* There's no need to call protect_from_queue, because
3241 either emit_move_insn or emit_push_insn will do that. */
3242
3243 /* Make sure it is a reasonable operand for a move or push insn. */
3244 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3245 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3246 val = force_operand (val, NULL_RTX);
3247
3248#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3249 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3250 {
3251 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3252 be viewed as just an efficiency improvement. */
3253 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3254 emit_move_insn (slot, val);
de76b467 3255 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12
RH
3256 mode = Pmode;
3257 }
3258#endif
3259
3260 argvec[count].value = val;
3261 argvec[count].mode = mode;
3262
3263 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3264
3265#ifdef FUNCTION_ARG_PARTIAL_NREGS
3266 argvec[count].partial
3267 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3268#else
3269 argvec[count].partial = 0;
3270#endif
3271
3272 locate_and_pad_parm (mode, NULL_TREE,
3273 argvec[count].reg && argvec[count].partial == 0,
3274 NULL_TREE, &args_size, &argvec[count].offset,
3275 &argvec[count].size, &alignment_pad);
3276
3277 if (argvec[count].size.var)
3278 abort ();
3279
3280 if (reg_parm_stack_space == 0 && argvec[count].partial)
3281 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3282
3283 if (argvec[count].reg == 0 || argvec[count].partial != 0
3284 || reg_parm_stack_space > 0)
3285 args_size.constant += argvec[count].size.constant;
3286
3287 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3288 }
3c0fca12
RH
3289
3290#ifdef FINAL_REG_PARM_STACK_SPACE
3291 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3292 args_size.var);
3293#endif
3294 /* If this machine requires an external definition for library
3295 functions, write one out. */
3296 assemble_external_libcall (fun);
3297
3298 original_args_size = args_size;
3299#ifdef PREFERRED_STACK_BOUNDARY
3300 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3301 / STACK_BYTES) * STACK_BYTES);
3302#endif
3303
3304 args_size.constant = MAX (args_size.constant,
3305 reg_parm_stack_space);
3306
3307#ifndef OUTGOING_REG_PARM_STACK_SPACE
3308 args_size.constant -= reg_parm_stack_space;
3309#endif
3310
3311 if (args_size.constant > current_function_outgoing_args_size)
3312 current_function_outgoing_args_size = args_size.constant;
3313
3314#ifdef ACCUMULATE_OUTGOING_ARGS
3315 /* Since the stack pointer will never be pushed, it is possible for
3316 the evaluation of a parm to clobber something we have already
3317 written to the stack. Since most function calls on RISC machines
3318 do not use the stack, this is uncommon, but must work correctly.
3319
3320 Therefore, we save any area of the stack that was already written
3321 and that we are using. Here we set up to do this by making a new
3322 stack usage map from the old one.
3323
3324 Another approach might be to try to reorder the argument
3325 evaluations to avoid this conflicting stack usage. */
3326
3327 needed = args_size.constant;
3328
3329#ifndef OUTGOING_REG_PARM_STACK_SPACE
3330 /* Since we will be writing into the entire argument area, the
3331 map must be allocated for its entire size, not just the part that
3332 is the responsibility of the caller. */
3333 needed += reg_parm_stack_space;
3334#endif
3335
3336#ifdef ARGS_GROW_DOWNWARD
3337 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3338 needed + 1);
3339#else
3340 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3341 needed);
3342#endif
3343 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3344
3345 if (initial_highest_arg_in_use)
3346 bcopy (initial_stack_usage_map, stack_usage_map,
3347 initial_highest_arg_in_use);
3348
3349 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3350 bzero (&stack_usage_map[initial_highest_arg_in_use],
3351 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3352 needed = 0;
3353
3354 /* The address of the outgoing argument list must not be copied to a
3355 register here, because argblock would be left pointing to the
3356 wrong place after the call to allocate_dynamic_stack_space below.
3357 */
3358
3359 argblock = virtual_outgoing_args_rtx;
3360#else /* not ACCUMULATE_OUTGOING_ARGS */
3361#ifndef PUSH_ROUNDING
3362 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3363#endif
3364#endif
3365
3366#ifdef PUSH_ARGS_REVERSED
3367#ifdef PREFERRED_STACK_BOUNDARY
3368 /* If we push args individually in reverse order, perform stack alignment
3369 before the first push (the last arg). */
3370 if (argblock == 0)
3371 anti_adjust_stack (GEN_INT (args_size.constant
3372 - original_args_size.constant));
3373#endif
3374#endif
3375
3376#ifdef PUSH_ARGS_REVERSED
3377 inc = -1;
3378 argnum = nargs - 1;
3379#else
3380 inc = 1;
3381 argnum = 0;
3382#endif
3383
3384#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3385 /* The argument list is the property of the called routine and it
3386 may clobber it. If the fixed area has been used for previous
3387 parameters, we must save and restore it.
3388
3389 Here we compute the boundary of the that needs to be saved, if any. */
3390
3391#ifdef ARGS_GROW_DOWNWARD
3392 for (count = 0; count < reg_parm_stack_space + 1; count++)
3393#else
3394 for (count = 0; count < reg_parm_stack_space; count++)
3395#endif
3396 {
3397 if (count >= highest_outgoing_arg_in_use
3398 || stack_usage_map[count] == 0)
3399 continue;
3400
3401 if (low_to_save == -1)
3402 low_to_save = count;
3403
3404 high_to_save = count;
3405 }
3406
3407 if (low_to_save >= 0)
3408 {
3409 int num_to_save = high_to_save - low_to_save + 1;
3410 enum machine_mode save_mode
3411 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3412 rtx stack_area;
3413
3414 /* If we don't have the required alignment, must do this in BLKmode. */
3415 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3416 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3417 save_mode = BLKmode;
3418
3419#ifdef ARGS_GROW_DOWNWARD
3420 stack_area = gen_rtx_MEM (save_mode,
3421 memory_address (save_mode,
3422 plus_constant (argblock,
3423 - high_to_save)));
3424#else
3425 stack_area = gen_rtx_MEM (save_mode,
3426 memory_address (save_mode,
3427 plus_constant (argblock,
3428 low_to_save)));
3429#endif
3430 if (save_mode == BLKmode)
3431 {
3432 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3433 emit_block_move (validize_mem (save_area), stack_area,
3434 GEN_INT (num_to_save),
3435 PARM_BOUNDARY / BITS_PER_UNIT);
3436 }
3437 else
3438 {
3439 save_area = gen_reg_rtx (save_mode);
3440 emit_move_insn (save_area, stack_area);
3441 }
3442 }
3443#endif
3444
3445 /* Push the args that need to be pushed. */
3446
3447 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3448 are to be pushed. */
3449 for (count = 0; count < nargs; count++, argnum += inc)
3450 {
3451 register enum machine_mode mode = argvec[argnum].mode;
3452 register rtx val = argvec[argnum].value;
3453 rtx reg = argvec[argnum].reg;
3454 int partial = argvec[argnum].partial;
3455#ifdef ACCUMULATE_OUTGOING_ARGS
3456 int lower_bound, upper_bound, i;
3457#endif
3458
3459 if (! (reg != 0 && partial == 0))
3460 {
3461#ifdef ACCUMULATE_OUTGOING_ARGS
3462 /* If this is being stored into a pre-allocated, fixed-size, stack
3463 area, save any previous data at that location. */
3464
3465#ifdef ARGS_GROW_DOWNWARD
3466 /* stack_slot is negative, but we want to index stack_usage_map
3467 with positive values. */
3468 upper_bound = -argvec[argnum].offset.constant + 1;
3469 lower_bound = upper_bound - argvec[argnum].size.constant;
3470#else
3471 lower_bound = argvec[argnum].offset.constant;
3472 upper_bound = lower_bound + argvec[argnum].size.constant;
3473#endif
3474
3475 for (i = lower_bound; i < upper_bound; i++)
3476 if (stack_usage_map[i]
3477 /* Don't store things in the fixed argument area at this point;
3478 it has already been saved. */
3479 && i > reg_parm_stack_space)
3480 break;
3481
3482 if (i != upper_bound)
3483 {
3484 /* We need to make a save area. See what mode we can make it. */
3485 enum machine_mode save_mode
3486 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3487 MODE_INT, 1);
3488 rtx stack_area
3489 = gen_rtx_MEM
3490 (save_mode,
3491 memory_address
3492 (save_mode,
3493 plus_constant (argblock,
3494 argvec[argnum].offset.constant)));
3495 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3496
3497 emit_move_insn (argvec[argnum].save_area, stack_area);
3498 }
3499#endif
3500 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3501 argblock, GEN_INT (argvec[argnum].offset.constant),
3502 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3503
3504#ifdef ACCUMULATE_OUTGOING_ARGS
3505 /* Now mark the segment we just used. */
3506 for (i = lower_bound; i < upper_bound; i++)
3507 stack_usage_map[i] = 1;
3508#endif
3509
3510 NO_DEFER_POP;
3511 }
3512 }
3513
3514#ifndef PUSH_ARGS_REVERSED
3515#ifdef PREFERRED_STACK_BOUNDARY
3516 /* If we pushed args in forward order, perform stack alignment
3517 after pushing the last arg. */
3518 if (argblock == 0)
3519 anti_adjust_stack (GEN_INT (args_size.constant
3520 - original_args_size.constant));
3521#endif
3522#endif
3523
3524#ifdef PUSH_ARGS_REVERSED
3525 argnum = nargs - 1;
3526#else
3527 argnum = 0;
3528#endif
3529
3530 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3531
3532 /* Now load any reg parms into their regs. */
3533
3534 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3535 are to be pushed. */
3536 for (count = 0; count < nargs; count++, argnum += inc)
3537 {
3538 register rtx val = argvec[argnum].value;
3539 rtx reg = argvec[argnum].reg;
3540 int partial = argvec[argnum].partial;
3541
3542 /* Handle calls that pass values in multiple non-contiguous
3543 locations. The PA64 has examples of this for library calls. */
3544 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3545 emit_group_load (reg, val,
3546 GET_MODE_SIZE (GET_MODE (val)),
3547 GET_MODE_ALIGNMENT (GET_MODE (val)));
3548 else if (reg != 0 && partial == 0)
3549 emit_move_insn (reg, val);
3550
3551 NO_DEFER_POP;
3552 }
3553
3554#if 0
3555 /* For version 1.37, try deleting this entirely. */
3556 if (! no_queue)
3557 emit_queue ();
3558#endif
3559
3560 /* Any regs containing parms remain in use through the call. */
3561 for (count = 0; count < nargs; count++)
3562 {
3563 rtx reg = argvec[count].reg;
3564 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3565 use_group_regs (&call_fusage, reg);
3566 else if (reg != 0)
3567 use_reg (&call_fusage, reg);
3568 }
3569
3570 /* Pass the function the address in which to return a structure value. */
3571 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3572 {
3573 emit_move_insn (struct_value_rtx,
3574 force_reg (Pmode,
3575 force_operand (XEXP (mem_value, 0),
3576 NULL_RTX)));
3577 if (GET_CODE (struct_value_rtx) == REG)
3578 use_reg (&call_fusage, struct_value_rtx);
3579 }
3580
3581 /* Don't allow popping to be deferred, since then
3582 cse'ing of library calls could delete a call and leave the pop. */
3583 NO_DEFER_POP;
3584
3585 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3586 will set inhibit_defer_pop to that value. */
de76b467
JH
3587 /* The return type is needed to decide how many bytes the function pops.
3588 Signedness plays no role in that, so for simplicity, we pretend it's
3589 always signed. We also assume that the list of arguments passed has
3590 no impact, so we pretend it is unknown. */
3c0fca12
RH
3591
3592 emit_call_1 (fun,
3593 get_identifier (XSTR (orgfun, 0)),
de76b467
JH
3594 build_function_type (outmode == VOIDmode ? void_type_node
3595 : type_for_mode (outmode, 0), NULL_TREE),
3c0fca12
RH
3596 original_args_size.constant, args_size.constant,
3597 struct_value_size,
3598 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
de76b467 3599 mem_value == 0 && outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
0a1c58a2
JL
3600 old_inhibit_defer_pop + 1, call_fusage,
3601 ((is_const ? ECF_IS_CONST : 0)
3602 | (nothrow ? ECF_NOTHROW : 0)));
3c0fca12
RH
3603
3604 /* Now restore inhibit_defer_pop to its actual original value. */
3605 OK_DEFER_POP;
3606
3607 pop_temp_slots ();
3608
3609 /* Copy the value to the right place. */
de76b467 3610 if (outmode != VOIDmode && retval)
3c0fca12
RH
3611 {
3612 if (mem_value)
3613 {
3614 if (value == 0)
3615 value = mem_value;
3616 if (value != mem_value)
3617 emit_move_insn (value, mem_value);
3618 }
3619 else if (value != 0)
3620 emit_move_insn (value, hard_libcall_value (outmode));
3621 else
3622 value = hard_libcall_value (outmode);
3623 }
3624
3625#ifdef ACCUMULATE_OUTGOING_ARGS
3626#ifdef REG_PARM_STACK_SPACE
3627 if (save_area)
3628 {
3629 enum machine_mode save_mode = GET_MODE (save_area);
3630#ifdef ARGS_GROW_DOWNWARD
3631 rtx stack_area
3632 = gen_rtx_MEM (save_mode,
3633 memory_address (save_mode,
3634 plus_constant (argblock,
3635 - high_to_save)));
3636#else
3637 rtx stack_area
3638 = gen_rtx_MEM (save_mode,
3639 memory_address (save_mode,
3640 plus_constant (argblock, low_to_save)));
3641#endif
3642 if (save_mode != BLKmode)
3643 emit_move_insn (stack_area, save_area);
3644 else
3645 emit_block_move (stack_area, validize_mem (save_area),
3646 GEN_INT (high_to_save - low_to_save + 1),
3647 PARM_BOUNDARY / BITS_PER_UNIT);
3648 }
3649#endif
3650
3651 /* If we saved any argument areas, restore them. */
3652 for (count = 0; count < nargs; count++)
3653 if (argvec[count].save_area)
3654 {
3655 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3656 rtx stack_area
3657 = gen_rtx_MEM (save_mode,
3658 memory_address
3659 (save_mode,
3660 plus_constant (argblock,
3661 argvec[count].offset.constant)));
3662
3663 emit_move_insn (stack_area, argvec[count].save_area);
3664 }
3665
3666 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3667 stack_usage_map = initial_stack_usage_map;
3668#endif
43bc5f13 3669
de76b467
JH
3670 return value;
3671
3672}
3673\f
3674/* Output a library call to function FUN (a SYMBOL_REF rtx)
3675 (emitting the queue unless NO_QUEUE is nonzero),
3676 for a value of mode OUTMODE,
3677 with NARGS different arguments, passed as alternating rtx values
3678 and machine_modes to convert them to.
3679 The rtx values should have been passed through protect_from_queue already.
3680
3681 NO_QUEUE will be true if and only if the library call is a `const' call
3682 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
3683 to the variable is_const in expand_call.
3684
3685 NO_QUEUE must be true for const calls, because if it isn't, then
3686 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
3687 and will be lost if the libcall sequence is optimized away.
3688
3689 NO_QUEUE must be false for non-const calls, because if it isn't, the
3690 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
3691 optimized. For instance, the instruction scheduler may incorrectly
3692 move memory references across the non-const call. */
3693
3694void
3695emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
3696 int nargs, ...))
3697{
3698#ifndef ANSI_PROTOTYPES
3699 rtx orgfun;
3700 int no_queue;
3701 enum machine_mode outmode;
3702 int nargs;
3703#endif
3704 va_list p;
3705
3706 VA_START (p, nargs);
3707
3708#ifndef ANSI_PROTOTYPES
3709 orgfun = va_arg (p, rtx);
3710 no_queue = va_arg (p, int);
3711 outmode = va_arg (p, enum machine_mode);
3712 nargs = va_arg (p, int);
3713#endif
3714
3715 emit_library_call_value_1 (0, orgfun, NULL_RTX, no_queue, outmode, nargs, p);
3716
3717 va_end (p);
3718}
3719\f
3720/* Like emit_library_call except that an extra argument, VALUE,
3721 comes second and says where to store the result.
3722 (If VALUE is zero, this function chooses a convenient way
3723 to return the value.
3724
3725 This function returns an rtx for where the value is to be found.
3726 If VALUE is nonzero, VALUE is returned. */
3727
3728rtx
3729emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3730 enum machine_mode outmode, int nargs, ...))
3731{
3732#ifndef ANSI_PROTOTYPES
3733 rtx orgfun;
3734 rtx value;
3735 int no_queue;
3736 enum machine_mode outmode;
3737 int nargs;
3738#endif
3739 va_list p;
3740
3741 VA_START (p, nargs);
3742
3743#ifndef ANSI_PROTOTYPES
3744 orgfun = va_arg (p, rtx);
3745 value = va_arg (p, rtx);
3746 no_queue = va_arg (p, int);
3747 outmode = va_arg (p, enum machine_mode);
3748 nargs = va_arg (p, int);
3749#endif
3750
3751 value = emit_library_call_value_1 (1, orgfun, value, no_queue, outmode, nargs, p);
3752
3753 va_end (p);
3754
fac0ad80 3755 return value;
322e3e34
RK
3756}
3757\f
51bbfa0c
RS
3758#if 0
3759/* Return an rtx which represents a suitable home on the stack
3760 given TYPE, the type of the argument looking for a home.
3761 This is called only for BLKmode arguments.
3762
3763 SIZE is the size needed for this target.
3764 ARGS_ADDR is the address of the bottom of the argument block for this call.
3765 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3766 if this machine uses push insns. */
3767
3768static rtx
3769target_for_arg (type, size, args_addr, offset)
3770 tree type;
3771 rtx size;
3772 rtx args_addr;
3773 struct args_size offset;
3774{
3775 rtx target;
3776 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3777
3778 /* We do not call memory_address if possible,
3779 because we want to address as close to the stack
3780 as possible. For non-variable sized arguments,
3781 this will be stack-pointer relative addressing. */
3782 if (GET_CODE (offset_rtx) == CONST_INT)
3783 target = plus_constant (args_addr, INTVAL (offset_rtx));
3784 else
3785 {
3786 /* I have no idea how to guarantee that this
3787 will work in the presence of register parameters. */
38a448ca 3788 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3789 target = memory_address (QImode, target);
3790 }
3791
38a448ca 3792 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3793}
3794#endif
3795\f
3796/* Store a single argument for a function call
3797 into the register or memory area where it must be passed.
3798 *ARG describes the argument value and where to pass it.
3799
3800 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3801 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3802
3803 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3804 so must be careful about how the stack is used.
3805
3806 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3807 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3808 that we need not worry about saving and restoring the stack.
3809
3810 FNDECL is the declaration of the function we are calling. */
3811
3812static void
c84e2712 3813store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3814 reg_parm_stack_space)
51bbfa0c
RS
3815 struct arg_data *arg;
3816 rtx argblock;
3817 int may_be_alloca;
0f9b3ea6 3818 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3819 int reg_parm_stack_space;
51bbfa0c
RS
3820{
3821 register tree pval = arg->tree_value;
3822 rtx reg = 0;
3823 int partial = 0;
3824 int used = 0;
69d4ca36 3825#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3826 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3827#endif
51bbfa0c
RS
3828
3829 if (TREE_CODE (pval) == ERROR_MARK)
3830 return;
3831
cc79451b
RK
3832 /* Push a new temporary level for any temporaries we make for
3833 this argument. */
3834 push_temp_slots ();
3835
51bbfa0c
RS
3836#ifdef ACCUMULATE_OUTGOING_ARGS
3837 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3838 save any previous data at that location. */
3839 if (argblock && ! variable_size && arg->stack)
3840 {
3841#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3842 /* stack_slot is negative, but we want to index stack_usage_map
3843 with positive values. */
51bbfa0c
RS
3844 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3845 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3846 else
50eb43ca 3847 upper_bound = 0;
51bbfa0c
RS
3848
3849 lower_bound = upper_bound - arg->size.constant;
3850#else
3851 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3852 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3853 else
3854 lower_bound = 0;
3855
3856 upper_bound = lower_bound + arg->size.constant;
3857#endif
3858
3859 for (i = lower_bound; i < upper_bound; i++)
3860 if (stack_usage_map[i]
51bbfa0c
RS
3861 /* Don't store things in the fixed argument area at this point;
3862 it has already been saved. */
e5e809f4 3863 && i > reg_parm_stack_space)
51bbfa0c
RS
3864 break;
3865
3866 if (i != upper_bound)
3867 {
3868 /* We need to make a save area. See what mode we can make it. */
3869 enum machine_mode save_mode
3870 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3871 rtx stack_area
38a448ca
RH
3872 = gen_rtx_MEM (save_mode,
3873 memory_address (save_mode,
3874 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3875
3876 if (save_mode == BLKmode)
3877 {
3878 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3879 arg->size.constant, 0);
c6df88cb
MM
3880 MEM_SET_IN_STRUCT_P (arg->save_area,
3881 AGGREGATE_TYPE_P (TREE_TYPE
3882 (arg->tree_value)));
cc79451b 3883 preserve_temp_slots (arg->save_area);
51bbfa0c 3884 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3885 GEN_INT (arg->size.constant),
51bbfa0c
RS
3886 PARM_BOUNDARY / BITS_PER_UNIT);
3887 }
3888 else
3889 {
3890 arg->save_area = gen_reg_rtx (save_mode);
3891 emit_move_insn (arg->save_area, stack_area);
3892 }
3893 }
3894 }
b564df06
JL
3895
3896 /* Now that we have saved any slots that will be overwritten by this
3897 store, mark all slots this store will use. We must do this before
3898 we actually expand the argument since the expansion itself may
3899 trigger library calls which might need to use the same stack slot. */
3900 if (argblock && ! variable_size && arg->stack)
3901 for (i = lower_bound; i < upper_bound; i++)
3902 stack_usage_map[i] = 1;
51bbfa0c
RS
3903#endif
3904
3905 /* If this isn't going to be placed on both the stack and in registers,
3906 set up the register and number of words. */
3907 if (! arg->pass_on_stack)
3908 reg = arg->reg, partial = arg->partial;
3909
3910 if (reg != 0 && partial == 0)
3911 /* Being passed entirely in a register. We shouldn't be called in
3912 this case. */
3913 abort ();
3914
4ab56118
RK
3915 /* If this arg needs special alignment, don't load the registers
3916 here. */
3917 if (arg->n_aligned_regs != 0)
3918 reg = 0;
4ab56118 3919
4ab56118 3920 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3921 it directly into its stack slot. Otherwise, we can. */
3922 if (arg->value == 0)
d64f5a78
RS
3923 {
3924#ifdef ACCUMULATE_OUTGOING_ARGS
3925 /* stack_arg_under_construction is nonzero if a function argument is
3926 being evaluated directly into the outgoing argument list and
3927 expand_call must take special action to preserve the argument list
3928 if it is called recursively.
3929
3930 For scalar function arguments stack_usage_map is sufficient to
3931 determine which stack slots must be saved and restored. Scalar
3932 arguments in general have pass_on_stack == 0.
3933
3934 If this argument is initialized by a function which takes the
3935 address of the argument (a C++ constructor or a C function
3936 returning a BLKmode structure), then stack_usage_map is
3937 insufficient and expand_call must push the stack around the
3938 function call. Such arguments have pass_on_stack == 1.
3939
3940 Note that it is always safe to set stack_arg_under_construction,
3941 but this generates suboptimal code if set when not needed. */
3942
3943 if (arg->pass_on_stack)
3944 stack_arg_under_construction++;
3945#endif
3a08477a
RK
3946 arg->value = expand_expr (pval,
3947 (partial
3948 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3949 ? NULL_RTX : arg->stack,
e5d70561 3950 VOIDmode, 0);
1efe6448
RK
3951
3952 /* If we are promoting object (or for any other reason) the mode
3953 doesn't agree, convert the mode. */
3954
7373d92d
RK
3955 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3956 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3957 arg->value, arg->unsignedp);
1efe6448 3958
d64f5a78
RS
3959#ifdef ACCUMULATE_OUTGOING_ARGS
3960 if (arg->pass_on_stack)
3961 stack_arg_under_construction--;
3962#endif
3963 }
51bbfa0c
RS
3964
3965 /* Don't allow anything left on stack from computation
3966 of argument to alloca. */
3967 if (may_be_alloca)
3968 do_pending_stack_adjust ();
3969
3970 if (arg->value == arg->stack)
7815214e 3971 {
c5c76735 3972 /* If the value is already in the stack slot, we are done. */
7d384cc0 3973 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 3974 {
7815214e 3975 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3976 XEXP (arg->stack, 0), Pmode,
7d384cc0 3977 ARGS_SIZE_RTX (arg->size),
7815214e 3978 TYPE_MODE (sizetype),
956d6950
JL
3979 GEN_INT (MEMORY_USE_RW),
3980 TYPE_MODE (integer_type_node));
7815214e
RK
3981 }
3982 }
1efe6448 3983 else if (arg->mode != BLKmode)
51bbfa0c
RS
3984 {
3985 register int size;
3986
3987 /* Argument is a scalar, not entirely passed in registers.
3988 (If part is passed in registers, arg->partial says how much
3989 and emit_push_insn will take care of putting it there.)
3990
3991 Push it, and if its size is less than the
3992 amount of space allocated to it,
3993 also bump stack pointer by the additional space.
3994 Note that in C the default argument promotions
3995 will prevent such mismatches. */
3996
1efe6448 3997 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3998 /* Compute how much space the push instruction will push.
3999 On many machines, pushing a byte will advance the stack
4000 pointer by a halfword. */
4001#ifdef PUSH_ROUNDING
4002 size = PUSH_ROUNDING (size);
4003#endif
4004 used = size;
4005
4006 /* Compute how much space the argument should get:
4007 round up to a multiple of the alignment for arguments. */
1efe6448 4008 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4009 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4010 / (PARM_BOUNDARY / BITS_PER_UNIT))
4011 * (PARM_BOUNDARY / BITS_PER_UNIT));
4012
4013 /* This isn't already where we want it on the stack, so put it there.
4014 This can either be done with push or copy insns. */
e5e809f4
JL
4015 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4016 partial, reg, used - size, argblock,
4fc026cd
CM
4017 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4018 ARGS_SIZE_RTX (arg->alignment_pad));
4019
c2732da3 4020 arg_space_so_far += used;
51bbfa0c
RS
4021 }
4022 else
4023 {
4024 /* BLKmode, at least partly to be pushed. */
4025
4026 register int excess;
4027 rtx size_rtx;
4028
4029 /* Pushing a nonscalar.
4030 If part is passed in registers, PARTIAL says how much
4031 and emit_push_insn will take care of putting it there. */
4032
4033 /* Round its size up to a multiple
4034 of the allocation unit for arguments. */
4035
4036 if (arg->size.var != 0)
4037 {
4038 excess = 0;
4039 size_rtx = ARGS_SIZE_RTX (arg->size);
4040 }
4041 else
4042 {
51bbfa0c
RS
4043 /* PUSH_ROUNDING has no effect on us, because
4044 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 4045 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4046 + partial * UNITS_PER_WORD);
e4f93898 4047 size_rtx = expr_size (pval);
c2732da3 4048 arg_space_so_far += excess + INTVAL (size_rtx);
51bbfa0c
RS
4049 }
4050
1efe6448 4051 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 4052 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4 4053 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
4054 reg_parm_stack_space,
4055 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
4056 }
4057
4058
4059 /* Unless this is a partially-in-register argument, the argument is now
4060 in the stack.
4061
4062 ??? Note that this can change arg->value from arg->stack to
4063 arg->stack_slot and it matters when they are not the same.
4064 It isn't totally clear that this is correct in all cases. */
4065 if (partial == 0)
3b917a55 4066 arg->value = arg->stack_slot;
51bbfa0c
RS
4067
4068 /* Once we have pushed something, pops can't safely
4069 be deferred during the rest of the arguments. */
4070 NO_DEFER_POP;
4071
4072 /* ANSI doesn't require a sequence point here,
4073 but PCC has one, so this will avoid some problems. */
4074 emit_queue ();
4075
db907e7b
RK
4076 /* Free any temporary slots made in processing this argument. Show
4077 that we might have taken the address of something and pushed that
4078 as an operand. */
4079 preserve_temp_slots (NULL_RTX);
51bbfa0c 4080 free_temp_slots ();
cc79451b 4081 pop_temp_slots ();
51bbfa0c 4082}
This page took 1.21621 seconds and 5 git commands to generate.