]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
genattrtab.h (INSN_ALTS_FUNC_NAME): Move it from genautomata.c.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
97fc4caf 3 1999, 2000, 2001 Free Software Foundation, Inc.
51bbfa0c 4
1322177d 5This file is part of GCC.
51bbfa0c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
51bbfa0c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
51bbfa0c
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
670ee920
KG
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "expr.h"
e78d8e51 30#include "libfuncs.h"
49ad7cfa 31#include "function.h"
670ee920 32#include "regs.h"
5f6da302 33#include "toplev.h"
d6f4ec51 34#include "output.h"
b1474bb7 35#include "tm_p.h"
ea11ca7e 36#include "timevar.h"
c67846f2 37#include "sbitmap.h"
b0c48229 38#include "langhooks.h"
23626154 39#include "target.h"
51bbfa0c
RS
40
41/* Decide whether a function's arguments should be processed
bbc8a071
RK
42 from first to last or from last to first.
43
44 They should if the stack and args grow in opposite directions, but
45 only if we have push insns. */
51bbfa0c 46
51bbfa0c 47#ifdef PUSH_ROUNDING
bbc8a071 48
2da4124d 49#ifndef PUSH_ARGS_REVERSED
40083ddf 50#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
f73ad30e 51#define PUSH_ARGS_REVERSED PUSH_ARGS
51bbfa0c 52#endif
2da4124d 53#endif
bbc8a071 54
51bbfa0c
RS
55#endif
56
f73ad30e
JH
57#ifndef PUSH_ARGS_REVERSED
58#define PUSH_ARGS_REVERSED 0
59#endif
60
c39ada04
DD
61#ifndef STACK_POINTER_OFFSET
62#define STACK_POINTER_OFFSET 0
63#endif
64
c795bca9
BS
65/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
67
68/* Data structure and subroutines used within expand_call. */
69
70struct arg_data
71{
72 /* Tree node for this argument. */
73 tree tree_value;
1efe6448
RK
74 /* Mode for value; TYPE_MODE unless promoted. */
75 enum machine_mode mode;
51bbfa0c
RS
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
77 rtx value;
78 /* Initially-compute RTL value for argument; only for const functions. */
79 rtx initial_value;
80 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 81 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
82 registers. */
83 rtx reg;
099e9712
JH
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
86 register windows. */
87 rtx tail_call_reg;
84b55618
RK
88 /* If REG was promoted from the actual mode of the argument expression,
89 indicates whether the promotion is sign- or zero-extended. */
90 int unsignedp;
51bbfa0c
RS
91 /* Number of registers to use. 0 means put the whole arg in registers.
92 Also 0 if not passed in registers. */
93 int partial;
da7d8304 94 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
95 Note that some arguments may be passed on the stack
96 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
97 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
98 int pass_on_stack;
99 /* Offset of this argument from beginning of stack-args. */
100 struct args_size offset;
101 /* Similar, but offset to the start of the stack slot. Different from
102 OFFSET if this arg pads downward. */
103 struct args_size slot_offset;
104 /* Size of this argument on the stack, rounded up for any padding it gets,
105 parts of the argument passed in registers do not count.
106 If REG_PARM_STACK_SPACE is defined, then register parms
107 are counted here as well. */
108 struct args_size size;
109 /* Location on the stack at which parameter should be stored. The store
110 has already been done if STACK == VALUE. */
111 rtx stack;
112 /* Location on the stack of the start of this argument slot. This can
113 differ from STACK if this arg pads downward. This location is known
114 to be aligned to FUNCTION_ARG_BOUNDARY. */
115 rtx stack_slot;
51bbfa0c
RS
116 /* Place that this stack area has been saved, if needed. */
117 rtx save_area;
4ab56118
RK
118 /* If an argument's alignment does not permit direct copying into registers,
119 copy in smaller-sized pieces into pseudos. These are stored in a
120 block pointed to by this field. The next field says how many
121 word-sized pseudos we made. */
122 rtx *aligned_regs;
123 int n_aligned_regs;
4fc026cd
CM
124 /* The amount that the stack pointer needs to be adjusted to
125 force alignment for the next argument. */
126 struct args_size alignment_pad;
51bbfa0c
RS
127};
128
da7d8304 129/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
130 the corresponding stack location has been used.
131 This vector is used to prevent a function call within an argument from
132 clobbering any stack already set up. */
133static char *stack_usage_map;
134
135/* Size of STACK_USAGE_MAP. */
136static int highest_outgoing_arg_in_use;
2f4aa534 137
c67846f2
JJ
138/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
139 stack location's tail call argument has been already stored into the stack.
140 This bitmap is used to prevent sibling call optimization if function tries
141 to use parent's incoming argument slots when they have been already
142 overwritten with tail call arguments. */
143static sbitmap stored_args_map;
144
2f4aa534
RS
145/* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
150int stack_arg_under_construction;
51bbfa0c 151
3d994c6b
KG
152static int calls_function PARAMS ((tree, int));
153static int calls_function_1 PARAMS ((tree, int));
0a1c58a2 154
3d994c6b
KG
155static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
156 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
fa5322fa
AO
157 rtx, int, rtx, int,
158 CUMULATIVE_ARGS *));
3d994c6b
KG
159static void precompute_register_parameters PARAMS ((int,
160 struct arg_data *,
161 int *));
4c6b3b2a 162static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
3d994c6b
KG
163 int));
164static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
165 int));
166static int finalize_must_preallocate PARAMS ((int, int,
167 struct arg_data *,
168 struct args_size *));
40d6e956
JH
169static void precompute_arguments PARAMS ((int, int,
170 struct arg_data *));
f725a3ec 171static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
172 struct args_size *,
173 int));
3d994c6b
KG
174static void initialize_argument_information PARAMS ((int,
175 struct arg_data *,
176 struct args_size *,
177 int, tree, tree,
178 CUMULATIVE_ARGS *,
179 int, rtx *, int *,
f2d33f13 180 int *, int *));
3d994c6b
KG
181static void compute_argument_addresses PARAMS ((struct arg_data *,
182 rtx, int));
183static rtx rtx_for_function_call PARAMS ((tree, tree));
184static void load_register_parameters PARAMS ((struct arg_data *,
0cdca92b
DJ
185 int, rtx *, int,
186 int, int *));
ebb1b59a
BS
187static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
188 enum libcall_type,
de76b467
JH
189 enum machine_mode,
190 int, va_list));
f2d33f13 191static int special_function_p PARAMS ((tree, int));
f2d33f13
JH
192static rtx try_to_integrate PARAMS ((tree, tree, rtx,
193 int, tree, rtx));
c67846f2 194static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
0cdca92b
DJ
195static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
196 int));
c67846f2 197
ce48579b 198static int combine_pending_stack_adjustment_and_call
739fb049 199 PARAMS ((int, struct args_size *, int));
21a3b983 200
f73ad30e 201#ifdef REG_PARM_STACK_SPACE
3d994c6b
KG
202static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
203static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 204#endif
51bbfa0c 205\f
1ce0cb53
JW
206/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
207 `alloca'.
208
209 If WHICH is 0, return 1 if EXP contains a call to any function.
210 Actually, we only need return 1 if evaluating EXP would require pushing
211 arguments on the stack, but that is too difficult to compute, so we just
212 assume any function call might require the stack. */
51bbfa0c 213
1c8d7aef
RS
214static tree calls_function_save_exprs;
215
51bbfa0c 216static int
1ce0cb53 217calls_function (exp, which)
51bbfa0c 218 tree exp;
1ce0cb53 219 int which;
1c8d7aef
RS
220{
221 int val;
8d5e6e25 222
1c8d7aef
RS
223 calls_function_save_exprs = 0;
224 val = calls_function_1 (exp, which);
225 calls_function_save_exprs = 0;
226 return val;
227}
228
8d5e6e25
RK
229/* Recursive function to do the work of above function. */
230
1c8d7aef
RS
231static int
232calls_function_1 (exp, which)
233 tree exp;
234 int which;
51bbfa0c 235{
b3694847 236 int i;
0207efa2 237 enum tree_code code = TREE_CODE (exp);
8d5e6e25
RK
238 int class = TREE_CODE_CLASS (code);
239 int length = first_rtl_op (code);
51bbfa0c 240
ddd5a7c1 241 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
242 if ((int) code >= NUM_TREE_CODES)
243 return 1;
51bbfa0c 244
0207efa2 245 switch (code)
51bbfa0c
RS
246 {
247 case CALL_EXPR:
1ce0cb53
JW
248 if (which == 0)
249 return 1;
43db0363
RK
250 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
251 == FUNCTION_TYPE)
252 && (TYPE_RETURNS_STACK_DEPRESSED
253 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
7393c642 254 return 1;
1ce0cb53
JW
255 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
256 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8d5e6e25
RK
257 == FUNCTION_DECL)
258 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
259 0)
260 & ECF_MAY_BE_ALLOCA))
261 return 1;
51bbfa0c 262
51bbfa0c
RS
263 break;
264
b367c416
RK
265 case CONSTRUCTOR:
266 {
267 tree tem;
268
269 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
270 if (calls_function_1 (TREE_VALUE (tem), which))
271 return 1;
272 }
273
274 return 0;
275
51bbfa0c
RS
276 case SAVE_EXPR:
277 if (SAVE_EXPR_RTL (exp) != 0)
278 return 0;
1c8d7aef
RS
279 if (value_member (exp, calls_function_save_exprs))
280 return 0;
281 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
282 calls_function_save_exprs);
283 return (TREE_OPERAND (exp, 0) != 0
284 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
285
286 case BLOCK:
ef03bc85 287 {
b3694847
SS
288 tree local;
289 tree subblock;
ef03bc85
CH
290
291 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 292 if (DECL_INITIAL (local) != 0
1c8d7aef 293 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85 294 return 1;
ef03bc85
CH
295
296 for (subblock = BLOCK_SUBBLOCKS (exp);
297 subblock;
298 subblock = TREE_CHAIN (subblock))
1c8d7aef 299 if (calls_function_1 (subblock, which))
ef03bc85
CH
300 return 1;
301 }
302 return 0;
8d5e6e25 303
0c4c16df
JH
304 case TREE_LIST:
305 for (; exp != 0; exp = TREE_CHAIN (exp))
306 if (calls_function_1 (TREE_VALUE (exp), which))
307 return 1;
308 return 0;
51bbfa0c 309
e9a25f70
JL
310 default:
311 break;
51bbfa0c
RS
312 }
313
8d5e6e25
RK
314 /* Only expressions, references, and blocks can contain calls. */
315 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
0c4c16df
JH
316 return 0;
317
51bbfa0c
RS
318 for (i = 0; i < length; i++)
319 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 320 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
321 return 1;
322
323 return 0;
324}
325\f
326/* Force FUNEXP into a form suitable for the address of a CALL,
327 and return that as an rtx. Also load the static chain register
328 if FNDECL is a nested function.
329
77cac2f2
RK
330 CALL_FUSAGE points to a variable holding the prospective
331 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 332
03dacb02 333rtx
3affaf29 334prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
51bbfa0c
RS
335 rtx funexp;
336 tree fndecl;
77cac2f2 337 rtx *call_fusage;
01368078 338 int reg_parm_seen;
3affaf29 339 int sibcallp;
51bbfa0c
RS
340{
341 rtx static_chain_value = 0;
342
343 funexp = protect_from_queue (funexp, 0);
344
345 if (fndecl != 0)
0f41302f 346 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
347 static_chain_value = lookup_static_chain (fndecl);
348
349 /* Make a valid memory address and copy constants thru pseudo-regs,
350 but not for a constant address if -fno-function-cse. */
351 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 352 /* If we are using registers for parameters, force the
e9a25f70
JL
353 function address into a register now. */
354 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
355 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
356 : memory_address (FUNCTION_MODE, funexp));
3affaf29 357 else if (! sibcallp)
51bbfa0c
RS
358 {
359#ifndef NO_FUNCTION_CSE
360 if (optimize && ! flag_no_function_cse)
361#ifdef NO_RECURSIVE_FUNCTION_CSE
362 if (fndecl != current_function_decl)
363#endif
364 funexp = force_reg (Pmode, funexp);
365#endif
366 }
367
368 if (static_chain_value != 0)
369 {
370 emit_move_insn (static_chain_rtx, static_chain_value);
371
f991a240
RK
372 if (GET_CODE (static_chain_rtx) == REG)
373 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
374 }
375
376 return funexp;
377}
378
379/* Generate instructions to call function FUNEXP,
380 and optionally pop the results.
381 The CALL_INSN is the first insn generated.
382
607ea900 383 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
384 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
385
334c4f0f
RK
386 FUNTYPE is the data type of the function. This is given to the macro
387 RETURN_POPS_ARGS to determine whether this function pops its own args.
388 We used to allow an identifier for library functions, but that doesn't
389 work when the return type is an aggregate type and the calling convention
390 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
391
392 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
393 ROUNDED_STACK_SIZE is that number rounded up to
394 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
395 both to put into the call insn and to generate explicit popping
396 code if necessary.
51bbfa0c
RS
397
398 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
399 It is zero if this call doesn't want a structure value.
400
401 NEXT_ARG_REG is the rtx that results from executing
402 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
403 just after all the args have had their registers assigned.
404 This could be whatever you like, but normally it is the first
405 arg-register beyond those used for args in this call,
406 or 0 if all the arg-registers are used in this call.
407 It is passed on to `gen_call' so you can put this info in the call insn.
408
409 VALREG is a hard register in which a value is returned,
410 or 0 if the call does not return a value.
411
412 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
413 the args to this call were processed.
414 We restore `inhibit_defer_pop' to that value.
415
94b25f81 416 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 417 denote registers used by the called function. */
f725a3ec 418
322e3e34 419static void
fb5eebb9
RH
420emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
421 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
fa5322fa 422 call_fusage, ecf_flags, args_so_far)
51bbfa0c 423 rtx funexp;
c84e2712
KG
424 tree fndecl ATTRIBUTE_UNUSED;
425 tree funtype ATTRIBUTE_UNUSED;
6a651371 426 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 427 HOST_WIDE_INT rounded_stack_size;
962f1324 428 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
6894579f 429 rtx next_arg_reg ATTRIBUTE_UNUSED;
51bbfa0c
RS
430 rtx valreg;
431 int old_inhibit_defer_pop;
77cac2f2 432 rtx call_fusage;
0a1c58a2 433 int ecf_flags;
fa5322fa 434 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
51bbfa0c 435{
062e7fd8 436 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
437 rtx call_insn;
438 int already_popped = 0;
fb5eebb9 439 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
f45c9d95
ZW
440#if defined (HAVE_call) && defined (HAVE_call_value)
441 rtx struct_value_size_rtx;
442 struct_value_size_rtx = GEN_INT (struct_value_size);
443#endif
51bbfa0c 444
fa5322fa
AO
445#ifdef CALL_POPS_ARGS
446 n_popped += CALL_POPS_ARGS (* args_so_far);
447#endif
448
51bbfa0c
RS
449 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
450 and we don't want to load it into a register as an optimization,
451 because prepare_call_address already did it if it should be done. */
452 if (GET_CODE (funexp) != SYMBOL_REF)
453 funexp = memory_address (FUNCTION_MODE, funexp);
454
0a1c58a2
JL
455#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
456 if ((ecf_flags & ECF_SIBCALL)
457 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
f132f529 458 && (n_popped > 0 || stack_size == 0))
0a1c58a2 459 {
8ac61af7 460 rtx n_pop = GEN_INT (n_popped);
0a1c58a2
JL
461 rtx pat;
462
463 /* If this subroutine pops its own args, record that in the call insn
464 if possible, for the sake of frame pointer elimination. */
465
466 if (valreg)
f45c9d95 467 pat = GEN_SIBCALL_VALUE_POP (valreg,
0a1c58a2
JL
468 gen_rtx_MEM (FUNCTION_MODE, funexp),
469 rounded_stack_size_rtx, next_arg_reg,
470 n_pop);
471 else
f45c9d95 472 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
473 rounded_stack_size_rtx, next_arg_reg, n_pop);
474
475 emit_call_insn (pat);
476 already_popped = 1;
477 }
478 else
479#endif
480
51bbfa0c 481#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8ac61af7
RK
482 /* If the target has "call" or "call_value" insns, then prefer them
483 if no arguments are actually popped. If the target does not have
484 "call" or "call_value" insns, then we must use the popping versions
485 even if the call has no arguments to pop. */
8bcafee3
JDA
486#if defined (HAVE_call) && defined (HAVE_call_value)
487 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
7393c642 488 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
8bcafee3
JDA
489#else
490 if (HAVE_call_pop && HAVE_call_value_pop)
491#endif
51bbfa0c 492 {
fb5eebb9 493 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
494 rtx pat;
495
496 /* If this subroutine pops its own args, record that in the call insn
497 if possible, for the sake of frame pointer elimination. */
2c8da025 498
51bbfa0c 499 if (valreg)
f45c9d95 500 pat = GEN_CALL_VALUE_POP (valreg,
38a448ca 501 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 502 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 503 else
f45c9d95 504 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 505 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
506
507 emit_call_insn (pat);
508 already_popped = 1;
509 }
510 else
511#endif
51bbfa0c 512
0a1c58a2
JL
513#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
514 if ((ecf_flags & ECF_SIBCALL)
515 && HAVE_sibcall && HAVE_sibcall_value)
516 {
517 if (valreg)
f45c9d95 518 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
0a1c58a2
JL
519 gen_rtx_MEM (FUNCTION_MODE, funexp),
520 rounded_stack_size_rtx,
521 next_arg_reg, NULL_RTX));
522 else
f45c9d95 523 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
524 rounded_stack_size_rtx, next_arg_reg,
525 struct_value_size_rtx));
526 }
527 else
528#endif
529
51bbfa0c
RS
530#if defined (HAVE_call) && defined (HAVE_call_value)
531 if (HAVE_call && HAVE_call_value)
532 {
533 if (valreg)
f45c9d95 534 emit_call_insn (GEN_CALL_VALUE (valreg,
38a448ca 535 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 536 rounded_stack_size_rtx, next_arg_reg,
e992302c 537 NULL_RTX));
51bbfa0c 538 else
f45c9d95 539 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 540 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
541 struct_value_size_rtx));
542 }
543 else
544#endif
545 abort ();
546
77cac2f2 547 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
548 for (call_insn = get_last_insn ();
549 call_insn && GET_CODE (call_insn) != CALL_INSN;
550 call_insn = PREV_INSN (call_insn))
551 ;
552
553 if (! call_insn)
554 abort ();
555
2a8f6b90
JH
556 /* Mark memory as used for "pure" function call. */
557 if (ecf_flags & ECF_PURE)
8ac61af7
RK
558 call_fusage
559 = gen_rtx_EXPR_LIST
560 (VOIDmode,
561 gen_rtx_USE (VOIDmode,
562 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
563 call_fusage);
2a8f6b90 564
e59e60a7
RK
565 /* Put the register usage information on the CALL. If there is already
566 some usage information, put ours at the end. */
567 if (CALL_INSN_FUNCTION_USAGE (call_insn))
568 {
569 rtx link;
570
571 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
572 link = XEXP (link, 1))
573 ;
574
575 XEXP (link, 1) = call_fusage;
576 }
577 else
578 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
579
580 /* If this is a const call, then set the insn's unchanging bit. */
2a8f6b90 581 if (ecf_flags & (ECF_CONST | ECF_PURE))
24a28584 582 CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 583
12a22e76
JM
584 /* If this call can't throw, attach a REG_EH_REGION reg note to that
585 effect. */
0a1c58a2 586 if (ecf_flags & ECF_NOTHROW)
54cea123 587 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
588 REG_NOTES (call_insn));
589
ca3920ad
JW
590 if (ecf_flags & ECF_NORETURN)
591 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
592 REG_NOTES (call_insn));
9d98f8f9
JH
593 if (ecf_flags & ECF_ALWAYS_RETURN)
594 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
595 REG_NOTES (call_insn));
ca3920ad 596
570a98eb 597 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7
RH
598 {
599 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
600 REG_NOTES (call_insn));
601 current_function_calls_setjmp = 1;
602 }
570a98eb 603
0a1c58a2
JL
604 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
605
b1e64e0d
RS
606 /* Restore this now, so that we do defer pops for this call's args
607 if the context of the call as a whole permits. */
608 inhibit_defer_pop = old_inhibit_defer_pop;
609
fb5eebb9 610 if (n_popped > 0)
51bbfa0c
RS
611 {
612 if (!already_popped)
e3da301d 613 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
614 = gen_rtx_EXPR_LIST (VOIDmode,
615 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
616 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 617 rounded_stack_size -= n_popped;
062e7fd8 618 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 619 stack_pointer_delta -= n_popped;
51bbfa0c
RS
620 }
621
f73ad30e 622 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 623 {
f73ad30e
JH
624 /* If returning from the subroutine does not automatically pop the args,
625 we need an instruction to pop them sooner or later.
626 Perhaps do it now; perhaps just record how much space to pop later.
627
628 If returning from the subroutine does pop the args, indicate that the
629 stack pointer will be changed. */
630
f79a65c0 631 if (rounded_stack_size != 0)
f73ad30e 632 {
f79a65c0
RK
633 if (ecf_flags & ECF_SP_DEPRESSED)
634 /* Just pretend we did the pop. */
635 stack_pointer_delta -= rounded_stack_size;
636 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 637 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
638 pending_stack_adjust += rounded_stack_size;
639 else
640 adjust_stack (rounded_stack_size_rtx);
641 }
51bbfa0c 642 }
f73ad30e
JH
643 /* When we accumulate outgoing args, we must avoid any stack manipulations.
644 Restore the stack pointer to its original value now. Usually
645 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
646 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
647 popping variants of functions exist as well.
648
649 ??? We may optimize similar to defer_pop above, but it is
650 probably not worthwhile.
f725a3ec 651
f73ad30e
JH
652 ??? It will be worthwhile to enable combine_stack_adjustments even for
653 such machines. */
654 else if (n_popped)
655 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
656}
657
20efdf74
JL
658/* Determine if the function identified by NAME and FNDECL is one with
659 special properties we wish to know about.
660
661 For example, if the function might return more than one time (setjmp), then
662 set RETURNS_TWICE to a nonzero value.
663
f2d33f13 664 Similarly set LONGJMP for if the function is in the longjmp family.
20efdf74 665
f2d33f13 666 Set MALLOC for any of the standard memory allocation functions which
20efdf74
JL
667 allocate from the heap.
668
669 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
670 space from the stack such as alloca. */
671
f2d33f13
JH
672static int
673special_function_p (fndecl, flags)
20efdf74 674 tree fndecl;
f2d33f13 675 int flags;
20efdf74 676{
f2d33f13 677 if (! (flags & ECF_MALLOC)
3a8c995b 678 && fndecl && DECL_NAME (fndecl)
140592a0 679 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
680 /* Exclude functions not at the file scope, or not `extern',
681 since they are not the magic functions we would otherwise
682 think they are. */
683 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
684 {
63ad61ed
ZW
685 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
686 const char *tname = name;
20efdf74 687
ca54603f
JL
688 /* We assume that alloca will always be called by name. It
689 makes no sense to pass it as a pointer-to-function to
690 anything that does not understand its behavior. */
f2d33f13
JH
691 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
692 && name[0] == 'a'
693 && ! strcmp (name, "alloca"))
694 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
695 && name[0] == '_'
696 && ! strcmp (name, "__builtin_alloca"))))
697 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 698
20efdf74
JL
699 /* Disregard prefix _, __ or __x. */
700 if (name[0] == '_')
701 {
702 if (name[1] == '_' && name[2] == 'x')
703 tname += 3;
704 else if (name[1] == '_')
705 tname += 2;
706 else
707 tname += 1;
708 }
709
710 if (tname[0] == 's')
711 {
f2d33f13
JH
712 if ((tname[1] == 'e'
713 && (! strcmp (tname, "setjmp")
714 || ! strcmp (tname, "setjmp_syscall")))
715 || (tname[1] == 'i'
716 && ! strcmp (tname, "sigsetjmp"))
717 || (tname[1] == 'a'
718 && ! strcmp (tname, "savectx")))
719 flags |= ECF_RETURNS_TWICE;
720
20efdf74
JL
721 if (tname[1] == 'i'
722 && ! strcmp (tname, "siglongjmp"))
f2d33f13 723 flags |= ECF_LONGJMP;
20efdf74
JL
724 }
725 else if ((tname[0] == 'q' && tname[1] == 's'
726 && ! strcmp (tname, "qsetjmp"))
727 || (tname[0] == 'v' && tname[1] == 'f'
728 && ! strcmp (tname, "vfork")))
f2d33f13 729 flags |= ECF_RETURNS_TWICE;
20efdf74
JL
730
731 else if (tname[0] == 'l' && tname[1] == 'o'
732 && ! strcmp (tname, "longjmp"))
f2d33f13 733 flags |= ECF_LONGJMP;
fa76d9e0
JR
734
735 else if ((tname[0] == 'f' && tname[1] == 'o'
736 && ! strcmp (tname, "fork"))
737 /* Linux specific: __clone. check NAME to insist on the
738 leading underscores, to avoid polluting the ISO / POSIX
739 namespace. */
740 || (name[0] == '_' && name[1] == '_'
741 && ! strcmp (tname, "clone"))
742 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
743 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
744 && (tname[5] == '\0'
745 || ((tname[5] == 'p' || tname[5] == 'e')
746 && tname[6] == '\0'))))
f2d33f13 747 flags |= ECF_FORK_OR_EXEC;
fa76d9e0 748
140592a0 749 /* Do not add any more malloc-like functions to this list,
82514696
KG
750 instead mark them as malloc functions using the malloc attribute.
751 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
752 it may return the same address across multiple calls.
753 C++ operator new is not suitable because it is not required
754 to return a unique pointer; indeed, the standard placement new
f725a3ec 755 just returns its argument. */
91d024d5
ML
756 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
757 && (! strcmp (tname, "malloc")
758 || ! strcmp (tname, "calloc")
759 || ! strcmp (tname, "strdup")))
f2d33f13 760 flags |= ECF_MALLOC;
20efdf74 761 }
f2d33f13 762 return flags;
20efdf74
JL
763}
764
f2d33f13 765/* Return nonzero when tree represent call to longjmp. */
7393c642 766
f2d33f13
JH
767int
768setjmp_call_p (fndecl)
769 tree fndecl;
770{
771 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
772}
773
c986baf6
JH
774/* Return true when exp contains alloca call. */
775bool
776alloca_call_p (exp)
777 tree exp;
778{
779 if (TREE_CODE (exp) == CALL_EXPR
780 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
781 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
782 == FUNCTION_DECL)
783 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
784 0) & ECF_MAY_BE_ALLOCA))
785 return true;
786 return false;
787}
788
b5cd4ed4 789/* Detect flags (function attributes) from the function decl or type node. */
7393c642 790
4977bab6 791int
f2d33f13
JH
792flags_from_decl_or_type (exp)
793 tree exp;
794{
795 int flags = 0;
b5cd4ed4 796 tree type = exp;
f2d33f13
JH
797 /* ??? We can't set IS_MALLOC for function types? */
798 if (DECL_P (exp))
799 {
b5cd4ed4
RK
800 type = TREE_TYPE (exp);
801
f2d33f13
JH
802 /* The function exp may have the `malloc' attribute. */
803 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
804 flags |= ECF_MALLOC;
805
2a8f6b90
JH
806 /* The function exp may have the `pure' attribute. */
807 if (DECL_P (exp) && DECL_IS_PURE (exp))
53d4257f 808 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
2a8f6b90 809
f2d33f13
JH
810 if (TREE_NOTHROW (exp))
811 flags |= ECF_NOTHROW;
812 }
813
7393c642 814 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
53d4257f 815 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
f2d33f13
JH
816
817 if (TREE_THIS_VOLATILE (exp))
818 flags |= ECF_NORETURN;
819
b5cd4ed4
RK
820 /* Mark if the function returns with the stack pointer depressed. We
821 cannot consider it pure or constant in that case. */
822 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
823 {
824 flags |= ECF_SP_DEPRESSED;
53d4257f 825 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
b5cd4ed4
RK
826 }
827
f2d33f13
JH
828 return flags;
829}
830
20efdf74
JL
831/* Precompute all register parameters as described by ARGS, storing values
832 into fields within the ARGS array.
833
834 NUM_ACTUALS indicates the total number elements in the ARGS array.
835
836 Set REG_PARM_SEEN if we encounter a register parameter. */
837
838static void
839precompute_register_parameters (num_actuals, args, reg_parm_seen)
840 int num_actuals;
841 struct arg_data *args;
842 int *reg_parm_seen;
843{
844 int i;
845
846 *reg_parm_seen = 0;
847
848 for (i = 0; i < num_actuals; i++)
849 if (args[i].reg != 0 && ! args[i].pass_on_stack)
850 {
851 *reg_parm_seen = 1;
852
853 if (args[i].value == 0)
854 {
855 push_temp_slots ();
856 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
857 VOIDmode, 0);
858 preserve_temp_slots (args[i].value);
859 pop_temp_slots ();
860
861 /* ANSI doesn't require a sequence point here,
862 but PCC has one, so this will avoid some problems. */
863 emit_queue ();
864 }
865
fd1e5d25
RH
866 /* If the value is a non-legitimate constant, force it into a
867 pseudo now. TLS symbols sometimes need a call to resolve. */
868 if (CONSTANT_P (args[i].value)
869 && !LEGITIMATE_CONSTANT_P (args[i].value))
870 args[i].value = force_reg (args[i].mode, args[i].value);
871
20efdf74
JL
872 /* If we are to promote the function arg to a wider mode,
873 do it now. */
874
875 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
876 args[i].value
877 = convert_modes (args[i].mode,
878 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
879 args[i].value, args[i].unsignedp);
880
f725a3ec 881 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
882 short loop, put the value into a pseudo and then put the pseudo
883 into the hard reg.
884
885 For small register classes, also do this if this call uses
886 register parameters. This is to avoid reload conflicts while
887 loading the parameters registers. */
888
889 if ((! (GET_CODE (args[i].value) == REG
890 || (GET_CODE (args[i].value) == SUBREG
891 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
892 && args[i].mode != BLKmode
b437f1a7 893 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
20efdf74
JL
894 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
895 || preserve_subexpressions_p ()))
896 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
897 }
898}
899
f73ad30e 900#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
901
902 /* The argument list is the property of the called routine and it
903 may clobber it. If the fixed area has been used for previous
904 parameters, we must save and restore it. */
3bdf5ad1 905
20efdf74
JL
906static rtx
907save_fixed_argument_area (reg_parm_stack_space, argblock,
908 low_to_save, high_to_save)
909 int reg_parm_stack_space;
910 rtx argblock;
911 int *low_to_save;
912 int *high_to_save;
913{
914 int i;
915 rtx save_area = NULL_RTX;
916
917 /* Compute the boundary of the that needs to be saved, if any. */
918#ifdef ARGS_GROW_DOWNWARD
919 for (i = 0; i < reg_parm_stack_space + 1; i++)
920#else
921 for (i = 0; i < reg_parm_stack_space; i++)
922#endif
923 {
924 if (i >= highest_outgoing_arg_in_use
925 || stack_usage_map[i] == 0)
926 continue;
927
928 if (*low_to_save == -1)
929 *low_to_save = i;
930
931 *high_to_save = i;
932 }
933
934 if (*low_to_save >= 0)
935 {
936 int num_to_save = *high_to_save - *low_to_save + 1;
937 enum machine_mode save_mode
938 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
939 rtx stack_area;
940
941 /* If we don't have the required alignment, must do this in BLKmode. */
942 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
f725a3ec 943 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
20efdf74
JL
944 save_mode = BLKmode;
945
946#ifdef ARGS_GROW_DOWNWARD
3bdf5ad1
RK
947 stack_area
948 = gen_rtx_MEM (save_mode,
949 memory_address (save_mode,
950 plus_constant (argblock,
951 - *high_to_save)));
20efdf74
JL
952#else
953 stack_area = gen_rtx_MEM (save_mode,
954 memory_address (save_mode,
955 plus_constant (argblock,
956 *low_to_save)));
957#endif
8ac61af7
RK
958
959 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
960 if (save_mode == BLKmode)
961 {
962 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
44bb111a
RH
963 emit_block_move (validize_mem (save_area), stack_area,
964 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
20efdf74
JL
965 }
966 else
967 {
968 save_area = gen_reg_rtx (save_mode);
969 emit_move_insn (save_area, stack_area);
970 }
971 }
8ac61af7 972
20efdf74
JL
973 return save_area;
974}
975
976static void
977restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
978 rtx save_area;
979 rtx argblock;
980 int high_to_save;
981 int low_to_save;
982{
983 enum machine_mode save_mode = GET_MODE (save_area);
984#ifdef ARGS_GROW_DOWNWARD
985 rtx stack_area
986 = gen_rtx_MEM (save_mode,
987 memory_address (save_mode,
988 plus_constant (argblock,
989 - high_to_save)));
990#else
991 rtx stack_area
992 = gen_rtx_MEM (save_mode,
993 memory_address (save_mode,
994 plus_constant (argblock,
995 low_to_save)));
996#endif
997
998 if (save_mode != BLKmode)
999 emit_move_insn (stack_area, save_area);
1000 else
44bb111a
RH
1001 emit_block_move (stack_area, validize_mem (save_area),
1002 GEN_INT (high_to_save - low_to_save + 1),
1003 BLOCK_OP_CALL_PARM);
20efdf74 1004}
19652adf 1005#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1006
20efdf74
JL
1007/* If any elements in ARGS refer to parameters that are to be passed in
1008 registers, but not in memory, and whose alignment does not permit a
1009 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1010 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1011
1012 Pseudos for each unaligned argument will be stored into the array
1013 args[argnum].aligned_regs. The caller is responsible for deallocating
1014 the aligned_regs array if it is nonzero. */
1015
20efdf74
JL
1016static void
1017store_unaligned_arguments_into_pseudos (args, num_actuals)
1018 struct arg_data *args;
1019 int num_actuals;
1020{
1021 int i, j;
f725a3ec 1022
20efdf74
JL
1023 for (i = 0; i < num_actuals; i++)
1024 if (args[i].reg != 0 && ! args[i].pass_on_stack
1025 && args[i].mode == BLKmode
1026 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1027 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1028 {
1029 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1030 int big_endian_correction = 0;
1031
1032 args[i].n_aligned_regs
1033 = args[i].partial ? args[i].partial
1034 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1035
8e6a59fe
MM
1036 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1037 * args[i].n_aligned_regs);
20efdf74
JL
1038
1039 /* Structures smaller than a word are aligned to the least
1040 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1041 this means we must skip the empty high order bytes when
1042 calculating the bit offset. */
0d7839da 1043 if (BYTES_BIG_ENDIAN
0d7839da 1044 && bytes < UNITS_PER_WORD)
20efdf74
JL
1045 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1046
1047 for (j = 0; j < args[i].n_aligned_regs; j++)
1048 {
1049 rtx reg = gen_reg_rtx (word_mode);
1050 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1051 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1052
1053 args[i].aligned_regs[j] = reg;
1054
1055 /* There is no need to restrict this code to loading items
1056 in TYPE_ALIGN sized hunks. The bitfield instructions can
1057 load up entire word sized registers efficiently.
1058
1059 ??? This may not be needed anymore.
1060 We use to emit a clobber here but that doesn't let later
1061 passes optimize the instructions we emit. By storing 0 into
1062 the register later passes know the first AND to zero out the
1063 bitfield being set in the register is unnecessary. The store
1064 of 0 will be deleted as will at least the first AND. */
1065
1066 emit_move_insn (reg, const0_rtx);
1067
1068 bytes -= bitsize / BITS_PER_UNIT;
1069 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
19caa751 1070 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
04050c69 1071 word_mode, word_mode,
20efdf74 1072 BITS_PER_WORD),
04050c69 1073 BITS_PER_WORD);
20efdf74
JL
1074 }
1075 }
1076}
1077
d7cdf113 1078/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
f725a3ec 1079 ACTPARMS.
d7cdf113
JL
1080
1081 NUM_ACTUALS is the total number of parameters.
1082
1083 N_NAMED_ARGS is the total number of named arguments.
1084
1085 FNDECL is the tree code for the target of this call (if known)
1086
1087 ARGS_SO_FAR holds state needed by the target to know where to place
1088 the next argument.
1089
1090 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1091 for arguments which are passed in registers.
1092
1093 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1094 and may be modified by this routine.
1095
f2d33f13 1096 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
d7cdf113
JL
1097 flags which may may be modified by this routine. */
1098
1099static void
1100initialize_argument_information (num_actuals, args, args_size, n_named_args,
1101 actparms, fndecl, args_so_far,
1102 reg_parm_stack_space, old_stack_level,
f2d33f13 1103 old_pending_adj, must_preallocate,
7d167afd 1104 ecf_flags)
91813b28 1105 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
1106 struct arg_data *args;
1107 struct args_size *args_size;
91813b28 1108 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
1109 tree actparms;
1110 tree fndecl;
959f3a06 1111 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
1112 int reg_parm_stack_space;
1113 rtx *old_stack_level;
1114 int *old_pending_adj;
1115 int *must_preallocate;
f2d33f13 1116 int *ecf_flags;
d7cdf113
JL
1117{
1118 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1119 int inc;
1120
1121 /* Count arg position in order args appear. */
1122 int argpos;
1123
4fc026cd 1124 struct args_size alignment_pad;
d7cdf113
JL
1125 int i;
1126 tree p;
f725a3ec 1127
d7cdf113
JL
1128 args_size->constant = 0;
1129 args_size->var = 0;
1130
1131 /* In this loop, we consider args in the order they are written.
1132 We fill up ARGS from the front or from the back if necessary
1133 so that in any case the first arg to be pushed ends up at the front. */
1134
f73ad30e
JH
1135 if (PUSH_ARGS_REVERSED)
1136 {
1137 i = num_actuals - 1, inc = -1;
1138 /* In this case, must reverse order of args
1139 so that we compute and push the last arg first. */
1140 }
1141 else
1142 {
1143 i = 0, inc = 1;
1144 }
d7cdf113
JL
1145
1146 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1147 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1148 {
1149 tree type = TREE_TYPE (TREE_VALUE (p));
1150 int unsignedp;
1151 enum machine_mode mode;
1152
1153 args[i].tree_value = TREE_VALUE (p);
1154
1155 /* Replace erroneous argument with constant zero. */
d0f062fb 1156 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1157 args[i].tree_value = integer_zero_node, type = integer_type_node;
1158
1159 /* If TYPE is a transparent union, pass things the way we would
1160 pass the first field of the union. We have already verified that
1161 the modes are the same. */
2bf105ab 1162 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
d7cdf113
JL
1163 type = TREE_TYPE (TYPE_FIELDS (type));
1164
1165 /* Decide where to pass this arg.
1166
1167 args[i].reg is nonzero if all or part is passed in registers.
1168
1169 args[i].partial is nonzero if part but not all is passed in registers,
1170 and the exact value says how many words are passed in registers.
1171
1172 args[i].pass_on_stack is nonzero if the argument must at least be
1173 computed on the stack. It may then be loaded back into registers
1174 if args[i].reg is nonzero.
1175
1176 These decisions are driven by the FUNCTION_... macros and must agree
1177 with those made by function.c. */
1178
1179 /* See if this argument should be passed by invisible reference. */
1180 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1181 && contains_placeholder_p (TYPE_SIZE (type)))
1182 || TREE_ADDRESSABLE (type)
1183#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1184 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1185 type, argpos < n_named_args)
1186#endif
1187 )
1188 {
1189 /* If we're compiling a thunk, pass through invisible
1190 references instead of making a copy. */
1191 if (current_function_is_thunk
1192#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1193 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1194 type, argpos < n_named_args)
1195 /* If it's in a register, we must make a copy of it too. */
1196 /* ??? Is this a sufficient test? Is there a better one? */
1197 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1198 && REG_P (DECL_RTL (args[i].tree_value)))
1199 && ! TREE_ADDRESSABLE (type))
1200#endif
1201 )
1202 {
1203 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1204 new object from the argument. If we are passing by
1205 invisible reference, the callee will do that for us, so we
1206 can strip off the TARGET_EXPR. This is not always safe,
1207 but it is safe in the only case where this is a useful
1208 optimization; namely, when the argument is a plain object.
1209 In that case, the frontend is just asking the backend to
f725a3ec
KH
1210 make a bitwise copy of the argument. */
1211
d7cdf113 1212 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
2f939d94 1213 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
d7cdf113
JL
1214 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1215 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1216
1217 args[i].tree_value = build1 (ADDR_EXPR,
1218 build_pointer_type (type),
1219 args[i].tree_value);
1220 type = build_pointer_type (type);
1221 }
f21add07
JM
1222 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1223 {
1224 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1225 We implement this by passing the address of the temporary
1226 rather than expanding it into another allocated slot. */
1227 args[i].tree_value = build1 (ADDR_EXPR,
1228 build_pointer_type (type),
1229 args[i].tree_value);
1230 type = build_pointer_type (type);
1231 }
d7cdf113
JL
1232 else
1233 {
1234 /* We make a copy of the object and pass the address to the
1235 function being called. */
1236 rtx copy;
1237
d0f062fb 1238 if (!COMPLETE_TYPE_P (type)
d7cdf113
JL
1239 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1240 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1241 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1242 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1243 {
1244 /* This is a variable-sized object. Make space on the stack
1245 for it. */
1246 rtx size_rtx = expr_size (TREE_VALUE (p));
1247
1248 if (*old_stack_level == 0)
1249 {
1250 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1251 *old_pending_adj = pending_stack_adjust;
1252 pending_stack_adjust = 0;
1253 }
1254
1255 copy = gen_rtx_MEM (BLKmode,
3bdf5ad1
RK
1256 allocate_dynamic_stack_space
1257 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1258 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1259 }
1260 else
3bdf5ad1 1261 copy = assign_temp (type, 0, 1, 0);
d7cdf113
JL
1262
1263 store_expr (args[i].tree_value, copy, 0);
53d4257f 1264 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
d7cdf113
JL
1265
1266 args[i].tree_value = build1 (ADDR_EXPR,
1267 build_pointer_type (type),
1268 make_tree (type, copy));
1269 type = build_pointer_type (type);
1270 }
1271 }
1272
1273 mode = TYPE_MODE (type);
1274 unsignedp = TREE_UNSIGNED (type);
1275
1276#ifdef PROMOTE_FUNCTION_ARGS
1277 mode = promote_mode (type, mode, &unsignedp, 1);
1278#endif
1279
1280 args[i].unsignedp = unsignedp;
1281 args[i].mode = mode;
7d167afd 1282
099e9712
JH
1283 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1284 argpos < n_named_args);
7d167afd
JJ
1285#ifdef FUNCTION_INCOMING_ARG
1286 /* If this is a sibling call and the machine has register windows, the
1287 register window has to be unwinded before calling the routine, so
1288 arguments have to go into the incoming registers. */
099e9712 1289 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
f725a3ec 1290 argpos < n_named_args);
099e9712
JH
1291#else
1292 args[i].tail_call_reg = args[i].reg;
7d167afd 1293#endif
7d167afd 1294
d7cdf113
JL
1295#ifdef FUNCTION_ARG_PARTIAL_NREGS
1296 if (args[i].reg)
1297 args[i].partial
959f3a06 1298 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1299 argpos < n_named_args);
1300#endif
1301
1302 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1303
1304 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1305 it means that we are to pass this arg in the register(s) designated
1306 by the PARALLEL, but also to pass it in the stack. */
1307 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1308 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1309 args[i].pass_on_stack = 1;
1310
1311 /* If this is an addressable type, we must preallocate the stack
1312 since we must evaluate the object into its final location.
1313
1314 If this is to be passed in both registers and the stack, it is simpler
1315 to preallocate. */
1316 if (TREE_ADDRESSABLE (type)
1317 || (args[i].pass_on_stack && args[i].reg != 0))
1318 *must_preallocate = 1;
1319
1320 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1321 we cannot consider this function call constant. */
1322 if (TREE_ADDRESSABLE (type))
53d4257f 1323 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
d7cdf113
JL
1324
1325 /* Compute the stack-size of this argument. */
1326 if (args[i].reg == 0 || args[i].partial != 0
1327 || reg_parm_stack_space > 0
1328 || args[i].pass_on_stack)
1329 locate_and_pad_parm (mode, type,
1330#ifdef STACK_PARMS_IN_REG_PARM_AREA
1331 1,
1332#else
1333 args[i].reg != 0,
1334#endif
1335 fndecl, args_size, &args[i].offset,
4fc026cd 1336 &args[i].size, &alignment_pad);
d7cdf113
JL
1337
1338#ifndef ARGS_GROW_DOWNWARD
1339 args[i].slot_offset = *args_size;
1340#endif
1341
4fc026cd
CM
1342 args[i].alignment_pad = alignment_pad;
1343
d7cdf113
JL
1344 /* If a part of the arg was put into registers,
1345 don't include that part in the amount pushed. */
1346 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1347 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1348 / (PARM_BOUNDARY / BITS_PER_UNIT)
1349 * (PARM_BOUNDARY / BITS_PER_UNIT));
f725a3ec 1350
d7cdf113
JL
1351 /* Update ARGS_SIZE, the total stack space for args so far. */
1352
1353 args_size->constant += args[i].size.constant;
1354 if (args[i].size.var)
1355 {
1356 ADD_PARM_SIZE (*args_size, args[i].size.var);
1357 }
1358
1359 /* Since the slot offset points to the bottom of the slot,
1360 we must record it after incrementing if the args grow down. */
1361#ifdef ARGS_GROW_DOWNWARD
1362 args[i].slot_offset = *args_size;
1363
1364 args[i].slot_offset.constant = -args_size->constant;
1365 if (args_size->var)
fed3cef0 1366 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1367#endif
1368
1369 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1370 have been used, etc. */
1371
959f3a06 1372 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1373 argpos < n_named_args);
1374 }
1375}
1376
599f37b6
JL
1377/* Update ARGS_SIZE to contain the total size for the argument block.
1378 Return the original constant component of the argument block's size.
1379
1380 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1381 for arguments passed in registers. */
1382
1383static int
c2f8b491 1384compute_argument_block_size (reg_parm_stack_space, args_size,
f725a3ec 1385 preferred_stack_boundary)
599f37b6
JL
1386 int reg_parm_stack_space;
1387 struct args_size *args_size;
c2f8b491 1388 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1389{
1390 int unadjusted_args_size = args_size->constant;
1391
f73ad30e
JH
1392 /* For accumulate outgoing args mode we don't need to align, since the frame
1393 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1394 backends from generating misaligned frame sizes. */
f73ad30e
JH
1395 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1396 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1397
599f37b6
JL
1398 /* Compute the actual size of the argument block required. The variable
1399 and constant sizes must be combined, the size may have to be rounded,
1400 and there may be a minimum required size. */
1401
1402 if (args_size->var)
1403 {
1404 args_size->var = ARGS_SIZE_TREE (*args_size);
1405 args_size->constant = 0;
1406
c2f8b491
JH
1407 preferred_stack_boundary /= BITS_PER_UNIT;
1408 if (preferred_stack_boundary > 1)
1503a7ec
JH
1409 {
1410 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1411 to add the delta, round and subtract the delta.
1503a7ec
JH
1412 Currently no machine description requires this support. */
1413 if (stack_pointer_delta & (preferred_stack_boundary - 1))
f725a3ec 1414 abort ();
1503a7ec
JH
1415 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1416 }
599f37b6
JL
1417
1418 if (reg_parm_stack_space > 0)
1419 {
1420 args_size->var
1421 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1422 ssize_int (reg_parm_stack_space));
599f37b6
JL
1423
1424#ifndef OUTGOING_REG_PARM_STACK_SPACE
1425 /* The area corresponding to register parameters is not to count in
1426 the size of the block we need. So make the adjustment. */
1427 args_size->var
1428 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1429 ssize_int (reg_parm_stack_space));
599f37b6
JL
1430#endif
1431 }
1432 }
1433 else
1434 {
c2f8b491 1435 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1436 if (preferred_stack_boundary < 1)
1437 preferred_stack_boundary = 1;
fb5eebb9 1438 args_size->constant = (((args_size->constant
1503a7ec 1439 + stack_pointer_delta
c2f8b491
JH
1440 + preferred_stack_boundary - 1)
1441 / preferred_stack_boundary
1442 * preferred_stack_boundary)
1503a7ec 1443 - stack_pointer_delta);
599f37b6
JL
1444
1445 args_size->constant = MAX (args_size->constant,
1446 reg_parm_stack_space);
1447
1448#ifdef MAYBE_REG_PARM_STACK_SPACE
1449 if (reg_parm_stack_space == 0)
1450 args_size->constant = 0;
1451#endif
1452
1453#ifndef OUTGOING_REG_PARM_STACK_SPACE
1454 args_size->constant -= reg_parm_stack_space;
1455#endif
1456 }
1457 return unadjusted_args_size;
1458}
1459
19832c77 1460/* Precompute parameters as needed for a function call.
cc0b1adc 1461
f2d33f13 1462 FLAGS is mask of ECF_* constants.
cc0b1adc 1463
cc0b1adc
JL
1464 NUM_ACTUALS is the number of arguments.
1465
f725a3ec
KH
1466 ARGS is an array containing information for each argument; this
1467 routine fills in the INITIAL_VALUE and VALUE fields for each
1468 precomputed argument. */
cc0b1adc
JL
1469
1470static void
40d6e956 1471precompute_arguments (flags, num_actuals, args)
f2d33f13 1472 int flags;
cc0b1adc
JL
1473 int num_actuals;
1474 struct arg_data *args;
cc0b1adc
JL
1475{
1476 int i;
1477
1478 /* If this function call is cse'able, precompute all the parameters.
1479 Note that if the parameter is constructed into a temporary, this will
1480 cause an additional copy because the parameter will be constructed
1481 into a temporary location and then copied into the outgoing arguments.
1482 If a parameter contains a call to alloca and this function uses the
1483 stack, precompute the parameter. */
1484
1485 /* If we preallocated the stack space, and some arguments must be passed
1486 on the stack, then we must precompute any parameter which contains a
1487 function call which will store arguments on the stack.
1488 Otherwise, evaluating the parameter may clobber previous parameters
40d6e956 1489 which have already been stored into the stack. (we have code to avoid
f5143c46 1490 such case by saving the outgoing stack arguments, but it results in
40d6e956 1491 worse code) */
cc0b1adc
JL
1492
1493 for (i = 0; i < num_actuals; i++)
53d4257f 1494 if ((flags & ECF_LIBCALL_BLOCK)
40d6e956 1495 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
cc0b1adc 1496 {
ddef6bc7
JJ
1497 enum machine_mode mode;
1498
cc0b1adc
JL
1499 /* If this is an addressable type, we cannot pre-evaluate it. */
1500 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1501 abort ();
1502
47841d1b 1503 args[i].value
cc0b1adc
JL
1504 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1505
cc0b1adc
JL
1506 /* ANSI doesn't require a sequence point here,
1507 but PCC has one, so this will avoid some problems. */
1508 emit_queue ();
1509
1510 args[i].initial_value = args[i].value
47841d1b 1511 = protect_from_queue (args[i].value, 0);
cc0b1adc 1512
ddef6bc7
JJ
1513 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1514 if (mode != args[i].mode)
47841d1b
JJ
1515 {
1516 args[i].value
ddef6bc7 1517 = convert_modes (args[i].mode, mode,
47841d1b
JJ
1518 args[i].value, args[i].unsignedp);
1519#ifdef PROMOTE_FOR_CALL_ONLY
1520 /* CSE will replace this only if it contains args[i].value
1521 pseudo, so convert it down to the declared mode using
1522 a SUBREG. */
1523 if (GET_CODE (args[i].value) == REG
1524 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1525 {
1526 args[i].initial_value
ddef6bc7 1527 = gen_lowpart_SUBREG (mode, args[i].value);
47841d1b 1528 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
7879b81e
SE
1529 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1530 args[i].unsignedp);
47841d1b
JJ
1531 }
1532#endif
1533 }
cc0b1adc
JL
1534 }
1535}
1536
0f9b3ea6
JL
1537/* Given the current state of MUST_PREALLOCATE and information about
1538 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1539 compute and return the final value for MUST_PREALLOCATE. */
1540
1541static int
1542finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1543 int must_preallocate;
1544 int num_actuals;
1545 struct arg_data *args;
1546 struct args_size *args_size;
1547{
1548 /* See if we have or want to preallocate stack space.
1549
1550 If we would have to push a partially-in-regs parm
1551 before other stack parms, preallocate stack space instead.
1552
1553 If the size of some parm is not a multiple of the required stack
1554 alignment, we must preallocate.
1555
1556 If the total size of arguments that would otherwise create a copy in
1557 a temporary (such as a CALL) is more than half the total argument list
1558 size, preallocation is faster.
1559
1560 Another reason to preallocate is if we have a machine (like the m88k)
1561 where stack alignment is required to be maintained between every
1562 pair of insns, not just when the call is made. However, we assume here
1563 that such machines either do not have push insns (and hence preallocation
1564 would occur anyway) or the problem is taken care of with
1565 PUSH_ROUNDING. */
1566
1567 if (! must_preallocate)
1568 {
1569 int partial_seen = 0;
1570 int copy_to_evaluate_size = 0;
1571 int i;
1572
1573 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1574 {
1575 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1576 partial_seen = 1;
1577 else if (partial_seen && args[i].reg == 0)
1578 must_preallocate = 1;
1579
1580 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1581 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1582 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1583 || TREE_CODE (args[i].tree_value) == COND_EXPR
1584 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1585 copy_to_evaluate_size
1586 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1587 }
1588
1589 if (copy_to_evaluate_size * 2 >= args_size->constant
1590 && args_size->constant > 0)
1591 must_preallocate = 1;
1592 }
1593 return must_preallocate;
1594}
599f37b6 1595
a45bdd02
JL
1596/* If we preallocated stack space, compute the address of each argument
1597 and store it into the ARGS array.
1598
f725a3ec 1599 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1600 validized when it is used.
1601
1602 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1603
1604static void
1605compute_argument_addresses (args, argblock, num_actuals)
1606 struct arg_data *args;
1607 rtx argblock;
1608 int num_actuals;
1609{
1610 if (argblock)
1611 {
1612 rtx arg_reg = argblock;
1613 int i, arg_offset = 0;
1614
1615 if (GET_CODE (argblock) == PLUS)
1616 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1617
1618 for (i = 0; i < num_actuals; i++)
1619 {
1620 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1621 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1622 rtx addr;
1623
1624 /* Skip this parm if it will not be passed on the stack. */
1625 if (! args[i].pass_on_stack && args[i].reg != 0)
1626 continue;
1627
1628 if (GET_CODE (offset) == CONST_INT)
1629 addr = plus_constant (arg_reg, INTVAL (offset));
1630 else
1631 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1632
1633 addr = plus_constant (addr, arg_offset);
1634 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
3bdf5ad1
RK
1635 set_mem_attributes (args[i].stack,
1636 TREE_TYPE (args[i].tree_value), 1);
a45bdd02
JL
1637
1638 if (GET_CODE (slot_offset) == CONST_INT)
1639 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1640 else
1641 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1642
1643 addr = plus_constant (addr, arg_offset);
1644 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
3bdf5ad1
RK
1645 set_mem_attributes (args[i].stack_slot,
1646 TREE_TYPE (args[i].tree_value), 1);
7ab923cc
JJ
1647
1648 /* Function incoming arguments may overlap with sibling call
1649 outgoing arguments and we cannot allow reordering of reads
1650 from function arguments with stores to outgoing arguments
1651 of sibling calls. */
ba4828e0
RK
1652 set_mem_alias_set (args[i].stack, 0);
1653 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1654 }
1655 }
1656}
f725a3ec 1657
a45bdd02
JL
1658/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1659 in a call instruction.
1660
1661 FNDECL is the tree node for the target function. For an indirect call
1662 FNDECL will be NULL_TREE.
1663
1664 EXP is the CALL_EXPR for this call. */
1665
1666static rtx
1667rtx_for_function_call (fndecl, exp)
1668 tree fndecl;
1669 tree exp;
1670{
1671 rtx funexp;
1672
1673 /* Get the function to call, in the form of RTL. */
1674 if (fndecl)
1675 {
1676 /* If this is the first use of the function, see if we need to
1677 make an external definition for it. */
1678 if (! TREE_USED (fndecl))
1679 {
1680 assemble_external (fndecl);
1681 TREE_USED (fndecl) = 1;
1682 }
1683
1684 /* Get a SYMBOL_REF rtx for the function address. */
1685 funexp = XEXP (DECL_RTL (fndecl), 0);
1686 }
1687 else
1688 /* Generate an rtx (probably a pseudo-register) for the address. */
1689 {
1690 push_temp_slots ();
4977bab6 1691 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
f725a3ec 1692 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1693 emit_queue ();
1694 }
1695 return funexp;
1696}
1697
21a3b983
JL
1698/* Do the register loads required for any wholly-register parms or any
1699 parms which are passed both on the stack and in a register. Their
f725a3ec 1700 expressions were already evaluated.
21a3b983
JL
1701
1702 Mark all register-parms as living through the call, putting these USE
0cdca92b
DJ
1703 insns in the CALL_INSN_FUNCTION_USAGE field.
1704
1705 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1706 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
1707
1708static void
0cdca92b
DJ
1709load_register_parameters (args, num_actuals, call_fusage, flags,
1710 is_sibcall, sibcall_failure)
21a3b983
JL
1711 struct arg_data *args;
1712 int num_actuals;
1713 rtx *call_fusage;
099e9712 1714 int flags;
0cdca92b
DJ
1715 int is_sibcall;
1716 int *sibcall_failure;
21a3b983
JL
1717{
1718 int i, j;
1719
1720#ifdef LOAD_ARGS_REVERSED
1721 for (i = num_actuals - 1; i >= 0; i--)
1722#else
1723 for (i = 0; i < num_actuals; i++)
1724#endif
1725 {
099e9712
JH
1726 rtx reg = ((flags & ECF_SIBCALL)
1727 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
1728 int partial = args[i].partial;
1729 int nregs;
1730
1731 if (reg)
1732 {
0cdca92b 1733 rtx before_arg = get_last_insn ();
21a3b983
JL
1734 /* Set to non-negative if must move a word at a time, even if just
1735 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1736 we just use a normal move insn. This value can be zero if the
1737 argument is a zero size structure with no fields. */
1738 nregs = (partial ? partial
1739 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1740 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1741 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1742 : -1));
1743
1744 /* Handle calls that pass values in multiple non-contiguous
1745 locations. The Irix 6 ABI has examples of this. */
1746
1747 if (GET_CODE (reg) == PARALLEL)
19caa751 1748 emit_group_load (reg, args[i].value,
04050c69 1749 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
21a3b983
JL
1750
1751 /* If simple case, just do move. If normal partial, store_one_arg
1752 has already loaded the register for us. In all other cases,
1753 load the register(s) from memory. */
1754
1755 else if (nregs == -1)
1756 emit_move_insn (reg, args[i].value);
1757
1758 /* If we have pre-computed the values to put in the registers in
1759 the case of non-aligned structures, copy them in now. */
1760
1761 else if (args[i].n_aligned_regs != 0)
1762 for (j = 0; j < args[i].n_aligned_regs; j++)
1763 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1764 args[i].aligned_regs[j]);
1765
1766 else if (partial == 0 || args[i].pass_on_stack)
1767 move_block_to_reg (REGNO (reg),
1768 validize_mem (args[i].value), nregs,
1769 args[i].mode);
1770
0cdca92b
DJ
1771 /* When a parameter is a block, and perhaps in other cases, it is
1772 possible that it did a load from an argument slot that was
32dd366d 1773 already clobbered. */
0cdca92b
DJ
1774 if (is_sibcall
1775 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1776 *sibcall_failure = 1;
1777
21a3b983
JL
1778 /* Handle calls that pass values in multiple non-contiguous
1779 locations. The Irix 6 ABI has examples of this. */
1780 if (GET_CODE (reg) == PARALLEL)
1781 use_group_regs (call_fusage, reg);
1782 else if (nregs == -1)
1783 use_reg (call_fusage, reg);
1784 else
1785 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1786 }
1787 }
1788}
1789
ea11ca7e 1790/* Try to integrate function. See expand_inline_function for documentation
f2d33f13
JH
1791 about the parameters. */
1792
1793static rtx
1794try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1795 tree fndecl;
1796 tree actparms;
1797 rtx target;
1798 int ignore;
1799 tree type;
1800 rtx structure_value_addr;
1801{
1802 rtx temp;
1803 rtx before_call;
1804 int i;
1805 rtx old_stack_level = 0;
7657ad0a 1806 int reg_parm_stack_space = 0;
f2d33f13
JH
1807
1808#ifdef REG_PARM_STACK_SPACE
1809#ifdef MAYBE_REG_PARM_STACK_SPACE
1810 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1811#else
1812 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1813#endif
1814#endif
1815
1816 before_call = get_last_insn ();
1817
ea11ca7e
JM
1818 timevar_push (TV_INTEGRATION);
1819
f2d33f13
JH
1820 temp = expand_inline_function (fndecl, actparms, target,
1821 ignore, type,
1822 structure_value_addr);
1823
ea11ca7e
JM
1824 timevar_pop (TV_INTEGRATION);
1825
f2d33f13 1826 /* If inlining succeeded, return. */
60e8b9f0 1827 if (temp != (rtx) (size_t) - 1)
f2d33f13
JH
1828 {
1829 if (ACCUMULATE_OUTGOING_ARGS)
1830 {
1831 /* If the outgoing argument list must be preserved, push
1832 the stack before executing the inlined function if it
1833 makes any calls. */
1834
1835 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1836 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1837 break;
1838
1839 if (stack_arg_under_construction || i >= 0)
1840 {
1841 rtx first_insn
1842 = before_call ? NEXT_INSN (before_call) : get_insns ();
1843 rtx insn = NULL_RTX, seq;
1844
1845 /* Look for a call in the inline function code.
1846 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1847 nonzero then there is a call and it is not necessary
1848 to scan the insns. */
1849
1850 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1851 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1852 if (GET_CODE (insn) == CALL_INSN)
1853 break;
1854
1855 if (insn)
1856 {
1857 /* Reserve enough stack space so that the largest
1858 argument list of any function call in the inline
1859 function does not overlap the argument list being
1860 evaluated. This is usually an overestimate because
1861 allocate_dynamic_stack_space reserves space for an
1862 outgoing argument list in addition to the requested
1863 space, but there is no way to ask for stack space such
1864 that an argument list of a certain length can be
f725a3ec 1865 safely constructed.
f2d33f13
JH
1866
1867 Add the stack space reserved for register arguments, if
1868 any, in the inline function. What is really needed is the
1869 largest value of reg_parm_stack_space in the inline
1870 function, but that is not available. Using the current
1871 value of reg_parm_stack_space is wrong, but gives
1872 correct results on all supported machines. */
1873
1874 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1875 + reg_parm_stack_space);
1876
1877 start_sequence ();
1878 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1879 allocate_dynamic_stack_space (GEN_INT (adjust),
1880 NULL_RTX, BITS_PER_UNIT);
1881 seq = get_insns ();
1882 end_sequence ();
2f937369 1883 emit_insn_before (seq, first_insn);
f2d33f13
JH
1884 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1885 }
1886 }
1887 }
1888
1889 /* If the result is equivalent to TARGET, return TARGET to simplify
1890 checks in store_expr. They can be equivalent but not equal in the
1891 case of a function that returns BLKmode. */
1892 if (temp != target && rtx_equal_p (temp, target))
1893 return target;
1894 return temp;
1895 }
1896
1897 /* If inlining failed, mark FNDECL as needing to be compiled
1898 separately after all. If function was declared inline,
1899 give a warning. */
1900 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1901 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1902 {
1903 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1904 warning ("called from here");
1905 }
dffd7eb6 1906 (*lang_hooks.mark_addressable) (fndecl);
60e8b9f0 1907 return (rtx) (size_t) - 1;
f2d33f13
JH
1908}
1909
739fb049
MM
1910/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1911 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1912 bytes, then we would need to push some additional bytes to pad the
ce48579b
RH
1913 arguments. So, we compute an adjust to the stack pointer for an
1914 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1915 bytes. Then, when the arguments are pushed the stack will be perfectly
1916 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1917 be popped after the call. Returns the adjustment. */
739fb049 1918
ce48579b 1919static int
739fb049
MM
1920combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1921 args_size,
1922 preferred_unit_stack_boundary)
1923 int unadjusted_args_size;
1924 struct args_size *args_size;
1925 int preferred_unit_stack_boundary;
1926{
1927 /* The number of bytes to pop so that the stack will be
1928 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1929 HOST_WIDE_INT adjustment;
1930 /* The alignment of the stack after the arguments are pushed, if we
1931 just pushed the arguments without adjust the stack here. */
1932 HOST_WIDE_INT unadjusted_alignment;
1933
f725a3ec 1934 unadjusted_alignment
739fb049
MM
1935 = ((stack_pointer_delta + unadjusted_args_size)
1936 % preferred_unit_stack_boundary);
1937
1938 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1939 as possible -- leaving just enough left to cancel out the
1940 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1941 PENDING_STACK_ADJUST is non-negative, and congruent to
1942 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1943
1944 /* Begin by trying to pop all the bytes. */
f725a3ec
KH
1945 unadjusted_alignment
1946 = (unadjusted_alignment
739fb049
MM
1947 - (pending_stack_adjust % preferred_unit_stack_boundary));
1948 adjustment = pending_stack_adjust;
1949 /* Push enough additional bytes that the stack will be aligned
1950 after the arguments are pushed. */
e079dcdb
HB
1951 if (preferred_unit_stack_boundary > 1)
1952 {
3e555c7d 1953 if (unadjusted_alignment > 0)
f725a3ec 1954 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
e079dcdb 1955 else
f725a3ec 1956 adjustment += unadjusted_alignment;
e079dcdb 1957 }
f725a3ec 1958
739fb049
MM
1959 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1960 bytes after the call. The right number is the entire
1961 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1962 by the arguments in the first place. */
f725a3ec 1963 args_size->constant
739fb049
MM
1964 = pending_stack_adjust - adjustment + unadjusted_args_size;
1965
ce48579b 1966 return adjustment;
739fb049
MM
1967}
1968
c67846f2
JJ
1969/* Scan X expression if it does not dereference any argument slots
1970 we already clobbered by tail call arguments (as noted in stored_args_map
1971 bitmap).
da7d8304 1972 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
1973 zero otherwise. */
1974
1975static int
1976check_sibcall_argument_overlap_1 (x)
1977 rtx x;
1978{
1979 RTX_CODE code;
1980 int i, j;
1981 unsigned int k;
1982 const char *fmt;
1983
1984 if (x == NULL_RTX)
1985 return 0;
1986
1987 code = GET_CODE (x);
1988
1989 if (code == MEM)
1990 {
1991 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1992 i = 0;
4c6b3b2a
JJ
1993 else if (GET_CODE (XEXP (x, 0)) == PLUS
1994 && XEXP (XEXP (x, 0), 0) ==
1995 current_function_internal_arg_pointer
1996 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
c67846f2
JJ
1997 i = INTVAL (XEXP (XEXP (x, 0), 1));
1998 else
1999 return 0;
2000
d60eab50
AO
2001#ifdef ARGS_GROW_DOWNWARD
2002 i = -i - GET_MODE_SIZE (GET_MODE (x));
2003#endif
2004
c67846f2
JJ
2005 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2006 if (i + k < stored_args_map->n_bits
2007 && TEST_BIT (stored_args_map, i + k))
2008 return 1;
2009
2010 return 0;
2011 }
2012
f725a3ec 2013 /* Scan all subexpressions. */
c67846f2
JJ
2014 fmt = GET_RTX_FORMAT (code);
2015 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2016 {
2017 if (*fmt == 'e')
f725a3ec
KH
2018 {
2019 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2020 return 1;
2021 }
c67846f2 2022 else if (*fmt == 'E')
f725a3ec
KH
2023 {
2024 for (j = 0; j < XVECLEN (x, i); j++)
2025 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2026 return 1;
2027 }
c67846f2
JJ
2028 }
2029 return 0;
c67846f2
JJ
2030}
2031
2032/* Scan sequence after INSN if it does not dereference any argument slots
2033 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
2034 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2035 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2036 should be 0). Return nonzero if sequence after INSN dereferences such argument
2037 slots, zero otherwise. */
c67846f2
JJ
2038
2039static int
0cdca92b 2040check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
c67846f2
JJ
2041 rtx insn;
2042 struct arg_data *arg;
0cdca92b 2043 int mark_stored_args_map;
f725a3ec 2044{
c67846f2
JJ
2045 int low, high;
2046
2047 if (insn == NULL_RTX)
2048 insn = get_insns ();
2049 else
2050 insn = NEXT_INSN (insn);
2051
2052 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
2053 if (INSN_P (insn)
2054 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
2055 break;
2056
0cdca92b
DJ
2057 if (mark_stored_args_map)
2058 {
d60eab50 2059#ifdef ARGS_GROW_DOWNWARD
0cdca92b 2060 low = -arg->slot_offset.constant - arg->size.constant;
d60eab50 2061#else
0cdca92b 2062 low = arg->slot_offset.constant;
d60eab50
AO
2063#endif
2064
0cdca92b
DJ
2065 for (high = low + arg->size.constant; low < high; low++)
2066 SET_BIT (stored_args_map, low);
2067 }
c67846f2
JJ
2068 return insn != NULL_RTX;
2069}
2070
51bbfa0c
RS
2071/* Generate all the code for a function call
2072 and return an rtx for its value.
2073 Store the value in TARGET (specified as an rtx) if convenient.
2074 If the value is stored in TARGET then TARGET is returned.
2075 If IGNORE is nonzero, then we ignore the value of the function call. */
2076
2077rtx
8129842c 2078expand_call (exp, target, ignore)
51bbfa0c
RS
2079 tree exp;
2080 rtx target;
2081 int ignore;
51bbfa0c 2082{
0a1c58a2
JL
2083 /* Nonzero if we are currently expanding a call. */
2084 static int currently_expanding_call = 0;
2085
51bbfa0c
RS
2086 /* List of actual parameters. */
2087 tree actparms = TREE_OPERAND (exp, 1);
2088 /* RTX for the function to be called. */
2089 rtx funexp;
0a1c58a2
JL
2090 /* Sequence of insns to perform a tail recursive "call". */
2091 rtx tail_recursion_insns = NULL_RTX;
2092 /* Sequence of insns to perform a normal "call". */
2093 rtx normal_call_insns = NULL_RTX;
2094 /* Sequence of insns to perform a tail recursive "call". */
2095 rtx tail_call_insns = NULL_RTX;
51bbfa0c
RS
2096 /* Data type of the function. */
2097 tree funtype;
2098 /* Declaration of the function being called,
2099 or 0 if the function is computed (not known by name). */
2100 tree fndecl = 0;
0a1c58a2 2101 rtx insn;
099e9712
JH
2102 int try_tail_call = 1;
2103 int try_tail_recursion = 1;
0a1c58a2 2104 int pass;
51bbfa0c
RS
2105
2106 /* Register in which non-BLKmode value will be returned,
2107 or 0 if no value or if value is BLKmode. */
2108 rtx valreg;
2109 /* Address where we should return a BLKmode value;
2110 0 if value not BLKmode. */
2111 rtx structure_value_addr = 0;
2112 /* Nonzero if that address is being passed by treating it as
2113 an extra, implicit first parameter. Otherwise,
2114 it is passed by being copied directly into struct_value_rtx. */
2115 int structure_value_addr_parm = 0;
2116 /* Size of aggregate value wanted, or zero if none wanted
2117 or if we are using the non-reentrant PCC calling convention
2118 or expecting the value in registers. */
e5e809f4 2119 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
2120 /* Nonzero if called function returns an aggregate in memory PCC style,
2121 by returning the address of where to find it. */
2122 int pcc_struct_value = 0;
2123
2124 /* Number of actual parameters in this call, including struct value addr. */
2125 int num_actuals;
2126 /* Number of named args. Args after this are anonymous ones
2127 and they must all go on the stack. */
2128 int n_named_args;
51bbfa0c
RS
2129
2130 /* Vector of information about each argument.
2131 Arguments are numbered in the order they will be pushed,
2132 not the order they are written. */
2133 struct arg_data *args;
2134
2135 /* Total size in bytes of all the stack-parms scanned so far. */
2136 struct args_size args_size;
099e9712 2137 struct args_size adjusted_args_size;
51bbfa0c 2138 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 2139 int unadjusted_args_size;
51bbfa0c
RS
2140 /* Data on reg parms scanned so far. */
2141 CUMULATIVE_ARGS args_so_far;
2142 /* Nonzero if a reg parm has been scanned. */
2143 int reg_parm_seen;
efd65a8b 2144 /* Nonzero if this is an indirect function call. */
51bbfa0c 2145
f725a3ec 2146 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
2147 If stack space is allocated for register parameters, but not by the
2148 caller, then it is preallocated in the fixed part of the stack frame.
2149 So the entire argument block must then be preallocated (i.e., we
2150 ignore PUSH_ROUNDING in that case). */
2151
f73ad30e 2152 int must_preallocate = !PUSH_ARGS;
51bbfa0c 2153
f72aed24 2154 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
2155 int reg_parm_stack_space = 0;
2156
51bbfa0c
RS
2157 /* Address of space preallocated for stack parms
2158 (on machines that lack push insns), or 0 if space not preallocated. */
2159 rtx argblock = 0;
2160
f2d33f13
JH
2161 /* Mask of ECF_ flags. */
2162 int flags = 0;
51bbfa0c
RS
2163 /* Nonzero if this is a call to an inline function. */
2164 int is_integrable = 0;
f73ad30e 2165#ifdef REG_PARM_STACK_SPACE
51bbfa0c
RS
2166 /* Define the boundary of the register parm stack space that needs to be
2167 save, if any. */
2168 int low_to_save = -1, high_to_save;
2169 rtx save_area = 0; /* Place that it is saved */
2170#endif
2171
51bbfa0c
RS
2172 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2173 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 2174 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
2175
2176 rtx old_stack_level = 0;
79be3418 2177 int old_pending_adj = 0;
51bbfa0c 2178 int old_inhibit_defer_pop = inhibit_defer_pop;
1503a7ec 2179 int old_stack_allocated;
0a1c58a2 2180 rtx call_fusage;
b3694847
SS
2181 tree p = TREE_OPERAND (exp, 0);
2182 int i;
739fb049
MM
2183 /* The alignment of the stack, in bits. */
2184 HOST_WIDE_INT preferred_stack_boundary;
2185 /* The alignment of the stack, in bytes. */
2186 HOST_WIDE_INT preferred_unit_stack_boundary;
51bbfa0c 2187
f2d33f13
JH
2188 /* See if this is "nothrow" function call. */
2189 if (TREE_NOTHROW (exp))
2190 flags |= ECF_NOTHROW;
2191
51bbfa0c
RS
2192 /* See if we can find a DECL-node for the actual function.
2193 As a result, decide whether this is a call to an integrable function. */
2194
39b0dce7
JM
2195 fndecl = get_callee_fndecl (exp);
2196 if (fndecl)
51bbfa0c 2197 {
39b0dce7
JM
2198 if (!flag_no_inline
2199 && fndecl != current_function_decl
2200 && DECL_INLINE (fndecl)
2201 && DECL_SAVED_INSNS (fndecl)
2202 && DECL_SAVED_INSNS (fndecl)->inlinable)
2203 is_integrable = 1;
2204 else if (! TREE_ADDRESSABLE (fndecl))
51bbfa0c 2205 {
39b0dce7
JM
2206 /* In case this function later becomes inlinable,
2207 record that there was already a non-inline call to it.
51bbfa0c 2208
39b0dce7
JM
2209 Use abstraction instead of setting TREE_ADDRESSABLE
2210 directly. */
2211 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2212 && optimize > 0)
2213 {
2214 warning_with_decl (fndecl, "can't inline call to `%s'");
2215 warning ("called from here");
51bbfa0c 2216 }
dffd7eb6 2217 (*lang_hooks.mark_addressable) (fndecl);
51bbfa0c 2218 }
39b0dce7
JM
2219
2220 flags |= flags_from_decl_or_type (fndecl);
51bbfa0c
RS
2221 }
2222
f725a3ec 2223 /* If we don't have specific function to call, see if we have a
f2d33f13 2224 attributes set in the type. */
39b0dce7 2225 else
7393c642
RK
2226 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2227
6f90e075
JW
2228#ifdef REG_PARM_STACK_SPACE
2229#ifdef MAYBE_REG_PARM_STACK_SPACE
2230 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2231#else
2232 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2233#endif
2234#endif
2235
f73ad30e
JH
2236#ifndef OUTGOING_REG_PARM_STACK_SPACE
2237 if (reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4
JL
2238 must_preallocate = 1;
2239#endif
2240
51bbfa0c
RS
2241 /* Warn if this value is an aggregate type,
2242 regardless of which calling convention we are using for it. */
05e3bdb9 2243 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
2244 warning ("function call has aggregate value");
2245
2246 /* Set up a place to return a structure. */
2247
2248 /* Cater to broken compilers. */
2249 if (aggregate_value_p (exp))
2250 {
2251 /* This call returns a big structure. */
53d4257f 2252 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
51bbfa0c
RS
2253
2254#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2255 {
2256 pcc_struct_value = 1;
0dd532dc
JW
2257 /* Easier than making that case work right. */
2258 if (is_integrable)
2259 {
2260 /* In case this is a static function, note that it has been
2261 used. */
2262 if (! TREE_ADDRESSABLE (fndecl))
dffd7eb6 2263 (*lang_hooks.mark_addressable) (fndecl);
0dd532dc
JW
2264 is_integrable = 0;
2265 }
9e7b1d0a
RS
2266 }
2267#else /* not PCC_STATIC_STRUCT_RETURN */
2268 {
2269 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 2270
4977bab6 2271 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
89ea02fb 2272 {
f32ac70d
JM
2273 /* The structure value address arg is already in actparms.
2274 Pull it out. It might be nice to just leave it there, but
2275 we need to set structure_value_addr. */
2276 tree return_arg = TREE_VALUE (actparms);
2277 actparms = TREE_CHAIN (actparms);
2278 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2279 VOIDmode, EXPAND_NORMAL);
89ea02fb 2280 }
4977bab6 2281 else if (target && GET_CODE (target) == MEM)
9e7b1d0a
RS
2282 structure_value_addr = XEXP (target, 0);
2283 else
2284 {
9e7b1d0a
RS
2285 /* For variable-sized objects, we must be called with a target
2286 specified. If we were to allocate space on the stack here,
2287 we would have no way of knowing when to free it. */
1da68f56 2288 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
51bbfa0c 2289
4361b41d
MM
2290 mark_temp_addr_taken (d);
2291 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2292 target = 0;
2293 }
2294 }
2295#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2296 }
2297
2298 /* If called function is inline, try to integrate it. */
2299
2300 if (is_integrable)
2301 {
f2d33f13
JH
2302 rtx temp = try_to_integrate (fndecl, actparms, target,
2303 ignore, TREE_TYPE (exp),
2304 structure_value_addr);
60e8b9f0 2305 if (temp != (rtx) (size_t) - 1)
f2d33f13 2306 return temp;
51bbfa0c
RS
2307 }
2308
099e9712 2309 /* Figure out the amount to which the stack should be aligned. */
099e9712 2310 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
099e9712
JH
2311
2312 /* Operand 0 is a pointer-to-function; get the type of the function. */
2313 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2314 if (! POINTER_TYPE_P (funtype))
2315 abort ();
2316 funtype = TREE_TYPE (funtype);
2317
2318 /* See if this is a call to a function that can return more than once
2319 or a call to longjmp or malloc. */
2320 flags |= special_function_p (fndecl, flags);
2321
2322 if (flags & ECF_MAY_BE_ALLOCA)
2323 current_function_calls_alloca = 1;
2324
2325 /* If struct_value_rtx is 0, it means pass the address
2326 as if it were an extra parameter. */
2327 if (structure_value_addr && struct_value_rtx == 0)
2328 {
2329 /* If structure_value_addr is a REG other than
2330 virtual_outgoing_args_rtx, we can use always use it. If it
2331 is not a REG, we must always copy it into a register.
2332 If it is virtual_outgoing_args_rtx, we must copy it to another
2333 register in some cases. */
2334 rtx temp = (GET_CODE (structure_value_addr) != REG
2335 || (ACCUMULATE_OUTGOING_ARGS
2336 && stack_arg_under_construction
2337 && structure_value_addr == virtual_outgoing_args_rtx)
2338 ? copy_addr_to_reg (structure_value_addr)
2339 : structure_value_addr);
2340
2341 actparms
2342 = tree_cons (error_mark_node,
2343 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2344 temp),
2345 actparms);
2346 structure_value_addr_parm = 1;
2347 }
2348
2349 /* Count the arguments and set NUM_ACTUALS. */
2350 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2351 num_actuals++;
2352
2353 /* Compute number of named args.
2354 Normally, don't include the last named arg if anonymous args follow.
2355 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2356 (If no anonymous args follow, the result of list_length is actually
2357 one too large. This is harmless.)
2358
2359 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2360 zero, this machine will be able to place unnamed args that were
2361 passed in registers into the stack. So treat all args as named.
2362 This allows the insns emitting for a specific argument list to be
2363 independent of the function declaration.
2364
2365 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2366 reliable way to pass unnamed args in registers, so we must force
2367 them into memory. */
2368
2369 if ((STRICT_ARGUMENT_NAMING
2370 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2371 && TYPE_ARG_TYPES (funtype) != 0)
2372 n_named_args
2373 = (list_length (TYPE_ARG_TYPES (funtype))
2374 /* Don't include the last named arg. */
2375 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2376 /* Count the struct value address, if it is passed as a parm. */
2377 + structure_value_addr_parm);
2378 else
2379 /* If we know nothing, treat all args as named. */
2380 n_named_args = num_actuals;
2381
2382 /* Start updating where the next arg would go.
2383
2384 On some machines (such as the PA) indirect calls have a different
2385 calling convention than normal calls. The last argument in
2386 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2387 or not. */
2388 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2389
099e9712 2390 /* Make a vector to hold all the information about each arg. */
f725a3ec 2391 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
961192e1 2392 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
099e9712 2393
d80d2d2a
KH
2394 /* Build up entries in the ARGS array, compute the size of the
2395 arguments into ARGS_SIZE, etc. */
099e9712
JH
2396 initialize_argument_information (num_actuals, args, &args_size,
2397 n_named_args, actparms, fndecl,
2398 &args_so_far, reg_parm_stack_space,
2399 &old_stack_level, &old_pending_adj,
2400 &must_preallocate, &flags);
2401
2402 if (args_size.var)
2403 {
2404 /* If this function requires a variable-sized argument list, don't
2405 try to make a cse'able block for this call. We may be able to
2406 do this eventually, but it is too complicated to keep track of
6d2f8887 2407 what insns go in the cse'able block and which don't. */
099e9712 2408
53d4257f 2409 flags &= ~ECF_LIBCALL_BLOCK;
099e9712
JH
2410 must_preallocate = 1;
2411 }
2412
2413 /* Now make final decision about preallocating stack space. */
2414 must_preallocate = finalize_must_preallocate (must_preallocate,
2415 num_actuals, args,
2416 &args_size);
2417
2418 /* If the structure value address will reference the stack pointer, we
2419 must stabilize it. We don't need to do this if we know that we are
2420 not going to adjust the stack pointer in processing this call. */
2421
2422 if (structure_value_addr
2423 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2424 || reg_mentioned_p (virtual_outgoing_args_rtx,
2425 structure_value_addr))
2426 && (args_size.var
2427 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2428 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 2429
194c7c45
RH
2430 /* Tail calls can make things harder to debug, and we're traditionally
2431 pushed these optimizations into -O2. Don't try if we're already
fb158467
JH
2432 expanding a call, as that means we're an argument. Don't try if
2433 there's cleanups, as we know there's code to follow the call.
e2ee9912 2434
f725a3ec 2435 If rtx_equal_function_value_matters is false, that means we've
e2ee9912
RH
2436 finished with regular parsing. Which means that some of the
2437 machinery we use to generate tail-calls is no longer in place.
2438 This is most often true of sjlj-exceptions, which we couldn't
2439 tail-call to anyway. */
0a1c58a2 2440
099e9712
JH
2441 if (currently_expanding_call++ != 0
2442 || !flag_optimize_sibling_calls
2443 || !rtx_equal_function_value_matters
099e9712
JH
2444 || any_pending_cleanups (1)
2445 || args_size.var)
2446 try_tail_call = try_tail_recursion = 0;
2447
2448 /* Tail recursion fails, when we are not dealing with recursive calls. */
2449 if (!try_tail_recursion
2450 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2451 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2452 try_tail_recursion = 0;
2453
2454 /* Rest of purposes for tail call optimizations to fail. */
2455 if (
2456#ifdef HAVE_sibcall_epilogue
2457 !HAVE_sibcall_epilogue
2458#else
2459 1
2460#endif
2461 || !try_tail_call
2462 /* Doing sibling call optimization needs some work, since
2463 structure_value_addr can be allocated on the stack.
2464 It does not seem worth the effort since few optimizable
2465 sibling calls will return a structure. */
2466 || structure_value_addr != NULL_RTX
4977bab6
ZW
2467 /* Check whether the target is able to optimize the call
2468 into a sibcall. */
2469 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2470 /* Functions that do not return exactly once may not be sibcall
2471 optimized. */
db655634 2472 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
4977bab6 2473 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
099e9712
JH
2474 /* If this function requires more stack slots than the current
2475 function, we cannot change it into a sibling call. */
2476 || args_size.constant > current_function_args_size
2477 /* If the callee pops its own arguments, then it must pop exactly
2478 the same number of arguments as the current function. */
2479 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2480 != RETURN_POPS_ARGS (current_function_decl,
2481 TREE_TYPE (current_function_decl),
2482 current_function_args_size))
e6f64875 2483 try_tail_call = 0;
497eb8c3 2484
099e9712
JH
2485 if (try_tail_call || try_tail_recursion)
2486 {
2487 int end, inc;
2488 actparms = NULL_TREE;
194c7c45
RH
2489 /* Ok, we're going to give the tail call the old college try.
2490 This means we're going to evaluate the function arguments
2491 up to three times. There are two degrees of badness we can
2492 encounter, those that can be unsaved and those that can't.
2493 (See unsafe_for_reeval commentary for details.)
2494
2495 Generate a new argument list. Pass safe arguments through
f725a3ec 2496 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
194c7c45 2497 For hard badness, evaluate them now and put their resulting
099e9712
JH
2498 rtx in a temporary VAR_DECL.
2499
2500 initialize_argument_information has ordered the array for the
2501 order to be pushed, and we must remember this when reconstructing
f5143c46 2502 the original argument order. */
4d393a0b 2503
099e9712
JH
2504 if (PUSH_ARGS_REVERSED)
2505 {
2506 inc = 1;
2507 i = 0;
2508 end = num_actuals;
2509 }
2510 else
f725a3ec 2511 {
099e9712
JH
2512 inc = -1;
2513 i = num_actuals - 1;
2514 end = -1;
2515 }
2516
2517 for (; i != end; i += inc)
2518 {
2519 switch (unsafe_for_reeval (args[i].tree_value))
2520 {
f725a3ec
KH
2521 case 0: /* Safe. */
2522 break;
497eb8c3 2523
f725a3ec
KH
2524 case 1: /* Mildly unsafe. */
2525 args[i].tree_value = unsave_expr (args[i].tree_value);
2526 break;
497eb8c3 2527
f725a3ec
KH
2528 case 2: /* Wildly unsafe. */
2529 {
2530 tree var = build_decl (VAR_DECL, NULL_TREE,
099e9712 2531 TREE_TYPE (args[i].tree_value));
19e7881c
MM
2532 SET_DECL_RTL (var,
2533 expand_expr (args[i].tree_value, NULL_RTX,
2534 VOIDmode, EXPAND_NORMAL));
099e9712 2535 args[i].tree_value = var;
f725a3ec
KH
2536 }
2537 break;
0a1c58a2 2538
f725a3ec
KH
2539 default:
2540 abort ();
2541 }
099e9712
JH
2542 /* We need to build actparms for optimize_tail_recursion. We can
2543 safely trash away TREE_PURPOSE, since it is unused by this
2544 function. */
2545 if (try_tail_recursion)
2546 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2547 }
194c7c45
RH
2548 /* Expanding one of those dangerous arguments could have added
2549 cleanups, but otherwise give it a whirl. */
099e9712
JH
2550 if (any_pending_cleanups (1))
2551 try_tail_call = try_tail_recursion = 0;
0a1c58a2
JL
2552 }
2553
2554 /* Generate a tail recursion sequence when calling ourselves. */
2555
099e9712 2556 if (try_tail_recursion)
0a1c58a2
JL
2557 {
2558 /* We want to emit any pending stack adjustments before the tail
2559 recursion "call". That way we know any adjustment after the tail
2560 recursion call can be ignored if we indeed use the tail recursion
2561 call expansion. */
2562 int save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2563 int save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2564
41c39533
RH
2565 /* Emit any queued insns now; otherwise they would end up in
2566 only one of the alternates. */
2567 emit_queue ();
2568
0a1c58a2
JL
2569 /* Use a new sequence to hold any RTL we generate. We do not even
2570 know if we will use this RTL yet. The final decision can not be
2571 made until after RTL generation for the entire function is
2572 complete. */
b06775f9 2573 start_sequence ();
f5e846c8
MM
2574 /* If expanding any of the arguments creates cleanups, we can't
2575 do a tailcall. So, we'll need to pop the pending cleanups
2576 list. If, however, all goes well, and there are no cleanups
2577 then the call to expand_start_target_temps will have no
2578 effect. */
2579 expand_start_target_temps ();
b06775f9 2580 if (optimize_tail_recursion (actparms, get_last_insn ()))
f5e846c8
MM
2581 {
2582 if (any_pending_cleanups (1))
2583 try_tail_call = try_tail_recursion = 0;
2584 else
2585 tail_recursion_insns = get_insns ();
2586 }
2587 expand_end_target_temps ();
0a1c58a2
JL
2588 end_sequence ();
2589
0a1c58a2
JL
2590 /* Restore the original pending stack adjustment for the sibling and
2591 normal call cases below. */
2592 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 2593 stack_pointer_delta = save_stack_pointer_delta;
0a1c58a2
JL
2594 }
2595
099e9712
JH
2596 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2597 {
2598 /* A fork duplicates the profile information, and an exec discards
2599 it. We can't rely on fork/exec to be paired. So write out the
2600 profile information we have gathered so far, and clear it. */
2601 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2602 is subject to race conditions, just as with multithreaded
2603 programs. */
2604
4977bab6 2605 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__gcov_flush"),
9d98f8f9 2606 LCT_ALWAYS_RETURN,
099e9712
JH
2607 VOIDmode, 0);
2608 }
0a1c58a2 2609
c2f8b491
JH
2610 /* Ensure current function's preferred stack boundary is at least
2611 what we need. We don't have to increase alignment for recursive
2612 functions. */
2613 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2614 && fndecl != current_function_decl)
2615 cfun->preferred_stack_boundary = preferred_stack_boundary;
2616
099e9712 2617 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 2618
099e9712 2619 function_call_count++;
39842893 2620
0a1c58a2
JL
2621 /* We want to make two insn chains; one for a sibling call, the other
2622 for a normal call. We will select one of the two chains after
2623 initial RTL generation is complete. */
2624 for (pass = 0; pass < 2; pass++)
2625 {
2626 int sibcall_failure = 0;
f5143c46 2627 /* We want to emit any pending stack adjustments before the tail
0a1c58a2
JL
2628 recursion "call". That way we know any adjustment after the tail
2629 recursion call can be ignored if we indeed use the tail recursion
2630 call expansion. */
5ac9118e
KG
2631 int save_pending_stack_adjust = 0;
2632 int save_stack_pointer_delta = 0;
0a1c58a2 2633 rtx insns;
7d167afd 2634 rtx before_call, next_arg_reg;
39842893 2635
0a1c58a2
JL
2636 if (pass == 0)
2637 {
099e9712 2638 if (! try_tail_call)
0a1c58a2 2639 continue;
51bbfa0c 2640
1c81f9fe
JM
2641 /* Emit any queued insns now; otherwise they would end up in
2642 only one of the alternates. */
2643 emit_queue ();
2644
0a1c58a2
JL
2645 /* State variables we need to save and restore between
2646 iterations. */
2647 save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2648 save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2649 }
f2d33f13
JH
2650 if (pass)
2651 flags &= ~ECF_SIBCALL;
2652 else
2653 flags |= ECF_SIBCALL;
51bbfa0c 2654
0a1c58a2 2655 /* Other state variables that we must reinitialize each time
f2d33f13 2656 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
2657 argblock = 0;
2658 call_fusage = 0;
fa76d9e0 2659
f725a3ec 2660 /* Start a new sequence for the normal call case.
51bbfa0c 2661
0a1c58a2
JL
2662 From this point on, if the sibling call fails, we want to set
2663 sibcall_failure instead of continuing the loop. */
2664 start_sequence ();
eecb6f50 2665
b8d254e6
JJ
2666 if (pass == 0)
2667 {
2668 /* We know at this point that there are not currently any
2669 pending cleanups. If, however, in the process of evaluating
2670 the arguments we were to create some, we'll need to be
2671 able to get rid of them. */
2672 expand_start_target_temps ();
2673 }
2674
0a1c58a2
JL
2675 /* Don't let pending stack adjusts add up to too much.
2676 Also, do all pending adjustments now if there is any chance
2677 this might be a call to alloca or if we are expanding a sibling
b5cd4ed4
RK
2678 call sequence or if we are calling a function that is to return
2679 with stack pointer depressed. */
0a1c58a2 2680 if (pending_stack_adjust >= 32
b5cd4ed4
RK
2681 || (pending_stack_adjust > 0
2682 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
0a1c58a2
JL
2683 || pass == 0)
2684 do_pending_stack_adjust ();
51bbfa0c 2685
54fef245
RH
2686 /* When calling a const function, we must pop the stack args right away,
2687 so that the pop is deleted or moved with the call. */
53d4257f 2688 if (pass && (flags & ECF_LIBCALL_BLOCK))
54fef245
RH
2689 NO_DEFER_POP;
2690
6f90e075 2691#ifdef FINAL_REG_PARM_STACK_SPACE
0a1c58a2
JL
2692 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2693 args_size.var);
6f90e075 2694#endif
0a1c58a2 2695 /* Precompute any arguments as needed. */
f8a097cd
JH
2696 if (pass)
2697 precompute_arguments (flags, num_actuals, args);
51bbfa0c 2698
0a1c58a2
JL
2699 /* Now we are about to start emitting insns that can be deleted
2700 if a libcall is deleted. */
53d4257f 2701 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
0a1c58a2 2702 start_sequence ();
51bbfa0c 2703
099e9712 2704 adjusted_args_size = args_size;
ce48579b
RH
2705 /* Compute the actual size of the argument block required. The variable
2706 and constant sizes must be combined, the size may have to be rounded,
2707 and there may be a minimum required size. When generating a sibcall
2708 pattern, do not round up, since we'll be re-using whatever space our
2709 caller provided. */
2710 unadjusted_args_size
f725a3ec
KH
2711 = compute_argument_block_size (reg_parm_stack_space,
2712 &adjusted_args_size,
ce48579b
RH
2713 (pass == 0 ? 0
2714 : preferred_stack_boundary));
2715
f725a3ec 2716 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 2717
f8a097cd
JH
2718 /* The argument block when performing a sibling call is the
2719 incoming argument block. */
2720 if (pass == 0)
c67846f2
JJ
2721 {
2722 argblock = virtual_incoming_args_rtx;
fcae219a
R
2723 argblock
2724#ifdef STACK_GROWS_DOWNWARD
2725 = plus_constant (argblock, current_function_pretend_args_size);
2726#else
2727 = plus_constant (argblock, -current_function_pretend_args_size);
2728#endif
c67846f2
JJ
2729 stored_args_map = sbitmap_alloc (args_size.constant);
2730 sbitmap_zero (stored_args_map);
2731 }
ce48579b 2732
0a1c58a2
JL
2733 /* If we have no actual push instructions, or shouldn't use them,
2734 make space for all args right now. */
099e9712 2735 else if (adjusted_args_size.var != 0)
51bbfa0c 2736 {
0a1c58a2
JL
2737 if (old_stack_level == 0)
2738 {
2739 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2740 old_pending_adj = pending_stack_adjust;
2741 pending_stack_adjust = 0;
0a1c58a2
JL
2742 /* stack_arg_under_construction says whether a stack arg is
2743 being constructed at the old stack level. Pushing the stack
2744 gets a clean outgoing argument block. */
2745 old_stack_arg_under_construction = stack_arg_under_construction;
2746 stack_arg_under_construction = 0;
0a1c58a2 2747 }
099e9712 2748 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
51bbfa0c 2749 }
0a1c58a2
JL
2750 else
2751 {
2752 /* Note that we must go through the motions of allocating an argument
2753 block even if the size is zero because we may be storing args
2754 in the area reserved for register arguments, which may be part of
2755 the stack frame. */
26a258fe 2756
099e9712 2757 int needed = adjusted_args_size.constant;
51bbfa0c 2758
0a1c58a2
JL
2759 /* Store the maximum argument space used. It will be pushed by
2760 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2761 checking). */
51bbfa0c 2762
0a1c58a2
JL
2763 if (needed > current_function_outgoing_args_size)
2764 current_function_outgoing_args_size = needed;
51bbfa0c 2765
0a1c58a2
JL
2766 if (must_preallocate)
2767 {
f73ad30e
JH
2768 if (ACCUMULATE_OUTGOING_ARGS)
2769 {
f8a097cd
JH
2770 /* Since the stack pointer will never be pushed, it is
2771 possible for the evaluation of a parm to clobber
2772 something we have already written to the stack.
2773 Since most function calls on RISC machines do not use
2774 the stack, this is uncommon, but must work correctly.
26a258fe 2775
f73ad30e 2776 Therefore, we save any area of the stack that was already
f8a097cd
JH
2777 written and that we are using. Here we set up to do this
2778 by making a new stack usage map from the old one. The
f725a3ec 2779 actual save will be done by store_one_arg.
26a258fe 2780
f73ad30e
JH
2781 Another approach might be to try to reorder the argument
2782 evaluations to avoid this conflicting stack usage. */
26a258fe 2783
e5e809f4 2784#ifndef OUTGOING_REG_PARM_STACK_SPACE
f8a097cd
JH
2785 /* Since we will be writing into the entire argument area,
2786 the map must be allocated for its entire size, not just
2787 the part that is the responsibility of the caller. */
f73ad30e 2788 needed += reg_parm_stack_space;
51bbfa0c
RS
2789#endif
2790
2791#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
2792 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2793 needed + 1);
51bbfa0c 2794#else
f73ad30e
JH
2795 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2796 needed);
51bbfa0c 2797#endif
f8a097cd
JH
2798 stack_usage_map
2799 = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2800
f73ad30e 2801 if (initial_highest_arg_in_use)
2e09e75a
JM
2802 memcpy (stack_usage_map, initial_stack_usage_map,
2803 initial_highest_arg_in_use);
2f4aa534 2804
f73ad30e 2805 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 2806 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
2807 (highest_outgoing_arg_in_use
2808 - initial_highest_arg_in_use));
2809 needed = 0;
2f4aa534 2810
f8a097cd
JH
2811 /* The address of the outgoing argument list must not be
2812 copied to a register here, because argblock would be left
2813 pointing to the wrong place after the call to
f725a3ec 2814 allocate_dynamic_stack_space below. */
2f4aa534 2815
f73ad30e 2816 argblock = virtual_outgoing_args_rtx;
f725a3ec 2817 }
f73ad30e 2818 else
26a258fe 2819 {
f73ad30e 2820 if (inhibit_defer_pop == 0)
0a1c58a2 2821 {
f73ad30e 2822 /* Try to reuse some or all of the pending_stack_adjust
ce48579b
RH
2823 to get this space. */
2824 needed
f725a3ec 2825 = (combine_pending_stack_adjustment_and_call
ce48579b 2826 (unadjusted_args_size,
099e9712 2827 &adjusted_args_size,
ce48579b
RH
2828 preferred_unit_stack_boundary));
2829
2830 /* combine_pending_stack_adjustment_and_call computes
2831 an adjustment before the arguments are allocated.
2832 Account for them and see whether or not the stack
2833 needs to go up or down. */
2834 needed = unadjusted_args_size - needed;
2835
2836 if (needed < 0)
f73ad30e 2837 {
ce48579b
RH
2838 /* We're releasing stack space. */
2839 /* ??? We can avoid any adjustment at all if we're
2840 already aligned. FIXME. */
2841 pending_stack_adjust = -needed;
2842 do_pending_stack_adjust ();
f73ad30e
JH
2843 needed = 0;
2844 }
f725a3ec 2845 else
ce48579b
RH
2846 /* We need to allocate space. We'll do that in
2847 push_block below. */
2848 pending_stack_adjust = 0;
0a1c58a2 2849 }
ce48579b
RH
2850
2851 /* Special case this because overhead of `push_block' in
2852 this case is non-trivial. */
f73ad30e
JH
2853 if (needed == 0)
2854 argblock = virtual_outgoing_args_rtx;
0a1c58a2 2855 else
f73ad30e
JH
2856 argblock = push_block (GEN_INT (needed), 0, 0);
2857
f8a097cd
JH
2858 /* We only really need to call `copy_to_reg' in the case
2859 where push insns are going to be used to pass ARGBLOCK
2860 to a function call in ARGS. In that case, the stack
2861 pointer changes value from the allocation point to the
2862 call point, and hence the value of
2863 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2864 as well always do it. */
f73ad30e 2865 argblock = copy_to_reg (argblock);
0a1c58a2 2866
f8a097cd 2867 /* The save/restore code in store_one_arg handles all
ce48579b
RH
2868 cases except one: a constructor call (including a C
2869 function returning a BLKmode struct) to initialize
2870 an argument. */
f8a097cd
JH
2871 if (stack_arg_under_construction)
2872 {
e5e809f4 2873#ifndef OUTGOING_REG_PARM_STACK_SPACE
ce48579b 2874 rtx push_size = GEN_INT (reg_parm_stack_space
099e9712 2875 + adjusted_args_size.constant);
bfbf933a 2876#else
099e9712 2877 rtx push_size = GEN_INT (adjusted_args_size.constant);
bfbf933a 2878#endif
f8a097cd
JH
2879 if (old_stack_level == 0)
2880 {
ce48579b
RH
2881 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2882 NULL_RTX);
f8a097cd
JH
2883 old_pending_adj = pending_stack_adjust;
2884 pending_stack_adjust = 0;
ce48579b
RH
2885 /* stack_arg_under_construction says whether a stack
2886 arg is being constructed at the old stack level.
2887 Pushing the stack gets a clean outgoing argument
2888 block. */
2889 old_stack_arg_under_construction
2890 = stack_arg_under_construction;
f8a097cd
JH
2891 stack_arg_under_construction = 0;
2892 /* Make a new map for the new argument list. */
ce48579b
RH
2893 stack_usage_map = (char *)
2894 alloca (highest_outgoing_arg_in_use);
961192e1 2895 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
f8a097cd
JH
2896 highest_outgoing_arg_in_use = 0;
2897 }
ce48579b
RH
2898 allocate_dynamic_stack_space (push_size, NULL_RTX,
2899 BITS_PER_UNIT);
f8a097cd 2900 }
ce48579b
RH
2901 /* If argument evaluation might modify the stack pointer,
2902 copy the address of the argument list to a register. */
f8a097cd
JH
2903 for (i = 0; i < num_actuals; i++)
2904 if (args[i].pass_on_stack)
2905 {
2906 argblock = copy_addr_to_reg (argblock);
2907 break;
2908 }
f73ad30e 2909 }
0a1c58a2 2910 }
bfbf933a 2911 }
bfbf933a 2912
0a1c58a2 2913 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2914
0a1c58a2
JL
2915 /* If we push args individually in reverse order, perform stack alignment
2916 before the first push (the last arg). */
f73ad30e 2917 if (PUSH_ARGS_REVERSED && argblock == 0
099e9712 2918 && adjusted_args_size.constant != unadjusted_args_size)
4e217aed 2919 {
0a1c58a2
JL
2920 /* When the stack adjustment is pending, we get better code
2921 by combining the adjustments. */
f725a3ec 2922 if (pending_stack_adjust
53d4257f 2923 && ! (flags & ECF_LIBCALL_BLOCK)
0a1c58a2 2924 && ! inhibit_defer_pop)
ce48579b
RH
2925 {
2926 pending_stack_adjust
f725a3ec 2927 = (combine_pending_stack_adjustment_and_call
ce48579b 2928 (unadjusted_args_size,
099e9712 2929 &adjusted_args_size,
ce48579b
RH
2930 preferred_unit_stack_boundary));
2931 do_pending_stack_adjust ();
2932 }
0a1c58a2 2933 else if (argblock == 0)
099e9712 2934 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 2935 - unadjusted_args_size));
0a1c58a2 2936 }
ebcd0b57
JH
2937 /* Now that the stack is properly aligned, pops can't safely
2938 be deferred during the evaluation of the arguments. */
2939 NO_DEFER_POP;
51bbfa0c 2940
0a1c58a2 2941 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c 2942
0a1c58a2
JL
2943 /* Figure out the register where the value, if any, will come back. */
2944 valreg = 0;
2945 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2946 && ! structure_value_addr)
2947 {
2948 if (pcc_struct_value)
2949 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
7d167afd 2950 fndecl, (pass == 0));
0a1c58a2 2951 else
7d167afd 2952 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
0a1c58a2 2953 }
51bbfa0c 2954
0a1c58a2
JL
2955 /* Precompute all register parameters. It isn't safe to compute anything
2956 once we have started filling any specific hard regs. */
2957 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c 2958
f73ad30e 2959#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
2960 /* Save the fixed argument area if it's part of the caller's frame and
2961 is clobbered by argument setup for this call. */
f8a097cd 2962 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
2963 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2964 &low_to_save, &high_to_save);
b94301c2 2965#endif
51bbfa0c 2966
0a1c58a2
JL
2967 /* Now store (and compute if necessary) all non-register parms.
2968 These come before register parms, since they can require block-moves,
2969 which could clobber the registers used for register parms.
2970 Parms which have partial registers are not stored here,
2971 but we do preallocate space here if they want that. */
51bbfa0c 2972
0a1c58a2
JL
2973 for (i = 0; i < num_actuals; i++)
2974 if (args[i].reg == 0 || args[i].pass_on_stack)
c67846f2
JJ
2975 {
2976 rtx before_arg = get_last_insn ();
2977
4c6b3b2a
JJ
2978 if (store_one_arg (&args[i], argblock, flags,
2979 adjusted_args_size.var != 0,
2980 reg_parm_stack_space)
2981 || (pass == 0
2982 && check_sibcall_argument_overlap (before_arg,
0cdca92b 2983 &args[i], 1)))
c67846f2
JJ
2984 sibcall_failure = 1;
2985 }
0a1c58a2
JL
2986
2987 /* If we have a parm that is passed in registers but not in memory
2988 and whose alignment does not permit a direct copy into registers,
2989 make a group of pseudos that correspond to each register that we
2990 will later fill. */
2991 if (STRICT_ALIGNMENT)
2992 store_unaligned_arguments_into_pseudos (args, num_actuals);
2993
2994 /* Now store any partially-in-registers parm.
2995 This is the last place a block-move can happen. */
2996 if (reg_parm_seen)
2997 for (i = 0; i < num_actuals; i++)
2998 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2
JJ
2999 {
3000 rtx before_arg = get_last_insn ();
3001
4c6b3b2a
JJ
3002 if (store_one_arg (&args[i], argblock, flags,
3003 adjusted_args_size.var != 0,
3004 reg_parm_stack_space)
3005 || (pass == 0
3006 && check_sibcall_argument_overlap (before_arg,
0cdca92b 3007 &args[i], 1)))
c67846f2
JJ
3008 sibcall_failure = 1;
3009 }
51bbfa0c 3010
0a1c58a2
JL
3011 /* If we pushed args in forward order, perform stack alignment
3012 after pushing the last arg. */
f73ad30e 3013 if (!PUSH_ARGS_REVERSED && argblock == 0)
099e9712 3014 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 3015 - unadjusted_args_size));
51bbfa0c 3016
0a1c58a2
JL
3017 /* If register arguments require space on the stack and stack space
3018 was not preallocated, allocate stack space here for arguments
3019 passed in registers. */
f73ad30e
JH
3020#ifdef OUTGOING_REG_PARM_STACK_SPACE
3021 if (!ACCUMULATE_OUTGOING_ARGS
f725a3ec 3022 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 3023 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
3024#endif
3025
0a1c58a2
JL
3026 /* Pass the function the address in which to return a
3027 structure value. */
3028 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3029 {
3030 emit_move_insn (struct_value_rtx,
3031 force_reg (Pmode,
3032 force_operand (structure_value_addr,
3033 NULL_RTX)));
3034
0a1c58a2
JL
3035 if (GET_CODE (struct_value_rtx) == REG)
3036 use_reg (&call_fusage, struct_value_rtx);
3037 }
c2939b57 3038
0a1c58a2 3039 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3affaf29 3040 reg_parm_seen, pass == 0);
51bbfa0c 3041
0cdca92b
DJ
3042 load_register_parameters (args, num_actuals, &call_fusage, flags,
3043 pass == 0, &sibcall_failure);
f725a3ec 3044
0a1c58a2
JL
3045 /* Perform postincrements before actually calling the function. */
3046 emit_queue ();
51bbfa0c 3047
0a1c58a2
JL
3048 /* Save a pointer to the last insn before the call, so that we can
3049 later safely search backwards to find the CALL_INSN. */
3050 before_call = get_last_insn ();
51bbfa0c 3051
7d167afd
JJ
3052 /* Set up next argument register. For sibling calls on machines
3053 with register windows this should be the incoming register. */
3054#ifdef FUNCTION_INCOMING_ARG
3055 if (pass == 0)
3056 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3057 void_type_node, 1);
3058 else
3059#endif
3060 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3061 void_type_node, 1);
3062
0a1c58a2
JL
3063 /* All arguments and registers used for the call must be set up by
3064 now! */
3065
ce48579b
RH
3066 /* Stack must be properly aligned now. */
3067 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
d9a7d592 3068 abort ();
ebcd0b57 3069
0a1c58a2
JL
3070 /* Generate the actual call instruction. */
3071 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
099e9712 3072 adjusted_args_size.constant, struct_value_size,
7d167afd 3073 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
fa5322fa 3074 flags, & args_so_far);
0a1c58a2 3075
1503a7ec
JH
3076 /* Verify that we've deallocated all the stack we used. */
3077 if (pass
f725a3ec
KH
3078 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3079 abort ();
1503a7ec 3080
0a1c58a2
JL
3081 /* If call is cse'able, make appropriate pair of reg-notes around it.
3082 Test valreg so we don't crash; may safely ignore `const'
3083 if return type is void. Disable for PARALLEL return values, because
3084 we have no way to move such values into a pseudo register. */
53d4257f 3085 if (pass && (flags & ECF_LIBCALL_BLOCK))
9ae8ffe7 3086 {
0a1c58a2 3087 rtx insns;
9ae8ffe7 3088
e4abc3d5
RH
3089 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3090 {
3091 insns = get_insns ();
3092 end_sequence ();
2f937369 3093 emit_insn (insns);
e4abc3d5
RH
3094 }
3095 else
3096 {
3097 rtx note = 0;
3098 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3099
3100 /* Mark the return value as a pointer if needed. */
3101 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3102 mark_reg_pointer (temp,
3103 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3104
3105 /* Construct an "equal form" for the value which mentions all the
3106 arguments in order as well as the function name. */
3107 for (i = 0; i < num_actuals; i++)
3108 note = gen_rtx_EXPR_LIST (VOIDmode,
3109 args[i].initial_value, note);
3110 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3111
3112 insns = get_insns ();
3113 end_sequence ();
3114
3115 if (flags & ECF_PURE)
3116 note = gen_rtx_EXPR_LIST (VOIDmode,
3117 gen_rtx_USE (VOIDmode,
3118 gen_rtx_MEM (BLKmode,
3119 gen_rtx_SCRATCH (VOIDmode))),
3120 note);
3121
3122 emit_libcall_block (insns, temp, valreg, note);
3123
3124 valreg = temp;
3125 }
0a1c58a2 3126 }
53d4257f 3127 else if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3128 {
3129 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3130 rtx last, insns;
3131
f725a3ec 3132 /* The return value from a malloc-like function is a pointer. */
0a1c58a2 3133 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
bdb429a5 3134 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
0a1c58a2
JL
3135
3136 emit_move_insn (temp, valreg);
3137
3138 /* The return value from a malloc-like function can not alias
3139 anything else. */
3140 last = get_last_insn ();
f725a3ec 3141 REG_NOTES (last) =
0a1c58a2
JL
3142 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3143
3144 /* Write out the sequence. */
3145 insns = get_insns ();
3146 end_sequence ();
2f937369 3147 emit_insn (insns);
0a1c58a2
JL
3148 valreg = temp;
3149 }
51bbfa0c 3150
0a1c58a2
JL
3151 /* For calls to `setjmp', etc., inform flow.c it should complain
3152 if nonvolatile values are live. For functions that cannot return,
3153 inform flow that control does not fall through. */
51bbfa0c 3154
570a98eb 3155 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
c2939b57 3156 {
570a98eb 3157 /* The barrier must be emitted
0a1c58a2
JL
3158 immediately after the CALL_INSN. Some ports emit more
3159 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 3160
0a1c58a2
JL
3161 rtx last = get_last_insn ();
3162 while (GET_CODE (last) != CALL_INSN)
3163 {
3164 last = PREV_INSN (last);
3165 /* There was no CALL_INSN? */
3166 if (last == before_call)
3167 abort ();
3168 }
51bbfa0c 3169
570a98eb 3170 emit_barrier_after (last);
0a1c58a2 3171 }
51bbfa0c 3172
f2d33f13 3173 if (flags & ECF_LONGJMP)
099e9712 3174 current_function_calls_longjmp = 1;
51bbfa0c 3175
25a1fcb4
RK
3176 /* If this function is returning into a memory location marked as
3177 readonly, it means it is initializing that location. But we normally
3178 treat functions as not clobbering such locations, so we need to
3179 specify that this one does. */
3180 if (target != 0 && GET_CODE (target) == MEM
3181 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3182 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3183
0a1c58a2 3184 /* If value type not void, return an rtx for the value. */
51bbfa0c 3185
0a1c58a2
JL
3186 /* If there are cleanups to be called, don't use a hard reg as target.
3187 We need to double check this and see if it matters anymore. */
194c7c45
RH
3188 if (any_pending_cleanups (1))
3189 {
3190 if (target && REG_P (target)
3191 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3192 target = 0;
3193 sibcall_failure = 1;
3194 }
51bbfa0c 3195
0a1c58a2
JL
3196 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3197 || ignore)
b5cd4ed4 3198 target = const0_rtx;
0a1c58a2
JL
3199 else if (structure_value_addr)
3200 {
3201 if (target == 0 || GET_CODE (target) != MEM)
3202 {
3bdf5ad1
RK
3203 target
3204 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3205 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3206 structure_value_addr));
3207 set_mem_attributes (target, exp, 1);
0a1c58a2
JL
3208 }
3209 }
3210 else if (pcc_struct_value)
cacbd532 3211 {
0a1c58a2
JL
3212 /* This is the special C++ case where we need to
3213 know what the true target was. We take care to
3214 never use this value more than once in one expression. */
3215 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3216 copy_to_reg (valreg));
3bdf5ad1 3217 set_mem_attributes (target, exp, 1);
cacbd532 3218 }
0a1c58a2
JL
3219 /* Handle calls that return values in multiple non-contiguous locations.
3220 The Irix 6 ABI has examples of this. */
3221 else if (GET_CODE (valreg) == PARALLEL)
3222 {
0a1c58a2
JL
3223 if (target == 0)
3224 {
1da68f56
RK
3225 /* This will only be assigned once, so it can be readonly. */
3226 tree nt = build_qualified_type (TREE_TYPE (exp),
3227 (TYPE_QUALS (TREE_TYPE (exp))
3228 | TYPE_QUAL_CONST));
3229
3230 target = assign_temp (nt, 0, 1, 1);
0a1c58a2
JL
3231 preserve_temp_slots (target);
3232 }
3233
3234 if (! rtx_equal_p (target, valreg))
1da68f56 3235 emit_group_store (target, valreg,
04050c69 3236 int_size_in_bytes (TREE_TYPE (exp)));
19caa751 3237
0a1c58a2
JL
3238 /* We can not support sibling calls for this case. */
3239 sibcall_failure = 1;
3240 }
3241 else if (target
3242 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3243 && GET_MODE (target) == GET_MODE (valreg))
3244 {
3245 /* TARGET and VALREG cannot be equal at this point because the
3246 latter would not have REG_FUNCTION_VALUE_P true, while the
3247 former would if it were referring to the same register.
3248
3249 If they refer to the same register, this move will be a no-op,
3250 except when function inlining is being done. */
3251 emit_move_insn (target, valreg);
3252 }
3253 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
8eb99146
RH
3254 {
3255 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3256
3257 /* We can not support sibling calls for this case. */
3258 sibcall_failure = 1;
3259 }
0a1c58a2
JL
3260 else
3261 target = copy_to_reg (valreg);
51bbfa0c 3262
84b55618 3263#ifdef PROMOTE_FUNCTION_RETURN
0a1c58a2
JL
3264 /* If we promoted this return value, make the proper SUBREG. TARGET
3265 might be const0_rtx here, so be careful. */
3266 if (GET_CODE (target) == REG
3267 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3268 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3269 {
3270 tree type = TREE_TYPE (exp);
3271 int unsignedp = TREE_UNSIGNED (type);
ddef6bc7 3272 int offset = 0;
84b55618 3273
0a1c58a2
JL
3274 /* If we don't promote as expected, something is wrong. */
3275 if (GET_MODE (target)
3276 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3277 abort ();
5d2ac65e 3278
ddef6bc7
JJ
3279 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3280 && GET_MODE_SIZE (GET_MODE (target))
3281 > GET_MODE_SIZE (TYPE_MODE (type)))
3282 {
3283 offset = GET_MODE_SIZE (GET_MODE (target))
3284 - GET_MODE_SIZE (TYPE_MODE (type));
3285 if (! BYTES_BIG_ENDIAN)
3286 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3287 else if (! WORDS_BIG_ENDIAN)
3288 offset %= UNITS_PER_WORD;
3289 }
3290 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
0a1c58a2 3291 SUBREG_PROMOTED_VAR_P (target) = 1;
7879b81e 3292 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
0a1c58a2 3293 }
84b55618
RK
3294#endif
3295
0a1c58a2
JL
3296 /* If size of args is variable or this was a constructor call for a stack
3297 argument, restore saved stack-pointer value. */
51bbfa0c 3298
7393c642 3299 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
0a1c58a2
JL
3300 {
3301 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3302 pending_stack_adjust = old_pending_adj;
0a1c58a2
JL
3303 stack_arg_under_construction = old_stack_arg_under_construction;
3304 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3305 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
3306 sibcall_failure = 1;
3307 }
f8a097cd 3308 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3309 {
51bbfa0c 3310#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3311 if (save_area)
3312 {
3313 restore_fixed_argument_area (save_area, argblock,
3314 high_to_save, low_to_save);
0a1c58a2 3315 }
b94301c2 3316#endif
51bbfa0c 3317
0a1c58a2
JL
3318 /* If we saved any argument areas, restore them. */
3319 for (i = 0; i < num_actuals; i++)
3320 if (args[i].save_area)
3321 {
3322 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3323 rtx stack_area
3324 = gen_rtx_MEM (save_mode,
3325 memory_address (save_mode,
3326 XEXP (args[i].stack_slot, 0)));
3327
3328 if (save_mode != BLKmode)
3329 emit_move_insn (stack_area, args[i].save_area);
3330 else
44bb111a
RH
3331 emit_block_move (stack_area, args[i].save_area,
3332 GEN_INT (args[i].size.constant),
3333 BLOCK_OP_CALL_PARM);
0a1c58a2 3334 }
51bbfa0c 3335
0a1c58a2
JL
3336 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3337 stack_usage_map = initial_stack_usage_map;
3338 }
51bbfa0c 3339
f725a3ec 3340 /* If this was alloca, record the new stack level for nonlocal gotos.
0a1c58a2
JL
3341 Check for the handler slots since we might not have a save area
3342 for non-local gotos. */
59257ff7 3343
f2d33f13 3344 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
0a1c58a2 3345 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c 3346
0a1c58a2
JL
3347 /* Free up storage we no longer need. */
3348 for (i = 0; i < num_actuals; ++i)
3349 if (args[i].aligned_regs)
3350 free (args[i].aligned_regs);
3351
e245d3af
RH
3352 if (pass == 0)
3353 {
3354 /* Undo the fake expand_start_target_temps we did earlier. If
3355 there had been any cleanups created, we've already set
3356 sibcall_failure. */
3357 expand_end_target_temps ();
3358 }
3359
0a1c58a2
JL
3360 insns = get_insns ();
3361 end_sequence ();
3362
3363 if (pass == 0)
3364 {
3365 tail_call_insns = insns;
3366
0a1c58a2
JL
3367 /* Restore the pending stack adjustment now that we have
3368 finished generating the sibling call sequence. */
1503a7ec 3369
0a1c58a2 3370 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 3371 stack_pointer_delta = save_stack_pointer_delta;
099e9712
JH
3372
3373 /* Prepare arg structure for next iteration. */
f725a3ec 3374 for (i = 0; i < num_actuals; i++)
099e9712
JH
3375 {
3376 args[i].value = 0;
3377 args[i].aligned_regs = 0;
3378 args[i].stack = 0;
3379 }
c67846f2
JJ
3380
3381 sbitmap_free (stored_args_map);
0a1c58a2
JL
3382 }
3383 else
3384 normal_call_insns = insns;
fadb729c
JJ
3385
3386 /* If something prevents making this a sibling call,
3387 zero out the sequence. */
3388 if (sibcall_failure)
3389 tail_call_insns = NULL_RTX;
0a1c58a2
JL
3390 }
3391
3392 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3393 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3394 can happen if the arguments to this function call an inline
3395 function who's expansion contains another CALL_PLACEHOLDER.
3396
3397 If there are any C_Ps in any of these sequences, replace them
f725a3ec 3398 with their normal call. */
0a1c58a2
JL
3399
3400 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3401 if (GET_CODE (insn) == CALL_INSN
3402 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3403 replace_call_placeholder (insn, sibcall_use_normal);
3404
3405 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3406 if (GET_CODE (insn) == CALL_INSN
3407 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3408 replace_call_placeholder (insn, sibcall_use_normal);
3409
3410 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3411 if (GET_CODE (insn) == CALL_INSN
3412 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3413 replace_call_placeholder (insn, sibcall_use_normal);
3414
3415 /* If this was a potential tail recursion site, then emit a
3416 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3417 One of them will be selected later. */
3418 if (tail_recursion_insns || tail_call_insns)
3419 {
3420 /* The tail recursion label must be kept around. We could expose
3421 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3422 and makes determining true tail recursion sites difficult.
3423
3424 So we set LABEL_PRESERVE_P here, then clear it when we select
3425 one of the call sequences after rtl generation is complete. */
3426 if (tail_recursion_insns)
3427 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3428 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3429 tail_call_insns,
3430 tail_recursion_insns,
3431 tail_recursion_label));
3432 }
3433 else
2f937369 3434 emit_insn (normal_call_insns);
51bbfa0c 3435
0a1c58a2 3436 currently_expanding_call--;
8e6a59fe 3437
7393c642
RK
3438 /* If this function returns with the stack pointer depressed, ensure
3439 this block saves and restores the stack pointer, show it was
3440 changed, and adjust for any outgoing arg space. */
3441 if (flags & ECF_SP_DEPRESSED)
3442 {
3443 clear_pending_stack_adjust ();
3444 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3445 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3446 save_stack_pointer ();
3447 }
3448
51bbfa0c
RS
3449 return target;
3450}
3451\f
de76b467 3452/* Output a library call to function FUN (a SYMBOL_REF rtx).
f725a3ec 3453 The RETVAL parameter specifies whether return value needs to be saved, other
0407c02b 3454 parameters are documented in the emit_library_call function below. */
8ac61af7 3455
de76b467 3456static rtx
2a8f6b90 3457emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
de76b467
JH
3458 int retval;
3459 rtx orgfun;
3460 rtx value;
ebb1b59a 3461 enum libcall_type fn_type;
de76b467
JH
3462 enum machine_mode outmode;
3463 int nargs;
3464 va_list p;
43bc5f13 3465{
3c0fca12
RH
3466 /* Total size in bytes of all the stack-parms scanned so far. */
3467 struct args_size args_size;
3468 /* Size of arguments before any adjustments (such as rounding). */
3469 struct args_size original_args_size;
b3694847 3470 int argnum;
3c0fca12
RH
3471 rtx fun;
3472 int inc;
3473 int count;
3474 struct args_size alignment_pad;
3475 rtx argblock = 0;
3476 CUMULATIVE_ARGS args_so_far;
f725a3ec
KH
3477 struct arg
3478 {
3479 rtx value;
3480 enum machine_mode mode;
3481 rtx reg;
3482 int partial;
3483 struct args_size offset;
3484 struct args_size size;
3485 rtx save_area;
3486 };
3c0fca12
RH
3487 struct arg *argvec;
3488 int old_inhibit_defer_pop = inhibit_defer_pop;
3489 rtx call_fusage = 0;
3490 rtx mem_value = 0;
5591ee6f 3491 rtx valreg;
3c0fca12
RH
3492 int pcc_struct_value = 0;
3493 int struct_value_size = 0;
52a11cbf 3494 int flags;
3c0fca12 3495 int reg_parm_stack_space = 0;
3c0fca12 3496 int needed;
695ee791 3497 rtx before_call;
b0c48229 3498 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 3499
f73ad30e 3500#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
3501 /* Define the boundary of the register parm stack space that needs to be
3502 save, if any. */
3503 int low_to_save = -1, high_to_save = 0;
f725a3ec 3504 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
3505#endif
3506
3c0fca12
RH
3507 /* Size of the stack reserved for parameter registers. */
3508 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3509 char *initial_stack_usage_map = stack_usage_map;
3c0fca12
RH
3510
3511#ifdef REG_PARM_STACK_SPACE
3512#ifdef MAYBE_REG_PARM_STACK_SPACE
3513 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3514#else
3515 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3516#endif
3517#endif
3518
9555a122 3519 /* By default, library functions can not throw. */
52a11cbf
RH
3520 flags = ECF_NOTHROW;
3521
9555a122
RH
3522 switch (fn_type)
3523 {
3524 case LCT_NORMAL:
53d4257f 3525 break;
9555a122 3526 case LCT_CONST:
53d4257f
JH
3527 flags |= ECF_CONST;
3528 break;
9555a122 3529 case LCT_PURE:
53d4257f 3530 flags |= ECF_PURE;
9555a122
RH
3531 break;
3532 case LCT_CONST_MAKE_BLOCK:
53d4257f 3533 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
9555a122
RH
3534 break;
3535 case LCT_PURE_MAKE_BLOCK:
53d4257f 3536 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
9555a122
RH
3537 break;
3538 case LCT_NORETURN:
3539 flags |= ECF_NORETURN;
3540 break;
3541 case LCT_THROW:
3542 flags = ECF_NORETURN;
3543 break;
9d98f8f9
JH
3544 case LCT_ALWAYS_RETURN:
3545 flags = ECF_ALWAYS_RETURN;
3546 break;
9defc9b7
RH
3547 case LCT_RETURNS_TWICE:
3548 flags = ECF_RETURNS_TWICE;
3549 break;
9555a122 3550 }
3c0fca12
RH
3551 fun = orgfun;
3552
3c0fca12
RH
3553 /* Ensure current function's preferred stack boundary is at least
3554 what we need. */
3555 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3556 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
3557
3558 /* If this kind of value comes back in memory,
3559 decide where in memory it should come back. */
b0c48229 3560 if (outmode != VOIDmode)
3c0fca12 3561 {
b0c48229
NB
3562 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3563 if (aggregate_value_p (tfom))
3564 {
3c0fca12 3565#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229
NB
3566 rtx pointer_reg
3567 = hard_function_value (build_pointer_type (tfom), 0, 0);
3568 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3569 pcc_struct_value = 1;
3570 if (value == 0)
3571 value = gen_reg_rtx (outmode);
3c0fca12 3572#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229
NB
3573 struct_value_size = GET_MODE_SIZE (outmode);
3574 if (value != 0 && GET_CODE (value) == MEM)
3575 mem_value = value;
3576 else
3577 mem_value = assign_temp (tfom, 0, 1, 1);
3c0fca12 3578#endif
b0c48229
NB
3579 /* This call returns a big structure. */
3580 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3581 }
3c0fca12 3582 }
b0c48229
NB
3583 else
3584 tfom = void_type_node;
3c0fca12
RH
3585
3586 /* ??? Unfinished: must pass the memory address as an argument. */
3587
3588 /* Copy all the libcall-arguments out of the varargs data
3589 and into a vector ARGVEC.
3590
3591 Compute how to pass each argument. We only support a very small subset
3592 of the full argument passing conventions to limit complexity here since
3593 library functions shouldn't have many args. */
3594
3595 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
961192e1 3596 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 3597
97fc4caf
AO
3598#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3599 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3600#else
3c0fca12 3601 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
97fc4caf 3602#endif
3c0fca12
RH
3603
3604 args_size.constant = 0;
3605 args_size.var = 0;
3606
3607 count = 0;
3608
ebb1b59a
BS
3609 /* Now we are about to start emitting insns that can be deleted
3610 if a libcall is deleted. */
53d4257f 3611 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a
BS
3612 start_sequence ();
3613
3c0fca12
RH
3614 push_temp_slots ();
3615
3616 /* If there's a structure value address to be passed,
3617 either pass it in the special place, or pass it as an extra argument. */
3618 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3619 {
3620 rtx addr = XEXP (mem_value, 0);
3621 nargs++;
3622
3623 /* Make sure it is a reasonable operand for a move or push insn. */
3624 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3625 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3626 addr = force_operand (addr, NULL_RTX);
3627
3628 argvec[count].value = addr;
3629 argvec[count].mode = Pmode;
3630 argvec[count].partial = 0;
3631
3632 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3633#ifdef FUNCTION_ARG_PARTIAL_NREGS
3634 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3635 abort ();
3636#endif
3637
3638 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f
CM
3639#ifdef STACK_PARMS_IN_REG_PARM_AREA
3640 1,
3641#else
3642 argvec[count].reg != 0,
3643#endif
3c0fca12
RH
3644 NULL_TREE, &args_size, &argvec[count].offset,
3645 &argvec[count].size, &alignment_pad);
3646
3c0fca12
RH
3647 if (argvec[count].reg == 0 || argvec[count].partial != 0
3648 || reg_parm_stack_space > 0)
3649 args_size.constant += argvec[count].size.constant;
3650
3651 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3652
3653 count++;
3654 }
3655
3656 for (; count < nargs; count++)
3657 {
3658 rtx val = va_arg (p, rtx);
3659 enum machine_mode mode = va_arg (p, enum machine_mode);
3660
3661 /* We cannot convert the arg value to the mode the library wants here;
3662 must do it earlier where we know the signedness of the arg. */
3663 if (mode == BLKmode
3664 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3665 abort ();
3666
3667 /* On some machines, there's no way to pass a float to a library fcn.
3668 Pass it as a double instead. */
3669#ifdef LIBGCC_NEEDS_DOUBLE
3670 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3671 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3672#endif
3673
3674 /* There's no need to call protect_from_queue, because
3675 either emit_move_insn or emit_push_insn will do that. */
3676
3677 /* Make sure it is a reasonable operand for a move or push insn. */
3678 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3679 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3680 val = force_operand (val, NULL_RTX);
3681
3682#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3683 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3684 {
f474c6f8
AO
3685 rtx slot;
3686 int must_copy = 1
3687#ifdef FUNCTION_ARG_CALLEE_COPIES
3688 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3689 NULL_TREE, 1)
3690#endif
3691 ;
3692
a0dc500c
R
3693 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3694 functions, so we have to pretend this isn't such a function. */
3695 if (flags & ECF_LIBCALL_BLOCK)
3696 {
3697 rtx insns = get_insns ();
3698 end_sequence ();
3699 emit_insn (insns);
3700 }
3701 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3702
99a32567
DM
3703 /* If this was a CONST function, it is now PURE since
3704 it now reads memory. */
3705 if (flags & ECF_CONST)
3706 {
3707 flags &= ~ECF_CONST;
3708 flags |= ECF_PURE;
3709 }
3710
f474c6f8
AO
3711 if (GET_MODE (val) == MEM && ! must_copy)
3712 slot = val;
3713 else if (must_copy)
3714 {
b0c48229
NB
3715 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3716 0, 1, 1);
f474c6f8
AO
3717 emit_move_insn (slot, val);
3718 }
3719 else
3720 {
b0c48229 3721 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
f474c6f8 3722
4cfc6042 3723 slot = gen_rtx_MEM (mode,
f474c6f8
AO
3724 expand_expr (build1 (ADDR_EXPR,
3725 build_pointer_type
3726 (type),
3727 make_tree (type, val)),
3728 NULL_RTX, VOIDmode, 0));
3729 }
1da68f56 3730
6b5273c3
AO
3731 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3732 gen_rtx_USE (VOIDmode, slot),
3733 call_fusage);
f474c6f8
AO
3734 if (must_copy)
3735 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3736 gen_rtx_CLOBBER (VOIDmode,
3737 slot),
3738 call_fusage);
3739
3c0fca12 3740 mode = Pmode;
f474c6f8 3741 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12
RH
3742 }
3743#endif
3744
3745 argvec[count].value = val;
3746 argvec[count].mode = mode;
3747
3748 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3749
3750#ifdef FUNCTION_ARG_PARTIAL_NREGS
3751 argvec[count].partial
3752 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3753#else
3754 argvec[count].partial = 0;
3755#endif
3756
3757 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 3758#ifdef STACK_PARMS_IN_REG_PARM_AREA
f725a3ec 3759 1,
a4d5044f
CM
3760#else
3761 argvec[count].reg != 0,
3762#endif
3c0fca12
RH
3763 NULL_TREE, &args_size, &argvec[count].offset,
3764 &argvec[count].size, &alignment_pad);
3765
3766 if (argvec[count].size.var)
3767 abort ();
3768
3769 if (reg_parm_stack_space == 0 && argvec[count].partial)
3770 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3771
3772 if (argvec[count].reg == 0 || argvec[count].partial != 0
3773 || reg_parm_stack_space > 0)
3774 args_size.constant += argvec[count].size.constant;
3775
3776 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3777 }
3c0fca12
RH
3778
3779#ifdef FINAL_REG_PARM_STACK_SPACE
3780 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3781 args_size.var);
3782#endif
3783 /* If this machine requires an external definition for library
3784 functions, write one out. */
3785 assemble_external_libcall (fun);
3786
3787 original_args_size = args_size;
1503a7ec
JH
3788 args_size.constant = (((args_size.constant
3789 + stack_pointer_delta
3790 + STACK_BYTES - 1)
3791 / STACK_BYTES
3792 * STACK_BYTES)
3793 - stack_pointer_delta);
3c0fca12
RH
3794
3795 args_size.constant = MAX (args_size.constant,
3796 reg_parm_stack_space);
3797
3798#ifndef OUTGOING_REG_PARM_STACK_SPACE
3799 args_size.constant -= reg_parm_stack_space;
3800#endif
3801
3802 if (args_size.constant > current_function_outgoing_args_size)
3803 current_function_outgoing_args_size = args_size.constant;
3804
f73ad30e
JH
3805 if (ACCUMULATE_OUTGOING_ARGS)
3806 {
3807 /* Since the stack pointer will never be pushed, it is possible for
3808 the evaluation of a parm to clobber something we have already
3809 written to the stack. Since most function calls on RISC machines
3810 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 3811
f73ad30e
JH
3812 Therefore, we save any area of the stack that was already written
3813 and that we are using. Here we set up to do this by making a new
3814 stack usage map from the old one.
3c0fca12 3815
f73ad30e
JH
3816 Another approach might be to try to reorder the argument
3817 evaluations to avoid this conflicting stack usage. */
3c0fca12 3818
f73ad30e 3819 needed = args_size.constant;
3c0fca12
RH
3820
3821#ifndef OUTGOING_REG_PARM_STACK_SPACE
f73ad30e
JH
3822 /* Since we will be writing into the entire argument area, the
3823 map must be allocated for its entire size, not just the part that
3824 is the responsibility of the caller. */
3825 needed += reg_parm_stack_space;
3c0fca12
RH
3826#endif
3827
3828#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3829 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3830 needed + 1);
3c0fca12 3831#else
f73ad30e
JH
3832 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3833 needed);
3c0fca12 3834#endif
f73ad30e 3835 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3c0fca12 3836
f73ad30e 3837 if (initial_highest_arg_in_use)
2e09e75a
JM
3838 memcpy (stack_usage_map, initial_stack_usage_map,
3839 initial_highest_arg_in_use);
3c0fca12 3840
f73ad30e 3841 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3842 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3843 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3844 needed = 0;
3c0fca12 3845
c39ada04
DD
3846 /* We must be careful to use virtual regs before they're instantiated,
3847 and real regs afterwards. Loop optimization, for example, can create
3848 new libcalls after we've instantiated the virtual regs, and if we
3849 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 3850
c39ada04
DD
3851 if (virtuals_instantiated)
3852 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3853 else
3854 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
3855 }
3856 else
3857 {
3858 if (!PUSH_ARGS)
3859 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3860 }
3c0fca12 3861
3c0fca12
RH
3862 /* If we push args individually in reverse order, perform stack alignment
3863 before the first push (the last arg). */
f73ad30e 3864 if (argblock == 0 && PUSH_ARGS_REVERSED)
3c0fca12
RH
3865 anti_adjust_stack (GEN_INT (args_size.constant
3866 - original_args_size.constant));
3c0fca12 3867
f73ad30e
JH
3868 if (PUSH_ARGS_REVERSED)
3869 {
3870 inc = -1;
3871 argnum = nargs - 1;
3872 }
3873 else
3874 {
3875 inc = 1;
3876 argnum = 0;
3877 }
3c0fca12 3878
f73ad30e
JH
3879#ifdef REG_PARM_STACK_SPACE
3880 if (ACCUMULATE_OUTGOING_ARGS)
3881 {
3882 /* The argument list is the property of the called routine and it
3883 may clobber it. If the fixed area has been used for previous
3884 parameters, we must save and restore it.
3c0fca12 3885
f73ad30e 3886 Here we compute the boundary of the that needs to be saved, if any. */
3c0fca12
RH
3887
3888#ifdef ARGS_GROW_DOWNWARD
f73ad30e 3889 for (count = 0; count < reg_parm_stack_space + 1; count++)
3c0fca12 3890#else
f73ad30e 3891 for (count = 0; count < reg_parm_stack_space; count++)
3c0fca12 3892#endif
f73ad30e 3893 {
f725a3ec 3894 if (count >= highest_outgoing_arg_in_use
f73ad30e
JH
3895 || stack_usage_map[count] == 0)
3896 continue;
3c0fca12 3897
f73ad30e
JH
3898 if (low_to_save == -1)
3899 low_to_save = count;
3c0fca12 3900
f73ad30e
JH
3901 high_to_save = count;
3902 }
3c0fca12 3903
f73ad30e
JH
3904 if (low_to_save >= 0)
3905 {
3906 int num_to_save = high_to_save - low_to_save + 1;
3907 enum machine_mode save_mode
3908 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3909 rtx stack_area;
3c0fca12 3910
f73ad30e
JH
3911 /* If we don't have the required alignment, must do this in BLKmode. */
3912 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3913 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3914 save_mode = BLKmode;
3c0fca12
RH
3915
3916#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3917 stack_area = gen_rtx_MEM (save_mode,
3918 memory_address (save_mode,
3919 plus_constant (argblock,
f725a3ec 3920 -high_to_save)));
3c0fca12 3921#else
f73ad30e
JH
3922 stack_area = gen_rtx_MEM (save_mode,
3923 memory_address (save_mode,
3924 plus_constant (argblock,
3925 low_to_save)));
3c0fca12 3926#endif
f73ad30e
JH
3927 if (save_mode == BLKmode)
3928 {
3929 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
8ac61af7 3930 set_mem_align (save_area, PARM_BOUNDARY);
44bb111a
RH
3931 emit_block_move (save_area, stack_area, GEN_INT (num_to_save),
3932 BLOCK_OP_CALL_PARM);
f73ad30e
JH
3933 }
3934 else
3935 {
3936 save_area = gen_reg_rtx (save_mode);
3937 emit_move_insn (save_area, stack_area);
3938 }
3c0fca12
RH
3939 }
3940 }
3941#endif
f725a3ec 3942
3c0fca12
RH
3943 /* Push the args that need to be pushed. */
3944
3945 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3946 are to be pushed. */
3947 for (count = 0; count < nargs; count++, argnum += inc)
3948 {
b3694847
SS
3949 enum machine_mode mode = argvec[argnum].mode;
3950 rtx val = argvec[argnum].value;
3c0fca12
RH
3951 rtx reg = argvec[argnum].reg;
3952 int partial = argvec[argnum].partial;
f73ad30e 3953 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
3954
3955 if (! (reg != 0 && partial == 0))
3956 {
f73ad30e
JH
3957 if (ACCUMULATE_OUTGOING_ARGS)
3958 {
f8a097cd
JH
3959 /* If this is being stored into a pre-allocated, fixed-size,
3960 stack area, save any previous data at that location. */
3c0fca12
RH
3961
3962#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3963 /* stack_slot is negative, but we want to index stack_usage_map
3964 with positive values. */
3965 upper_bound = -argvec[argnum].offset.constant + 1;
3966 lower_bound = upper_bound - argvec[argnum].size.constant;
3c0fca12 3967#else
f73ad30e
JH
3968 lower_bound = argvec[argnum].offset.constant;
3969 upper_bound = lower_bound + argvec[argnum].size.constant;
3c0fca12
RH
3970#endif
3971
f73ad30e
JH
3972 for (i = lower_bound; i < upper_bound; i++)
3973 if (stack_usage_map[i]
f8a097cd
JH
3974 /* Don't store things in the fixed argument area at this
3975 point; it has already been saved. */
f73ad30e
JH
3976 && i > reg_parm_stack_space)
3977 break;
3c0fca12 3978
f73ad30e
JH
3979 if (i != upper_bound)
3980 {
f8a097cd 3981 /* We need to make a save area. See what mode we can make
f725a3ec 3982 it. */
f73ad30e 3983 enum machine_mode save_mode
f8a097cd
JH
3984 = mode_for_size (argvec[argnum].size.constant
3985 * BITS_PER_UNIT,
f73ad30e
JH
3986 MODE_INT, 1);
3987 rtx stack_area
3988 = gen_rtx_MEM
3989 (save_mode,
3990 memory_address
3991 (save_mode,
3992 plus_constant (argblock,
3993 argvec[argnum].offset.constant)));
3994 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3995
3996 emit_move_insn (argvec[argnum].save_area, stack_area);
3997 }
3c0fca12 3998 }
19caa751 3999
44bb111a
RH
4000 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4001 partial, reg, 0, argblock,
4002 GEN_INT (argvec[argnum].offset.constant),
3c0fca12
RH
4003 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
4004
3c0fca12 4005 /* Now mark the segment we just used. */
f73ad30e
JH
4006 if (ACCUMULATE_OUTGOING_ARGS)
4007 for (i = lower_bound; i < upper_bound; i++)
4008 stack_usage_map[i] = 1;
3c0fca12
RH
4009
4010 NO_DEFER_POP;
4011 }
4012 }
4013
3c0fca12
RH
4014 /* If we pushed args in forward order, perform stack alignment
4015 after pushing the last arg. */
f73ad30e 4016 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3c0fca12
RH
4017 anti_adjust_stack (GEN_INT (args_size.constant
4018 - original_args_size.constant));
3c0fca12 4019
f73ad30e
JH
4020 if (PUSH_ARGS_REVERSED)
4021 argnum = nargs - 1;
4022 else
4023 argnum = 0;
3c0fca12 4024
3affaf29 4025 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
3c0fca12
RH
4026
4027 /* Now load any reg parms into their regs. */
4028
4029 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4030 are to be pushed. */
4031 for (count = 0; count < nargs; count++, argnum += inc)
4032 {
b3694847 4033 rtx val = argvec[argnum].value;
3c0fca12
RH
4034 rtx reg = argvec[argnum].reg;
4035 int partial = argvec[argnum].partial;
4036
4037 /* Handle calls that pass values in multiple non-contiguous
4038 locations. The PA64 has examples of this for library calls. */
4039 if (reg != 0 && GET_CODE (reg) == PARALLEL)
04050c69 4040 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
3c0fca12
RH
4041 else if (reg != 0 && partial == 0)
4042 emit_move_insn (reg, val);
4043
4044 NO_DEFER_POP;
4045 }
4046
3c0fca12
RH
4047 /* Any regs containing parms remain in use through the call. */
4048 for (count = 0; count < nargs; count++)
4049 {
4050 rtx reg = argvec[count].reg;
4051 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4052 use_group_regs (&call_fusage, reg);
4053 else if (reg != 0)
4054 use_reg (&call_fusage, reg);
4055 }
4056
4057 /* Pass the function the address in which to return a structure value. */
4058 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4059 {
4060 emit_move_insn (struct_value_rtx,
4061 force_reg (Pmode,
4062 force_operand (XEXP (mem_value, 0),
4063 NULL_RTX)));
4064 if (GET_CODE (struct_value_rtx) == REG)
f725a3ec 4065 use_reg (&call_fusage, struct_value_rtx);
3c0fca12
RH
4066 }
4067
4068 /* Don't allow popping to be deferred, since then
4069 cse'ing of library calls could delete a call and leave the pop. */
4070 NO_DEFER_POP;
5591ee6f
JH
4071 valreg = (mem_value == 0 && outmode != VOIDmode
4072 ? hard_libcall_value (outmode) : NULL_RTX);
3c0fca12 4073
ce48579b 4074 /* Stack must be properly aligned now. */
ebcd0b57 4075 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
f725a3ec 4076 abort ();
ebcd0b57 4077
695ee791
RH
4078 before_call = get_last_insn ();
4079
3c0fca12
RH
4080 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4081 will set inhibit_defer_pop to that value. */
de76b467
JH
4082 /* The return type is needed to decide how many bytes the function pops.
4083 Signedness plays no role in that, so for simplicity, we pretend it's
4084 always signed. We also assume that the list of arguments passed has
4085 no impact, so we pretend it is unknown. */
3c0fca12 4086
f725a3ec
KH
4087 emit_call_1 (fun,
4088 get_identifier (XSTR (orgfun, 0)),
b0c48229 4089 build_function_type (tfom, NULL_TREE),
f725a3ec 4090 original_args_size.constant, args_size.constant,
3c0fca12
RH
4091 struct_value_size,
4092 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
5591ee6f 4093 valreg,
fa5322fa 4094 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3c0fca12 4095
695ee791
RH
4096 /* For calls to `setjmp', etc., inform flow.c it should complain
4097 if nonvolatile values are live. For functions that cannot return,
4098 inform flow that control does not fall through. */
4099
570a98eb 4100 if (flags & (ECF_NORETURN | ECF_LONGJMP))
695ee791 4101 {
570a98eb 4102 /* The barrier note must be emitted
695ee791
RH
4103 immediately after the CALL_INSN. Some ports emit more than
4104 just a CALL_INSN above, so we must search for it here. */
4105
4106 rtx last = get_last_insn ();
4107 while (GET_CODE (last) != CALL_INSN)
4108 {
4109 last = PREV_INSN (last);
4110 /* There was no CALL_INSN? */
4111 if (last == before_call)
4112 abort ();
4113 }
4114
570a98eb 4115 emit_barrier_after (last);
695ee791
RH
4116 }
4117
3c0fca12
RH
4118 /* Now restore inhibit_defer_pop to its actual original value. */
4119 OK_DEFER_POP;
4120
ebb1b59a
BS
4121 /* If call is cse'able, make appropriate pair of reg-notes around it.
4122 Test valreg so we don't crash; may safely ignore `const'
4123 if return type is void. Disable for PARALLEL return values, because
4124 we have no way to move such values into a pseudo register. */
53d4257f 4125 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a 4126 {
ebb1b59a 4127 rtx insns;
ebb1b59a 4128
e4abc3d5
RH
4129 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
4130 {
4131 insns = get_insns ();
4132 end_sequence ();
2f937369 4133 emit_insn (insns);
e4abc3d5
RH
4134 }
4135 else
4136 {
4137 rtx note = 0;
4138 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4139 int i;
ebb1b59a 4140
e4abc3d5
RH
4141 /* Construct an "equal form" for the value which mentions all the
4142 arguments in order as well as the function name. */
4143 for (i = 0; i < nargs; i++)
4144 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4145 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
ebb1b59a 4146
e4abc3d5
RH
4147 insns = get_insns ();
4148 end_sequence ();
ebb1b59a 4149
e4abc3d5
RH
4150 if (flags & ECF_PURE)
4151 note = gen_rtx_EXPR_LIST (VOIDmode,
4152 gen_rtx_USE (VOIDmode,
4153 gen_rtx_MEM (BLKmode,
4154 gen_rtx_SCRATCH (VOIDmode))),
4155 note);
4156
4157 emit_libcall_block (insns, temp, valreg, note);
ebb1b59a 4158
e4abc3d5
RH
4159 valreg = temp;
4160 }
ebb1b59a 4161 }
3c0fca12
RH
4162 pop_temp_slots ();
4163
4164 /* Copy the value to the right place. */
de76b467 4165 if (outmode != VOIDmode && retval)
3c0fca12
RH
4166 {
4167 if (mem_value)
4168 {
4169 if (value == 0)
4170 value = mem_value;
4171 if (value != mem_value)
4172 emit_move_insn (value, mem_value);
4173 }
4174 else if (value != 0)
d57551c7 4175 emit_move_insn (value, valreg);
3c0fca12 4176 else
d57551c7 4177 value = valreg;
3c0fca12
RH
4178 }
4179
f73ad30e 4180 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 4181 {
f73ad30e
JH
4182#ifdef REG_PARM_STACK_SPACE
4183 if (save_area)
4184 {
4185 enum machine_mode save_mode = GET_MODE (save_area);
3c0fca12 4186#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4187 rtx stack_area
4188 = gen_rtx_MEM (save_mode,
4189 memory_address (save_mode,
4190 plus_constant (argblock,
4191 - high_to_save)));
3c0fca12 4192#else
f73ad30e
JH
4193 rtx stack_area
4194 = gen_rtx_MEM (save_mode,
4195 memory_address (save_mode,
4196 plus_constant (argblock, low_to_save)));
3c0fca12 4197#endif
8ac61af7
RK
4198
4199 set_mem_align (stack_area, PARM_BOUNDARY);
f73ad30e
JH
4200 if (save_mode != BLKmode)
4201 emit_move_insn (stack_area, save_area);
4202 else
44bb111a
RH
4203 emit_block_move (stack_area, save_area,
4204 GEN_INT (high_to_save - low_to_save + 1),
4205 BLOCK_OP_CALL_PARM);
f73ad30e 4206 }
3c0fca12 4207#endif
f725a3ec 4208
f73ad30e
JH
4209 /* If we saved any argument areas, restore them. */
4210 for (count = 0; count < nargs; count++)
4211 if (argvec[count].save_area)
4212 {
4213 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4214 rtx stack_area
4215 = gen_rtx_MEM (save_mode,
4216 memory_address
4217 (save_mode,
4218 plus_constant (argblock,
4219 argvec[count].offset.constant)));
4220
4221 emit_move_insn (stack_area, argvec[count].save_area);
4222 }
3c0fca12 4223
f73ad30e
JH
4224 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4225 stack_usage_map = initial_stack_usage_map;
4226 }
43bc5f13 4227
de76b467
JH
4228 return value;
4229
4230}
4231\f
4232/* Output a library call to function FUN (a SYMBOL_REF rtx)
4233 (emitting the queue unless NO_QUEUE is nonzero),
4234 for a value of mode OUTMODE,
4235 with NARGS different arguments, passed as alternating rtx values
4236 and machine_modes to convert them to.
4237 The rtx values should have been passed through protect_from_queue already.
4238
1258ee80
JJ
4239 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4240 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4241 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4242 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4243 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4244 or other LCT_ value for other types of library calls. */
de76b467
JH
4245
4246void
ebb1b59a
BS
4247emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4248 enum machine_mode outmode, int nargs, ...))
de76b467 4249{
79e8ec0e
AJ
4250 VA_OPEN (p, nargs);
4251 VA_FIXEDARG (p, rtx, orgfun);
4252 VA_FIXEDARG (p, int, fn_type);
4253 VA_FIXEDARG (p, enum machine_mode, outmode);
4254 VA_FIXEDARG (p, int, nargs);
de76b467 4255
2a8f6b90 4256 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
de76b467 4257
79e8ec0e 4258 VA_CLOSE (p);
de76b467
JH
4259}
4260\f
4261/* Like emit_library_call except that an extra argument, VALUE,
4262 comes second and says where to store the result.
4263 (If VALUE is zero, this function chooses a convenient way
4264 to return the value.
4265
4266 This function returns an rtx for where the value is to be found.
4267 If VALUE is nonzero, VALUE is returned. */
4268
4269rtx
ebb1b59a
BS
4270emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4271 enum libcall_type fn_type,
de76b467
JH
4272 enum machine_mode outmode, int nargs, ...))
4273{
6268b922
KG
4274 rtx result;
4275
79e8ec0e
AJ
4276 VA_OPEN (p, nargs);
4277 VA_FIXEDARG (p, rtx, orgfun);
4278 VA_FIXEDARG (p, rtx, value);
4279 VA_FIXEDARG (p, int, fn_type);
4280 VA_FIXEDARG (p, enum machine_mode, outmode);
4281 VA_FIXEDARG (p, int, nargs);
de76b467 4282
6268b922
KG
4283 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4284 nargs, p);
de76b467 4285
79e8ec0e 4286 VA_CLOSE (p);
de76b467 4287
6268b922 4288 return result;
322e3e34
RK
4289}
4290\f
51bbfa0c
RS
4291/* Store a single argument for a function call
4292 into the register or memory area where it must be passed.
4293 *ARG describes the argument value and where to pass it.
4294
4295 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4296 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4297
4298 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4299 so must be careful about how the stack is used.
51bbfa0c
RS
4300
4301 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4302 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4303 that we need not worry about saving and restoring the stack.
4304
4c6b3b2a 4305 FNDECL is the declaration of the function we are calling.
f725a3ec 4306
da7d8304 4307 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 4308 zero otherwise. */
51bbfa0c 4309
4c6b3b2a 4310static int
f725a3ec 4311store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
51bbfa0c
RS
4312 struct arg_data *arg;
4313 rtx argblock;
f8a097cd 4314 int flags;
0f9b3ea6 4315 int variable_size ATTRIBUTE_UNUSED;
6f90e075 4316 int reg_parm_stack_space;
51bbfa0c 4317{
b3694847 4318 tree pval = arg->tree_value;
51bbfa0c
RS
4319 rtx reg = 0;
4320 int partial = 0;
4321 int used = 0;
6a651371 4322 int i, lower_bound = 0, upper_bound = 0;
4c6b3b2a 4323 int sibcall_failure = 0;
51bbfa0c
RS
4324
4325 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 4326 return 1;
51bbfa0c 4327
cc79451b
RK
4328 /* Push a new temporary level for any temporaries we make for
4329 this argument. */
4330 push_temp_slots ();
4331
f8a097cd 4332 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4333 {
f73ad30e
JH
4334 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4335 save any previous data at that location. */
4336 if (argblock && ! variable_size && arg->stack)
4337 {
51bbfa0c 4338#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4339 /* stack_slot is negative, but we want to index stack_usage_map
4340 with positive values. */
4341 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4342 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4343 else
4344 upper_bound = 0;
51bbfa0c 4345
f73ad30e 4346 lower_bound = upper_bound - arg->size.constant;
51bbfa0c 4347#else
f73ad30e
JH
4348 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4349 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4350 else
4351 lower_bound = 0;
51bbfa0c 4352
f73ad30e 4353 upper_bound = lower_bound + arg->size.constant;
51bbfa0c
RS
4354#endif
4355
f73ad30e
JH
4356 for (i = lower_bound; i < upper_bound; i++)
4357 if (stack_usage_map[i]
4358 /* Don't store things in the fixed argument area at this point;
4359 it has already been saved. */
4360 && i > reg_parm_stack_space)
4361 break;
51bbfa0c 4362
f73ad30e 4363 if (i != upper_bound)
51bbfa0c 4364 {
f73ad30e
JH
4365 /* We need to make a save area. See what mode we can make it. */
4366 enum machine_mode save_mode
4367 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4368 rtx stack_area
4369 = gen_rtx_MEM (save_mode,
4370 memory_address (save_mode,
4371 XEXP (arg->stack_slot, 0)));
4372
4373 if (save_mode == BLKmode)
4374 {
1da68f56
RK
4375 tree ot = TREE_TYPE (arg->tree_value);
4376 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4377 | TYPE_QUAL_CONST));
4378
4379 arg->save_area = assign_temp (nt, 0, 1, 1);
f73ad30e
JH
4380 preserve_temp_slots (arg->save_area);
4381 emit_block_move (validize_mem (arg->save_area), stack_area,
44bb111a
RH
4382 expr_size (arg->tree_value),
4383 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4384 }
4385 else
4386 {
4387 arg->save_area = gen_reg_rtx (save_mode);
4388 emit_move_insn (arg->save_area, stack_area);
4389 }
51bbfa0c
RS
4390 }
4391 }
f73ad30e
JH
4392 /* Now that we have saved any slots that will be overwritten by this
4393 store, mark all slots this store will use. We must do this before
4394 we actually expand the argument since the expansion itself may
4395 trigger library calls which might need to use the same stack slot. */
4396 if (argblock && ! variable_size && arg->stack)
4397 for (i = lower_bound; i < upper_bound; i++)
4398 stack_usage_map[i] = 1;
51bbfa0c 4399 }
b564df06 4400
51bbfa0c
RS
4401 /* If this isn't going to be placed on both the stack and in registers,
4402 set up the register and number of words. */
4403 if (! arg->pass_on_stack)
aa7634dd
DM
4404 {
4405 if (flags & ECF_SIBCALL)
4406 reg = arg->tail_call_reg;
4407 else
4408 reg = arg->reg;
4409 partial = arg->partial;
4410 }
51bbfa0c
RS
4411
4412 if (reg != 0 && partial == 0)
4413 /* Being passed entirely in a register. We shouldn't be called in
6d2f8887 4414 this case. */
51bbfa0c
RS
4415 abort ();
4416
4ab56118
RK
4417 /* If this arg needs special alignment, don't load the registers
4418 here. */
4419 if (arg->n_aligned_regs != 0)
4420 reg = 0;
f725a3ec 4421
4ab56118 4422 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4423 it directly into its stack slot. Otherwise, we can. */
4424 if (arg->value == 0)
d64f5a78 4425 {
d64f5a78
RS
4426 /* stack_arg_under_construction is nonzero if a function argument is
4427 being evaluated directly into the outgoing argument list and
4428 expand_call must take special action to preserve the argument list
4429 if it is called recursively.
4430
4431 For scalar function arguments stack_usage_map is sufficient to
4432 determine which stack slots must be saved and restored. Scalar
4433 arguments in general have pass_on_stack == 0.
4434
4435 If this argument is initialized by a function which takes the
4436 address of the argument (a C++ constructor or a C function
4437 returning a BLKmode structure), then stack_usage_map is
4438 insufficient and expand_call must push the stack around the
4439 function call. Such arguments have pass_on_stack == 1.
4440
4441 Note that it is always safe to set stack_arg_under_construction,
4442 but this generates suboptimal code if set when not needed. */
4443
4444 if (arg->pass_on_stack)
4445 stack_arg_under_construction++;
f73ad30e 4446
3a08477a
RK
4447 arg->value = expand_expr (pval,
4448 (partial
4449 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4450 ? NULL_RTX : arg->stack,
e5d70561 4451 VOIDmode, 0);
1efe6448
RK
4452
4453 /* If we are promoting object (or for any other reason) the mode
4454 doesn't agree, convert the mode. */
4455
7373d92d
RK
4456 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4457 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4458 arg->value, arg->unsignedp);
1efe6448 4459
d64f5a78
RS
4460 if (arg->pass_on_stack)
4461 stack_arg_under_construction--;
d64f5a78 4462 }
51bbfa0c
RS
4463
4464 /* Don't allow anything left on stack from computation
4465 of argument to alloca. */
f8a097cd 4466 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
4467 do_pending_stack_adjust ();
4468
4469 if (arg->value == arg->stack)
37a08a29
RK
4470 /* If the value is already in the stack slot, we are done. */
4471 ;
1efe6448 4472 else if (arg->mode != BLKmode)
51bbfa0c 4473 {
b3694847 4474 int size;
51bbfa0c
RS
4475
4476 /* Argument is a scalar, not entirely passed in registers.
4477 (If part is passed in registers, arg->partial says how much
4478 and emit_push_insn will take care of putting it there.)
f725a3ec 4479
51bbfa0c
RS
4480 Push it, and if its size is less than the
4481 amount of space allocated to it,
4482 also bump stack pointer by the additional space.
4483 Note that in C the default argument promotions
4484 will prevent such mismatches. */
4485
1efe6448 4486 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4487 /* Compute how much space the push instruction will push.
4488 On many machines, pushing a byte will advance the stack
4489 pointer by a halfword. */
4490#ifdef PUSH_ROUNDING
4491 size = PUSH_ROUNDING (size);
4492#endif
4493 used = size;
4494
4495 /* Compute how much space the argument should get:
4496 round up to a multiple of the alignment for arguments. */
1efe6448 4497 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4498 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4499 / (PARM_BOUNDARY / BITS_PER_UNIT))
4500 * (PARM_BOUNDARY / BITS_PER_UNIT));
4501
4502 /* This isn't already where we want it on the stack, so put it there.
4503 This can either be done with push or copy insns. */
44bb111a
RH
4504 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4505 PARM_BOUNDARY, partial, reg, used - size, argblock,
4fc026cd
CM
4506 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4507 ARGS_SIZE_RTX (arg->alignment_pad));
841404cd
AO
4508
4509 /* Unless this is a partially-in-register argument, the argument is now
4510 in the stack. */
4511 if (partial == 0)
4512 arg->value = arg->stack;
51bbfa0c
RS
4513 }
4514 else
4515 {
4516 /* BLKmode, at least partly to be pushed. */
4517
1b1f20ca 4518 unsigned int parm_align;
b3694847 4519 int excess;
51bbfa0c
RS
4520 rtx size_rtx;
4521
4522 /* Pushing a nonscalar.
4523 If part is passed in registers, PARTIAL says how much
4524 and emit_push_insn will take care of putting it there. */
4525
4526 /* Round its size up to a multiple
4527 of the allocation unit for arguments. */
4528
4529 if (arg->size.var != 0)
4530 {
4531 excess = 0;
4532 size_rtx = ARGS_SIZE_RTX (arg->size);
4533 }
4534 else
4535 {
51bbfa0c
RS
4536 /* PUSH_ROUNDING has no effect on us, because
4537 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 4538 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4539 + partial * UNITS_PER_WORD);
db4c55f6
JM
4540 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4541 NULL_RTX, TYPE_MODE (sizetype), 0);
51bbfa0c
RS
4542 }
4543
1b1f20ca
RH
4544 /* Some types will require stricter alignment, which will be
4545 provided for elsewhere in argument layout. */
4546 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4547
4548 /* When an argument is padded down, the block is aligned to
4549 PARM_BOUNDARY, but the actual argument isn't. */
4550 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4551 {
4552 if (arg->size.var)
4553 parm_align = BITS_PER_UNIT;
4554 else if (excess)
4555 {
97d05bfd 4556 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
1b1f20ca
RH
4557 parm_align = MIN (parm_align, excess_align);
4558 }
4559 }
4560
4c6b3b2a
JJ
4561 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4562 {
4563 /* emit_push_insn might not work properly if arg->value and
4564 argblock + arg->offset areas overlap. */
4565 rtx x = arg->value;
4566 int i = 0;
4567
4568 if (XEXP (x, 0) == current_function_internal_arg_pointer
4569 || (GET_CODE (XEXP (x, 0)) == PLUS
4570 && XEXP (XEXP (x, 0), 0) ==
4571 current_function_internal_arg_pointer
4572 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4573 {
4574 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4575 i = INTVAL (XEXP (XEXP (x, 0), 1));
4576
4577 /* expand_call should ensure this */
4578 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4579 abort ();
4580
4581 if (arg->offset.constant > i)
4582 {
4583 if (arg->offset.constant < i + INTVAL (size_rtx))
4584 sibcall_failure = 1;
4585 }
4586 else if (arg->offset.constant < i)
4587 {
4588 if (i < arg->offset.constant + INTVAL (size_rtx))
4589 sibcall_failure = 1;
4590 }
4591 }
4592 }
4593
2e547b13
RS
4594 /* Special handling is required if part of the parameter lies in the
4595 register parameter area. The argument may be copied into the stack
4596 slot using memcpy(), but the original contents of the register
4597 parameter area will be restored after the memcpy() call.
4598
4599 To ensure that the part that lies in the register parameter area
4600 is copied correctly, we emit a separate push for that part. This
4601 push should be small enough to avoid a call to memcpy(). */
0df299ae 4602#ifndef STACK_PARMS_IN_REG_PARM_AREA
57ec4709 4603 if (arg->reg && arg->pass_on_stack)
0df299ae
RL
4604#else
4605 if (1)
4606#endif
57ec4709
CC
4607 {
4608 if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
f5143c46 4609 error ("variable offset is passed partially in stack and in reg");
57ec4709
CC
4610 else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
4611 error ("variable size is passed partially in stack and in reg");
4612 else if (arg->offset.constant < reg_parm_stack_space
4613 && ((arg->offset.constant + arg->size.constant)
4614 > reg_parm_stack_space))
4615 {
4616 rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
4617 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
1b1f20ca 4618 parm_align, partial, reg, excess, argblock,
44bb111a 4619 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
57ec4709 4620 ARGS_SIZE_RTX (arg->alignment_pad));
57ec4709
CC
4621 }
4622 }
4623
4624
1efe6448 4625 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
1b1f20ca 4626 parm_align, partial, reg, excess, argblock,
44bb111a 4627 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4fc026cd 4628 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c 4629
841404cd
AO
4630 /* Unless this is a partially-in-register argument, the argument is now
4631 in the stack.
51bbfa0c 4632
841404cd
AO
4633 ??? Unlike the case above, in which we want the actual
4634 address of the data, so that we can load it directly into a
4635 register, here we want the address of the stack slot, so that
4636 it's properly aligned for word-by-word copying or something
4637 like that. It's not clear that this is always correct. */
4638 if (partial == 0)
4639 arg->value = arg->stack_slot;
4640 }
51bbfa0c
RS
4641
4642 /* Once we have pushed something, pops can't safely
4643 be deferred during the rest of the arguments. */
4644 NO_DEFER_POP;
4645
4646 /* ANSI doesn't require a sequence point here,
4647 but PCC has one, so this will avoid some problems. */
4648 emit_queue ();
4649
db907e7b
RK
4650 /* Free any temporary slots made in processing this argument. Show
4651 that we might have taken the address of something and pushed that
4652 as an operand. */
4653 preserve_temp_slots (NULL_RTX);
51bbfa0c 4654 free_temp_slots ();
cc79451b 4655 pop_temp_slots ();
4c6b3b2a
JJ
4656
4657 return sibcall_failure;
51bbfa0c 4658}
This page took 1.908367 seconds and 5 git commands to generate.