]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
calls.c (expand_call): Do sanity checking on arg_space_so_far.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f
JL
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920
KG
23#include "system.h"
24#include "rtl.h"
25#include "tree.h"
26#include "flags.h"
27#include "expr.h"
49ad7cfa 28#include "function.h"
670ee920 29#include "regs.h"
51bbfa0c 30#include "insn-flags.h"
5f6da302 31#include "toplev.h"
d6f4ec51 32#include "output.h"
b1474bb7 33#include "tm_p.h"
51bbfa0c 34
c795bca9
BS
35#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37#endif
38
51bbfa0c 39/* Decide whether a function's arguments should be processed
bbc8a071
RK
40 from first to last or from last to first.
41
42 They should if the stack and args grow in opposite directions, but
43 only if we have push insns. */
51bbfa0c 44
51bbfa0c 45#ifdef PUSH_ROUNDING
bbc8a071 46
40083ddf 47#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
48#define PUSH_ARGS_REVERSED /* If it's last to first */
49#endif
bbc8a071 50
51bbfa0c
RS
51#endif
52
c795bca9
BS
53/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
55
56/* Data structure and subroutines used within expand_call. */
57
58struct arg_data
59{
60 /* Tree node for this argument. */
61 tree tree_value;
1efe6448
RK
62 /* Mode for value; TYPE_MODE unless promoted. */
63 enum machine_mode mode;
51bbfa0c
RS
64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 rtx value;
66 /* Initially-compute RTL value for argument; only for const functions. */
67 rtx initial_value;
68 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 69 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
70 registers. */
71 rtx reg;
84b55618
RK
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
74 int unsignedp;
51bbfa0c
RS
75 /* Number of registers to use. 0 means put the whole arg in registers.
76 Also 0 if not passed in registers. */
77 int partial;
d64f5a78
RS
78 /* Non-zero if argument must be passed on stack.
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
82 int pass_on_stack;
83 /* Offset of this argument from beginning of stack-args. */
84 struct args_size offset;
85 /* Similar, but offset to the start of the stack slot. Different from
86 OFFSET if this arg pads downward. */
87 struct args_size slot_offset;
88 /* Size of this argument on the stack, rounded up for any padding it gets,
89 parts of the argument passed in registers do not count.
90 If REG_PARM_STACK_SPACE is defined, then register parms
91 are counted here as well. */
92 struct args_size size;
93 /* Location on the stack at which parameter should be stored. The store
94 has already been done if STACK == VALUE. */
95 rtx stack;
96 /* Location on the stack of the start of this argument slot. This can
97 differ from STACK if this arg pads downward. This location is known
98 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 rtx stack_slot;
100#ifdef ACCUMULATE_OUTGOING_ARGS
101 /* Place that this stack area has been saved, if needed. */
102 rtx save_area;
103#endif
4ab56118
RK
104 /* If an argument's alignment does not permit direct copying into registers,
105 copy in smaller-sized pieces into pseudos. These are stored in a
106 block pointed to by this field. The next field says how many
107 word-sized pseudos we made. */
108 rtx *aligned_regs;
109 int n_aligned_regs;
4fc026cd
CM
110 /* The amount that the stack pointer needs to be adjusted to
111 force alignment for the next argument. */
112 struct args_size alignment_pad;
51bbfa0c
RS
113};
114
115#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 116/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
117 the corresponding stack location has been used.
118 This vector is used to prevent a function call within an argument from
119 clobbering any stack already set up. */
120static char *stack_usage_map;
121
122/* Size of STACK_USAGE_MAP. */
123static int highest_outgoing_arg_in_use;
2f4aa534
RS
124
125/* stack_arg_under_construction is nonzero when an argument may be
126 initialized with a constructor call (including a C function that
127 returns a BLKmode struct) and expand_call must take special action
128 to make sure the object being constructed does not overlap the
129 argument list for the constructor call. */
130int stack_arg_under_construction;
51bbfa0c
RS
131#endif
132
3d994c6b
KG
133static int calls_function PARAMS ((tree, int));
134static int calls_function_1 PARAMS ((tree, int));
135static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
136 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
12a22e76 137 rtx, int, rtx, int, int));
3d994c6b
KG
138static void precompute_register_parameters PARAMS ((int,
139 struct arg_data *,
140 int *));
141static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
142 int));
143static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
144 int));
145static int finalize_must_preallocate PARAMS ((int, int,
146 struct arg_data *,
147 struct args_size *));
148static void precompute_arguments PARAMS ((int, int, int,
149 struct arg_data *,
150 struct args_size *));
151static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
152 struct args_size *,
153 int));
3d994c6b
KG
154static void initialize_argument_information PARAMS ((int,
155 struct arg_data *,
156 struct args_size *,
157 int, tree, tree,
158 CUMULATIVE_ARGS *,
159 int, rtx *, int *,
160 int *, int *));
161static void compute_argument_addresses PARAMS ((struct arg_data *,
162 rtx, int));
163static rtx rtx_for_function_call PARAMS ((tree, tree));
164static void load_register_parameters PARAMS ((struct arg_data *,
165 int, rtx *));
12a22e76 166static int libfunc_nothrow PARAMS ((rtx));
21a3b983 167
20efdf74 168#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3d994c6b
KG
169static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
170static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 171#endif
51bbfa0c 172\f
1ce0cb53
JW
173/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
174 `alloca'.
175
176 If WHICH is 0, return 1 if EXP contains a call to any function.
177 Actually, we only need return 1 if evaluating EXP would require pushing
178 arguments on the stack, but that is too difficult to compute, so we just
179 assume any function call might require the stack. */
51bbfa0c 180
1c8d7aef
RS
181static tree calls_function_save_exprs;
182
51bbfa0c 183static int
1ce0cb53 184calls_function (exp, which)
51bbfa0c 185 tree exp;
1ce0cb53 186 int which;
1c8d7aef
RS
187{
188 int val;
189 calls_function_save_exprs = 0;
190 val = calls_function_1 (exp, which);
191 calls_function_save_exprs = 0;
192 return val;
193}
194
195static int
196calls_function_1 (exp, which)
197 tree exp;
198 int which;
51bbfa0c
RS
199{
200 register int i;
0207efa2
RK
201 enum tree_code code = TREE_CODE (exp);
202 int type = TREE_CODE_CLASS (code);
203 int length = tree_code_length[(int) code];
51bbfa0c 204
ddd5a7c1 205 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
206 if ((int) code >= NUM_TREE_CODES)
207 return 1;
51bbfa0c 208
0207efa2 209 /* Only expressions and references can contain calls. */
3b59a331
RS
210 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
211 && type != 'b')
51bbfa0c
RS
212 return 0;
213
0207efa2 214 switch (code)
51bbfa0c
RS
215 {
216 case CALL_EXPR:
1ce0cb53
JW
217 if (which == 0)
218 return 1;
219 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
220 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
221 == FUNCTION_DECL))
222 {
223 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
224
225 if ((DECL_BUILT_IN (fndecl)
95815af9 226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
228 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 229 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
230 return 1;
231 }
51bbfa0c
RS
232
233 /* Third operand is RTL. */
234 length = 2;
235 break;
236
237 case SAVE_EXPR:
238 if (SAVE_EXPR_RTL (exp) != 0)
239 return 0;
1c8d7aef
RS
240 if (value_member (exp, calls_function_save_exprs))
241 return 0;
242 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
243 calls_function_save_exprs);
244 return (TREE_OPERAND (exp, 0) != 0
245 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
246
247 case BLOCK:
ef03bc85
CH
248 {
249 register tree local;
250
251 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 252 if (DECL_INITIAL (local) != 0
1c8d7aef 253 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
254 return 1;
255 }
256 {
257 register tree subblock;
258
259 for (subblock = BLOCK_SUBBLOCKS (exp);
260 subblock;
261 subblock = TREE_CHAIN (subblock))
1c8d7aef 262 if (calls_function_1 (subblock, which))
ef03bc85
CH
263 return 1;
264 }
265 return 0;
51bbfa0c
RS
266
267 case METHOD_CALL_EXPR:
268 length = 3;
269 break;
270
271 case WITH_CLEANUP_EXPR:
272 length = 1;
273 break;
274
275 case RTL_EXPR:
276 return 0;
e9a25f70
JL
277
278 default:
279 break;
51bbfa0c
RS
280 }
281
282 for (i = 0; i < length; i++)
283 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 284 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
285 return 1;
286
287 return 0;
288}
289\f
290/* Force FUNEXP into a form suitable for the address of a CALL,
291 and return that as an rtx. Also load the static chain register
292 if FNDECL is a nested function.
293
77cac2f2
RK
294 CALL_FUSAGE points to a variable holding the prospective
295 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 296
03dacb02 297rtx
77cac2f2 298prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
299 rtx funexp;
300 tree fndecl;
77cac2f2 301 rtx *call_fusage;
01368078 302 int reg_parm_seen;
51bbfa0c
RS
303{
304 rtx static_chain_value = 0;
305
306 funexp = protect_from_queue (funexp, 0);
307
308 if (fndecl != 0)
0f41302f 309 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
310 static_chain_value = lookup_static_chain (fndecl);
311
312 /* Make a valid memory address and copy constants thru pseudo-regs,
313 but not for a constant address if -fno-function-cse. */
314 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 315 /* If we are using registers for parameters, force the
e9a25f70
JL
316 function address into a register now. */
317 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
318 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
319 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
320 else
321 {
322#ifndef NO_FUNCTION_CSE
323 if (optimize && ! flag_no_function_cse)
324#ifdef NO_RECURSIVE_FUNCTION_CSE
325 if (fndecl != current_function_decl)
326#endif
327 funexp = force_reg (Pmode, funexp);
328#endif
329 }
330
331 if (static_chain_value != 0)
332 {
333 emit_move_insn (static_chain_rtx, static_chain_value);
334
f991a240
RK
335 if (GET_CODE (static_chain_rtx) == REG)
336 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
337 }
338
339 return funexp;
340}
341
342/* Generate instructions to call function FUNEXP,
343 and optionally pop the results.
344 The CALL_INSN is the first insn generated.
345
607ea900 346 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
347 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
348
334c4f0f
RK
349 FUNTYPE is the data type of the function. This is given to the macro
350 RETURN_POPS_ARGS to determine whether this function pops its own args.
351 We used to allow an identifier for library functions, but that doesn't
352 work when the return type is an aggregate type and the calling convention
353 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
354
355 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
356 ROUNDED_STACK_SIZE is that number rounded up to
357 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
358 both to put into the call insn and to generate explicit popping
359 code if necessary.
51bbfa0c
RS
360
361 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
362 It is zero if this call doesn't want a structure value.
363
364 NEXT_ARG_REG is the rtx that results from executing
365 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
366 just after all the args have had their registers assigned.
367 This could be whatever you like, but normally it is the first
368 arg-register beyond those used for args in this call,
369 or 0 if all the arg-registers are used in this call.
370 It is passed on to `gen_call' so you can put this info in the call insn.
371
372 VALREG is a hard register in which a value is returned,
373 or 0 if the call does not return a value.
374
375 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
376 the args to this call were processed.
377 We restore `inhibit_defer_pop' to that value.
378
94b25f81
RK
379 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
380 denote registers used by the called function.
51bbfa0c
RS
381
382 IS_CONST is true if this is a `const' call. */
383
322e3e34 384static void
fb5eebb9
RH
385emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
386 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
12a22e76 387 call_fusage, is_const, nothrow)
51bbfa0c 388 rtx funexp;
c84e2712
KG
389 tree fndecl ATTRIBUTE_UNUSED;
390 tree funtype ATTRIBUTE_UNUSED;
6a651371 391 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 392 HOST_WIDE_INT rounded_stack_size;
962f1324 393 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
394 rtx next_arg_reg;
395 rtx valreg;
396 int old_inhibit_defer_pop;
77cac2f2 397 rtx call_fusage;
12a22e76 398 int is_const, nothrow;
51bbfa0c 399{
062e7fd8 400 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 401#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 402 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 403#endif
51bbfa0c 404 rtx call_insn;
081f5e7e 405#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 406 int already_popped = 0;
fb5eebb9 407 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 408#endif
51bbfa0c
RS
409
410 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
411 and we don't want to load it into a register as an optimization,
412 because prepare_call_address already did it if it should be done. */
413 if (GET_CODE (funexp) != SYMBOL_REF)
414 funexp = memory_address (FUNCTION_MODE, funexp);
415
416#ifndef ACCUMULATE_OUTGOING_ARGS
417#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
418/* If the target has "call" or "call_value" insns, then prefer them
419 if no arguments are actually popped. If the target does not have
420 "call" or "call_value" insns, then we must use the popping versions
421 even if the call has no arguments to pop. */
422#if defined (HAVE_call) && defined (HAVE_call_value)
423 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
424 && n_popped > 0)
425#else
426 if (HAVE_call_pop && HAVE_call_value_pop)
427#endif
51bbfa0c 428 {
fb5eebb9 429 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
430 rtx pat;
431
432 /* If this subroutine pops its own args, record that in the call insn
433 if possible, for the sake of frame pointer elimination. */
2c8da025 434
51bbfa0c
RS
435 if (valreg)
436 pat = gen_call_value_pop (valreg,
38a448ca 437 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 438 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 439 else
38a448ca 440 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 441 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
442
443 emit_call_insn (pat);
444 already_popped = 1;
445 }
446 else
447#endif
448#endif
449
450#if defined (HAVE_call) && defined (HAVE_call_value)
451 if (HAVE_call && HAVE_call_value)
452 {
453 if (valreg)
454 emit_call_insn (gen_call_value (valreg,
38a448ca 455 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 456 rounded_stack_size_rtx, next_arg_reg,
e992302c 457 NULL_RTX));
51bbfa0c 458 else
38a448ca 459 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 460 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
461 struct_value_size_rtx));
462 }
463 else
464#endif
465 abort ();
466
77cac2f2 467 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
468 for (call_insn = get_last_insn ();
469 call_insn && GET_CODE (call_insn) != CALL_INSN;
470 call_insn = PREV_INSN (call_insn))
471 ;
472
473 if (! call_insn)
474 abort ();
475
e59e60a7
RK
476 /* Put the register usage information on the CALL. If there is already
477 some usage information, put ours at the end. */
478 if (CALL_INSN_FUNCTION_USAGE (call_insn))
479 {
480 rtx link;
481
482 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
483 link = XEXP (link, 1))
484 ;
485
486 XEXP (link, 1) = call_fusage;
487 }
488 else
489 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (is_const)
493 CONST_CALL_P (call_insn) = 1;
494
12a22e76
JM
495 /* If this call can't throw, attach a REG_EH_REGION reg note to that
496 effect. */
497 if (nothrow)
54cea123 498 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
499 REG_NOTES (call_insn));
500
b1e64e0d
RS
501 /* Restore this now, so that we do defer pops for this call's args
502 if the context of the call as a whole permits. */
503 inhibit_defer_pop = old_inhibit_defer_pop;
504
51bbfa0c
RS
505#ifndef ACCUMULATE_OUTGOING_ARGS
506 /* If returning from the subroutine does not automatically pop the args,
507 we need an instruction to pop them sooner or later.
508 Perhaps do it now; perhaps just record how much space to pop later.
509
510 If returning from the subroutine does pop the args, indicate that the
511 stack pointer will be changed. */
512
c2732da3
JM
513 /* The space for the args is no longer waiting for the call; either it
514 was popped by the call, or it'll be popped below. */
515 arg_space_so_far -= rounded_stack_size;
516
fb5eebb9 517 if (n_popped > 0)
51bbfa0c
RS
518 {
519 if (!already_popped)
e3da301d 520 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
521 = gen_rtx_EXPR_LIST (VOIDmode,
522 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
523 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 524 rounded_stack_size -= n_popped;
062e7fd8 525 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
526 }
527
fb5eebb9 528 if (rounded_stack_size != 0)
51bbfa0c 529 {
70a73141 530 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
fb5eebb9 531 pending_stack_adjust += rounded_stack_size;
51bbfa0c 532 else
062e7fd8 533 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
534 }
535#endif
536}
537
20efdf74
JL
538/* Determine if the function identified by NAME and FNDECL is one with
539 special properties we wish to know about.
540
541 For example, if the function might return more than one time (setjmp), then
542 set RETURNS_TWICE to a nonzero value.
543
544 Similarly set IS_LONGJMP for if the function is in the longjmp family.
545
546 Set IS_MALLOC for any of the standard memory allocation functions which
547 allocate from the heap.
548
549 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
550 space from the stack such as alloca. */
551
3a8c995b 552void
fa76d9e0 553special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
20efdf74 554 is_malloc, may_be_alloca)
20efdf74
JL
555 tree fndecl;
556 int *returns_twice;
557 int *is_longjmp;
fa76d9e0 558 int *fork_or_exec;
20efdf74
JL
559 int *is_malloc;
560 int *may_be_alloca;
561{
562 *returns_twice = 0;
563 *is_longjmp = 0;
fa76d9e0 564 *fork_or_exec = 0;
20efdf74
JL
565 *may_be_alloca = 0;
566
140592a0
AG
567 /* The function decl may have the `malloc' attribute. */
568 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
569
3a8c995b
MM
570 if (! *is_malloc
571 && fndecl && DECL_NAME (fndecl)
140592a0 572 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
573 /* Exclude functions not at the file scope, or not `extern',
574 since they are not the magic functions we would otherwise
575 think they are. */
576 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
577 {
3a8c995b 578 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
579 char *tname = name;
580
ca54603f
JL
581 /* We assume that alloca will always be called by name. It
582 makes no sense to pass it as a pointer-to-function to
583 anything that does not understand its behavior. */
584 *may_be_alloca
585 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
586 && name[0] == 'a'
587 && ! strcmp (name, "alloca"))
588 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
589 && name[0] == '_'
590 && ! strcmp (name, "__builtin_alloca"))));
591
20efdf74
JL
592 /* Disregard prefix _, __ or __x. */
593 if (name[0] == '_')
594 {
595 if (name[1] == '_' && name[2] == 'x')
596 tname += 3;
597 else if (name[1] == '_')
598 tname += 2;
599 else
600 tname += 1;
601 }
602
603 if (tname[0] == 's')
604 {
605 *returns_twice
606 = ((tname[1] == 'e'
607 && (! strcmp (tname, "setjmp")
608 || ! strcmp (tname, "setjmp_syscall")))
609 || (tname[1] == 'i'
610 && ! strcmp (tname, "sigsetjmp"))
611 || (tname[1] == 'a'
612 && ! strcmp (tname, "savectx")));
613 if (tname[1] == 'i'
614 && ! strcmp (tname, "siglongjmp"))
615 *is_longjmp = 1;
616 }
617 else if ((tname[0] == 'q' && tname[1] == 's'
618 && ! strcmp (tname, "qsetjmp"))
619 || (tname[0] == 'v' && tname[1] == 'f'
620 && ! strcmp (tname, "vfork")))
621 *returns_twice = 1;
622
623 else if (tname[0] == 'l' && tname[1] == 'o'
624 && ! strcmp (tname, "longjmp"))
625 *is_longjmp = 1;
fa76d9e0
JR
626
627 else if ((tname[0] == 'f' && tname[1] == 'o'
628 && ! strcmp (tname, "fork"))
629 /* Linux specific: __clone. check NAME to insist on the
630 leading underscores, to avoid polluting the ISO / POSIX
631 namespace. */
632 || (name[0] == '_' && name[1] == '_'
633 && ! strcmp (tname, "clone"))
634 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
635 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
636 && (tname[5] == '\0'
637 || ((tname[5] == 'p' || tname[5] == 'e')
638 && tname[6] == '\0'))))
639 *fork_or_exec = 1;
640
140592a0 641 /* Do not add any more malloc-like functions to this list,
82514696
KG
642 instead mark them as malloc functions using the malloc attribute.
643 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
644 it may return the same address across multiple calls.
645 C++ operator new is not suitable because it is not required
646 to return a unique pointer; indeed, the standard placement new
647 just returns its argument. */
20efdf74
JL
648 else if (! strcmp (tname, "malloc")
649 || ! strcmp (tname, "calloc")
1e5a1107 650 || ! strcmp (tname, "strdup"))
20efdf74
JL
651 *is_malloc = 1;
652 }
653}
654
655/* Precompute all register parameters as described by ARGS, storing values
656 into fields within the ARGS array.
657
658 NUM_ACTUALS indicates the total number elements in the ARGS array.
659
660 Set REG_PARM_SEEN if we encounter a register parameter. */
661
662static void
663precompute_register_parameters (num_actuals, args, reg_parm_seen)
664 int num_actuals;
665 struct arg_data *args;
666 int *reg_parm_seen;
667{
668 int i;
669
670 *reg_parm_seen = 0;
671
672 for (i = 0; i < num_actuals; i++)
673 if (args[i].reg != 0 && ! args[i].pass_on_stack)
674 {
675 *reg_parm_seen = 1;
676
677 if (args[i].value == 0)
678 {
679 push_temp_slots ();
680 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
681 VOIDmode, 0);
682 preserve_temp_slots (args[i].value);
683 pop_temp_slots ();
684
685 /* ANSI doesn't require a sequence point here,
686 but PCC has one, so this will avoid some problems. */
687 emit_queue ();
688 }
689
690 /* If we are to promote the function arg to a wider mode,
691 do it now. */
692
693 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
694 args[i].value
695 = convert_modes (args[i].mode,
696 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
697 args[i].value, args[i].unsignedp);
698
699 /* If the value is expensive, and we are inside an appropriately
700 short loop, put the value into a pseudo and then put the pseudo
701 into the hard reg.
702
703 For small register classes, also do this if this call uses
704 register parameters. This is to avoid reload conflicts while
705 loading the parameters registers. */
706
707 if ((! (GET_CODE (args[i].value) == REG
708 || (GET_CODE (args[i].value) == SUBREG
709 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
710 && args[i].mode != BLKmode
711 && rtx_cost (args[i].value, SET) > 2
712 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
713 || preserve_subexpressions_p ()))
714 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
715 }
716}
717
718#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
719
720 /* The argument list is the property of the called routine and it
721 may clobber it. If the fixed area has been used for previous
722 parameters, we must save and restore it. */
723static rtx
724save_fixed_argument_area (reg_parm_stack_space, argblock,
725 low_to_save, high_to_save)
726 int reg_parm_stack_space;
727 rtx argblock;
728 int *low_to_save;
729 int *high_to_save;
730{
731 int i;
732 rtx save_area = NULL_RTX;
733
734 /* Compute the boundary of the that needs to be saved, if any. */
735#ifdef ARGS_GROW_DOWNWARD
736 for (i = 0; i < reg_parm_stack_space + 1; i++)
737#else
738 for (i = 0; i < reg_parm_stack_space; i++)
739#endif
740 {
741 if (i >= highest_outgoing_arg_in_use
742 || stack_usage_map[i] == 0)
743 continue;
744
745 if (*low_to_save == -1)
746 *low_to_save = i;
747
748 *high_to_save = i;
749 }
750
751 if (*low_to_save >= 0)
752 {
753 int num_to_save = *high_to_save - *low_to_save + 1;
754 enum machine_mode save_mode
755 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
756 rtx stack_area;
757
758 /* If we don't have the required alignment, must do this in BLKmode. */
759 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
760 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
761 save_mode = BLKmode;
762
763#ifdef ARGS_GROW_DOWNWARD
764 stack_area = gen_rtx_MEM (save_mode,
765 memory_address (save_mode,
766 plus_constant (argblock,
767 - *high_to_save)));
768#else
769 stack_area = gen_rtx_MEM (save_mode,
770 memory_address (save_mode,
771 plus_constant (argblock,
772 *low_to_save)));
773#endif
774 if (save_mode == BLKmode)
775 {
776 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
04572513
JJ
777 /* Cannot use emit_block_move here because it can be done by a library
778 call which in turn gets into this place again and deadly infinite
779 recursion happens. */
780 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
781 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
782 }
783 else
784 {
785 save_area = gen_reg_rtx (save_mode);
786 emit_move_insn (save_area, stack_area);
787 }
788 }
789 return save_area;
790}
791
792static void
793restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
794 rtx save_area;
795 rtx argblock;
796 int high_to_save;
797 int low_to_save;
798{
799 enum machine_mode save_mode = GET_MODE (save_area);
800#ifdef ARGS_GROW_DOWNWARD
801 rtx stack_area
802 = gen_rtx_MEM (save_mode,
803 memory_address (save_mode,
804 plus_constant (argblock,
805 - high_to_save)));
806#else
807 rtx stack_area
808 = gen_rtx_MEM (save_mode,
809 memory_address (save_mode,
810 plus_constant (argblock,
811 low_to_save)));
812#endif
813
814 if (save_mode != BLKmode)
815 emit_move_insn (stack_area, save_area);
816 else
04572513
JJ
817 /* Cannot use emit_block_move here because it can be done by a library
818 call which in turn gets into this place again and deadly infinite
819 recursion happens. */
820 move_by_pieces (stack_area, validize_mem (save_area),
821 high_to_save - low_to_save + 1,
822 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
823}
824#endif
825
826/* If any elements in ARGS refer to parameters that are to be passed in
827 registers, but not in memory, and whose alignment does not permit a
828 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
829 which we will later copy into the appropriate hard registers.
830
831 Pseudos for each unaligned argument will be stored into the array
832 args[argnum].aligned_regs. The caller is responsible for deallocating
833 the aligned_regs array if it is nonzero. */
834
20efdf74
JL
835static void
836store_unaligned_arguments_into_pseudos (args, num_actuals)
837 struct arg_data *args;
838 int num_actuals;
839{
840 int i, j;
841
842 for (i = 0; i < num_actuals; i++)
843 if (args[i].reg != 0 && ! args[i].pass_on_stack
844 && args[i].mode == BLKmode
845 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
846 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
847 {
848 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
849 int big_endian_correction = 0;
850
851 args[i].n_aligned_regs
852 = args[i].partial ? args[i].partial
853 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
854
8e6a59fe
MM
855 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
856 * args[i].n_aligned_regs);
20efdf74
JL
857
858 /* Structures smaller than a word are aligned to the least
859 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
860 this means we must skip the empty high order bytes when
861 calculating the bit offset. */
862 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
863 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
864
865 for (j = 0; j < args[i].n_aligned_regs; j++)
866 {
867 rtx reg = gen_reg_rtx (word_mode);
868 rtx word = operand_subword_force (args[i].value, j, BLKmode);
869 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
870 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
871
872 args[i].aligned_regs[j] = reg;
873
874 /* There is no need to restrict this code to loading items
875 in TYPE_ALIGN sized hunks. The bitfield instructions can
876 load up entire word sized registers efficiently.
877
878 ??? This may not be needed anymore.
879 We use to emit a clobber here but that doesn't let later
880 passes optimize the instructions we emit. By storing 0 into
881 the register later passes know the first AND to zero out the
882 bitfield being set in the register is unnecessary. The store
883 of 0 will be deleted as will at least the first AND. */
884
885 emit_move_insn (reg, const0_rtx);
886
887 bytes -= bitsize / BITS_PER_UNIT;
888 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
889 extract_bit_field (word, bitsize, 0, 1,
890 NULL_RTX, word_mode,
891 word_mode,
892 bitalign / BITS_PER_UNIT,
893 BITS_PER_WORD),
894 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
895 }
896 }
897}
898
d7cdf113
JL
899/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
900 ACTPARMS.
901
902 NUM_ACTUALS is the total number of parameters.
903
904 N_NAMED_ARGS is the total number of named arguments.
905
906 FNDECL is the tree code for the target of this call (if known)
907
908 ARGS_SO_FAR holds state needed by the target to know where to place
909 the next argument.
910
911 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
912 for arguments which are passed in registers.
913
914 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
915 and may be modified by this routine.
916
917 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
918 flags which may may be modified by this routine. */
919
920static void
921initialize_argument_information (num_actuals, args, args_size, n_named_args,
922 actparms, fndecl, args_so_far,
923 reg_parm_stack_space, old_stack_level,
924 old_pending_adj, must_preallocate, is_const)
91813b28 925 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
926 struct arg_data *args;
927 struct args_size *args_size;
91813b28 928 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
929 tree actparms;
930 tree fndecl;
959f3a06 931 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
932 int reg_parm_stack_space;
933 rtx *old_stack_level;
934 int *old_pending_adj;
935 int *must_preallocate;
936 int *is_const;
937{
938 /* 1 if scanning parms front to back, -1 if scanning back to front. */
939 int inc;
940
941 /* Count arg position in order args appear. */
942 int argpos;
943
4fc026cd 944 struct args_size alignment_pad;
d7cdf113
JL
945 int i;
946 tree p;
947
948 args_size->constant = 0;
949 args_size->var = 0;
950
951 /* In this loop, we consider args in the order they are written.
952 We fill up ARGS from the front or from the back if necessary
953 so that in any case the first arg to be pushed ends up at the front. */
954
955#ifdef PUSH_ARGS_REVERSED
956 i = num_actuals - 1, inc = -1;
957 /* In this case, must reverse order of args
958 so that we compute and push the last arg first. */
959#else
960 i = 0, inc = 1;
961#endif
962
963 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
964 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
965 {
966 tree type = TREE_TYPE (TREE_VALUE (p));
967 int unsignedp;
968 enum machine_mode mode;
969
970 args[i].tree_value = TREE_VALUE (p);
971
972 /* Replace erroneous argument with constant zero. */
973 if (type == error_mark_node || TYPE_SIZE (type) == 0)
974 args[i].tree_value = integer_zero_node, type = integer_type_node;
975
976 /* If TYPE is a transparent union, pass things the way we would
977 pass the first field of the union. We have already verified that
978 the modes are the same. */
979 if (TYPE_TRANSPARENT_UNION (type))
980 type = TREE_TYPE (TYPE_FIELDS (type));
981
982 /* Decide where to pass this arg.
983
984 args[i].reg is nonzero if all or part is passed in registers.
985
986 args[i].partial is nonzero if part but not all is passed in registers,
987 and the exact value says how many words are passed in registers.
988
989 args[i].pass_on_stack is nonzero if the argument must at least be
990 computed on the stack. It may then be loaded back into registers
991 if args[i].reg is nonzero.
992
993 These decisions are driven by the FUNCTION_... macros and must agree
994 with those made by function.c. */
995
996 /* See if this argument should be passed by invisible reference. */
997 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
998 && contains_placeholder_p (TYPE_SIZE (type)))
999 || TREE_ADDRESSABLE (type)
1000#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1001 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1002 type, argpos < n_named_args)
1003#endif
1004 )
1005 {
1006 /* If we're compiling a thunk, pass through invisible
1007 references instead of making a copy. */
1008 if (current_function_is_thunk
1009#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1010 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1011 type, argpos < n_named_args)
1012 /* If it's in a register, we must make a copy of it too. */
1013 /* ??? Is this a sufficient test? Is there a better one? */
1014 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1015 && REG_P (DECL_RTL (args[i].tree_value)))
1016 && ! TREE_ADDRESSABLE (type))
1017#endif
1018 )
1019 {
1020 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1021 new object from the argument. If we are passing by
1022 invisible reference, the callee will do that for us, so we
1023 can strip off the TARGET_EXPR. This is not always safe,
1024 but it is safe in the only case where this is a useful
1025 optimization; namely, when the argument is a plain object.
1026 In that case, the frontend is just asking the backend to
1027 make a bitwise copy of the argument. */
1028
1029 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1030 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1031 (args[i].tree_value, 1)))
1032 == 'd')
1033 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1034 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1035
1036 args[i].tree_value = build1 (ADDR_EXPR,
1037 build_pointer_type (type),
1038 args[i].tree_value);
1039 type = build_pointer_type (type);
1040 }
1041 else
1042 {
1043 /* We make a copy of the object and pass the address to the
1044 function being called. */
1045 rtx copy;
1046
1047 if (TYPE_SIZE (type) == 0
1048 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1049 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1050 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1051 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1052 {
1053 /* This is a variable-sized object. Make space on the stack
1054 for it. */
1055 rtx size_rtx = expr_size (TREE_VALUE (p));
1056
1057 if (*old_stack_level == 0)
1058 {
1059 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1060 *old_pending_adj = pending_stack_adjust;
1061 pending_stack_adjust = 0;
1062 }
1063
1064 copy = gen_rtx_MEM (BLKmode,
1065 allocate_dynamic_stack_space (size_rtx,
1066 NULL_RTX,
1067 TYPE_ALIGN (type)));
1068 }
1069 else
1070 {
1071 int size = int_size_in_bytes (type);
1072 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1073 }
1074
1075 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1076
1077 store_expr (args[i].tree_value, copy, 0);
1078 *is_const = 0;
1079
1080 args[i].tree_value = build1 (ADDR_EXPR,
1081 build_pointer_type (type),
1082 make_tree (type, copy));
1083 type = build_pointer_type (type);
1084 }
1085 }
1086
1087 mode = TYPE_MODE (type);
1088 unsignedp = TREE_UNSIGNED (type);
1089
1090#ifdef PROMOTE_FUNCTION_ARGS
1091 mode = promote_mode (type, mode, &unsignedp, 1);
1092#endif
1093
1094 args[i].unsignedp = unsignedp;
1095 args[i].mode = mode;
959f3a06 1096 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
d7cdf113
JL
1097 argpos < n_named_args);
1098#ifdef FUNCTION_ARG_PARTIAL_NREGS
1099 if (args[i].reg)
1100 args[i].partial
959f3a06 1101 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1102 argpos < n_named_args);
1103#endif
1104
1105 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1106
1107 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1108 it means that we are to pass this arg in the register(s) designated
1109 by the PARALLEL, but also to pass it in the stack. */
1110 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1111 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1112 args[i].pass_on_stack = 1;
1113
1114 /* If this is an addressable type, we must preallocate the stack
1115 since we must evaluate the object into its final location.
1116
1117 If this is to be passed in both registers and the stack, it is simpler
1118 to preallocate. */
1119 if (TREE_ADDRESSABLE (type)
1120 || (args[i].pass_on_stack && args[i].reg != 0))
1121 *must_preallocate = 1;
1122
1123 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1124 we cannot consider this function call constant. */
1125 if (TREE_ADDRESSABLE (type))
1126 *is_const = 0;
1127
1128 /* Compute the stack-size of this argument. */
1129 if (args[i].reg == 0 || args[i].partial != 0
1130 || reg_parm_stack_space > 0
1131 || args[i].pass_on_stack)
1132 locate_and_pad_parm (mode, type,
1133#ifdef STACK_PARMS_IN_REG_PARM_AREA
1134 1,
1135#else
1136 args[i].reg != 0,
1137#endif
1138 fndecl, args_size, &args[i].offset,
4fc026cd 1139 &args[i].size, &alignment_pad);
d7cdf113
JL
1140
1141#ifndef ARGS_GROW_DOWNWARD
1142 args[i].slot_offset = *args_size;
1143#endif
1144
4fc026cd
CM
1145 args[i].alignment_pad = alignment_pad;
1146
d7cdf113
JL
1147 /* If a part of the arg was put into registers,
1148 don't include that part in the amount pushed. */
1149 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1150 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1151 / (PARM_BOUNDARY / BITS_PER_UNIT)
1152 * (PARM_BOUNDARY / BITS_PER_UNIT));
1153
1154 /* Update ARGS_SIZE, the total stack space for args so far. */
1155
1156 args_size->constant += args[i].size.constant;
1157 if (args[i].size.var)
1158 {
1159 ADD_PARM_SIZE (*args_size, args[i].size.var);
1160 }
1161
1162 /* Since the slot offset points to the bottom of the slot,
1163 we must record it after incrementing if the args grow down. */
1164#ifdef ARGS_GROW_DOWNWARD
1165 args[i].slot_offset = *args_size;
1166
1167 args[i].slot_offset.constant = -args_size->constant;
1168 if (args_size->var)
fed3cef0 1169 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1170#endif
1171
1172 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1173 have been used, etc. */
1174
959f3a06 1175 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1176 argpos < n_named_args);
1177 }
1178}
1179
599f37b6
JL
1180/* Update ARGS_SIZE to contain the total size for the argument block.
1181 Return the original constant component of the argument block's size.
1182
1183 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1184 for arguments passed in registers. */
1185
1186static int
c2f8b491
JH
1187compute_argument_block_size (reg_parm_stack_space, args_size,
1188 preferred_stack_boundary)
599f37b6
JL
1189 int reg_parm_stack_space;
1190 struct args_size *args_size;
c2f8b491 1191 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1192{
1193 int unadjusted_args_size = args_size->constant;
1194
1195 /* Compute the actual size of the argument block required. The variable
1196 and constant sizes must be combined, the size may have to be rounded,
1197 and there may be a minimum required size. */
1198
1199 if (args_size->var)
1200 {
1201 args_size->var = ARGS_SIZE_TREE (*args_size);
1202 args_size->constant = 0;
1203
1204#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491
JH
1205 preferred_stack_boundary /= BITS_PER_UNIT;
1206 if (preferred_stack_boundary > 1)
1207 args_size->var = round_up (args_size->var, preferred_stack_boundary);
599f37b6
JL
1208#endif
1209
1210 if (reg_parm_stack_space > 0)
1211 {
1212 args_size->var
1213 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1214 ssize_int (reg_parm_stack_space));
599f37b6
JL
1215
1216#ifndef OUTGOING_REG_PARM_STACK_SPACE
1217 /* The area corresponding to register parameters is not to count in
1218 the size of the block we need. So make the adjustment. */
1219 args_size->var
1220 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1221 ssize_int (reg_parm_stack_space));
599f37b6
JL
1222#endif
1223 }
1224 }
1225 else
1226 {
1227#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491 1228 preferred_stack_boundary /= BITS_PER_UNIT;
fb5eebb9 1229 args_size->constant = (((args_size->constant
c2732da3 1230 + arg_space_so_far
fb5eebb9 1231 + pending_stack_adjust
c2f8b491
JH
1232 + preferred_stack_boundary - 1)
1233 / preferred_stack_boundary
1234 * preferred_stack_boundary)
c2732da3 1235 - arg_space_so_far
fb5eebb9 1236 - pending_stack_adjust);
599f37b6
JL
1237#endif
1238
1239 args_size->constant = MAX (args_size->constant,
1240 reg_parm_stack_space);
1241
1242#ifdef MAYBE_REG_PARM_STACK_SPACE
1243 if (reg_parm_stack_space == 0)
1244 args_size->constant = 0;
1245#endif
1246
1247#ifndef OUTGOING_REG_PARM_STACK_SPACE
1248 args_size->constant -= reg_parm_stack_space;
1249#endif
1250 }
1251 return unadjusted_args_size;
1252}
1253
19832c77 1254/* Precompute parameters as needed for a function call.
cc0b1adc
JL
1255
1256 IS_CONST indicates the target function is a pure function.
1257
1258 MUST_PREALLOCATE indicates that we must preallocate stack space for
1259 any stack arguments.
1260
1261 NUM_ACTUALS is the number of arguments.
1262
1263 ARGS is an array containing information for each argument; this routine
1264 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1265
1266 ARGS_SIZE contains information about the size of the arg list. */
1267
1268static void
1269precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1270 int is_const;
1271 int must_preallocate;
1272 int num_actuals;
1273 struct arg_data *args;
1274 struct args_size *args_size;
1275{
1276 int i;
1277
1278 /* If this function call is cse'able, precompute all the parameters.
1279 Note that if the parameter is constructed into a temporary, this will
1280 cause an additional copy because the parameter will be constructed
1281 into a temporary location and then copied into the outgoing arguments.
1282 If a parameter contains a call to alloca and this function uses the
1283 stack, precompute the parameter. */
1284
1285 /* If we preallocated the stack space, and some arguments must be passed
1286 on the stack, then we must precompute any parameter which contains a
1287 function call which will store arguments on the stack.
1288 Otherwise, evaluating the parameter may clobber previous parameters
1289 which have already been stored into the stack. */
1290
1291 for (i = 0; i < num_actuals; i++)
1292 if (is_const
1293 || ((args_size->var != 0 || args_size->constant != 0)
1294 && calls_function (args[i].tree_value, 1))
1295 || (must_preallocate
1296 && (args_size->var != 0 || args_size->constant != 0)
1297 && calls_function (args[i].tree_value, 0)))
1298 {
1299 /* If this is an addressable type, we cannot pre-evaluate it. */
1300 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1301 abort ();
1302
1303 push_temp_slots ();
1304
47841d1b 1305 args[i].value
cc0b1adc
JL
1306 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1307
1308 preserve_temp_slots (args[i].value);
1309 pop_temp_slots ();
1310
1311 /* ANSI doesn't require a sequence point here,
1312 but PCC has one, so this will avoid some problems. */
1313 emit_queue ();
1314
1315 args[i].initial_value = args[i].value
47841d1b 1316 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1317
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1319 {
1320 args[i].value
1321 = convert_modes (args[i].mode,
1322 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1323 args[i].value, args[i].unsignedp);
1324#ifdef PROMOTE_FOR_CALL_ONLY
1325 /* CSE will replace this only if it contains args[i].value
1326 pseudo, so convert it down to the declared mode using
1327 a SUBREG. */
1328 if (GET_CODE (args[i].value) == REG
1329 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1330 {
1331 args[i].initial_value
1332 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1333 args[i].value, 0);
1334 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1335 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1336 = args[i].unsignedp;
1337 }
1338#endif
1339 }
cc0b1adc
JL
1340 }
1341}
1342
0f9b3ea6
JL
1343/* Given the current state of MUST_PREALLOCATE and information about
1344 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1345 compute and return the final value for MUST_PREALLOCATE. */
1346
1347static int
1348finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1349 int must_preallocate;
1350 int num_actuals;
1351 struct arg_data *args;
1352 struct args_size *args_size;
1353{
1354 /* See if we have or want to preallocate stack space.
1355
1356 If we would have to push a partially-in-regs parm
1357 before other stack parms, preallocate stack space instead.
1358
1359 If the size of some parm is not a multiple of the required stack
1360 alignment, we must preallocate.
1361
1362 If the total size of arguments that would otherwise create a copy in
1363 a temporary (such as a CALL) is more than half the total argument list
1364 size, preallocation is faster.
1365
1366 Another reason to preallocate is if we have a machine (like the m88k)
1367 where stack alignment is required to be maintained between every
1368 pair of insns, not just when the call is made. However, we assume here
1369 that such machines either do not have push insns (and hence preallocation
1370 would occur anyway) or the problem is taken care of with
1371 PUSH_ROUNDING. */
1372
1373 if (! must_preallocate)
1374 {
1375 int partial_seen = 0;
1376 int copy_to_evaluate_size = 0;
1377 int i;
1378
1379 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1380 {
1381 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1382 partial_seen = 1;
1383 else if (partial_seen && args[i].reg == 0)
1384 must_preallocate = 1;
1385
1386 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1387 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1388 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1389 || TREE_CODE (args[i].tree_value) == COND_EXPR
1390 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1391 copy_to_evaluate_size
1392 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1393 }
1394
1395 if (copy_to_evaluate_size * 2 >= args_size->constant
1396 && args_size->constant > 0)
1397 must_preallocate = 1;
1398 }
1399 return must_preallocate;
1400}
599f37b6 1401
a45bdd02
JL
1402/* If we preallocated stack space, compute the address of each argument
1403 and store it into the ARGS array.
1404
1405 We need not ensure it is a valid memory address here; it will be
1406 validized when it is used.
1407
1408 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1409
1410static void
1411compute_argument_addresses (args, argblock, num_actuals)
1412 struct arg_data *args;
1413 rtx argblock;
1414 int num_actuals;
1415{
1416 if (argblock)
1417 {
1418 rtx arg_reg = argblock;
1419 int i, arg_offset = 0;
1420
1421 if (GET_CODE (argblock) == PLUS)
1422 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1423
1424 for (i = 0; i < num_actuals; i++)
1425 {
1426 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1427 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1428 rtx addr;
1429
1430 /* Skip this parm if it will not be passed on the stack. */
1431 if (! args[i].pass_on_stack && args[i].reg != 0)
1432 continue;
1433
1434 if (GET_CODE (offset) == CONST_INT)
1435 addr = plus_constant (arg_reg, INTVAL (offset));
1436 else
1437 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1438
1439 addr = plus_constant (addr, arg_offset);
1440 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1441 MEM_SET_IN_STRUCT_P
1442 (args[i].stack,
1443 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1444
1445 if (GET_CODE (slot_offset) == CONST_INT)
1446 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1447 else
1448 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1449
1450 addr = plus_constant (addr, arg_offset);
1451 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1452 }
1453 }
1454}
1455
1456/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1457 in a call instruction.
1458
1459 FNDECL is the tree node for the target function. For an indirect call
1460 FNDECL will be NULL_TREE.
1461
1462 EXP is the CALL_EXPR for this call. */
1463
1464static rtx
1465rtx_for_function_call (fndecl, exp)
1466 tree fndecl;
1467 tree exp;
1468{
1469 rtx funexp;
1470
1471 /* Get the function to call, in the form of RTL. */
1472 if (fndecl)
1473 {
1474 /* If this is the first use of the function, see if we need to
1475 make an external definition for it. */
1476 if (! TREE_USED (fndecl))
1477 {
1478 assemble_external (fndecl);
1479 TREE_USED (fndecl) = 1;
1480 }
1481
1482 /* Get a SYMBOL_REF rtx for the function address. */
1483 funexp = XEXP (DECL_RTL (fndecl), 0);
1484 }
1485 else
1486 /* Generate an rtx (probably a pseudo-register) for the address. */
1487 {
91ab1046 1488 rtx funaddr;
a45bdd02 1489 push_temp_slots ();
91ab1046
DT
1490 funaddr = funexp =
1491 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1492 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1493
1494 /* Check the function is executable. */
1495 if (current_function_check_memory_usage)
91ab1046
DT
1496 {
1497#ifdef POINTERS_EXTEND_UNSIGNED
1498 /* It might be OK to convert funexp in place, but there's
1499 a lot going on between here and when it happens naturally
1500 that this seems safer. */
1501 funaddr = convert_memory_address (Pmode, funexp);
1502#endif
1503 emit_library_call (chkr_check_exec_libfunc, 1,
1504 VOIDmode, 1,
1505 funaddr, Pmode);
1506 }
a45bdd02
JL
1507 emit_queue ();
1508 }
1509 return funexp;
1510}
1511
21a3b983
JL
1512/* Do the register loads required for any wholly-register parms or any
1513 parms which are passed both on the stack and in a register. Their
1514 expressions were already evaluated.
1515
1516 Mark all register-parms as living through the call, putting these USE
1517 insns in the CALL_INSN_FUNCTION_USAGE field. */
1518
1519static void
1520load_register_parameters (args, num_actuals, call_fusage)
1521 struct arg_data *args;
1522 int num_actuals;
1523 rtx *call_fusage;
1524{
1525 int i, j;
1526
1527#ifdef LOAD_ARGS_REVERSED
1528 for (i = num_actuals - 1; i >= 0; i--)
1529#else
1530 for (i = 0; i < num_actuals; i++)
1531#endif
1532 {
1533 rtx reg = args[i].reg;
1534 int partial = args[i].partial;
1535 int nregs;
1536
1537 if (reg)
1538 {
1539 /* Set to non-negative if must move a word at a time, even if just
1540 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1541 we just use a normal move insn. This value can be zero if the
1542 argument is a zero size structure with no fields. */
1543 nregs = (partial ? partial
1544 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1545 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1546 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1547 : -1));
1548
1549 /* Handle calls that pass values in multiple non-contiguous
1550 locations. The Irix 6 ABI has examples of this. */
1551
1552 if (GET_CODE (reg) == PARALLEL)
1553 {
1554 emit_group_load (reg, args[i].value,
1555 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1556 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1557 / BITS_PER_UNIT));
1558 }
1559
1560 /* If simple case, just do move. If normal partial, store_one_arg
1561 has already loaded the register for us. In all other cases,
1562 load the register(s) from memory. */
1563
1564 else if (nregs == -1)
1565 emit_move_insn (reg, args[i].value);
1566
1567 /* If we have pre-computed the values to put in the registers in
1568 the case of non-aligned structures, copy them in now. */
1569
1570 else if (args[i].n_aligned_regs != 0)
1571 for (j = 0; j < args[i].n_aligned_regs; j++)
1572 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1573 args[i].aligned_regs[j]);
1574
1575 else if (partial == 0 || args[i].pass_on_stack)
1576 move_block_to_reg (REGNO (reg),
1577 validize_mem (args[i].value), nregs,
1578 args[i].mode);
1579
1580 /* Handle calls that pass values in multiple non-contiguous
1581 locations. The Irix 6 ABI has examples of this. */
1582 if (GET_CODE (reg) == PARALLEL)
1583 use_group_regs (call_fusage, reg);
1584 else if (nregs == -1)
1585 use_reg (call_fusage, reg);
1586 else
1587 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1588 }
1589 }
1590}
1591
51bbfa0c
RS
1592/* Generate all the code for a function call
1593 and return an rtx for its value.
1594 Store the value in TARGET (specified as an rtx) if convenient.
1595 If the value is stored in TARGET then TARGET is returned.
1596 If IGNORE is nonzero, then we ignore the value of the function call. */
1597
1598rtx
8129842c 1599expand_call (exp, target, ignore)
51bbfa0c
RS
1600 tree exp;
1601 rtx target;
1602 int ignore;
51bbfa0c
RS
1603{
1604 /* List of actual parameters. */
1605 tree actparms = TREE_OPERAND (exp, 1);
1606 /* RTX for the function to be called. */
1607 rtx funexp;
51bbfa0c
RS
1608 /* Data type of the function. */
1609 tree funtype;
1610 /* Declaration of the function being called,
1611 or 0 if the function is computed (not known by name). */
1612 tree fndecl = 0;
1613 char *name = 0;
c2939b57 1614 rtx before_call;
51bbfa0c
RS
1615
1616 /* Register in which non-BLKmode value will be returned,
1617 or 0 if no value or if value is BLKmode. */
1618 rtx valreg;
1619 /* Address where we should return a BLKmode value;
1620 0 if value not BLKmode. */
1621 rtx structure_value_addr = 0;
1622 /* Nonzero if that address is being passed by treating it as
1623 an extra, implicit first parameter. Otherwise,
1624 it is passed by being copied directly into struct_value_rtx. */
1625 int structure_value_addr_parm = 0;
1626 /* Size of aggregate value wanted, or zero if none wanted
1627 or if we are using the non-reentrant PCC calling convention
1628 or expecting the value in registers. */
e5e809f4 1629 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1630 /* Nonzero if called function returns an aggregate in memory PCC style,
1631 by returning the address of where to find it. */
1632 int pcc_struct_value = 0;
1633
1634 /* Number of actual parameters in this call, including struct value addr. */
1635 int num_actuals;
1636 /* Number of named args. Args after this are anonymous ones
1637 and they must all go on the stack. */
1638 int n_named_args;
51bbfa0c
RS
1639
1640 /* Vector of information about each argument.
1641 Arguments are numbered in the order they will be pushed,
1642 not the order they are written. */
1643 struct arg_data *args;
1644
1645 /* Total size in bytes of all the stack-parms scanned so far. */
1646 struct args_size args_size;
1647 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1648 int unadjusted_args_size;
51bbfa0c
RS
1649 /* Data on reg parms scanned so far. */
1650 CUMULATIVE_ARGS args_so_far;
1651 /* Nonzero if a reg parm has been scanned. */
1652 int reg_parm_seen;
efd65a8b 1653 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1654
1655 /* Nonzero if we must avoid push-insns in the args for this call.
1656 If stack space is allocated for register parameters, but not by the
1657 caller, then it is preallocated in the fixed part of the stack frame.
1658 So the entire argument block must then be preallocated (i.e., we
1659 ignore PUSH_ROUNDING in that case). */
1660
51bbfa0c
RS
1661#ifdef PUSH_ROUNDING
1662 int must_preallocate = 0;
1663#else
1664 int must_preallocate = 1;
51bbfa0c
RS
1665#endif
1666
f72aed24 1667 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1668 int reg_parm_stack_space = 0;
1669
51bbfa0c
RS
1670 /* Address of space preallocated for stack parms
1671 (on machines that lack push insns), or 0 if space not preallocated. */
1672 rtx argblock = 0;
1673
1674 /* Nonzero if it is plausible that this is a call to alloca. */
1675 int may_be_alloca;
9ae8ffe7
JL
1676 /* Nonzero if this is a call to malloc or a related function. */
1677 int is_malloc;
51bbfa0c
RS
1678 /* Nonzero if this is a call to setjmp or a related function. */
1679 int returns_twice;
1680 /* Nonzero if this is a call to `longjmp'. */
1681 int is_longjmp;
fa76d9e0
JR
1682 /* Nonzero if this is a syscall that makes a new process in the image of
1683 the current one. */
1684 int fork_or_exec;
51bbfa0c
RS
1685 /* Nonzero if this is a call to an inline function. */
1686 int is_integrable = 0;
51bbfa0c
RS
1687 /* Nonzero if this is a call to a `const' function.
1688 Note that only explicitly named functions are handled as `const' here. */
1689 int is_const = 0;
1690 /* Nonzero if this is a call to a `volatile' function. */
1691 int is_volatile = 0;
12a22e76
JM
1692 /* Nonzero if this is a call to a function that won't throw an exception. */
1693 int nothrow = TREE_NOTHROW (exp);
51bbfa0c
RS
1694#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1695 /* Define the boundary of the register parm stack space that needs to be
1696 save, if any. */
1697 int low_to_save = -1, high_to_save;
1698 rtx save_area = 0; /* Place that it is saved */
1699#endif
1700
1701#ifdef ACCUMULATE_OUTGOING_ARGS
1702 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1703 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1704 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1705#endif
1706
1707 rtx old_stack_level = 0;
79be3418 1708 int old_pending_adj = 0;
51bbfa0c 1709 int old_inhibit_defer_pop = inhibit_defer_pop;
774e6b37 1710 int old_arg_space_so_far = arg_space_so_far;
77cac2f2 1711 rtx call_fusage = 0;
51bbfa0c 1712 register tree p;
21a3b983 1713 register int i;
c2f8b491
JH
1714#ifdef PREFERRED_STACK_BOUNDARY
1715 int preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1716#else
1717 /* In this case preferred_stack_boundary variable is meaningless.
1718 It is used only in order to keep ifdef noise down when calling
1719 compute_argument_block_size. */
1720 int preferred_stack_boundary = 0;
1721#endif
51bbfa0c 1722
7815214e
RK
1723 /* The value of the function call can be put in a hard register. But
1724 if -fcheck-memory-usage, code which invokes functions (and thus
1725 damages some hard registers) can be inserted before using the value.
1726 So, target is always a pseudo-register in that case. */
7d384cc0 1727 if (current_function_check_memory_usage)
7815214e
RK
1728 target = 0;
1729
51bbfa0c
RS
1730 /* See if we can find a DECL-node for the actual function.
1731 As a result, decide whether this is a call to an integrable function. */
1732
1733 p = TREE_OPERAND (exp, 0);
1734 if (TREE_CODE (p) == ADDR_EXPR)
1735 {
1736 fndecl = TREE_OPERAND (p, 0);
1737 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1738 fndecl = 0;
51bbfa0c
RS
1739 else
1740 {
1741 if (!flag_no_inline
1742 && fndecl != current_function_decl
aa10adff 1743 && DECL_INLINE (fndecl)
1cf4f698 1744 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1745 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1746 is_integrable = 1;
1747 else if (! TREE_ADDRESSABLE (fndecl))
1748 {
13d39dbc 1749 /* In case this function later becomes inlinable,
51bbfa0c
RS
1750 record that there was already a non-inline call to it.
1751
1752 Use abstraction instead of setting TREE_ADDRESSABLE
1753 directly. */
da8c1713
RK
1754 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1755 && optimize > 0)
1907795e
JM
1756 {
1757 warning_with_decl (fndecl, "can't inline call to `%s'");
1758 warning ("called from here");
1759 }
51bbfa0c
RS
1760 mark_addressable (fndecl);
1761 }
1762
d45cf215
RS
1763 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1764 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1765 is_const = 1;
5e24110e
RS
1766
1767 if (TREE_THIS_VOLATILE (fndecl))
1768 is_volatile = 1;
12a22e76
JM
1769
1770 if (TREE_NOTHROW (fndecl))
1771 nothrow = 1;
51bbfa0c
RS
1772 }
1773 }
1774
fdff8c6d
RK
1775 /* If we don't have specific function to call, see if we have a
1776 constant or `noreturn' function from the type. */
1777 if (fndecl == 0)
1778 {
1779 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1780 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1781 }
1782
6f90e075
JW
1783#ifdef REG_PARM_STACK_SPACE
1784#ifdef MAYBE_REG_PARM_STACK_SPACE
1785 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1786#else
1787 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1788#endif
1789#endif
1790
e5e809f4
JL
1791#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1792 if (reg_parm_stack_space > 0)
1793 must_preallocate = 1;
1794#endif
1795
51bbfa0c
RS
1796 /* Warn if this value is an aggregate type,
1797 regardless of which calling convention we are using for it. */
05e3bdb9 1798 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1799 warning ("function call has aggregate value");
1800
1801 /* Set up a place to return a structure. */
1802
1803 /* Cater to broken compilers. */
1804 if (aggregate_value_p (exp))
1805 {
1806 /* This call returns a big structure. */
1807 is_const = 0;
1808
1809#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1810 {
1811 pcc_struct_value = 1;
0dd532dc
JW
1812 /* Easier than making that case work right. */
1813 if (is_integrable)
1814 {
1815 /* In case this is a static function, note that it has been
1816 used. */
1817 if (! TREE_ADDRESSABLE (fndecl))
1818 mark_addressable (fndecl);
1819 is_integrable = 0;
1820 }
9e7b1d0a
RS
1821 }
1822#else /* not PCC_STATIC_STRUCT_RETURN */
1823 {
1824 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1825
9e7b1d0a
RS
1826 if (target && GET_CODE (target) == MEM)
1827 structure_value_addr = XEXP (target, 0);
1828 else
1829 {
e9a25f70
JL
1830 /* Assign a temporary to hold the value. */
1831 tree d;
51bbfa0c 1832
9e7b1d0a
RS
1833 /* For variable-sized objects, we must be called with a target
1834 specified. If we were to allocate space on the stack here,
1835 we would have no way of knowing when to free it. */
51bbfa0c 1836
002bdd6c
RK
1837 if (struct_value_size < 0)
1838 abort ();
1839
e9a25f70
JL
1840 /* This DECL is just something to feed to mark_addressable;
1841 it doesn't get pushed. */
1842 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1843 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1844 mark_addressable (d);
14a774a9 1845 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 1846 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1847 TREE_USED (d) = 1;
9e7b1d0a
RS
1848 target = 0;
1849 }
1850 }
1851#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1852 }
1853
1854 /* If called function is inline, try to integrate it. */
1855
1856 if (is_integrable)
1857 {
1858 rtx temp;
c2939b57 1859
69d4ca36 1860#ifdef ACCUMULATE_OUTGOING_ARGS
c2939b57 1861 before_call = get_last_insn ();
69d4ca36 1862#endif
51bbfa0c
RS
1863
1864 temp = expand_inline_function (fndecl, actparms, target,
1865 ignore, TREE_TYPE (exp),
1866 structure_value_addr);
1867
1868 /* If inlining succeeded, return. */
2e0dd623 1869 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1870 {
d64f5a78 1871#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1872 /* If the outgoing argument list must be preserved, push
1873 the stack before executing the inlined function if it
1874 makes any calls. */
1875
1876 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1877 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1878 break;
1879
1880 if (stack_arg_under_construction || i >= 0)
1881 {
a1917650
RK
1882 rtx first_insn
1883 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1884 rtx insn = NULL_RTX, seq;
2f4aa534 1885
d64f5a78 1886 /* Look for a call in the inline function code.
49ad7cfa 1887 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1888 nonzero then there is a call and it is not necessary
1889 to scan the insns. */
1890
49ad7cfa 1891 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1892 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1893 if (GET_CODE (insn) == CALL_INSN)
1894 break;
2f4aa534
RS
1895
1896 if (insn)
1897 {
d64f5a78
RS
1898 /* Reserve enough stack space so that the largest
1899 argument list of any function call in the inline
1900 function does not overlap the argument list being
1901 evaluated. This is usually an overestimate because
1902 allocate_dynamic_stack_space reserves space for an
1903 outgoing argument list in addition to the requested
1904 space, but there is no way to ask for stack space such
1905 that an argument list of a certain length can be
e5e809f4 1906 safely constructed.
d64f5a78 1907
e5e809f4
JL
1908 Add the stack space reserved for register arguments, if
1909 any, in the inline function. What is really needed is the
d64f5a78
RS
1910 largest value of reg_parm_stack_space in the inline
1911 function, but that is not available. Using the current
1912 value of reg_parm_stack_space is wrong, but gives
1913 correct results on all supported machines. */
e5e809f4 1914
49ad7cfa 1915 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1916 + reg_parm_stack_space);
1917
2f4aa534 1918 start_sequence ();
ccf5d244 1919 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1920 allocate_dynamic_stack_space (GEN_INT (adjust),
1921 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
1922 seq = get_insns ();
1923 end_sequence ();
a1917650 1924 emit_insns_before (seq, first_insn);
e5d70561 1925 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
1926 }
1927 }
d64f5a78 1928#endif
51bbfa0c
RS
1929
1930 /* If the result is equivalent to TARGET, return TARGET to simplify
1931 checks in store_expr. They can be equivalent but not equal in the
1932 case of a function that returns BLKmode. */
1933 if (temp != target && rtx_equal_p (temp, target))
1934 return target;
1935 return temp;
1936 }
1937
1938 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
1939 separately after all. If function was declared inline,
1940 give a warning. */
1941 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 1942 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
1943 {
1944 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1945 warning ("called from here");
1946 }
51bbfa0c
RS
1947 mark_addressable (fndecl);
1948 }
1949
51bbfa0c
RS
1950 function_call_count++;
1951
1952 if (fndecl && DECL_NAME (fndecl))
1953 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1954
c2f8b491
JH
1955 /* Ensure current function's preferred stack boundary is at least
1956 what we need. We don't have to increase alignment for recursive
1957 functions. */
1958 if (cfun->preferred_stack_boundary < preferred_stack_boundary
1959 && fndecl != current_function_decl)
1960 cfun->preferred_stack_boundary = preferred_stack_boundary;
1961
51bbfa0c 1962 /* See if this is a call to a function that can return more than once
20efdf74 1963 or a call to longjmp or malloc. */
fa76d9e0 1964 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
20efdf74 1965 &is_malloc, &may_be_alloca);
51bbfa0c 1966
51bbfa0c
RS
1967 if (may_be_alloca)
1968 current_function_calls_alloca = 1;
1969
39842893
JL
1970 /* Operand 0 is a pointer-to-function; get the type of the function. */
1971 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1972 if (! POINTER_TYPE_P (funtype))
1973 abort ();
1974 funtype = TREE_TYPE (funtype);
1975
1976 /* When calling a const function, we must pop the stack args right away,
1977 so that the pop is deleted or moved with the call. */
1978 if (is_const)
1979 NO_DEFER_POP;
1980
51bbfa0c
RS
1981 /* Don't let pending stack adjusts add up to too much.
1982 Also, do all pending adjustments now
1983 if there is any chance this might be a call to alloca. */
1984
1985 if (pending_stack_adjust >= 32
1986 || (pending_stack_adjust > 0 && may_be_alloca))
1987 do_pending_stack_adjust ();
1988
fa76d9e0
JR
1989 if (profile_arc_flag && fork_or_exec)
1990 {
1991 /* A fork duplicates the profile information, and an exec discards
1992 it. We can't rely on fork/exec to be paired. So write out the
1993 profile information we have gathered so far, and clear it. */
1994 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
1995 VOIDmode, 0);
1996
1997 /* ??? When __clone is called with CLONE_VM set, profiling is
1998 subject to race conditions, just as with multithreaded programs. */
1999 }
2000
cc79451b
RK
2001 /* Push the temporary stack slot level so that we can free any temporaries
2002 we make. */
51bbfa0c
RS
2003 push_temp_slots ();
2004
eecb6f50
JL
2005 /* Start updating where the next arg would go.
2006
2007 On some machines (such as the PA) indirect calls have a different
2008 calling convention than normal calls. The last argument in
2009 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2010 or not. */
2011 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
2012
2013 /* If struct_value_rtx is 0, it means pass the address
2014 as if it were an extra parameter. */
2015 if (structure_value_addr && struct_value_rtx == 0)
2016 {
5582b006
RK
2017 /* If structure_value_addr is a REG other than
2018 virtual_outgoing_args_rtx, we can use always use it. If it
2019 is not a REG, we must always copy it into a register.
2020 If it is virtual_outgoing_args_rtx, we must copy it to another
2021 register in some cases. */
2022 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 2023#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
2024 || (stack_arg_under_construction
2025 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 2026#endif
5582b006
RK
2027 ? copy_addr_to_reg (structure_value_addr)
2028 : structure_value_addr);
d64f5a78 2029
51bbfa0c
RS
2030 actparms
2031 = tree_cons (error_mark_node,
2032 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 2033 temp),
51bbfa0c
RS
2034 actparms);
2035 structure_value_addr_parm = 1;
2036 }
2037
2038 /* Count the arguments and set NUM_ACTUALS. */
2039 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2040 num_actuals = i;
2041
2042 /* Compute number of named args.
2043 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 2044 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
2045 (If no anonymous args follow, the result of list_length is actually
2046 one too large. This is harmless.)
51bbfa0c 2047
9ab70a9b
R
2048 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2049 zero, this machine will be able to place unnamed args that were passed in
469225d8
JW
2050 registers into the stack. So treat all args as named. This allows the
2051 insns emitting for a specific argument list to be independent of the
2052 function declaration.
51bbfa0c 2053
9ab70a9b 2054 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
51bbfa0c
RS
2055 way to pass unnamed args in registers, so we must force them into
2056 memory. */
e5e809f4
JL
2057
2058 if ((STRICT_ARGUMENT_NAMING
9ab70a9b 2059 || ! PRETEND_OUTGOING_VARARGS_NAMED)
e5e809f4 2060 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 2061 n_named_args
0ee902cb 2062 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 2063 /* Don't include the last named arg. */
d0f9021a 2064 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
0ee902cb
RM
2065 /* Count the struct value address, if it is passed as a parm. */
2066 + structure_value_addr_parm);
51bbfa0c 2067 else
51bbfa0c
RS
2068 /* If we know nothing, treat all args as named. */
2069 n_named_args = num_actuals;
2070
2071 /* Make a vector to hold all the information about each arg. */
2072 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 2073 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c 2074
d7cdf113
JL
2075 /* Build up entries inthe ARGS array, compute the size of the arguments
2076 into ARGS_SIZE, etc. */
2077 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
959f3a06 2078 actparms, fndecl, &args_so_far,
d7cdf113
JL
2079 reg_parm_stack_space, &old_stack_level,
2080 &old_pending_adj, &must_preallocate,
2081 &is_const);
51bbfa0c 2082
6f90e075
JW
2083#ifdef FINAL_REG_PARM_STACK_SPACE
2084 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2085 args_size.var);
2086#endif
2087
51bbfa0c
RS
2088 if (args_size.var)
2089 {
2090 /* If this function requires a variable-sized argument list, don't try to
2091 make a cse'able block for this call. We may be able to do this
2092 eventually, but it is too complicated to keep track of what insns go
2093 in the cse'able block and which don't. */
2094
2095 is_const = 0;
2096 must_preallocate = 1;
51bbfa0c 2097 }
e5e809f4 2098
599f37b6
JL
2099 /* Compute the actual size of the argument block required. The variable
2100 and constant sizes must be combined, the size may have to be rounded,
2101 and there may be a minimum required size. */
2102 unadjusted_args_size
c2f8b491
JH
2103 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2104 preferred_stack_boundary);
51bbfa0c 2105
0f9b3ea6
JL
2106 /* Now make final decision about preallocating stack space. */
2107 must_preallocate = finalize_must_preallocate (must_preallocate,
2108 num_actuals, args, &args_size);
51bbfa0c
RS
2109
2110 /* If the structure value address will reference the stack pointer, we must
2111 stabilize it. We don't need to do this if we know that we are not going
2112 to adjust the stack pointer in processing this call. */
2113
2114 if (structure_value_addr
2115 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2116 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2117 && (args_size.var
2118#ifndef ACCUMULATE_OUTGOING_ARGS
2119 || args_size.constant
2120#endif
2121 ))
2122 structure_value_addr = copy_to_reg (structure_value_addr);
2123
cc0b1adc
JL
2124 /* Precompute any arguments as needed. */
2125 precompute_arguments (is_const, must_preallocate, num_actuals,
2126 args, &args_size);
51bbfa0c
RS
2127
2128 /* Now we are about to start emitting insns that can be deleted
2129 if a libcall is deleted. */
9ae8ffe7 2130 if (is_const || is_malloc)
51bbfa0c
RS
2131 start_sequence ();
2132
2133 /* If we have no actual push instructions, or shouldn't use them,
2134 make space for all args right now. */
2135
2136 if (args_size.var != 0)
2137 {
2138 if (old_stack_level == 0)
2139 {
e5d70561 2140 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
2141 old_pending_adj = pending_stack_adjust;
2142 pending_stack_adjust = 0;
d64f5a78 2143#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2144 /* stack_arg_under_construction says whether a stack arg is
2145 being constructed at the old stack level. Pushing the stack
2146 gets a clean outgoing argument block. */
2147 old_stack_arg_under_construction = stack_arg_under_construction;
2148 stack_arg_under_construction = 0;
d64f5a78 2149#endif
51bbfa0c
RS
2150 }
2151 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2152 }
26a258fe 2153 else
51bbfa0c
RS
2154 {
2155 /* Note that we must go through the motions of allocating an argument
2156 block even if the size is zero because we may be storing args
2157 in the area reserved for register arguments, which may be part of
2158 the stack frame. */
26a258fe 2159
51bbfa0c
RS
2160 int needed = args_size.constant;
2161
0f41302f
MS
2162 /* Store the maximum argument space used. It will be pushed by
2163 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2164 checking). */
51bbfa0c
RS
2165
2166 if (needed > current_function_outgoing_args_size)
2167 current_function_outgoing_args_size = needed;
2168
26a258fe
PB
2169 if (must_preallocate)
2170 {
2171#ifdef ACCUMULATE_OUTGOING_ARGS
2172 /* Since the stack pointer will never be pushed, it is possible for
2173 the evaluation of a parm to clobber something we have already
2174 written to the stack. Since most function calls on RISC machines
2175 do not use the stack, this is uncommon, but must work correctly.
2176
2177 Therefore, we save any area of the stack that was already written
2178 and that we are using. Here we set up to do this by making a new
2179 stack usage map from the old one. The actual save will be done
2180 by store_one_arg.
2181
2182 Another approach might be to try to reorder the argument
2183 evaluations to avoid this conflicting stack usage. */
2184
e5e809f4 2185#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
2186 /* Since we will be writing into the entire argument area, the
2187 map must be allocated for its entire size, not just the part that
2188 is the responsibility of the caller. */
2189 needed += reg_parm_stack_space;
51bbfa0c
RS
2190#endif
2191
2192#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
2193 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2194 needed + 1);
51bbfa0c 2195#else
26a258fe
PB
2196 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2197 needed);
51bbfa0c 2198#endif
26a258fe 2199 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2200
26a258fe
PB
2201 if (initial_highest_arg_in_use)
2202 bcopy (initial_stack_usage_map, stack_usage_map,
2203 initial_highest_arg_in_use);
51bbfa0c 2204
26a258fe
PB
2205 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2206 bzero (&stack_usage_map[initial_highest_arg_in_use],
2207 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2208 needed = 0;
2f4aa534 2209
26a258fe
PB
2210 /* The address of the outgoing argument list must not be copied to a
2211 register here, because argblock would be left pointing to the
2212 wrong place after the call to allocate_dynamic_stack_space below.
2213 */
2f4aa534 2214
26a258fe 2215 argblock = virtual_outgoing_args_rtx;
2f4aa534 2216
51bbfa0c 2217#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2218 if (inhibit_defer_pop == 0)
51bbfa0c 2219 {
26a258fe
PB
2220 /* Try to reuse some or all of the pending_stack_adjust
2221 to get this space. Maybe we can avoid any pushing. */
2222 if (needed > pending_stack_adjust)
2223 {
2224 needed -= pending_stack_adjust;
2225 pending_stack_adjust = 0;
2226 }
2227 else
2228 {
2229 pending_stack_adjust -= needed;
2230 needed = 0;
2231 }
51bbfa0c 2232 }
26a258fe
PB
2233 /* Special case this because overhead of `push_block' in this
2234 case is non-trivial. */
2235 if (needed == 0)
2236 argblock = virtual_outgoing_args_rtx;
51bbfa0c 2237 else
26a258fe
PB
2238 argblock = push_block (GEN_INT (needed), 0, 0);
2239
2240 /* We only really need to call `copy_to_reg' in the case where push
2241 insns are going to be used to pass ARGBLOCK to a function
2242 call in ARGS. In that case, the stack pointer changes value
2243 from the allocation point to the call point, and hence
2244 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2245 But might as well always do it. */
2246 argblock = copy_to_reg (argblock);
51bbfa0c 2247#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2248 }
51bbfa0c
RS
2249 }
2250
bfbf933a
RS
2251#ifdef ACCUMULATE_OUTGOING_ARGS
2252 /* The save/restore code in store_one_arg handles all cases except one:
2253 a constructor call (including a C function returning a BLKmode struct)
2254 to initialize an argument. */
2255 if (stack_arg_under_construction)
2256 {
e5e809f4 2257#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 2258 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2259#else
e5d70561 2260 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
2261#endif
2262 if (old_stack_level == 0)
2263 {
e5d70561 2264 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
2265 old_pending_adj = pending_stack_adjust;
2266 pending_stack_adjust = 0;
2267 /* stack_arg_under_construction says whether a stack arg is
2268 being constructed at the old stack level. Pushing the stack
2269 gets a clean outgoing argument block. */
2270 old_stack_arg_under_construction = stack_arg_under_construction;
2271 stack_arg_under_construction = 0;
2272 /* Make a new map for the new argument list. */
2273 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2274 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2275 highest_outgoing_arg_in_use = 0;
2276 }
e5d70561 2277 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
2278 }
2279 /* If argument evaluation might modify the stack pointer, copy the
2280 address of the argument list to a register. */
2281 for (i = 0; i < num_actuals; i++)
2282 if (args[i].pass_on_stack)
2283 {
2284 argblock = copy_addr_to_reg (argblock);
2285 break;
2286 }
2287#endif
2288
a45bdd02 2289 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2290
51bbfa0c 2291#ifdef PUSH_ARGS_REVERSED
c795bca9 2292#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2293 /* If we push args individually in reverse order, perform stack alignment
2294 before the first push (the last arg). */
4e217aed
JH
2295 if (args_size.constant != unadjusted_args_size)
2296 {
2297 /* When the stack adjustment is pending,
2298 we get better code by combining the adjustments. */
42f602d8
JH
2299 if (pending_stack_adjust && !is_const
2300 && !inhibit_defer_pop)
4e217aed
JH
2301 {
2302 args_size.constant = (unadjusted_args_size
2303 + ((pending_stack_adjust + args_size.constant
c2732da3 2304 + arg_space_so_far
4e217aed
JH
2305 - unadjusted_args_size)
2306 % (preferred_stack_boundary / BITS_PER_UNIT)));
2307 pending_stack_adjust -= args_size.constant - unadjusted_args_size;
2308 do_pending_stack_adjust ();
2309 }
2310 else if (argblock == 0)
2311 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
c2732da3
JM
2312 arg_space_so_far += args_size.constant - unadjusted_args_size;
2313
2314 /* Now that the stack is properly aligned, pops can't safely
2315 be deferred during the evaluation of the arguments. */
2316 NO_DEFER_POP;
4e217aed 2317 }
51bbfa0c
RS
2318#endif
2319#endif
2320
2321 /* Don't try to defer pops if preallocating, not even from the first arg,
2322 since ARGBLOCK probably refers to the SP. */
2323 if (argblock)
2324 NO_DEFER_POP;
2325
a45bdd02 2326 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c
RS
2327
2328 /* Figure out the register where the value, if any, will come back. */
2329 valreg = 0;
2330 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2331 && ! structure_value_addr)
2332 {
2333 if (pcc_struct_value)
2334 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
4dc07bd7 2335 fndecl, 0);
51bbfa0c 2336 else
4dc07bd7 2337 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
51bbfa0c
RS
2338 }
2339
2340 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 2341 once we have started filling any specific hard regs. */
20efdf74 2342 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2343
2344#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 2345
20efdf74
JL
2346 /* Save the fixed argument area if it's part of the caller's frame and
2347 is clobbered by argument setup for this call. */
2348 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2349 &low_to_save, &high_to_save);
b94301c2 2350#endif
20efdf74 2351
51bbfa0c
RS
2352
2353 /* Now store (and compute if necessary) all non-register parms.
2354 These come before register parms, since they can require block-moves,
2355 which could clobber the registers used for register parms.
2356 Parms which have partial registers are not stored here,
2357 but we do preallocate space here if they want that. */
2358
2359 for (i = 0; i < num_actuals; i++)
2360 if (args[i].reg == 0 || args[i].pass_on_stack)
2361 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2362 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2363
4ab56118
RK
2364 /* If we have a parm that is passed in registers but not in memory
2365 and whose alignment does not permit a direct copy into registers,
2366 make a group of pseudos that correspond to each register that we
2367 will later fill. */
45d44c98 2368 if (STRICT_ALIGNMENT)
20efdf74 2369 store_unaligned_arguments_into_pseudos (args, num_actuals);
4ab56118 2370
51bbfa0c
RS
2371 /* Now store any partially-in-registers parm.
2372 This is the last place a block-move can happen. */
2373 if (reg_parm_seen)
2374 for (i = 0; i < num_actuals; i++)
2375 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2376 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2377 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2378
2379#ifndef PUSH_ARGS_REVERSED
c795bca9 2380#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2381 /* If we pushed args in forward order, perform stack alignment
2382 after pushing the last arg. */
2383 if (argblock == 0)
774e6b37
JH
2384 {
2385 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2386 arg_space_so_far += args_size.constant - unadjusted_args_size;
2387 }
51bbfa0c
RS
2388#endif
2389#endif
2390
756e0e12
RS
2391 /* If register arguments require space on the stack and stack space
2392 was not preallocated, allocate stack space here for arguments
2393 passed in registers. */
6e716e89 2394#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 2395 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 2396 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2397#endif
2398
51bbfa0c
RS
2399 /* Pass the function the address in which to return a structure value. */
2400 if (structure_value_addr && ! structure_value_addr_parm)
2401 {
2402 emit_move_insn (struct_value_rtx,
2403 force_reg (Pmode,
e5d70561
RK
2404 force_operand (structure_value_addr,
2405 NULL_RTX)));
7815214e
RK
2406
2407 /* Mark the memory for the aggregate as write-only. */
7d384cc0 2408 if (current_function_check_memory_usage)
7815214e
RK
2409 emit_library_call (chkr_set_right_libfunc, 1,
2410 VOIDmode, 3,
6a9c4aed 2411 structure_value_addr, Pmode,
7815214e 2412 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
2413 GEN_INT (MEMORY_USE_WO),
2414 TYPE_MODE (integer_type_node));
7815214e 2415
51bbfa0c 2416 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2417 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
2418 }
2419
77cac2f2 2420 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 2421
21a3b983 2422 load_register_parameters (args, num_actuals, &call_fusage);
51bbfa0c
RS
2423
2424 /* Perform postincrements before actually calling the function. */
2425 emit_queue ();
2426
c2939b57
JW
2427 /* Save a pointer to the last insn before the call, so that we can
2428 later safely search backwards to find the CALL_INSN. */
2429 before_call = get_last_insn ();
2430
51bbfa0c
RS
2431 /* All arguments and registers used for the call must be set up by now! */
2432
51bbfa0c 2433 /* Generate the actual call instruction. */
fb5eebb9
RH
2434 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2435 args_size.constant, struct_value_size,
51bbfa0c 2436 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
12a22e76 2437 valreg, old_inhibit_defer_pop, call_fusage, is_const, nothrow);
51bbfa0c 2438
774e6b37
JH
2439 /* Stack pointer ought to be restored to the value before call. */
2440 if (old_arg_space_so_far != arg_space_so_far)
2441 abort();
2442
51bbfa0c
RS
2443 /* If call is cse'able, make appropriate pair of reg-notes around it.
2444 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
2445 if return type is void. Disable for PARALLEL return values, because
2446 we have no way to move such values into a pseudo register. */
2447 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
2448 {
2449 rtx note = 0;
2450 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2451 rtx insns;
2452
9ae8ffe7
JL
2453 /* Mark the return value as a pointer if needed. */
2454 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2455 {
2456 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2457 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2458 }
2459
51bbfa0c
RS
2460 /* Construct an "equal form" for the value which mentions all the
2461 arguments in order as well as the function name. */
2462#ifdef PUSH_ARGS_REVERSED
2463 for (i = 0; i < num_actuals; i++)
38a448ca 2464 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
2465#else
2466 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 2467 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2468#endif
38a448ca 2469 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
2470
2471 insns = get_insns ();
2472 end_sequence ();
2473
2474 emit_libcall_block (insns, temp, valreg, note);
2475
2476 valreg = temp;
2477 }
4f48d56a
RK
2478 else if (is_const)
2479 {
2480 /* Otherwise, just write out the sequence without a note. */
2481 rtx insns = get_insns ();
2482
2483 end_sequence ();
2484 emit_insns (insns);
2485 }
9ae8ffe7
JL
2486 else if (is_malloc)
2487 {
2488 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2489 rtx last, insns;
2490
2491 /* The return value from a malloc-like function is a pointer. */
2492 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2493 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2494
2495 emit_move_insn (temp, valreg);
2496
2497 /* The return value from a malloc-like function can not alias
2498 anything else. */
2499 last = get_last_insn ();
2500 REG_NOTES (last) =
38a448ca 2501 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2502
2503 /* Write out the sequence. */
2504 insns = get_insns ();
2505 end_sequence ();
2506 emit_insns (insns);
2507 valreg = temp;
2508 }
51bbfa0c
RS
2509
2510 /* For calls to `setjmp', etc., inform flow.c it should complain
2511 if nonvolatile values are live. */
2512
2513 if (returns_twice)
2514 {
c2939b57
JW
2515 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2516 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2517 we must search for it here. */
2518 rtx last = get_last_insn ();
2519 while (GET_CODE (last) != CALL_INSN)
2520 {
2521 last = PREV_INSN (last);
2522 /* There was no CALL_INSN? */
2523 if (last == before_call)
2524 abort ();
2525 }
2526 emit_note_after (NOTE_INSN_SETJMP, last);
51bbfa0c
RS
2527 current_function_calls_setjmp = 1;
2528 }
2529
2530 if (is_longjmp)
2531 current_function_calls_longjmp = 1;
2532
2533 /* Notice functions that cannot return.
2534 If optimizing, insns emitted below will be dead.
2535 If not optimizing, they will exist, which is useful
2536 if the user uses the `return' command in the debugger. */
2537
2538 if (is_volatile || is_longjmp)
2539 emit_barrier ();
2540
51bbfa0c
RS
2541 /* If value type not void, return an rtx for the value. */
2542
e976b8b2
MS
2543 /* If there are cleanups to be called, don't use a hard reg as target.
2544 We need to double check this and see if it matters anymore. */
e9a25f70 2545 if (any_pending_cleanups (1)
51bbfa0c
RS
2546 && target && REG_P (target)
2547 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2548 target = 0;
2549
2550 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2551 || ignore)
2552 {
2553 target = const0_rtx;
2554 }
2555 else if (structure_value_addr)
2556 {
2557 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2558 {
38a448ca
RH
2559 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2560 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2561 structure_value_addr));
c6df88cb
MM
2562 MEM_SET_IN_STRUCT_P (target,
2563 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
29008b51 2564 }
51bbfa0c
RS
2565 }
2566 else if (pcc_struct_value)
2567 {
f78b5ca1
JL
2568 /* This is the special C++ case where we need to
2569 know what the true target was. We take care to
2570 never use this value more than once in one expression. */
38a448ca
RH
2571 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2572 copy_to_reg (valreg));
c6df88cb 2573 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
51bbfa0c 2574 }
cacbd532
JW
2575 /* Handle calls that return values in multiple non-contiguous locations.
2576 The Irix 6 ABI has examples of this. */
2577 else if (GET_CODE (valreg) == PARALLEL)
2578 {
aac5cc16
RH
2579 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2580
cacbd532
JW
2581 if (target == 0)
2582 {
2b4092f2 2583 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
c6df88cb 2584 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532
JW
2585 preserve_temp_slots (target);
2586 }
2587
c5c76735
JL
2588 if (! rtx_equal_p (target, valreg))
2589 emit_group_store (target, valreg, bytes,
2590 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
cacbd532 2591 }
059c3d84
JW
2592 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2593 && GET_MODE (target) == GET_MODE (valreg))
2594 /* TARGET and VALREG cannot be equal at this point because the latter
2595 would not have REG_FUNCTION_VALUE_P true, while the former would if
2596 it were referring to the same register.
2597
2598 If they refer to the same register, this move will be a no-op, except
2599 when function inlining is being done. */
2600 emit_move_insn (target, valreg);
766b19fb 2601 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
c36fce9a 2602 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
51bbfa0c
RS
2603 else
2604 target = copy_to_reg (valreg);
2605
84b55618 2606#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2607 /* If we promoted this return value, make the proper SUBREG. TARGET
2608 might be const0_rtx here, so be careful. */
2609 if (GET_CODE (target) == REG
766b19fb 2610 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2611 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2612 {
321e0bba
RK
2613 tree type = TREE_TYPE (exp);
2614 int unsignedp = TREE_UNSIGNED (type);
84b55618 2615
321e0bba
RK
2616 /* If we don't promote as expected, something is wrong. */
2617 if (GET_MODE (target)
2618 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2619 abort ();
2620
38a448ca 2621 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2622 SUBREG_PROMOTED_VAR_P (target) = 1;
2623 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2624 }
2625#endif
2626
2f4aa534
RS
2627 /* If size of args is variable or this was a constructor call for a stack
2628 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2629
2630 if (old_stack_level)
2631 {
e5d70561 2632 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2633 pending_stack_adjust = old_pending_adj;
d64f5a78 2634#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2635 stack_arg_under_construction = old_stack_arg_under_construction;
2636 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2637 stack_usage_map = initial_stack_usage_map;
d64f5a78 2638#endif
51bbfa0c 2639 }
51bbfa0c
RS
2640#ifdef ACCUMULATE_OUTGOING_ARGS
2641 else
2642 {
2643#ifdef REG_PARM_STACK_SPACE
2644 if (save_area)
20efdf74
JL
2645 restore_fixed_argument_area (save_area, argblock,
2646 high_to_save, low_to_save);
b94301c2 2647#endif
51bbfa0c 2648
51bbfa0c
RS
2649 /* If we saved any argument areas, restore them. */
2650 for (i = 0; i < num_actuals; i++)
2651 if (args[i].save_area)
2652 {
2653 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2654 rtx stack_area
38a448ca
RH
2655 = gen_rtx_MEM (save_mode,
2656 memory_address (save_mode,
2657 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2658
2659 if (save_mode != BLKmode)
2660 emit_move_insn (stack_area, args[i].save_area);
2661 else
2662 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2663 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2664 PARM_BOUNDARY / BITS_PER_UNIT);
2665 }
2666
2667 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2668 stack_usage_map = initial_stack_usage_map;
2669 }
2670#endif
2671
59257ff7
RK
2672 /* If this was alloca, record the new stack level for nonlocal gotos.
2673 Check for the handler slots since we might not have a save area
0f41302f 2674 for non-local gotos. */
59257ff7 2675
ba716ac9 2676 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
e5d70561 2677 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2678
2679 pop_temp_slots ();
2680
8e6a59fe
MM
2681 /* Free up storage we no longer need. */
2682 for (i = 0; i < num_actuals; ++i)
2683 if (args[i].aligned_regs)
2684 free (args[i].aligned_regs);
2685
51bbfa0c
RS
2686 return target;
2687}
2688\f
12a22e76
JM
2689/* Returns nonzero if FUN is the symbol for a library function which can
2690 not throw. */
2691
2692static int
2693libfunc_nothrow (fun)
2694 rtx fun;
2695{
2696 if (fun == throw_libfunc
2697 || fun == rethrow_libfunc
2698 || fun == sjthrow_libfunc
2699 || fun == sjpopnthrow_libfunc)
2700 return 0;
2701
2702 return 1;
2703}
2704
322e3e34
RK
2705/* Output a library call to function FUN (a SYMBOL_REF rtx)
2706 (emitting the queue unless NO_QUEUE is nonzero),
2707 for a value of mode OUTMODE,
2708 with NARGS different arguments, passed as alternating rtx values
2709 and machine_modes to convert them to.
2710 The rtx values should have been passed through protect_from_queue already.
2711
2712 NO_QUEUE will be true if and only if the library call is a `const' call
2713 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2714 to the variable is_const in expand_call.
2715
2716 NO_QUEUE must be true for const calls, because if it isn't, then
2717 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2718 and will be lost if the libcall sequence is optimized away.
2719
2720 NO_QUEUE must be false for non-const calls, because if it isn't, the
2721 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2722 optimized. For instance, the instruction scheduler may incorrectly
2723 move memory references across the non-const call. */
2724
2725void
3d994c6b
KG
2726emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
2727 int nargs, ...))
322e3e34 2728{
5148a72b 2729#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
2730 rtx orgfun;
2731 int no_queue;
2732 enum machine_mode outmode;
2733 int nargs;
2734#endif
322e3e34
RK
2735 va_list p;
2736 /* Total size in bytes of all the stack-parms scanned so far. */
2737 struct args_size args_size;
2738 /* Size of arguments before any adjustments (such as rounding). */
2739 struct args_size original_args_size;
2740 register int argnum;
322e3e34 2741 rtx fun;
322e3e34
RK
2742 int inc;
2743 int count;
fbb57b2a 2744 struct args_size alignment_pad;
322e3e34
RK
2745 rtx argblock = 0;
2746 CUMULATIVE_ARGS args_so_far;
2747 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2748 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2749 struct arg *argvec;
2750 int old_inhibit_defer_pop = inhibit_defer_pop;
774e6b37 2751 int old_arg_space_so_far = arg_space_so_far;
77cac2f2 2752 rtx call_fusage = 0;
e5e809f4 2753 int reg_parm_stack_space = 0;
12a22e76 2754 int nothrow;
f046b3cc
JL
2755#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2756 /* Define the boundary of the register parm stack space that needs to be
2757 save, if any. */
6a651371 2758 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
2759 rtx save_area = 0; /* Place that it is saved */
2760#endif
2761
2762#ifdef ACCUMULATE_OUTGOING_ARGS
2763 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2764 char *initial_stack_usage_map = stack_usage_map;
2765 int needed;
2766#endif
2767
2768#ifdef REG_PARM_STACK_SPACE
69d4ca36 2769 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2770#ifdef MAYBE_REG_PARM_STACK_SPACE
2771 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2772#else
ab87f8c8 2773 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
2774#endif
2775#endif
322e3e34 2776
4f90e4a0
RK
2777 VA_START (p, nargs);
2778
5148a72b 2779#ifndef ANSI_PROTOTYPES
4f90e4a0 2780 orgfun = va_arg (p, rtx);
322e3e34
RK
2781 no_queue = va_arg (p, int);
2782 outmode = va_arg (p, enum machine_mode);
2783 nargs = va_arg (p, int);
4f90e4a0
RK
2784#endif
2785
2786 fun = orgfun;
322e3e34 2787
12a22e76
JM
2788 nothrow = libfunc_nothrow (fun);
2789
322e3e34
RK
2790 /* Copy all the libcall-arguments out of the varargs data
2791 and into a vector ARGVEC.
2792
2793 Compute how to pass each argument. We only support a very small subset
2794 of the full argument passing conventions to limit complexity here since
2795 library functions shouldn't have many args. */
2796
2797 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2798 bzero ((char *) argvec, nargs * sizeof (struct arg));
2799
322e3e34 2800
eecb6f50 2801 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2802
2803 args_size.constant = 0;
2804 args_size.var = 0;
2805
888aa7a9
RS
2806 push_temp_slots ();
2807
c2f8b491
JH
2808#ifdef PREFERRED_STACK_BOUNDARY
2809 /* Ensure current function's preferred stack boundary is at least
2810 what we need. */
2811 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
2812 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2813#endif
2814
322e3e34
RK
2815 for (count = 0; count < nargs; count++)
2816 {
2817 rtx val = va_arg (p, rtx);
2818 enum machine_mode mode = va_arg (p, enum machine_mode);
2819
2820 /* We cannot convert the arg value to the mode the library wants here;
2821 must do it earlier where we know the signedness of the arg. */
2822 if (mode == BLKmode
2823 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2824 abort ();
2825
2826 /* On some machines, there's no way to pass a float to a library fcn.
2827 Pass it as a double instead. */
2828#ifdef LIBGCC_NEEDS_DOUBLE
2829 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2830 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2831#endif
2832
2833 /* There's no need to call protect_from_queue, because
2834 either emit_move_insn or emit_push_insn will do that. */
2835
2836 /* Make sure it is a reasonable operand for a move or push insn. */
2837 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2838 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2839 val = force_operand (val, NULL_RTX);
2840
322e3e34
RK
2841#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2842 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2843 {
a44492f0
RK
2844 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2845 be viewed as just an efficiency improvement. */
888aa7a9
RS
2846 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2847 emit_move_insn (slot, val);
8301b6e2 2848 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2849 mode = Pmode;
888aa7a9 2850 }
322e3e34
RK
2851#endif
2852
888aa7a9
RS
2853 argvec[count].value = val;
2854 argvec[count].mode = mode;
2855
322e3e34 2856 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bf44874e 2857
322e3e34
RK
2858#ifdef FUNCTION_ARG_PARTIAL_NREGS
2859 argvec[count].partial
2860 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2861#else
2862 argvec[count].partial = 0;
2863#endif
2864
2865 locate_and_pad_parm (mode, NULL_TREE,
2866 argvec[count].reg && argvec[count].partial == 0,
2867 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 2868 &argvec[count].size, &alignment_pad);
322e3e34
RK
2869
2870 if (argvec[count].size.var)
2871 abort ();
2872
e5e809f4 2873 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2874 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2875
2876 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2877 || reg_parm_stack_space > 0)
322e3e34
RK
2878 args_size.constant += argvec[count].size.constant;
2879
0f41302f 2880 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2881 }
2882 va_end (p);
2883
f046b3cc
JL
2884#ifdef FINAL_REG_PARM_STACK_SPACE
2885 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2886 args_size.var);
2887#endif
2888
322e3e34
RK
2889 /* If this machine requires an external definition for library
2890 functions, write one out. */
2891 assemble_external_libcall (fun);
2892
2893 original_args_size = args_size;
c795bca9 2894#ifdef PREFERRED_STACK_BOUNDARY
774e6b37
JH
2895 args_size.constant = (((args_size.constant
2896 + arg_space_so_far
2897 + pending_stack_adjust
2898 + STACK_BYTES - 1)
2899 / STACK_BYTES
2900 * STACK_BYTES)
2901 - arg_space_so_far
2902 - pending_stack_adjust);
322e3e34
RK
2903#endif
2904
322e3e34 2905 args_size.constant = MAX (args_size.constant,
f046b3cc 2906 reg_parm_stack_space);
e5e809f4 2907
322e3e34 2908#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2909 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2910#endif
2911
322e3e34
RK
2912 if (args_size.constant > current_function_outgoing_args_size)
2913 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2914
2915#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2916 /* Since the stack pointer will never be pushed, it is possible for
2917 the evaluation of a parm to clobber something we have already
2918 written to the stack. Since most function calls on RISC machines
2919 do not use the stack, this is uncommon, but must work correctly.
2920
2921 Therefore, we save any area of the stack that was already written
2922 and that we are using. Here we set up to do this by making a new
2923 stack usage map from the old one.
2924
2925 Another approach might be to try to reorder the argument
2926 evaluations to avoid this conflicting stack usage. */
2927
2928 needed = args_size.constant;
e5e809f4
JL
2929
2930#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2931 /* Since we will be writing into the entire argument area, the
2932 map must be allocated for its entire size, not just the part that
2933 is the responsibility of the caller. */
2934 needed += reg_parm_stack_space;
2935#endif
2936
2937#ifdef ARGS_GROW_DOWNWARD
2938 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2939 needed + 1);
2940#else
2941 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2942 needed);
322e3e34 2943#endif
f046b3cc
JL
2944 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2945
2946 if (initial_highest_arg_in_use)
2947 bcopy (initial_stack_usage_map, stack_usage_map,
2948 initial_highest_arg_in_use);
2949
2950 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2951 bzero (&stack_usage_map[initial_highest_arg_in_use],
2952 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2953 needed = 0;
322e3e34 2954
f046b3cc
JL
2955 /* The address of the outgoing argument list must not be copied to a
2956 register here, because argblock would be left pointing to the
2957 wrong place after the call to allocate_dynamic_stack_space below.
2958 */
2959
2960 argblock = virtual_outgoing_args_rtx;
2961#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2962#ifndef PUSH_ROUNDING
2963 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2964#endif
f046b3cc 2965#endif
322e3e34
RK
2966
2967#ifdef PUSH_ARGS_REVERSED
c795bca9 2968#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2969 /* If we push args individually in reverse order, perform stack alignment
2970 before the first push (the last arg). */
2971 if (argblock == 0)
774e6b37
JH
2972 {
2973 anti_adjust_stack (GEN_INT (args_size.constant
2974 - original_args_size.constant));
2975 arg_space_so_far += args_size.constant - original_args_size.constant;
2976 }
322e3e34
RK
2977#endif
2978#endif
2979
2980#ifdef PUSH_ARGS_REVERSED
2981 inc = -1;
2982 argnum = nargs - 1;
2983#else
2984 inc = 1;
2985 argnum = 0;
2986#endif
2987
f046b3cc
JL
2988#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2989 /* The argument list is the property of the called routine and it
2990 may clobber it. If the fixed area has been used for previous
2991 parameters, we must save and restore it.
2992
2993 Here we compute the boundary of the that needs to be saved, if any. */
2994
2995#ifdef ARGS_GROW_DOWNWARD
2996 for (count = 0; count < reg_parm_stack_space + 1; count++)
2997#else
2998 for (count = 0; count < reg_parm_stack_space; count++)
2999#endif
3000 {
3001 if (count >= highest_outgoing_arg_in_use
3002 || stack_usage_map[count] == 0)
3003 continue;
3004
3005 if (low_to_save == -1)
3006 low_to_save = count;
3007
3008 high_to_save = count;
3009 }
3010
3011 if (low_to_save >= 0)
3012 {
3013 int num_to_save = high_to_save - low_to_save + 1;
3014 enum machine_mode save_mode
3015 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3016 rtx stack_area;
3017
3018 /* If we don't have the required alignment, must do this in BLKmode. */
3019 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3020 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3021 save_mode = BLKmode;
3022
ceb83206 3023#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3024 stack_area = gen_rtx_MEM (save_mode,
3025 memory_address (save_mode,
38a448ca 3026 plus_constant (argblock,
ceb83206 3027 - high_to_save)));
f046b3cc 3028#else
ceb83206
JL
3029 stack_area = gen_rtx_MEM (save_mode,
3030 memory_address (save_mode,
38a448ca 3031 plus_constant (argblock,
ceb83206 3032 low_to_save)));
f046b3cc 3033#endif
f046b3cc
JL
3034 if (save_mode == BLKmode)
3035 {
3036 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3037 emit_block_move (validize_mem (save_area), stack_area,
3038 GEN_INT (num_to_save),
3039 PARM_BOUNDARY / BITS_PER_UNIT);
3040 }
3041 else
3042 {
3043 save_area = gen_reg_rtx (save_mode);
3044 emit_move_insn (save_area, stack_area);
3045 }
3046 }
3047#endif
3048
322e3e34
RK
3049 /* Push the args that need to be pushed. */
3050
5e26979c
JL
3051 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3052 are to be pushed. */
322e3e34
RK
3053 for (count = 0; count < nargs; count++, argnum += inc)
3054 {
3055 register enum machine_mode mode = argvec[argnum].mode;
3056 register rtx val = argvec[argnum].value;
3057 rtx reg = argvec[argnum].reg;
3058 int partial = argvec[argnum].partial;
69d4ca36 3059#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3060 int lower_bound, upper_bound, i;
69d4ca36 3061#endif
322e3e34
RK
3062
3063 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3064 {
3065#ifdef ACCUMULATE_OUTGOING_ARGS
3066 /* If this is being stored into a pre-allocated, fixed-size, stack
3067 area, save any previous data at that location. */
3068
3069#ifdef ARGS_GROW_DOWNWARD
3070 /* stack_slot is negative, but we want to index stack_usage_map
3071 with positive values. */
5e26979c
JL
3072 upper_bound = -argvec[argnum].offset.constant + 1;
3073 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3074#else
5e26979c
JL
3075 lower_bound = argvec[argnum].offset.constant;
3076 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3077#endif
3078
3079 for (i = lower_bound; i < upper_bound; i++)
3080 if (stack_usage_map[i]
f046b3cc
JL
3081 /* Don't store things in the fixed argument area at this point;
3082 it has already been saved. */
e5e809f4 3083 && i > reg_parm_stack_space)
f046b3cc
JL
3084 break;
3085
3086 if (i != upper_bound)
3087 {
e5e809f4 3088 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3089 enum machine_mode save_mode
5e26979c 3090 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3091 MODE_INT, 1);
3092 rtx stack_area
c5c76735
JL
3093 = gen_rtx_MEM
3094 (save_mode,
3095 memory_address
3096 (save_mode,
3097 plus_constant (argblock,
3098 argvec[argnum].offset.constant)));
3099
5e26979c
JL
3100 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3101 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3102 }
3103#endif
3104 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 3105 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 3106 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
774e6b37 3107 arg_space_so_far += argvec[argnum].size.constant;
f046b3cc
JL
3108
3109#ifdef ACCUMULATE_OUTGOING_ARGS
3110 /* Now mark the segment we just used. */
3111 for (i = lower_bound; i < upper_bound; i++)
3112 stack_usage_map[i] = 1;
3113#endif
3114
3115 NO_DEFER_POP;
3116 }
322e3e34
RK
3117 }
3118
3119#ifndef PUSH_ARGS_REVERSED
c795bca9 3120#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3121 /* If we pushed args in forward order, perform stack alignment
3122 after pushing the last arg. */
3123 if (argblock == 0)
774e6b37
JH
3124 {
3125 anti_adjust_stack (GEN_INT (args_size.constant
3126 - original_args_size.constant));
3127 arg_space_so_far += args_size.constant - original_args_size.constant;
3128 }
322e3e34
RK
3129#endif
3130#endif
3131
3132#ifdef PUSH_ARGS_REVERSED
3133 argnum = nargs - 1;
3134#else
3135 argnum = 0;
3136#endif
3137
77cac2f2 3138 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3139
322e3e34
RK
3140 /* Now load any reg parms into their regs. */
3141
5e26979c
JL
3142 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3143 are to be pushed. */
322e3e34
RK
3144 for (count = 0; count < nargs; count++, argnum += inc)
3145 {
322e3e34
RK
3146 register rtx val = argvec[argnum].value;
3147 rtx reg = argvec[argnum].reg;
3148 int partial = argvec[argnum].partial;
3149
bf44874e
JL
3150 /* Handle calls that pass values in multiple non-contiguous
3151 locations. The PA64 has examples of this for library calls. */
19e3f61a 3152 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bf44874e
JL
3153 emit_group_load (reg, val,
3154 GET_MODE_SIZE (GET_MODE (val)),
3155 GET_MODE_ALIGNMENT (GET_MODE (val)));
3156 else if (reg != 0 && partial == 0)
322e3e34 3157 emit_move_insn (reg, val);
bf44874e 3158
322e3e34
RK
3159 NO_DEFER_POP;
3160 }
3161
3162 /* For version 1.37, try deleting this entirely. */
3163 if (! no_queue)
3164 emit_queue ();
3165
3166 /* Any regs containing parms remain in use through the call. */
322e3e34 3167 for (count = 0; count < nargs; count++)
bf44874e 3168 {
19e3f61a
JM
3169 rtx reg = argvec[count].reg;
3170 if (reg != 0 && GET_CODE (argvec[count].reg) == PARALLEL)
3171 use_group_regs (&call_fusage, reg);
3172 else if (reg != 0)
3173 use_reg (&call_fusage, reg);
bf44874e 3174 }
322e3e34 3175
322e3e34
RK
3176 /* Don't allow popping to be deferred, since then
3177 cse'ing of library calls could delete a call and leave the pop. */
3178 NO_DEFER_POP;
3179
3180 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3181 will set inhibit_defer_pop to that value. */
3182
334c4f0f
RK
3183 /* The return type is needed to decide how many bytes the function pops.
3184 Signedness plays no role in that, so for simplicity, we pretend it's
3185 always signed. We also assume that the list of arguments passed has
3186 no impact, so we pretend it is unknown. */
3187
2c8da025
RK
3188 emit_call_1 (fun,
3189 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
3190 build_function_type (outmode == VOIDmode ? void_type_node
3191 : type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9 3192 original_args_size.constant, args_size.constant, 0,
322e3e34
RK
3193 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3194 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
12a22e76 3195 old_inhibit_defer_pop + 1, call_fusage, no_queue, nothrow);
322e3e34 3196
888aa7a9
RS
3197 pop_temp_slots ();
3198
774e6b37
JH
3199 /* Stack pointer ought to be restored to the value before call. */
3200 if (old_arg_space_so_far != arg_space_so_far)
3201 abort();
3202
322e3e34
RK
3203 /* Now restore inhibit_defer_pop to its actual original value. */
3204 OK_DEFER_POP;
f046b3cc
JL
3205
3206#ifdef ACCUMULATE_OUTGOING_ARGS
3207#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3208 if (save_area)
3209 {
3210 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3211#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3212 rtx stack_area
38a448ca
RH
3213 = gen_rtx_MEM (save_mode,
3214 memory_address (save_mode,
ceb83206
JL
3215 plus_constant (argblock,
3216 - high_to_save)));
f046b3cc 3217#else
ceb83206
JL
3218 rtx stack_area
3219 = gen_rtx_MEM (save_mode,
3220 memory_address (save_mode,
3221 plus_constant (argblock, low_to_save)));
f046b3cc 3222#endif
f046b3cc 3223
e9a25f70
JL
3224 if (save_mode != BLKmode)
3225 emit_move_insn (stack_area, save_area);
3226 else
3227 emit_block_move (stack_area, validize_mem (save_area),
3228 GEN_INT (high_to_save - low_to_save + 1),
3229 PARM_BOUNDARY / BITS_PER_UNIT);
3230 }
f046b3cc
JL
3231#endif
3232
3233 /* If we saved any argument areas, restore them. */
3234 for (count = 0; count < nargs; count++)
3235 if (argvec[count].save_area)
3236 {
3237 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3238 rtx stack_area
38a448ca 3239 = gen_rtx_MEM (save_mode,
c5c76735
JL
3240 memory_address
3241 (save_mode,
3242 plus_constant (argblock,
3243 argvec[count].offset.constant)));
f046b3cc
JL
3244
3245 emit_move_insn (stack_area, argvec[count].save_area);
3246 }
3247
3248 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3249 stack_usage_map = initial_stack_usage_map;
3250#endif
322e3e34
RK
3251}
3252\f
3253/* Like emit_library_call except that an extra argument, VALUE,
3254 comes second and says where to store the result.
fac0ad80
RS
3255 (If VALUE is zero, this function chooses a convenient way
3256 to return the value.
322e3e34 3257
fac0ad80
RS
3258 This function returns an rtx for where the value is to be found.
3259 If VALUE is nonzero, VALUE is returned. */
3260
3261rtx
3d994c6b
KG
3262emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3263 enum machine_mode outmode, int nargs, ...))
322e3e34 3264{
5148a72b 3265#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
3266 rtx orgfun;
3267 rtx value;
3268 int no_queue;
3269 enum machine_mode outmode;
3270 int nargs;
3271#endif
322e3e34
RK
3272 va_list p;
3273 /* Total size in bytes of all the stack-parms scanned so far. */
3274 struct args_size args_size;
3275 /* Size of arguments before any adjustments (such as rounding). */
3276 struct args_size original_args_size;
3277 register int argnum;
322e3e34 3278 rtx fun;
322e3e34
RK
3279 int inc;
3280 int count;
4fc026cd 3281 struct args_size alignment_pad;
322e3e34
RK
3282 rtx argblock = 0;
3283 CUMULATIVE_ARGS args_so_far;
3284 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 3285 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
3286 struct arg *argvec;
3287 int old_inhibit_defer_pop = inhibit_defer_pop;
774e6b37 3288 int old_arg_space_so_far = arg_space_so_far;
77cac2f2 3289 rtx call_fusage = 0;
322e3e34 3290 rtx mem_value = 0;
fac0ad80 3291 int pcc_struct_value = 0;
4f389214 3292 int struct_value_size = 0;
d61bee95 3293 int is_const;
e5e809f4 3294 int reg_parm_stack_space = 0;
12a22e76 3295 int nothrow;
69d4ca36 3296#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3297 int needed;
69d4ca36 3298#endif
f046b3cc
JL
3299
3300#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3301 /* Define the boundary of the register parm stack space that needs to be
3302 save, if any. */
6a651371 3303 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
3304 rtx save_area = 0; /* Place that it is saved */
3305#endif
3306
3307#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36 3308 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
3309 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3310 char *initial_stack_usage_map = stack_usage_map;
3311#endif
3312
3313#ifdef REG_PARM_STACK_SPACE
3314#ifdef MAYBE_REG_PARM_STACK_SPACE
3315 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3316#else
ab87f8c8 3317 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
3318#endif
3319#endif
322e3e34 3320
4f90e4a0
RK
3321 VA_START (p, nargs);
3322
5148a72b 3323#ifndef ANSI_PROTOTYPES
4f90e4a0 3324 orgfun = va_arg (p, rtx);
322e3e34
RK
3325 value = va_arg (p, rtx);
3326 no_queue = va_arg (p, int);
3327 outmode = va_arg (p, enum machine_mode);
3328 nargs = va_arg (p, int);
4f90e4a0
RK
3329#endif
3330
d61bee95 3331 is_const = no_queue;
4f90e4a0 3332 fun = orgfun;
322e3e34 3333
12a22e76
JM
3334 nothrow = libfunc_nothrow (fun);
3335
c2f8b491
JH
3336#ifdef PREFERRED_STACK_BOUNDARY
3337 /* Ensure current function's preferred stack boundary is at least
3338 what we need. */
3339 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3340 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3341#endif
3342
322e3e34
RK
3343 /* If this kind of value comes back in memory,
3344 decide where in memory it should come back. */
fac0ad80 3345 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 3346 {
fac0ad80
RS
3347#ifdef PCC_STATIC_STRUCT_RETURN
3348 rtx pointer_reg
3349 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
4dc07bd7 3350 0, 0);
38a448ca 3351 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
3352 pcc_struct_value = 1;
3353 if (value == 0)
3354 value = gen_reg_rtx (outmode);
3355#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 3356 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 3357 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
3358 mem_value = value;
3359 else
3360 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 3361#endif
779c643a
JW
3362
3363 /* This call returns a big structure. */
3364 is_const = 0;
322e3e34
RK
3365 }
3366
3367 /* ??? Unfinished: must pass the memory address as an argument. */
3368
3369 /* Copy all the libcall-arguments out of the varargs data
3370 and into a vector ARGVEC.
3371
3372 Compute how to pass each argument. We only support a very small subset
3373 of the full argument passing conventions to limit complexity here since
3374 library functions shouldn't have many args. */
3375
3376 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 3377 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 3378
eecb6f50 3379 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
3380
3381 args_size.constant = 0;
3382 args_size.var = 0;
3383
3384 count = 0;
3385
888aa7a9
RS
3386 push_temp_slots ();
3387
322e3e34
RK
3388 /* If there's a structure value address to be passed,
3389 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 3390 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
3391 {
3392 rtx addr = XEXP (mem_value, 0);
fac0ad80 3393 nargs++;
322e3e34 3394
fac0ad80
RS
3395 /* Make sure it is a reasonable operand for a move or push insn. */
3396 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3397 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3398 addr = force_operand (addr, NULL_RTX);
322e3e34 3399
fac0ad80 3400 argvec[count].value = addr;
4fc3dcd5 3401 argvec[count].mode = Pmode;
fac0ad80 3402 argvec[count].partial = 0;
322e3e34 3403
4fc3dcd5 3404 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 3405#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 3406 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 3407 abort ();
322e3e34
RK
3408#endif
3409
4fc3dcd5 3410 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
3411 argvec[count].reg && argvec[count].partial == 0,
3412 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 3413 &argvec[count].size, &alignment_pad);
322e3e34
RK
3414
3415
fac0ad80 3416 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3417 || reg_parm_stack_space > 0)
fac0ad80 3418 args_size.constant += argvec[count].size.constant;
322e3e34 3419
0f41302f 3420 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
3421
3422 count++;
322e3e34
RK
3423 }
3424
3425 for (; count < nargs; count++)
3426 {
3427 rtx val = va_arg (p, rtx);
3428 enum machine_mode mode = va_arg (p, enum machine_mode);
3429
3430 /* We cannot convert the arg value to the mode the library wants here;
3431 must do it earlier where we know the signedness of the arg. */
3432 if (mode == BLKmode
3433 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3434 abort ();
3435
3436 /* On some machines, there's no way to pass a float to a library fcn.
3437 Pass it as a double instead. */
3438#ifdef LIBGCC_NEEDS_DOUBLE
3439 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 3440 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
3441#endif
3442
3443 /* There's no need to call protect_from_queue, because
3444 either emit_move_insn or emit_push_insn will do that. */
3445
3446 /* Make sure it is a reasonable operand for a move or push insn. */
3447 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3448 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3449 val = force_operand (val, NULL_RTX);
3450
322e3e34
RK
3451#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3452 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 3453 {
a44492f0
RK
3454 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3455 be viewed as just an efficiency improvement. */
888aa7a9
RS
3456 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3457 emit_move_insn (slot, val);
3458 val = XEXP (slot, 0);
3459 mode = Pmode;
3460 }
322e3e34
RK
3461#endif
3462
888aa7a9
RS
3463 argvec[count].value = val;
3464 argvec[count].mode = mode;
3465
322e3e34 3466 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bf44874e 3467
322e3e34
RK
3468#ifdef FUNCTION_ARG_PARTIAL_NREGS
3469 argvec[count].partial
3470 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3471#else
3472 argvec[count].partial = 0;
3473#endif
3474
3475 locate_and_pad_parm (mode, NULL_TREE,
3476 argvec[count].reg && argvec[count].partial == 0,
3477 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 3478 &argvec[count].size, &alignment_pad);
322e3e34
RK
3479
3480 if (argvec[count].size.var)
3481 abort ();
3482
e5e809f4 3483 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 3484 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
3485
3486 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3487 || reg_parm_stack_space > 0)
322e3e34
RK
3488 args_size.constant += argvec[count].size.constant;
3489
0f41302f 3490 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3491 }
3492 va_end (p);
3493
f046b3cc
JL
3494#ifdef FINAL_REG_PARM_STACK_SPACE
3495 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3496 args_size.var);
3497#endif
322e3e34
RK
3498 /* If this machine requires an external definition for library
3499 functions, write one out. */
3500 assemble_external_libcall (fun);
3501
3502 original_args_size = args_size;
c795bca9 3503#ifdef PREFERRED_STACK_BOUNDARY
774e6b37
JH
3504 args_size.constant = (((args_size.constant
3505 + arg_space_so_far
3506 + pending_stack_adjust
3507 + STACK_BYTES - 1)
3508 / STACK_BYTES
3509 * STACK_BYTES)
3510 - arg_space_so_far
3511 - pending_stack_adjust);
322e3e34
RK
3512#endif
3513
322e3e34 3514 args_size.constant = MAX (args_size.constant,
f046b3cc 3515 reg_parm_stack_space);
e5e809f4 3516
322e3e34 3517#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3518 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3519#endif
3520
322e3e34
RK
3521 if (args_size.constant > current_function_outgoing_args_size)
3522 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3523
3524#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3525 /* Since the stack pointer will never be pushed, it is possible for
3526 the evaluation of a parm to clobber something we have already
3527 written to the stack. Since most function calls on RISC machines
3528 do not use the stack, this is uncommon, but must work correctly.
3529
3530 Therefore, we save any area of the stack that was already written
3531 and that we are using. Here we set up to do this by making a new
3532 stack usage map from the old one.
3533
3534 Another approach might be to try to reorder the argument
3535 evaluations to avoid this conflicting stack usage. */
3536
3537 needed = args_size.constant;
e5e809f4
JL
3538
3539#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
3540 /* Since we will be writing into the entire argument area, the
3541 map must be allocated for its entire size, not just the part that
3542 is the responsibility of the caller. */
3543 needed += reg_parm_stack_space;
3544#endif
3545
3546#ifdef ARGS_GROW_DOWNWARD
3547 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3548 needed + 1);
3549#else
3550 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3551 needed);
322e3e34 3552#endif
f046b3cc
JL
3553 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3554
3555 if (initial_highest_arg_in_use)
3556 bcopy (initial_stack_usage_map, stack_usage_map,
3557 initial_highest_arg_in_use);
3558
3559 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3560 bzero (&stack_usage_map[initial_highest_arg_in_use],
3561 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3562 needed = 0;
322e3e34 3563
f046b3cc
JL
3564 /* The address of the outgoing argument list must not be copied to a
3565 register here, because argblock would be left pointing to the
3566 wrong place after the call to allocate_dynamic_stack_space below.
3567 */
3568
3569 argblock = virtual_outgoing_args_rtx;
3570#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3571#ifndef PUSH_ROUNDING
3572 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3573#endif
f046b3cc 3574#endif
322e3e34
RK
3575
3576#ifdef PUSH_ARGS_REVERSED
c795bca9 3577#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3578 /* If we push args individually in reverse order, perform stack alignment
3579 before the first push (the last arg). */
3580 if (argblock == 0)
774e6b37
JH
3581 {
3582 anti_adjust_stack (GEN_INT (args_size.constant
3583 - original_args_size.constant));
3584 arg_space_so_far += args_size.constant - original_args_size.constant;
3585 }
322e3e34
RK
3586#endif
3587#endif
3588
3589#ifdef PUSH_ARGS_REVERSED
3590 inc = -1;
3591 argnum = nargs - 1;
3592#else
3593 inc = 1;
3594 argnum = 0;
3595#endif
3596
f046b3cc
JL
3597#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3598 /* The argument list is the property of the called routine and it
3599 may clobber it. If the fixed area has been used for previous
3600 parameters, we must save and restore it.
3601
3602 Here we compute the boundary of the that needs to be saved, if any. */
3603
3604#ifdef ARGS_GROW_DOWNWARD
3605 for (count = 0; count < reg_parm_stack_space + 1; count++)
3606#else
3607 for (count = 0; count < reg_parm_stack_space; count++)
3608#endif
3609 {
3610 if (count >= highest_outgoing_arg_in_use
3611 || stack_usage_map[count] == 0)
3612 continue;
3613
3614 if (low_to_save == -1)
3615 low_to_save = count;
3616
3617 high_to_save = count;
3618 }
3619
3620 if (low_to_save >= 0)
3621 {
3622 int num_to_save = high_to_save - low_to_save + 1;
3623 enum machine_mode save_mode
3624 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3625 rtx stack_area;
3626
3627 /* If we don't have the required alignment, must do this in BLKmode. */
3628 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3629 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3630 save_mode = BLKmode;
3631
ceb83206 3632#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3633 stack_area = gen_rtx_MEM (save_mode,
3634 memory_address (save_mode,
38a448ca 3635 plus_constant (argblock,
ceb83206 3636 - high_to_save)));
f046b3cc 3637#else
ceb83206
JL
3638 stack_area = gen_rtx_MEM (save_mode,
3639 memory_address (save_mode,
38a448ca 3640 plus_constant (argblock,
ceb83206 3641 low_to_save)));
f046b3cc 3642#endif
f046b3cc
JL
3643 if (save_mode == BLKmode)
3644 {
3645 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3646 emit_block_move (validize_mem (save_area), stack_area,
3647 GEN_INT (num_to_save),
3648 PARM_BOUNDARY / BITS_PER_UNIT);
3649 }
3650 else
3651 {
3652 save_area = gen_reg_rtx (save_mode);
3653 emit_move_insn (save_area, stack_area);
3654 }
3655 }
3656#endif
3657
322e3e34
RK
3658 /* Push the args that need to be pushed. */
3659
5e26979c
JL
3660 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3661 are to be pushed. */
322e3e34
RK
3662 for (count = 0; count < nargs; count++, argnum += inc)
3663 {
3664 register enum machine_mode mode = argvec[argnum].mode;
3665 register rtx val = argvec[argnum].value;
3666 rtx reg = argvec[argnum].reg;
3667 int partial = argvec[argnum].partial;
69d4ca36 3668#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3669 int lower_bound, upper_bound, i;
69d4ca36 3670#endif
322e3e34
RK
3671
3672 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3673 {
3674#ifdef ACCUMULATE_OUTGOING_ARGS
3675 /* If this is being stored into a pre-allocated, fixed-size, stack
3676 area, save any previous data at that location. */
3677
3678#ifdef ARGS_GROW_DOWNWARD
3679 /* stack_slot is negative, but we want to index stack_usage_map
3680 with positive values. */
5e26979c
JL
3681 upper_bound = -argvec[argnum].offset.constant + 1;
3682 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3683#else
5e26979c
JL
3684 lower_bound = argvec[argnum].offset.constant;
3685 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3686#endif
3687
3688 for (i = lower_bound; i < upper_bound; i++)
3689 if (stack_usage_map[i]
f046b3cc
JL
3690 /* Don't store things in the fixed argument area at this point;
3691 it has already been saved. */
e5e809f4 3692 && i > reg_parm_stack_space)
f046b3cc
JL
3693 break;
3694
3695 if (i != upper_bound)
3696 {
e5e809f4 3697 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3698 enum machine_mode save_mode
5e26979c 3699 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3700 MODE_INT, 1);
3701 rtx stack_area
c5c76735
JL
3702 = gen_rtx_MEM
3703 (save_mode,
3704 memory_address
3705 (save_mode,
3706 plus_constant (argblock,
3707 argvec[argnum].offset.constant)));
5e26979c 3708 argvec[argnum].save_area = gen_reg_rtx (save_mode);
c5c76735 3709
5e26979c 3710 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3711 }
3712#endif
3713 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 3714 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 3715 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
774e6b37 3716 arg_space_so_far += argvec[argnum].size.constant;
f046b3cc
JL
3717
3718#ifdef ACCUMULATE_OUTGOING_ARGS
3719 /* Now mark the segment we just used. */
3720 for (i = lower_bound; i < upper_bound; i++)
3721 stack_usage_map[i] = 1;
3722#endif
3723
3724 NO_DEFER_POP;
3725 }
322e3e34
RK
3726 }
3727
3728#ifndef PUSH_ARGS_REVERSED
c795bca9 3729#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3730 /* If we pushed args in forward order, perform stack alignment
3731 after pushing the last arg. */
3732 if (argblock == 0)
774e6b37
JH
3733 {
3734 anti_adjust_stack (GEN_INT (args_size.constant
3735 - original_args_size.constant));
3736 arg_space_so_far += args_size.constant - unadjusted_args_size;
3737 }
322e3e34
RK
3738#endif
3739#endif
3740
3741#ifdef PUSH_ARGS_REVERSED
3742 argnum = nargs - 1;
3743#else
3744 argnum = 0;
3745#endif
3746
77cac2f2 3747 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3748
322e3e34
RK
3749 /* Now load any reg parms into their regs. */
3750
5e26979c
JL
3751 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3752 are to be pushed. */
322e3e34
RK
3753 for (count = 0; count < nargs; count++, argnum += inc)
3754 {
322e3e34
RK
3755 register rtx val = argvec[argnum].value;
3756 rtx reg = argvec[argnum].reg;
3757 int partial = argvec[argnum].partial;
3758
bf44874e
JL
3759 /* Handle calls that pass values in multiple non-contiguous
3760 locations. The PA64 has examples of this for library calls. */
19e3f61a 3761 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bf44874e
JL
3762 emit_group_load (reg, val,
3763 GET_MODE_SIZE (GET_MODE (val)),
3764 GET_MODE_ALIGNMENT (GET_MODE (val)));
3765 else if (reg != 0 && partial == 0)
322e3e34 3766 emit_move_insn (reg, val);
bf44874e 3767
322e3e34
RK
3768 NO_DEFER_POP;
3769 }
3770
3771#if 0
3772 /* For version 1.37, try deleting this entirely. */
3773 if (! no_queue)
3774 emit_queue ();
3775#endif
3776
3777 /* Any regs containing parms remain in use through the call. */
322e3e34 3778 for (count = 0; count < nargs; count++)
bf44874e 3779 {
19e3f61a
JM
3780 rtx reg = argvec[count].reg;
3781 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3782 use_group_regs (&call_fusage, reg);
3783 else if (reg != 0)
3784 use_reg (&call_fusage, reg);
bf44874e 3785 }
322e3e34 3786
fac0ad80
RS
3787 /* Pass the function the address in which to return a structure value. */
3788 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3789 {
3790 emit_move_insn (struct_value_rtx,
3791 force_reg (Pmode,
3792 force_operand (XEXP (mem_value, 0),
3793 NULL_RTX)));
3794 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3795 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3796 }
3797
322e3e34
RK
3798 /* Don't allow popping to be deferred, since then
3799 cse'ing of library calls could delete a call and leave the pop. */
3800 NO_DEFER_POP;
3801
3802 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3803 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3804 /* See the comment in emit_library_call about the function type we build
3805 and pass here. */
322e3e34 3806
2c8da025
RK
3807 emit_call_1 (fun,
3808 get_identifier (XSTR (orgfun, 0)),
334c4f0f 3809 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9
RH
3810 original_args_size.constant, args_size.constant,
3811 struct_value_size,
322e3e34 3812 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3813 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
12a22e76 3814 old_inhibit_defer_pop + 1, call_fusage, is_const, nothrow);
322e3e34
RK
3815
3816 /* Now restore inhibit_defer_pop to its actual original value. */
3817 OK_DEFER_POP;
3818
888aa7a9
RS
3819 pop_temp_slots ();
3820
774e6b37
JH
3821 /* Stack pointer ought to be restored to the value before call. */
3822 if (old_arg_space_so_far != arg_space_so_far)
3823 abort();
3824
322e3e34
RK
3825 /* Copy the value to the right place. */
3826 if (outmode != VOIDmode)
3827 {
3828 if (mem_value)
3829 {
3830 if (value == 0)
fac0ad80 3831 value = mem_value;
322e3e34
RK
3832 if (value != mem_value)
3833 emit_move_insn (value, mem_value);
3834 }
3835 else if (value != 0)
3836 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3837 else
3838 value = hard_libcall_value (outmode);
322e3e34 3839 }
fac0ad80 3840
f046b3cc
JL
3841#ifdef ACCUMULATE_OUTGOING_ARGS
3842#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3843 if (save_area)
3844 {
3845 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3846#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3847 rtx stack_area
38a448ca
RH
3848 = gen_rtx_MEM (save_mode,
3849 memory_address (save_mode,
ceb83206
JL
3850 plus_constant (argblock,
3851 - high_to_save)));
f046b3cc 3852#else
ceb83206
JL
3853 rtx stack_area
3854 = gen_rtx_MEM (save_mode,
3855 memory_address (save_mode,
3856 plus_constant (argblock, low_to_save)));
f046b3cc 3857#endif
e9a25f70
JL
3858 if (save_mode != BLKmode)
3859 emit_move_insn (stack_area, save_area);
3860 else
3861 emit_block_move (stack_area, validize_mem (save_area),
3862 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3863 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3864 }
f046b3cc
JL
3865#endif
3866
3867 /* If we saved any argument areas, restore them. */
3868 for (count = 0; count < nargs; count++)
3869 if (argvec[count].save_area)
3870 {
3871 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3872 rtx stack_area
38a448ca 3873 = gen_rtx_MEM (save_mode,
c5c76735
JL
3874 memory_address
3875 (save_mode,
3876 plus_constant (argblock,
3877 argvec[count].offset.constant)));
f046b3cc
JL
3878
3879 emit_move_insn (stack_area, argvec[count].save_area);
3880 }
3881
3882 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3883 stack_usage_map = initial_stack_usage_map;
3884#endif
3885
fac0ad80 3886 return value;
322e3e34
RK
3887}
3888\f
51bbfa0c
RS
3889#if 0
3890/* Return an rtx which represents a suitable home on the stack
3891 given TYPE, the type of the argument looking for a home.
3892 This is called only for BLKmode arguments.
3893
3894 SIZE is the size needed for this target.
3895 ARGS_ADDR is the address of the bottom of the argument block for this call.
3896 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3897 if this machine uses push insns. */
3898
3899static rtx
3900target_for_arg (type, size, args_addr, offset)
3901 tree type;
3902 rtx size;
3903 rtx args_addr;
3904 struct args_size offset;
3905{
3906 rtx target;
3907 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3908
3909 /* We do not call memory_address if possible,
3910 because we want to address as close to the stack
3911 as possible. For non-variable sized arguments,
3912 this will be stack-pointer relative addressing. */
3913 if (GET_CODE (offset_rtx) == CONST_INT)
3914 target = plus_constant (args_addr, INTVAL (offset_rtx));
3915 else
3916 {
3917 /* I have no idea how to guarantee that this
3918 will work in the presence of register parameters. */
38a448ca 3919 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3920 target = memory_address (QImode, target);
3921 }
3922
38a448ca 3923 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3924}
3925#endif
3926\f
3927/* Store a single argument for a function call
3928 into the register or memory area where it must be passed.
3929 *ARG describes the argument value and where to pass it.
3930
3931 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3932 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3933
3934 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3935 so must be careful about how the stack is used.
3936
3937 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3938 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3939 that we need not worry about saving and restoring the stack.
3940
3941 FNDECL is the declaration of the function we are calling. */
3942
3943static void
c84e2712 3944store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3945 reg_parm_stack_space)
51bbfa0c
RS
3946 struct arg_data *arg;
3947 rtx argblock;
3948 int may_be_alloca;
0f9b3ea6 3949 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3950 int reg_parm_stack_space;
51bbfa0c
RS
3951{
3952 register tree pval = arg->tree_value;
3953 rtx reg = 0;
3954 int partial = 0;
3955 int used = 0;
69d4ca36 3956#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3957 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3958#endif
51bbfa0c
RS
3959
3960 if (TREE_CODE (pval) == ERROR_MARK)
3961 return;
3962
cc79451b
RK
3963 /* Push a new temporary level for any temporaries we make for
3964 this argument. */
3965 push_temp_slots ();
3966
51bbfa0c
RS
3967#ifdef ACCUMULATE_OUTGOING_ARGS
3968 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3969 save any previous data at that location. */
3970 if (argblock && ! variable_size && arg->stack)
3971 {
3972#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3973 /* stack_slot is negative, but we want to index stack_usage_map
3974 with positive values. */
51bbfa0c
RS
3975 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3976 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3977 else
50eb43ca 3978 upper_bound = 0;
51bbfa0c
RS
3979
3980 lower_bound = upper_bound - arg->size.constant;
3981#else
3982 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3983 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3984 else
3985 lower_bound = 0;
3986
3987 upper_bound = lower_bound + arg->size.constant;
3988#endif
3989
3990 for (i = lower_bound; i < upper_bound; i++)
3991 if (stack_usage_map[i]
51bbfa0c
RS
3992 /* Don't store things in the fixed argument area at this point;
3993 it has already been saved. */
e5e809f4 3994 && i > reg_parm_stack_space)
51bbfa0c
RS
3995 break;
3996
3997 if (i != upper_bound)
3998 {
3999 /* We need to make a save area. See what mode we can make it. */
4000 enum machine_mode save_mode
4001 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4002 rtx stack_area
38a448ca
RH
4003 = gen_rtx_MEM (save_mode,
4004 memory_address (save_mode,
4005 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
4006
4007 if (save_mode == BLKmode)
4008 {
4009 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 4010 arg->size.constant, 0);
c6df88cb
MM
4011 MEM_SET_IN_STRUCT_P (arg->save_area,
4012 AGGREGATE_TYPE_P (TREE_TYPE
4013 (arg->tree_value)));
cc79451b 4014 preserve_temp_slots (arg->save_area);
51bbfa0c 4015 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 4016 GEN_INT (arg->size.constant),
51bbfa0c
RS
4017 PARM_BOUNDARY / BITS_PER_UNIT);
4018 }
4019 else
4020 {
4021 arg->save_area = gen_reg_rtx (save_mode);
4022 emit_move_insn (arg->save_area, stack_area);
4023 }
4024 }
4025 }
b564df06
JL
4026
4027 /* Now that we have saved any slots that will be overwritten by this
4028 store, mark all slots this store will use. We must do this before
4029 we actually expand the argument since the expansion itself may
4030 trigger library calls which might need to use the same stack slot. */
4031 if (argblock && ! variable_size && arg->stack)
4032 for (i = lower_bound; i < upper_bound; i++)
4033 stack_usage_map[i] = 1;
51bbfa0c
RS
4034#endif
4035
4036 /* If this isn't going to be placed on both the stack and in registers,
4037 set up the register and number of words. */
4038 if (! arg->pass_on_stack)
4039 reg = arg->reg, partial = arg->partial;
4040
4041 if (reg != 0 && partial == 0)
4042 /* Being passed entirely in a register. We shouldn't be called in
4043 this case. */
4044 abort ();
4045
4ab56118
RK
4046 /* If this arg needs special alignment, don't load the registers
4047 here. */
4048 if (arg->n_aligned_regs != 0)
4049 reg = 0;
4ab56118 4050
4ab56118 4051 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4052 it directly into its stack slot. Otherwise, we can. */
4053 if (arg->value == 0)
d64f5a78
RS
4054 {
4055#ifdef ACCUMULATE_OUTGOING_ARGS
4056 /* stack_arg_under_construction is nonzero if a function argument is
4057 being evaluated directly into the outgoing argument list and
4058 expand_call must take special action to preserve the argument list
4059 if it is called recursively.
4060
4061 For scalar function arguments stack_usage_map is sufficient to
4062 determine which stack slots must be saved and restored. Scalar
4063 arguments in general have pass_on_stack == 0.
4064
4065 If this argument is initialized by a function which takes the
4066 address of the argument (a C++ constructor or a C function
4067 returning a BLKmode structure), then stack_usage_map is
4068 insufficient and expand_call must push the stack around the
4069 function call. Such arguments have pass_on_stack == 1.
4070
4071 Note that it is always safe to set stack_arg_under_construction,
4072 but this generates suboptimal code if set when not needed. */
4073
4074 if (arg->pass_on_stack)
4075 stack_arg_under_construction++;
4076#endif
3a08477a
RK
4077 arg->value = expand_expr (pval,
4078 (partial
4079 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4080 ? NULL_RTX : arg->stack,
e5d70561 4081 VOIDmode, 0);
1efe6448
RK
4082
4083 /* If we are promoting object (or for any other reason) the mode
4084 doesn't agree, convert the mode. */
4085
7373d92d
RK
4086 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4087 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4088 arg->value, arg->unsignedp);
1efe6448 4089
d64f5a78
RS
4090#ifdef ACCUMULATE_OUTGOING_ARGS
4091 if (arg->pass_on_stack)
4092 stack_arg_under_construction--;
4093#endif
4094 }
51bbfa0c
RS
4095
4096 /* Don't allow anything left on stack from computation
4097 of argument to alloca. */
4098 if (may_be_alloca)
4099 do_pending_stack_adjust ();
4100
4101 if (arg->value == arg->stack)
7815214e 4102 {
c5c76735 4103 /* If the value is already in the stack slot, we are done. */
7d384cc0 4104 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 4105 {
7815214e 4106 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4107 XEXP (arg->stack, 0), Pmode,
7d384cc0 4108 ARGS_SIZE_RTX (arg->size),
7815214e 4109 TYPE_MODE (sizetype),
956d6950
JL
4110 GEN_INT (MEMORY_USE_RW),
4111 TYPE_MODE (integer_type_node));
7815214e
RK
4112 }
4113 }
1efe6448 4114 else if (arg->mode != BLKmode)
51bbfa0c
RS
4115 {
4116 register int size;
4117
4118 /* Argument is a scalar, not entirely passed in registers.
4119 (If part is passed in registers, arg->partial says how much
4120 and emit_push_insn will take care of putting it there.)
4121
4122 Push it, and if its size is less than the
4123 amount of space allocated to it,
4124 also bump stack pointer by the additional space.
4125 Note that in C the default argument promotions
4126 will prevent such mismatches. */
4127
1efe6448 4128 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4129 /* Compute how much space the push instruction will push.
4130 On many machines, pushing a byte will advance the stack
4131 pointer by a halfword. */
4132#ifdef PUSH_ROUNDING
4133 size = PUSH_ROUNDING (size);
4134#endif
4135 used = size;
4136
4137 /* Compute how much space the argument should get:
4138 round up to a multiple of the alignment for arguments. */
1efe6448 4139 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4140 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4141 / (PARM_BOUNDARY / BITS_PER_UNIT))
4142 * (PARM_BOUNDARY / BITS_PER_UNIT));
4143
4144 /* This isn't already where we want it on the stack, so put it there.
4145 This can either be done with push or copy insns. */
e5e809f4
JL
4146 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4147 partial, reg, used - size, argblock,
4fc026cd
CM
4148 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4149 ARGS_SIZE_RTX (arg->alignment_pad));
4150
c2732da3 4151 arg_space_so_far += used;
51bbfa0c
RS
4152 }
4153 else
4154 {
4155 /* BLKmode, at least partly to be pushed. */
4156
4157 register int excess;
4158 rtx size_rtx;
4159
4160 /* Pushing a nonscalar.
4161 If part is passed in registers, PARTIAL says how much
4162 and emit_push_insn will take care of putting it there. */
4163
4164 /* Round its size up to a multiple
4165 of the allocation unit for arguments. */
4166
4167 if (arg->size.var != 0)
4168 {
4169 excess = 0;
4170 size_rtx = ARGS_SIZE_RTX (arg->size);
4171 }
4172 else
4173 {
51bbfa0c
RS
4174 /* PUSH_ROUNDING has no effect on us, because
4175 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 4176 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4177 + partial * UNITS_PER_WORD);
e4f93898 4178 size_rtx = expr_size (pval);
c2732da3 4179 arg_space_so_far += excess + INTVAL (size_rtx);
51bbfa0c
RS
4180 }
4181
1efe6448 4182 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 4183 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4 4184 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
4185 reg_parm_stack_space,
4186 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
4187 }
4188
4189
4190 /* Unless this is a partially-in-register argument, the argument is now
4191 in the stack.
4192
4193 ??? Note that this can change arg->value from arg->stack to
4194 arg->stack_slot and it matters when they are not the same.
4195 It isn't totally clear that this is correct in all cases. */
4196 if (partial == 0)
3b917a55 4197 arg->value = arg->stack_slot;
51bbfa0c
RS
4198
4199 /* Once we have pushed something, pops can't safely
4200 be deferred during the rest of the arguments. */
4201 NO_DEFER_POP;
4202
4203 /* ANSI doesn't require a sequence point here,
4204 but PCC has one, so this will avoid some problems. */
4205 emit_queue ();
4206
db907e7b
RK
4207 /* Free any temporary slots made in processing this argument. Show
4208 that we might have taken the address of something and pushed that
4209 as an operand. */
4210 preserve_temp_slots (NULL_RTX);
51bbfa0c 4211 free_temp_slots ();
cc79451b 4212 pop_temp_slots ();
51bbfa0c 4213}
This page took 1.227484 seconds and 5 git commands to generate.