]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
objc-act.c (encode_method_prototype): Pass types, not PARM_DECLs, to int_size_in_bytes.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f
JL
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920
KG
23#include "system.h"
24#include "rtl.h"
25#include "tree.h"
26#include "flags.h"
27#include "expr.h"
49ad7cfa 28#include "function.h"
670ee920 29#include "regs.h"
51bbfa0c 30#include "insn-flags.h"
5f6da302 31#include "toplev.h"
d6f4ec51 32#include "output.h"
b1474bb7 33#include "tm_p.h"
51bbfa0c 34
c795bca9
BS
35#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37#endif
38
51bbfa0c 39/* Decide whether a function's arguments should be processed
bbc8a071
RK
40 from first to last or from last to first.
41
42 They should if the stack and args grow in opposite directions, but
43 only if we have push insns. */
51bbfa0c 44
51bbfa0c 45#ifdef PUSH_ROUNDING
bbc8a071 46
40083ddf 47#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
48#define PUSH_ARGS_REVERSED /* If it's last to first */
49#endif
bbc8a071 50
51bbfa0c
RS
51#endif
52
c795bca9
BS
53/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
55
56/* Data structure and subroutines used within expand_call. */
57
58struct arg_data
59{
60 /* Tree node for this argument. */
61 tree tree_value;
1efe6448
RK
62 /* Mode for value; TYPE_MODE unless promoted. */
63 enum machine_mode mode;
51bbfa0c
RS
64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 rtx value;
66 /* Initially-compute RTL value for argument; only for const functions. */
67 rtx initial_value;
68 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 69 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
70 registers. */
71 rtx reg;
84b55618
RK
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
74 int unsignedp;
51bbfa0c
RS
75 /* Number of registers to use. 0 means put the whole arg in registers.
76 Also 0 if not passed in registers. */
77 int partial;
d64f5a78
RS
78 /* Non-zero if argument must be passed on stack.
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
82 int pass_on_stack;
83 /* Offset of this argument from beginning of stack-args. */
84 struct args_size offset;
85 /* Similar, but offset to the start of the stack slot. Different from
86 OFFSET if this arg pads downward. */
87 struct args_size slot_offset;
88 /* Size of this argument on the stack, rounded up for any padding it gets,
89 parts of the argument passed in registers do not count.
90 If REG_PARM_STACK_SPACE is defined, then register parms
91 are counted here as well. */
92 struct args_size size;
93 /* Location on the stack at which parameter should be stored. The store
94 has already been done if STACK == VALUE. */
95 rtx stack;
96 /* Location on the stack of the start of this argument slot. This can
97 differ from STACK if this arg pads downward. This location is known
98 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 rtx stack_slot;
100#ifdef ACCUMULATE_OUTGOING_ARGS
101 /* Place that this stack area has been saved, if needed. */
102 rtx save_area;
103#endif
4ab56118
RK
104 /* If an argument's alignment does not permit direct copying into registers,
105 copy in smaller-sized pieces into pseudos. These are stored in a
106 block pointed to by this field. The next field says how many
107 word-sized pseudos we made. */
108 rtx *aligned_regs;
109 int n_aligned_regs;
4fc026cd
CM
110 /* The amount that the stack pointer needs to be adjusted to
111 force alignment for the next argument. */
112 struct args_size alignment_pad;
51bbfa0c
RS
113};
114
115#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 116/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
117 the corresponding stack location has been used.
118 This vector is used to prevent a function call within an argument from
119 clobbering any stack already set up. */
120static char *stack_usage_map;
121
122/* Size of STACK_USAGE_MAP. */
123static int highest_outgoing_arg_in_use;
2f4aa534
RS
124
125/* stack_arg_under_construction is nonzero when an argument may be
126 initialized with a constructor call (including a C function that
127 returns a BLKmode struct) and expand_call must take special action
128 to make sure the object being constructed does not overlap the
129 argument list for the constructor call. */
130int stack_arg_under_construction;
51bbfa0c
RS
131#endif
132
3d994c6b
KG
133static int calls_function PARAMS ((tree, int));
134static int calls_function_1 PARAMS ((tree, int));
135static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
136 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
12a22e76 137 rtx, int, rtx, int, int));
3d994c6b
KG
138static void precompute_register_parameters PARAMS ((int,
139 struct arg_data *,
140 int *));
141static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
142 int));
143static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
144 int));
145static int finalize_must_preallocate PARAMS ((int, int,
146 struct arg_data *,
147 struct args_size *));
148static void precompute_arguments PARAMS ((int, int, int,
149 struct arg_data *,
150 struct args_size *));
151static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
152 struct args_size *,
153 int));
3d994c6b
KG
154static void initialize_argument_information PARAMS ((int,
155 struct arg_data *,
156 struct args_size *,
157 int, tree, tree,
158 CUMULATIVE_ARGS *,
159 int, rtx *, int *,
160 int *, int *));
161static void compute_argument_addresses PARAMS ((struct arg_data *,
162 rtx, int));
163static rtx rtx_for_function_call PARAMS ((tree, tree));
164static void load_register_parameters PARAMS ((struct arg_data *,
165 int, rtx *));
12a22e76 166static int libfunc_nothrow PARAMS ((rtx));
21a3b983 167
20efdf74 168#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3d994c6b
KG
169static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
170static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 171#endif
51bbfa0c 172\f
1ce0cb53
JW
173/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
174 `alloca'.
175
176 If WHICH is 0, return 1 if EXP contains a call to any function.
177 Actually, we only need return 1 if evaluating EXP would require pushing
178 arguments on the stack, but that is too difficult to compute, so we just
179 assume any function call might require the stack. */
51bbfa0c 180
1c8d7aef
RS
181static tree calls_function_save_exprs;
182
51bbfa0c 183static int
1ce0cb53 184calls_function (exp, which)
51bbfa0c 185 tree exp;
1ce0cb53 186 int which;
1c8d7aef
RS
187{
188 int val;
189 calls_function_save_exprs = 0;
190 val = calls_function_1 (exp, which);
191 calls_function_save_exprs = 0;
192 return val;
193}
194
195static int
196calls_function_1 (exp, which)
197 tree exp;
198 int which;
51bbfa0c
RS
199{
200 register int i;
0207efa2
RK
201 enum tree_code code = TREE_CODE (exp);
202 int type = TREE_CODE_CLASS (code);
203 int length = tree_code_length[(int) code];
51bbfa0c 204
ddd5a7c1 205 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
206 if ((int) code >= NUM_TREE_CODES)
207 return 1;
51bbfa0c 208
0207efa2 209 /* Only expressions and references can contain calls. */
3b59a331
RS
210 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
211 && type != 'b')
51bbfa0c
RS
212 return 0;
213
0207efa2 214 switch (code)
51bbfa0c
RS
215 {
216 case CALL_EXPR:
1ce0cb53
JW
217 if (which == 0)
218 return 1;
219 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
220 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
221 == FUNCTION_DECL))
222 {
223 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
224
225 if ((DECL_BUILT_IN (fndecl)
95815af9 226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
228 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 229 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
230 return 1;
231 }
51bbfa0c
RS
232
233 /* Third operand is RTL. */
234 length = 2;
235 break;
236
237 case SAVE_EXPR:
238 if (SAVE_EXPR_RTL (exp) != 0)
239 return 0;
1c8d7aef
RS
240 if (value_member (exp, calls_function_save_exprs))
241 return 0;
242 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
243 calls_function_save_exprs);
244 return (TREE_OPERAND (exp, 0) != 0
245 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
246
247 case BLOCK:
ef03bc85
CH
248 {
249 register tree local;
250
251 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 252 if (DECL_INITIAL (local) != 0
1c8d7aef 253 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
254 return 1;
255 }
256 {
257 register tree subblock;
258
259 for (subblock = BLOCK_SUBBLOCKS (exp);
260 subblock;
261 subblock = TREE_CHAIN (subblock))
1c8d7aef 262 if (calls_function_1 (subblock, which))
ef03bc85
CH
263 return 1;
264 }
265 return 0;
51bbfa0c
RS
266
267 case METHOD_CALL_EXPR:
268 length = 3;
269 break;
270
271 case WITH_CLEANUP_EXPR:
272 length = 1;
273 break;
274
275 case RTL_EXPR:
276 return 0;
e9a25f70
JL
277
278 default:
279 break;
51bbfa0c
RS
280 }
281
282 for (i = 0; i < length; i++)
283 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 284 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
285 return 1;
286
287 return 0;
288}
289\f
290/* Force FUNEXP into a form suitable for the address of a CALL,
291 and return that as an rtx. Also load the static chain register
292 if FNDECL is a nested function.
293
77cac2f2
RK
294 CALL_FUSAGE points to a variable holding the prospective
295 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 296
03dacb02 297rtx
77cac2f2 298prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
299 rtx funexp;
300 tree fndecl;
77cac2f2 301 rtx *call_fusage;
01368078 302 int reg_parm_seen;
51bbfa0c
RS
303{
304 rtx static_chain_value = 0;
305
306 funexp = protect_from_queue (funexp, 0);
307
308 if (fndecl != 0)
0f41302f 309 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
310 static_chain_value = lookup_static_chain (fndecl);
311
312 /* Make a valid memory address and copy constants thru pseudo-regs,
313 but not for a constant address if -fno-function-cse. */
314 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 315 /* If we are using registers for parameters, force the
e9a25f70
JL
316 function address into a register now. */
317 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
318 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
319 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
320 else
321 {
322#ifndef NO_FUNCTION_CSE
323 if (optimize && ! flag_no_function_cse)
324#ifdef NO_RECURSIVE_FUNCTION_CSE
325 if (fndecl != current_function_decl)
326#endif
327 funexp = force_reg (Pmode, funexp);
328#endif
329 }
330
331 if (static_chain_value != 0)
332 {
333 emit_move_insn (static_chain_rtx, static_chain_value);
334
f991a240
RK
335 if (GET_CODE (static_chain_rtx) == REG)
336 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
337 }
338
339 return funexp;
340}
341
342/* Generate instructions to call function FUNEXP,
343 and optionally pop the results.
344 The CALL_INSN is the first insn generated.
345
607ea900 346 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
347 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
348
334c4f0f
RK
349 FUNTYPE is the data type of the function. This is given to the macro
350 RETURN_POPS_ARGS to determine whether this function pops its own args.
351 We used to allow an identifier for library functions, but that doesn't
352 work when the return type is an aggregate type and the calling convention
353 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
354
355 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
356 ROUNDED_STACK_SIZE is that number rounded up to
357 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
358 both to put into the call insn and to generate explicit popping
359 code if necessary.
51bbfa0c
RS
360
361 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
362 It is zero if this call doesn't want a structure value.
363
364 NEXT_ARG_REG is the rtx that results from executing
365 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
366 just after all the args have had their registers assigned.
367 This could be whatever you like, but normally it is the first
368 arg-register beyond those used for args in this call,
369 or 0 if all the arg-registers are used in this call.
370 It is passed on to `gen_call' so you can put this info in the call insn.
371
372 VALREG is a hard register in which a value is returned,
373 or 0 if the call does not return a value.
374
375 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
376 the args to this call were processed.
377 We restore `inhibit_defer_pop' to that value.
378
94b25f81
RK
379 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
380 denote registers used by the called function.
51bbfa0c
RS
381
382 IS_CONST is true if this is a `const' call. */
383
322e3e34 384static void
fb5eebb9
RH
385emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
386 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
12a22e76 387 call_fusage, is_const, nothrow)
51bbfa0c 388 rtx funexp;
c84e2712
KG
389 tree fndecl ATTRIBUTE_UNUSED;
390 tree funtype ATTRIBUTE_UNUSED;
6a651371 391 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 392 HOST_WIDE_INT rounded_stack_size;
962f1324 393 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
394 rtx next_arg_reg;
395 rtx valreg;
396 int old_inhibit_defer_pop;
77cac2f2 397 rtx call_fusage;
12a22e76 398 int is_const, nothrow;
51bbfa0c 399{
062e7fd8 400 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 401#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 402 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 403#endif
51bbfa0c 404 rtx call_insn;
081f5e7e 405#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 406 int already_popped = 0;
fb5eebb9 407 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 408#endif
51bbfa0c
RS
409
410 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
411 and we don't want to load it into a register as an optimization,
412 because prepare_call_address already did it if it should be done. */
413 if (GET_CODE (funexp) != SYMBOL_REF)
414 funexp = memory_address (FUNCTION_MODE, funexp);
415
416#ifndef ACCUMULATE_OUTGOING_ARGS
417#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
418/* If the target has "call" or "call_value" insns, then prefer them
419 if no arguments are actually popped. If the target does not have
420 "call" or "call_value" insns, then we must use the popping versions
421 even if the call has no arguments to pop. */
422#if defined (HAVE_call) && defined (HAVE_call_value)
423 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
424 && n_popped > 0)
425#else
426 if (HAVE_call_pop && HAVE_call_value_pop)
427#endif
51bbfa0c 428 {
fb5eebb9 429 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
430 rtx pat;
431
432 /* If this subroutine pops its own args, record that in the call insn
433 if possible, for the sake of frame pointer elimination. */
2c8da025 434
51bbfa0c
RS
435 if (valreg)
436 pat = gen_call_value_pop (valreg,
38a448ca 437 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 438 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 439 else
38a448ca 440 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 441 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
442
443 emit_call_insn (pat);
444 already_popped = 1;
445 }
446 else
447#endif
448#endif
449
450#if defined (HAVE_call) && defined (HAVE_call_value)
451 if (HAVE_call && HAVE_call_value)
452 {
453 if (valreg)
454 emit_call_insn (gen_call_value (valreg,
38a448ca 455 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 456 rounded_stack_size_rtx, next_arg_reg,
e992302c 457 NULL_RTX));
51bbfa0c 458 else
38a448ca 459 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 460 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
461 struct_value_size_rtx));
462 }
463 else
464#endif
465 abort ();
466
77cac2f2 467 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
468 for (call_insn = get_last_insn ();
469 call_insn && GET_CODE (call_insn) != CALL_INSN;
470 call_insn = PREV_INSN (call_insn))
471 ;
472
473 if (! call_insn)
474 abort ();
475
e59e60a7
RK
476 /* Put the register usage information on the CALL. If there is already
477 some usage information, put ours at the end. */
478 if (CALL_INSN_FUNCTION_USAGE (call_insn))
479 {
480 rtx link;
481
482 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
483 link = XEXP (link, 1))
484 ;
485
486 XEXP (link, 1) = call_fusage;
487 }
488 else
489 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (is_const)
493 CONST_CALL_P (call_insn) = 1;
494
12a22e76
JM
495 /* If this call can't throw, attach a REG_EH_REGION reg note to that
496 effect. */
497 if (nothrow)
54cea123 498 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
499 REG_NOTES (call_insn));
500
b1e64e0d
RS
501 /* Restore this now, so that we do defer pops for this call's args
502 if the context of the call as a whole permits. */
503 inhibit_defer_pop = old_inhibit_defer_pop;
504
51bbfa0c
RS
505#ifndef ACCUMULATE_OUTGOING_ARGS
506 /* If returning from the subroutine does not automatically pop the args,
507 we need an instruction to pop them sooner or later.
508 Perhaps do it now; perhaps just record how much space to pop later.
509
510 If returning from the subroutine does pop the args, indicate that the
511 stack pointer will be changed. */
512
c2732da3
JM
513 /* The space for the args is no longer waiting for the call; either it
514 was popped by the call, or it'll be popped below. */
515 arg_space_so_far -= rounded_stack_size;
516
fb5eebb9 517 if (n_popped > 0)
51bbfa0c
RS
518 {
519 if (!already_popped)
e3da301d 520 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
521 = gen_rtx_EXPR_LIST (VOIDmode,
522 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
523 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 524 rounded_stack_size -= n_popped;
062e7fd8 525 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
526 }
527
fb5eebb9 528 if (rounded_stack_size != 0)
51bbfa0c 529 {
70a73141 530 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
fb5eebb9 531 pending_stack_adjust += rounded_stack_size;
51bbfa0c 532 else
062e7fd8 533 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
534 }
535#endif
536}
537
20efdf74
JL
538/* Determine if the function identified by NAME and FNDECL is one with
539 special properties we wish to know about.
540
541 For example, if the function might return more than one time (setjmp), then
542 set RETURNS_TWICE to a nonzero value.
543
544 Similarly set IS_LONGJMP for if the function is in the longjmp family.
545
546 Set IS_MALLOC for any of the standard memory allocation functions which
547 allocate from the heap.
548
549 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
550 space from the stack such as alloca. */
551
3a8c995b 552void
fa76d9e0 553special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
20efdf74 554 is_malloc, may_be_alloca)
20efdf74
JL
555 tree fndecl;
556 int *returns_twice;
557 int *is_longjmp;
fa76d9e0 558 int *fork_or_exec;
20efdf74
JL
559 int *is_malloc;
560 int *may_be_alloca;
561{
562 *returns_twice = 0;
563 *is_longjmp = 0;
fa76d9e0 564 *fork_or_exec = 0;
20efdf74
JL
565 *may_be_alloca = 0;
566
140592a0
AG
567 /* The function decl may have the `malloc' attribute. */
568 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
569
3a8c995b
MM
570 if (! *is_malloc
571 && fndecl && DECL_NAME (fndecl)
140592a0 572 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
573 /* Exclude functions not at the file scope, or not `extern',
574 since they are not the magic functions we would otherwise
575 think they are. */
576 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
577 {
3a8c995b 578 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
579 char *tname = name;
580
ca54603f
JL
581 /* We assume that alloca will always be called by name. It
582 makes no sense to pass it as a pointer-to-function to
583 anything that does not understand its behavior. */
584 *may_be_alloca
585 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
586 && name[0] == 'a'
587 && ! strcmp (name, "alloca"))
588 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
589 && name[0] == '_'
590 && ! strcmp (name, "__builtin_alloca"))));
591
20efdf74
JL
592 /* Disregard prefix _, __ or __x. */
593 if (name[0] == '_')
594 {
595 if (name[1] == '_' && name[2] == 'x')
596 tname += 3;
597 else if (name[1] == '_')
598 tname += 2;
599 else
600 tname += 1;
601 }
602
603 if (tname[0] == 's')
604 {
605 *returns_twice
606 = ((tname[1] == 'e'
607 && (! strcmp (tname, "setjmp")
608 || ! strcmp (tname, "setjmp_syscall")))
609 || (tname[1] == 'i'
610 && ! strcmp (tname, "sigsetjmp"))
611 || (tname[1] == 'a'
612 && ! strcmp (tname, "savectx")));
613 if (tname[1] == 'i'
614 && ! strcmp (tname, "siglongjmp"))
615 *is_longjmp = 1;
616 }
617 else if ((tname[0] == 'q' && tname[1] == 's'
618 && ! strcmp (tname, "qsetjmp"))
619 || (tname[0] == 'v' && tname[1] == 'f'
620 && ! strcmp (tname, "vfork")))
621 *returns_twice = 1;
622
623 else if (tname[0] == 'l' && tname[1] == 'o'
624 && ! strcmp (tname, "longjmp"))
625 *is_longjmp = 1;
fa76d9e0
JR
626
627 else if ((tname[0] == 'f' && tname[1] == 'o'
628 && ! strcmp (tname, "fork"))
629 /* Linux specific: __clone. check NAME to insist on the
630 leading underscores, to avoid polluting the ISO / POSIX
631 namespace. */
632 || (name[0] == '_' && name[1] == '_'
633 && ! strcmp (tname, "clone"))
634 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
635 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
636 && (tname[5] == '\0'
637 || ((tname[5] == 'p' || tname[5] == 'e')
638 && tname[6] == '\0'))))
639 *fork_or_exec = 1;
640
140592a0 641 /* Do not add any more malloc-like functions to this list,
82514696
KG
642 instead mark them as malloc functions using the malloc attribute.
643 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
644 it may return the same address across multiple calls.
645 C++ operator new is not suitable because it is not required
646 to return a unique pointer; indeed, the standard placement new
647 just returns its argument. */
91d024d5
ML
648 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
649 && (! strcmp (tname, "malloc")
650 || ! strcmp (tname, "calloc")
651 || ! strcmp (tname, "strdup")))
20efdf74
JL
652 *is_malloc = 1;
653 }
654}
655
656/* Precompute all register parameters as described by ARGS, storing values
657 into fields within the ARGS array.
658
659 NUM_ACTUALS indicates the total number elements in the ARGS array.
660
661 Set REG_PARM_SEEN if we encounter a register parameter. */
662
663static void
664precompute_register_parameters (num_actuals, args, reg_parm_seen)
665 int num_actuals;
666 struct arg_data *args;
667 int *reg_parm_seen;
668{
669 int i;
670
671 *reg_parm_seen = 0;
672
673 for (i = 0; i < num_actuals; i++)
674 if (args[i].reg != 0 && ! args[i].pass_on_stack)
675 {
676 *reg_parm_seen = 1;
677
678 if (args[i].value == 0)
679 {
680 push_temp_slots ();
681 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
682 VOIDmode, 0);
683 preserve_temp_slots (args[i].value);
684 pop_temp_slots ();
685
686 /* ANSI doesn't require a sequence point here,
687 but PCC has one, so this will avoid some problems. */
688 emit_queue ();
689 }
690
691 /* If we are to promote the function arg to a wider mode,
692 do it now. */
693
694 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
695 args[i].value
696 = convert_modes (args[i].mode,
697 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
698 args[i].value, args[i].unsignedp);
699
700 /* If the value is expensive, and we are inside an appropriately
701 short loop, put the value into a pseudo and then put the pseudo
702 into the hard reg.
703
704 For small register classes, also do this if this call uses
705 register parameters. This is to avoid reload conflicts while
706 loading the parameters registers. */
707
708 if ((! (GET_CODE (args[i].value) == REG
709 || (GET_CODE (args[i].value) == SUBREG
710 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
711 && args[i].mode != BLKmode
712 && rtx_cost (args[i].value, SET) > 2
713 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
714 || preserve_subexpressions_p ()))
715 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
716 }
717}
718
719#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
720
721 /* The argument list is the property of the called routine and it
722 may clobber it. If the fixed area has been used for previous
723 parameters, we must save and restore it. */
724static rtx
725save_fixed_argument_area (reg_parm_stack_space, argblock,
726 low_to_save, high_to_save)
727 int reg_parm_stack_space;
728 rtx argblock;
729 int *low_to_save;
730 int *high_to_save;
731{
732 int i;
733 rtx save_area = NULL_RTX;
734
735 /* Compute the boundary of the that needs to be saved, if any. */
736#ifdef ARGS_GROW_DOWNWARD
737 for (i = 0; i < reg_parm_stack_space + 1; i++)
738#else
739 for (i = 0; i < reg_parm_stack_space; i++)
740#endif
741 {
742 if (i >= highest_outgoing_arg_in_use
743 || stack_usage_map[i] == 0)
744 continue;
745
746 if (*low_to_save == -1)
747 *low_to_save = i;
748
749 *high_to_save = i;
750 }
751
752 if (*low_to_save >= 0)
753 {
754 int num_to_save = *high_to_save - *low_to_save + 1;
755 enum machine_mode save_mode
756 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
757 rtx stack_area;
758
759 /* If we don't have the required alignment, must do this in BLKmode. */
760 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
761 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
762 save_mode = BLKmode;
763
764#ifdef ARGS_GROW_DOWNWARD
765 stack_area = gen_rtx_MEM (save_mode,
766 memory_address (save_mode,
767 plus_constant (argblock,
768 - *high_to_save)));
769#else
770 stack_area = gen_rtx_MEM (save_mode,
771 memory_address (save_mode,
772 plus_constant (argblock,
773 *low_to_save)));
774#endif
775 if (save_mode == BLKmode)
776 {
777 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
04572513
JJ
778 /* Cannot use emit_block_move here because it can be done by a library
779 call which in turn gets into this place again and deadly infinite
780 recursion happens. */
781 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
782 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
783 }
784 else
785 {
786 save_area = gen_reg_rtx (save_mode);
787 emit_move_insn (save_area, stack_area);
788 }
789 }
790 return save_area;
791}
792
793static void
794restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
795 rtx save_area;
796 rtx argblock;
797 int high_to_save;
798 int low_to_save;
799{
800 enum machine_mode save_mode = GET_MODE (save_area);
801#ifdef ARGS_GROW_DOWNWARD
802 rtx stack_area
803 = gen_rtx_MEM (save_mode,
804 memory_address (save_mode,
805 plus_constant (argblock,
806 - high_to_save)));
807#else
808 rtx stack_area
809 = gen_rtx_MEM (save_mode,
810 memory_address (save_mode,
811 plus_constant (argblock,
812 low_to_save)));
813#endif
814
815 if (save_mode != BLKmode)
816 emit_move_insn (stack_area, save_area);
817 else
04572513
JJ
818 /* Cannot use emit_block_move here because it can be done by a library
819 call which in turn gets into this place again and deadly infinite
820 recursion happens. */
821 move_by_pieces (stack_area, validize_mem (save_area),
822 high_to_save - low_to_save + 1,
823 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
824}
825#endif
826
827/* If any elements in ARGS refer to parameters that are to be passed in
828 registers, but not in memory, and whose alignment does not permit a
829 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
830 which we will later copy into the appropriate hard registers.
831
832 Pseudos for each unaligned argument will be stored into the array
833 args[argnum].aligned_regs. The caller is responsible for deallocating
834 the aligned_regs array if it is nonzero. */
835
20efdf74
JL
836static void
837store_unaligned_arguments_into_pseudos (args, num_actuals)
838 struct arg_data *args;
839 int num_actuals;
840{
841 int i, j;
842
843 for (i = 0; i < num_actuals; i++)
844 if (args[i].reg != 0 && ! args[i].pass_on_stack
845 && args[i].mode == BLKmode
846 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
847 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
848 {
849 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
850 int big_endian_correction = 0;
851
852 args[i].n_aligned_regs
853 = args[i].partial ? args[i].partial
854 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
855
8e6a59fe
MM
856 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
857 * args[i].n_aligned_regs);
20efdf74
JL
858
859 /* Structures smaller than a word are aligned to the least
860 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
861 this means we must skip the empty high order bytes when
862 calculating the bit offset. */
863 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
864 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
865
866 for (j = 0; j < args[i].n_aligned_regs; j++)
867 {
868 rtx reg = gen_reg_rtx (word_mode);
869 rtx word = operand_subword_force (args[i].value, j, BLKmode);
870 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
871 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
872
873 args[i].aligned_regs[j] = reg;
874
875 /* There is no need to restrict this code to loading items
876 in TYPE_ALIGN sized hunks. The bitfield instructions can
877 load up entire word sized registers efficiently.
878
879 ??? This may not be needed anymore.
880 We use to emit a clobber here but that doesn't let later
881 passes optimize the instructions we emit. By storing 0 into
882 the register later passes know the first AND to zero out the
883 bitfield being set in the register is unnecessary. The store
884 of 0 will be deleted as will at least the first AND. */
885
886 emit_move_insn (reg, const0_rtx);
887
888 bytes -= bitsize / BITS_PER_UNIT;
889 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
890 extract_bit_field (word, bitsize, 0, 1,
891 NULL_RTX, word_mode,
892 word_mode,
893 bitalign / BITS_PER_UNIT,
894 BITS_PER_WORD),
895 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
896 }
897 }
898}
899
d7cdf113
JL
900/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
901 ACTPARMS.
902
903 NUM_ACTUALS is the total number of parameters.
904
905 N_NAMED_ARGS is the total number of named arguments.
906
907 FNDECL is the tree code for the target of this call (if known)
908
909 ARGS_SO_FAR holds state needed by the target to know where to place
910 the next argument.
911
912 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
913 for arguments which are passed in registers.
914
915 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
916 and may be modified by this routine.
917
918 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
919 flags which may may be modified by this routine. */
920
921static void
922initialize_argument_information (num_actuals, args, args_size, n_named_args,
923 actparms, fndecl, args_so_far,
924 reg_parm_stack_space, old_stack_level,
925 old_pending_adj, must_preallocate, is_const)
91813b28 926 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
927 struct arg_data *args;
928 struct args_size *args_size;
91813b28 929 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
930 tree actparms;
931 tree fndecl;
959f3a06 932 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
933 int reg_parm_stack_space;
934 rtx *old_stack_level;
935 int *old_pending_adj;
936 int *must_preallocate;
937 int *is_const;
938{
939 /* 1 if scanning parms front to back, -1 if scanning back to front. */
940 int inc;
941
942 /* Count arg position in order args appear. */
943 int argpos;
944
4fc026cd 945 struct args_size alignment_pad;
d7cdf113
JL
946 int i;
947 tree p;
948
949 args_size->constant = 0;
950 args_size->var = 0;
951
952 /* In this loop, we consider args in the order they are written.
953 We fill up ARGS from the front or from the back if necessary
954 so that in any case the first arg to be pushed ends up at the front. */
955
956#ifdef PUSH_ARGS_REVERSED
957 i = num_actuals - 1, inc = -1;
958 /* In this case, must reverse order of args
959 so that we compute and push the last arg first. */
960#else
961 i = 0, inc = 1;
962#endif
963
964 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
965 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
966 {
967 tree type = TREE_TYPE (TREE_VALUE (p));
968 int unsignedp;
969 enum machine_mode mode;
970
971 args[i].tree_value = TREE_VALUE (p);
972
973 /* Replace erroneous argument with constant zero. */
974 if (type == error_mark_node || TYPE_SIZE (type) == 0)
975 args[i].tree_value = integer_zero_node, type = integer_type_node;
976
977 /* If TYPE is a transparent union, pass things the way we would
978 pass the first field of the union. We have already verified that
979 the modes are the same. */
980 if (TYPE_TRANSPARENT_UNION (type))
981 type = TREE_TYPE (TYPE_FIELDS (type));
982
983 /* Decide where to pass this arg.
984
985 args[i].reg is nonzero if all or part is passed in registers.
986
987 args[i].partial is nonzero if part but not all is passed in registers,
988 and the exact value says how many words are passed in registers.
989
990 args[i].pass_on_stack is nonzero if the argument must at least be
991 computed on the stack. It may then be loaded back into registers
992 if args[i].reg is nonzero.
993
994 These decisions are driven by the FUNCTION_... macros and must agree
995 with those made by function.c. */
996
997 /* See if this argument should be passed by invisible reference. */
998 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
999 && contains_placeholder_p (TYPE_SIZE (type)))
1000 || TREE_ADDRESSABLE (type)
1001#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1002 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1003 type, argpos < n_named_args)
1004#endif
1005 )
1006 {
1007 /* If we're compiling a thunk, pass through invisible
1008 references instead of making a copy. */
1009 if (current_function_is_thunk
1010#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1011 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1012 type, argpos < n_named_args)
1013 /* If it's in a register, we must make a copy of it too. */
1014 /* ??? Is this a sufficient test? Is there a better one? */
1015 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1016 && REG_P (DECL_RTL (args[i].tree_value)))
1017 && ! TREE_ADDRESSABLE (type))
1018#endif
1019 )
1020 {
1021 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1022 new object from the argument. If we are passing by
1023 invisible reference, the callee will do that for us, so we
1024 can strip off the TARGET_EXPR. This is not always safe,
1025 but it is safe in the only case where this is a useful
1026 optimization; namely, when the argument is a plain object.
1027 In that case, the frontend is just asking the backend to
1028 make a bitwise copy of the argument. */
1029
1030 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1031 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1032 (args[i].tree_value, 1)))
1033 == 'd')
1034 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1035 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1036
1037 args[i].tree_value = build1 (ADDR_EXPR,
1038 build_pointer_type (type),
1039 args[i].tree_value);
1040 type = build_pointer_type (type);
1041 }
1042 else
1043 {
1044 /* We make a copy of the object and pass the address to the
1045 function being called. */
1046 rtx copy;
1047
1048 if (TYPE_SIZE (type) == 0
1049 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1050 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1051 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1052 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1053 {
1054 /* This is a variable-sized object. Make space on the stack
1055 for it. */
1056 rtx size_rtx = expr_size (TREE_VALUE (p));
1057
1058 if (*old_stack_level == 0)
1059 {
1060 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1061 *old_pending_adj = pending_stack_adjust;
1062 pending_stack_adjust = 0;
1063 }
1064
1065 copy = gen_rtx_MEM (BLKmode,
1066 allocate_dynamic_stack_space (size_rtx,
1067 NULL_RTX,
1068 TYPE_ALIGN (type)));
1069 }
1070 else
1071 {
1072 int size = int_size_in_bytes (type);
1073 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1074 }
1075
1076 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1077
1078 store_expr (args[i].tree_value, copy, 0);
1079 *is_const = 0;
1080
1081 args[i].tree_value = build1 (ADDR_EXPR,
1082 build_pointer_type (type),
1083 make_tree (type, copy));
1084 type = build_pointer_type (type);
1085 }
1086 }
1087
1088 mode = TYPE_MODE (type);
1089 unsignedp = TREE_UNSIGNED (type);
1090
1091#ifdef PROMOTE_FUNCTION_ARGS
1092 mode = promote_mode (type, mode, &unsignedp, 1);
1093#endif
1094
1095 args[i].unsignedp = unsignedp;
1096 args[i].mode = mode;
959f3a06 1097 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
d7cdf113
JL
1098 argpos < n_named_args);
1099#ifdef FUNCTION_ARG_PARTIAL_NREGS
1100 if (args[i].reg)
1101 args[i].partial
959f3a06 1102 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1103 argpos < n_named_args);
1104#endif
1105
1106 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1107
1108 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1109 it means that we are to pass this arg in the register(s) designated
1110 by the PARALLEL, but also to pass it in the stack. */
1111 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1112 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1113 args[i].pass_on_stack = 1;
1114
1115 /* If this is an addressable type, we must preallocate the stack
1116 since we must evaluate the object into its final location.
1117
1118 If this is to be passed in both registers and the stack, it is simpler
1119 to preallocate. */
1120 if (TREE_ADDRESSABLE (type)
1121 || (args[i].pass_on_stack && args[i].reg != 0))
1122 *must_preallocate = 1;
1123
1124 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1125 we cannot consider this function call constant. */
1126 if (TREE_ADDRESSABLE (type))
1127 *is_const = 0;
1128
1129 /* Compute the stack-size of this argument. */
1130 if (args[i].reg == 0 || args[i].partial != 0
1131 || reg_parm_stack_space > 0
1132 || args[i].pass_on_stack)
1133 locate_and_pad_parm (mode, type,
1134#ifdef STACK_PARMS_IN_REG_PARM_AREA
1135 1,
1136#else
1137 args[i].reg != 0,
1138#endif
1139 fndecl, args_size, &args[i].offset,
4fc026cd 1140 &args[i].size, &alignment_pad);
d7cdf113
JL
1141
1142#ifndef ARGS_GROW_DOWNWARD
1143 args[i].slot_offset = *args_size;
1144#endif
1145
4fc026cd
CM
1146 args[i].alignment_pad = alignment_pad;
1147
d7cdf113
JL
1148 /* If a part of the arg was put into registers,
1149 don't include that part in the amount pushed. */
1150 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1151 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1152 / (PARM_BOUNDARY / BITS_PER_UNIT)
1153 * (PARM_BOUNDARY / BITS_PER_UNIT));
1154
1155 /* Update ARGS_SIZE, the total stack space for args so far. */
1156
1157 args_size->constant += args[i].size.constant;
1158 if (args[i].size.var)
1159 {
1160 ADD_PARM_SIZE (*args_size, args[i].size.var);
1161 }
1162
1163 /* Since the slot offset points to the bottom of the slot,
1164 we must record it after incrementing if the args grow down. */
1165#ifdef ARGS_GROW_DOWNWARD
1166 args[i].slot_offset = *args_size;
1167
1168 args[i].slot_offset.constant = -args_size->constant;
1169 if (args_size->var)
fed3cef0 1170 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1171#endif
1172
1173 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1174 have been used, etc. */
1175
959f3a06 1176 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1177 argpos < n_named_args);
1178 }
1179}
1180
599f37b6
JL
1181/* Update ARGS_SIZE to contain the total size for the argument block.
1182 Return the original constant component of the argument block's size.
1183
1184 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1185 for arguments passed in registers. */
1186
1187static int
c2f8b491
JH
1188compute_argument_block_size (reg_parm_stack_space, args_size,
1189 preferred_stack_boundary)
599f37b6
JL
1190 int reg_parm_stack_space;
1191 struct args_size *args_size;
c2f8b491 1192 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1193{
1194 int unadjusted_args_size = args_size->constant;
1195
1196 /* Compute the actual size of the argument block required. The variable
1197 and constant sizes must be combined, the size may have to be rounded,
1198 and there may be a minimum required size. */
1199
1200 if (args_size->var)
1201 {
1202 args_size->var = ARGS_SIZE_TREE (*args_size);
1203 args_size->constant = 0;
1204
1205#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491
JH
1206 preferred_stack_boundary /= BITS_PER_UNIT;
1207 if (preferred_stack_boundary > 1)
1208 args_size->var = round_up (args_size->var, preferred_stack_boundary);
599f37b6
JL
1209#endif
1210
1211 if (reg_parm_stack_space > 0)
1212 {
1213 args_size->var
1214 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1215 ssize_int (reg_parm_stack_space));
599f37b6
JL
1216
1217#ifndef OUTGOING_REG_PARM_STACK_SPACE
1218 /* The area corresponding to register parameters is not to count in
1219 the size of the block we need. So make the adjustment. */
1220 args_size->var
1221 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1222 ssize_int (reg_parm_stack_space));
599f37b6
JL
1223#endif
1224 }
1225 }
1226 else
1227 {
1228#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491 1229 preferred_stack_boundary /= BITS_PER_UNIT;
fb5eebb9 1230 args_size->constant = (((args_size->constant
c2732da3 1231 + arg_space_so_far
fb5eebb9 1232 + pending_stack_adjust
c2f8b491
JH
1233 + preferred_stack_boundary - 1)
1234 / preferred_stack_boundary
1235 * preferred_stack_boundary)
c2732da3 1236 - arg_space_so_far
fb5eebb9 1237 - pending_stack_adjust);
599f37b6
JL
1238#endif
1239
1240 args_size->constant = MAX (args_size->constant,
1241 reg_parm_stack_space);
1242
1243#ifdef MAYBE_REG_PARM_STACK_SPACE
1244 if (reg_parm_stack_space == 0)
1245 args_size->constant = 0;
1246#endif
1247
1248#ifndef OUTGOING_REG_PARM_STACK_SPACE
1249 args_size->constant -= reg_parm_stack_space;
1250#endif
1251 }
1252 return unadjusted_args_size;
1253}
1254
19832c77 1255/* Precompute parameters as needed for a function call.
cc0b1adc
JL
1256
1257 IS_CONST indicates the target function is a pure function.
1258
1259 MUST_PREALLOCATE indicates that we must preallocate stack space for
1260 any stack arguments.
1261
1262 NUM_ACTUALS is the number of arguments.
1263
1264 ARGS is an array containing information for each argument; this routine
1265 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1266
1267 ARGS_SIZE contains information about the size of the arg list. */
1268
1269static void
1270precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1271 int is_const;
1272 int must_preallocate;
1273 int num_actuals;
1274 struct arg_data *args;
1275 struct args_size *args_size;
1276{
1277 int i;
1278
1279 /* If this function call is cse'able, precompute all the parameters.
1280 Note that if the parameter is constructed into a temporary, this will
1281 cause an additional copy because the parameter will be constructed
1282 into a temporary location and then copied into the outgoing arguments.
1283 If a parameter contains a call to alloca and this function uses the
1284 stack, precompute the parameter. */
1285
1286 /* If we preallocated the stack space, and some arguments must be passed
1287 on the stack, then we must precompute any parameter which contains a
1288 function call which will store arguments on the stack.
1289 Otherwise, evaluating the parameter may clobber previous parameters
1290 which have already been stored into the stack. */
1291
1292 for (i = 0; i < num_actuals; i++)
1293 if (is_const
1294 || ((args_size->var != 0 || args_size->constant != 0)
1295 && calls_function (args[i].tree_value, 1))
1296 || (must_preallocate
1297 && (args_size->var != 0 || args_size->constant != 0)
1298 && calls_function (args[i].tree_value, 0)))
1299 {
1300 /* If this is an addressable type, we cannot pre-evaluate it. */
1301 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1302 abort ();
1303
1304 push_temp_slots ();
1305
47841d1b 1306 args[i].value
cc0b1adc
JL
1307 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1308
1309 preserve_temp_slots (args[i].value);
1310 pop_temp_slots ();
1311
1312 /* ANSI doesn't require a sequence point here,
1313 but PCC has one, so this will avoid some problems. */
1314 emit_queue ();
1315
1316 args[i].initial_value = args[i].value
47841d1b 1317 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1318
1319 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1320 {
1321 args[i].value
1322 = convert_modes (args[i].mode,
1323 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1324 args[i].value, args[i].unsignedp);
1325#ifdef PROMOTE_FOR_CALL_ONLY
1326 /* CSE will replace this only if it contains args[i].value
1327 pseudo, so convert it down to the declared mode using
1328 a SUBREG. */
1329 if (GET_CODE (args[i].value) == REG
1330 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1331 {
1332 args[i].initial_value
1333 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1334 args[i].value, 0);
1335 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1336 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1337 = args[i].unsignedp;
1338 }
1339#endif
1340 }
cc0b1adc
JL
1341 }
1342}
1343
0f9b3ea6
JL
1344/* Given the current state of MUST_PREALLOCATE and information about
1345 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1346 compute and return the final value for MUST_PREALLOCATE. */
1347
1348static int
1349finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1350 int must_preallocate;
1351 int num_actuals;
1352 struct arg_data *args;
1353 struct args_size *args_size;
1354{
1355 /* See if we have or want to preallocate stack space.
1356
1357 If we would have to push a partially-in-regs parm
1358 before other stack parms, preallocate stack space instead.
1359
1360 If the size of some parm is not a multiple of the required stack
1361 alignment, we must preallocate.
1362
1363 If the total size of arguments that would otherwise create a copy in
1364 a temporary (such as a CALL) is more than half the total argument list
1365 size, preallocation is faster.
1366
1367 Another reason to preallocate is if we have a machine (like the m88k)
1368 where stack alignment is required to be maintained between every
1369 pair of insns, not just when the call is made. However, we assume here
1370 that such machines either do not have push insns (and hence preallocation
1371 would occur anyway) or the problem is taken care of with
1372 PUSH_ROUNDING. */
1373
1374 if (! must_preallocate)
1375 {
1376 int partial_seen = 0;
1377 int copy_to_evaluate_size = 0;
1378 int i;
1379
1380 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1381 {
1382 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1383 partial_seen = 1;
1384 else if (partial_seen && args[i].reg == 0)
1385 must_preallocate = 1;
1386
1387 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1388 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1389 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1390 || TREE_CODE (args[i].tree_value) == COND_EXPR
1391 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1392 copy_to_evaluate_size
1393 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1394 }
1395
1396 if (copy_to_evaluate_size * 2 >= args_size->constant
1397 && args_size->constant > 0)
1398 must_preallocate = 1;
1399 }
1400 return must_preallocate;
1401}
599f37b6 1402
a45bdd02
JL
1403/* If we preallocated stack space, compute the address of each argument
1404 and store it into the ARGS array.
1405
1406 We need not ensure it is a valid memory address here; it will be
1407 validized when it is used.
1408
1409 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1410
1411static void
1412compute_argument_addresses (args, argblock, num_actuals)
1413 struct arg_data *args;
1414 rtx argblock;
1415 int num_actuals;
1416{
1417 if (argblock)
1418 {
1419 rtx arg_reg = argblock;
1420 int i, arg_offset = 0;
1421
1422 if (GET_CODE (argblock) == PLUS)
1423 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1424
1425 for (i = 0; i < num_actuals; i++)
1426 {
1427 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1428 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1429 rtx addr;
1430
1431 /* Skip this parm if it will not be passed on the stack. */
1432 if (! args[i].pass_on_stack && args[i].reg != 0)
1433 continue;
1434
1435 if (GET_CODE (offset) == CONST_INT)
1436 addr = plus_constant (arg_reg, INTVAL (offset));
1437 else
1438 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1439
1440 addr = plus_constant (addr, arg_offset);
1441 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1442 MEM_SET_IN_STRUCT_P
1443 (args[i].stack,
1444 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1445
1446 if (GET_CODE (slot_offset) == CONST_INT)
1447 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1448 else
1449 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1450
1451 addr = plus_constant (addr, arg_offset);
1452 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1453 }
1454 }
1455}
1456
1457/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1458 in a call instruction.
1459
1460 FNDECL is the tree node for the target function. For an indirect call
1461 FNDECL will be NULL_TREE.
1462
1463 EXP is the CALL_EXPR for this call. */
1464
1465static rtx
1466rtx_for_function_call (fndecl, exp)
1467 tree fndecl;
1468 tree exp;
1469{
1470 rtx funexp;
1471
1472 /* Get the function to call, in the form of RTL. */
1473 if (fndecl)
1474 {
1475 /* If this is the first use of the function, see if we need to
1476 make an external definition for it. */
1477 if (! TREE_USED (fndecl))
1478 {
1479 assemble_external (fndecl);
1480 TREE_USED (fndecl) = 1;
1481 }
1482
1483 /* Get a SYMBOL_REF rtx for the function address. */
1484 funexp = XEXP (DECL_RTL (fndecl), 0);
1485 }
1486 else
1487 /* Generate an rtx (probably a pseudo-register) for the address. */
1488 {
91ab1046 1489 rtx funaddr;
a45bdd02 1490 push_temp_slots ();
91ab1046
DT
1491 funaddr = funexp =
1492 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1493 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1494
1495 /* Check the function is executable. */
1496 if (current_function_check_memory_usage)
91ab1046
DT
1497 {
1498#ifdef POINTERS_EXTEND_UNSIGNED
1499 /* It might be OK to convert funexp in place, but there's
1500 a lot going on between here and when it happens naturally
1501 that this seems safer. */
1502 funaddr = convert_memory_address (Pmode, funexp);
1503#endif
1504 emit_library_call (chkr_check_exec_libfunc, 1,
1505 VOIDmode, 1,
1506 funaddr, Pmode);
1507 }
a45bdd02
JL
1508 emit_queue ();
1509 }
1510 return funexp;
1511}
1512
21a3b983
JL
1513/* Do the register loads required for any wholly-register parms or any
1514 parms which are passed both on the stack and in a register. Their
1515 expressions were already evaluated.
1516
1517 Mark all register-parms as living through the call, putting these USE
1518 insns in the CALL_INSN_FUNCTION_USAGE field. */
1519
1520static void
1521load_register_parameters (args, num_actuals, call_fusage)
1522 struct arg_data *args;
1523 int num_actuals;
1524 rtx *call_fusage;
1525{
1526 int i, j;
1527
1528#ifdef LOAD_ARGS_REVERSED
1529 for (i = num_actuals - 1; i >= 0; i--)
1530#else
1531 for (i = 0; i < num_actuals; i++)
1532#endif
1533 {
1534 rtx reg = args[i].reg;
1535 int partial = args[i].partial;
1536 int nregs;
1537
1538 if (reg)
1539 {
1540 /* Set to non-negative if must move a word at a time, even if just
1541 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1542 we just use a normal move insn. This value can be zero if the
1543 argument is a zero size structure with no fields. */
1544 nregs = (partial ? partial
1545 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1546 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1547 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1548 : -1));
1549
1550 /* Handle calls that pass values in multiple non-contiguous
1551 locations. The Irix 6 ABI has examples of this. */
1552
1553 if (GET_CODE (reg) == PARALLEL)
1554 {
1555 emit_group_load (reg, args[i].value,
1556 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1557 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1558 / BITS_PER_UNIT));
1559 }
1560
1561 /* If simple case, just do move. If normal partial, store_one_arg
1562 has already loaded the register for us. In all other cases,
1563 load the register(s) from memory. */
1564
1565 else if (nregs == -1)
1566 emit_move_insn (reg, args[i].value);
1567
1568 /* If we have pre-computed the values to put in the registers in
1569 the case of non-aligned structures, copy them in now. */
1570
1571 else if (args[i].n_aligned_regs != 0)
1572 for (j = 0; j < args[i].n_aligned_regs; j++)
1573 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1574 args[i].aligned_regs[j]);
1575
1576 else if (partial == 0 || args[i].pass_on_stack)
1577 move_block_to_reg (REGNO (reg),
1578 validize_mem (args[i].value), nregs,
1579 args[i].mode);
1580
1581 /* Handle calls that pass values in multiple non-contiguous
1582 locations. The Irix 6 ABI has examples of this. */
1583 if (GET_CODE (reg) == PARALLEL)
1584 use_group_regs (call_fusage, reg);
1585 else if (nregs == -1)
1586 use_reg (call_fusage, reg);
1587 else
1588 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1589 }
1590 }
1591}
1592
51bbfa0c
RS
1593/* Generate all the code for a function call
1594 and return an rtx for its value.
1595 Store the value in TARGET (specified as an rtx) if convenient.
1596 If the value is stored in TARGET then TARGET is returned.
1597 If IGNORE is nonzero, then we ignore the value of the function call. */
1598
1599rtx
8129842c 1600expand_call (exp, target, ignore)
51bbfa0c
RS
1601 tree exp;
1602 rtx target;
1603 int ignore;
51bbfa0c
RS
1604{
1605 /* List of actual parameters. */
1606 tree actparms = TREE_OPERAND (exp, 1);
1607 /* RTX for the function to be called. */
1608 rtx funexp;
51bbfa0c
RS
1609 /* Data type of the function. */
1610 tree funtype;
1611 /* Declaration of the function being called,
1612 or 0 if the function is computed (not known by name). */
1613 tree fndecl = 0;
1614 char *name = 0;
c2939b57 1615 rtx before_call;
51bbfa0c
RS
1616
1617 /* Register in which non-BLKmode value will be returned,
1618 or 0 if no value or if value is BLKmode. */
1619 rtx valreg;
1620 /* Address where we should return a BLKmode value;
1621 0 if value not BLKmode. */
1622 rtx structure_value_addr = 0;
1623 /* Nonzero if that address is being passed by treating it as
1624 an extra, implicit first parameter. Otherwise,
1625 it is passed by being copied directly into struct_value_rtx. */
1626 int structure_value_addr_parm = 0;
1627 /* Size of aggregate value wanted, or zero if none wanted
1628 or if we are using the non-reentrant PCC calling convention
1629 or expecting the value in registers. */
e5e809f4 1630 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1631 /* Nonzero if called function returns an aggregate in memory PCC style,
1632 by returning the address of where to find it. */
1633 int pcc_struct_value = 0;
1634
1635 /* Number of actual parameters in this call, including struct value addr. */
1636 int num_actuals;
1637 /* Number of named args. Args after this are anonymous ones
1638 and they must all go on the stack. */
1639 int n_named_args;
51bbfa0c
RS
1640
1641 /* Vector of information about each argument.
1642 Arguments are numbered in the order they will be pushed,
1643 not the order they are written. */
1644 struct arg_data *args;
1645
1646 /* Total size in bytes of all the stack-parms scanned so far. */
1647 struct args_size args_size;
1648 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1649 int unadjusted_args_size;
51bbfa0c
RS
1650 /* Data on reg parms scanned so far. */
1651 CUMULATIVE_ARGS args_so_far;
1652 /* Nonzero if a reg parm has been scanned. */
1653 int reg_parm_seen;
efd65a8b 1654 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1655
1656 /* Nonzero if we must avoid push-insns in the args for this call.
1657 If stack space is allocated for register parameters, but not by the
1658 caller, then it is preallocated in the fixed part of the stack frame.
1659 So the entire argument block must then be preallocated (i.e., we
1660 ignore PUSH_ROUNDING in that case). */
1661
51bbfa0c
RS
1662#ifdef PUSH_ROUNDING
1663 int must_preallocate = 0;
1664#else
1665 int must_preallocate = 1;
51bbfa0c
RS
1666#endif
1667
f72aed24 1668 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1669 int reg_parm_stack_space = 0;
1670
51bbfa0c
RS
1671 /* Address of space preallocated for stack parms
1672 (on machines that lack push insns), or 0 if space not preallocated. */
1673 rtx argblock = 0;
1674
1675 /* Nonzero if it is plausible that this is a call to alloca. */
1676 int may_be_alloca;
9ae8ffe7
JL
1677 /* Nonzero if this is a call to malloc or a related function. */
1678 int is_malloc;
51bbfa0c
RS
1679 /* Nonzero if this is a call to setjmp or a related function. */
1680 int returns_twice;
1681 /* Nonzero if this is a call to `longjmp'. */
1682 int is_longjmp;
fa76d9e0
JR
1683 /* Nonzero if this is a syscall that makes a new process in the image of
1684 the current one. */
1685 int fork_or_exec;
51bbfa0c
RS
1686 /* Nonzero if this is a call to an inline function. */
1687 int is_integrable = 0;
51bbfa0c
RS
1688 /* Nonzero if this is a call to a `const' function.
1689 Note that only explicitly named functions are handled as `const' here. */
1690 int is_const = 0;
1691 /* Nonzero if this is a call to a `volatile' function. */
1692 int is_volatile = 0;
12a22e76
JM
1693 /* Nonzero if this is a call to a function that won't throw an exception. */
1694 int nothrow = TREE_NOTHROW (exp);
51bbfa0c
RS
1695#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1696 /* Define the boundary of the register parm stack space that needs to be
1697 save, if any. */
1698 int low_to_save = -1, high_to_save;
1699 rtx save_area = 0; /* Place that it is saved */
1700#endif
1701
1702#ifdef ACCUMULATE_OUTGOING_ARGS
1703 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1704 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1705 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1706#endif
1707
1708 rtx old_stack_level = 0;
79be3418 1709 int old_pending_adj = 0;
51bbfa0c 1710 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 1711 rtx call_fusage = 0;
51bbfa0c 1712 register tree p;
21a3b983 1713 register int i;
c2f8b491
JH
1714#ifdef PREFERRED_STACK_BOUNDARY
1715 int preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1716#else
1717 /* In this case preferred_stack_boundary variable is meaningless.
1718 It is used only in order to keep ifdef noise down when calling
1719 compute_argument_block_size. */
1720 int preferred_stack_boundary = 0;
1721#endif
51bbfa0c 1722
7815214e
RK
1723 /* The value of the function call can be put in a hard register. But
1724 if -fcheck-memory-usage, code which invokes functions (and thus
1725 damages some hard registers) can be inserted before using the value.
1726 So, target is always a pseudo-register in that case. */
7d384cc0 1727 if (current_function_check_memory_usage)
7815214e
RK
1728 target = 0;
1729
51bbfa0c
RS
1730 /* See if we can find a DECL-node for the actual function.
1731 As a result, decide whether this is a call to an integrable function. */
1732
1733 p = TREE_OPERAND (exp, 0);
1734 if (TREE_CODE (p) == ADDR_EXPR)
1735 {
1736 fndecl = TREE_OPERAND (p, 0);
1737 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1738 fndecl = 0;
51bbfa0c
RS
1739 else
1740 {
1741 if (!flag_no_inline
1742 && fndecl != current_function_decl
aa10adff 1743 && DECL_INLINE (fndecl)
1cf4f698 1744 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1745 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1746 is_integrable = 1;
1747 else if (! TREE_ADDRESSABLE (fndecl))
1748 {
13d39dbc 1749 /* In case this function later becomes inlinable,
51bbfa0c
RS
1750 record that there was already a non-inline call to it.
1751
1752 Use abstraction instead of setting TREE_ADDRESSABLE
1753 directly. */
da8c1713
RK
1754 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1755 && optimize > 0)
1907795e
JM
1756 {
1757 warning_with_decl (fndecl, "can't inline call to `%s'");
1758 warning ("called from here");
1759 }
51bbfa0c
RS
1760 mark_addressable (fndecl);
1761 }
1762
d45cf215
RS
1763 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1764 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1765 is_const = 1;
5e24110e
RS
1766
1767 if (TREE_THIS_VOLATILE (fndecl))
1768 is_volatile = 1;
12a22e76
JM
1769
1770 if (TREE_NOTHROW (fndecl))
1771 nothrow = 1;
51bbfa0c
RS
1772 }
1773 }
1774
fdff8c6d
RK
1775 /* If we don't have specific function to call, see if we have a
1776 constant or `noreturn' function from the type. */
1777 if (fndecl == 0)
1778 {
1779 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1780 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1781 }
1782
6f90e075
JW
1783#ifdef REG_PARM_STACK_SPACE
1784#ifdef MAYBE_REG_PARM_STACK_SPACE
1785 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1786#else
1787 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1788#endif
1789#endif
1790
e5e809f4
JL
1791#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1792 if (reg_parm_stack_space > 0)
1793 must_preallocate = 1;
1794#endif
1795
51bbfa0c
RS
1796 /* Warn if this value is an aggregate type,
1797 regardless of which calling convention we are using for it. */
05e3bdb9 1798 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1799 warning ("function call has aggregate value");
1800
1801 /* Set up a place to return a structure. */
1802
1803 /* Cater to broken compilers. */
1804 if (aggregate_value_p (exp))
1805 {
1806 /* This call returns a big structure. */
1807 is_const = 0;
1808
1809#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1810 {
1811 pcc_struct_value = 1;
0dd532dc
JW
1812 /* Easier than making that case work right. */
1813 if (is_integrable)
1814 {
1815 /* In case this is a static function, note that it has been
1816 used. */
1817 if (! TREE_ADDRESSABLE (fndecl))
1818 mark_addressable (fndecl);
1819 is_integrable = 0;
1820 }
9e7b1d0a
RS
1821 }
1822#else /* not PCC_STATIC_STRUCT_RETURN */
1823 {
1824 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1825
9e7b1d0a
RS
1826 if (target && GET_CODE (target) == MEM)
1827 structure_value_addr = XEXP (target, 0);
1828 else
1829 {
e9a25f70
JL
1830 /* Assign a temporary to hold the value. */
1831 tree d;
51bbfa0c 1832
9e7b1d0a
RS
1833 /* For variable-sized objects, we must be called with a target
1834 specified. If we were to allocate space on the stack here,
1835 we would have no way of knowing when to free it. */
51bbfa0c 1836
002bdd6c
RK
1837 if (struct_value_size < 0)
1838 abort ();
1839
e9a25f70
JL
1840 /* This DECL is just something to feed to mark_addressable;
1841 it doesn't get pushed. */
1842 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1843 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1844 mark_addressable (d);
14a774a9 1845 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 1846 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1847 TREE_USED (d) = 1;
9e7b1d0a
RS
1848 target = 0;
1849 }
1850 }
1851#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1852 }
1853
1854 /* If called function is inline, try to integrate it. */
1855
1856 if (is_integrable)
1857 {
1858 rtx temp;
c2939b57 1859
69d4ca36 1860#ifdef ACCUMULATE_OUTGOING_ARGS
c2939b57 1861 before_call = get_last_insn ();
69d4ca36 1862#endif
51bbfa0c
RS
1863
1864 temp = expand_inline_function (fndecl, actparms, target,
1865 ignore, TREE_TYPE (exp),
1866 structure_value_addr);
1867
1868 /* If inlining succeeded, return. */
2e0dd623 1869 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1870 {
d64f5a78 1871#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1872 /* If the outgoing argument list must be preserved, push
1873 the stack before executing the inlined function if it
1874 makes any calls. */
1875
1876 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1877 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1878 break;
1879
1880 if (stack_arg_under_construction || i >= 0)
1881 {
a1917650
RK
1882 rtx first_insn
1883 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1884 rtx insn = NULL_RTX, seq;
2f4aa534 1885
d64f5a78 1886 /* Look for a call in the inline function code.
49ad7cfa 1887 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1888 nonzero then there is a call and it is not necessary
1889 to scan the insns. */
1890
49ad7cfa 1891 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1892 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1893 if (GET_CODE (insn) == CALL_INSN)
1894 break;
2f4aa534
RS
1895
1896 if (insn)
1897 {
d64f5a78
RS
1898 /* Reserve enough stack space so that the largest
1899 argument list of any function call in the inline
1900 function does not overlap the argument list being
1901 evaluated. This is usually an overestimate because
1902 allocate_dynamic_stack_space reserves space for an
1903 outgoing argument list in addition to the requested
1904 space, but there is no way to ask for stack space such
1905 that an argument list of a certain length can be
e5e809f4 1906 safely constructed.
d64f5a78 1907
e5e809f4
JL
1908 Add the stack space reserved for register arguments, if
1909 any, in the inline function. What is really needed is the
d64f5a78
RS
1910 largest value of reg_parm_stack_space in the inline
1911 function, but that is not available. Using the current
1912 value of reg_parm_stack_space is wrong, but gives
1913 correct results on all supported machines. */
e5e809f4 1914
49ad7cfa 1915 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1916 + reg_parm_stack_space);
1917
2f4aa534 1918 start_sequence ();
ccf5d244 1919 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1920 allocate_dynamic_stack_space (GEN_INT (adjust),
1921 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
1922 seq = get_insns ();
1923 end_sequence ();
a1917650 1924 emit_insns_before (seq, first_insn);
e5d70561 1925 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
1926 }
1927 }
d64f5a78 1928#endif
51bbfa0c
RS
1929
1930 /* If the result is equivalent to TARGET, return TARGET to simplify
1931 checks in store_expr. They can be equivalent but not equal in the
1932 case of a function that returns BLKmode. */
1933 if (temp != target && rtx_equal_p (temp, target))
1934 return target;
1935 return temp;
1936 }
1937
1938 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
1939 separately after all. If function was declared inline,
1940 give a warning. */
1941 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 1942 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
1943 {
1944 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1945 warning ("called from here");
1946 }
51bbfa0c
RS
1947 mark_addressable (fndecl);
1948 }
1949
51bbfa0c
RS
1950 function_call_count++;
1951
1952 if (fndecl && DECL_NAME (fndecl))
1953 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1954
c2f8b491
JH
1955 /* Ensure current function's preferred stack boundary is at least
1956 what we need. We don't have to increase alignment for recursive
1957 functions. */
1958 if (cfun->preferred_stack_boundary < preferred_stack_boundary
1959 && fndecl != current_function_decl)
1960 cfun->preferred_stack_boundary = preferred_stack_boundary;
1961
51bbfa0c 1962 /* See if this is a call to a function that can return more than once
20efdf74 1963 or a call to longjmp or malloc. */
fa76d9e0 1964 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
20efdf74 1965 &is_malloc, &may_be_alloca);
51bbfa0c 1966
51bbfa0c
RS
1967 if (may_be_alloca)
1968 current_function_calls_alloca = 1;
1969
39842893
JL
1970 /* Operand 0 is a pointer-to-function; get the type of the function. */
1971 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1972 if (! POINTER_TYPE_P (funtype))
1973 abort ();
1974 funtype = TREE_TYPE (funtype);
1975
1976 /* When calling a const function, we must pop the stack args right away,
1977 so that the pop is deleted or moved with the call. */
1978 if (is_const)
1979 NO_DEFER_POP;
1980
51bbfa0c
RS
1981 /* Don't let pending stack adjusts add up to too much.
1982 Also, do all pending adjustments now
1983 if there is any chance this might be a call to alloca. */
1984
1985 if (pending_stack_adjust >= 32
1986 || (pending_stack_adjust > 0 && may_be_alloca))
1987 do_pending_stack_adjust ();
1988
fa76d9e0
JR
1989 if (profile_arc_flag && fork_or_exec)
1990 {
1991 /* A fork duplicates the profile information, and an exec discards
1992 it. We can't rely on fork/exec to be paired. So write out the
1993 profile information we have gathered so far, and clear it. */
1994 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
1995 VOIDmode, 0);
1996
1997 /* ??? When __clone is called with CLONE_VM set, profiling is
1998 subject to race conditions, just as with multithreaded programs. */
1999 }
2000
cc79451b
RK
2001 /* Push the temporary stack slot level so that we can free any temporaries
2002 we make. */
51bbfa0c
RS
2003 push_temp_slots ();
2004
eecb6f50
JL
2005 /* Start updating where the next arg would go.
2006
2007 On some machines (such as the PA) indirect calls have a different
2008 calling convention than normal calls. The last argument in
2009 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2010 or not. */
2011 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
2012
2013 /* If struct_value_rtx is 0, it means pass the address
2014 as if it were an extra parameter. */
2015 if (structure_value_addr && struct_value_rtx == 0)
2016 {
5582b006
RK
2017 /* If structure_value_addr is a REG other than
2018 virtual_outgoing_args_rtx, we can use always use it. If it
2019 is not a REG, we must always copy it into a register.
2020 If it is virtual_outgoing_args_rtx, we must copy it to another
2021 register in some cases. */
2022 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 2023#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
2024 || (stack_arg_under_construction
2025 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 2026#endif
5582b006
RK
2027 ? copy_addr_to_reg (structure_value_addr)
2028 : structure_value_addr);
d64f5a78 2029
51bbfa0c
RS
2030 actparms
2031 = tree_cons (error_mark_node,
2032 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 2033 temp),
51bbfa0c
RS
2034 actparms);
2035 structure_value_addr_parm = 1;
2036 }
2037
2038 /* Count the arguments and set NUM_ACTUALS. */
2039 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2040 num_actuals = i;
2041
2042 /* Compute number of named args.
2043 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 2044 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
2045 (If no anonymous args follow, the result of list_length is actually
2046 one too large. This is harmless.)
51bbfa0c 2047
9ab70a9b
R
2048 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2049 zero, this machine will be able to place unnamed args that were passed in
469225d8
JW
2050 registers into the stack. So treat all args as named. This allows the
2051 insns emitting for a specific argument list to be independent of the
2052 function declaration.
51bbfa0c 2053
9ab70a9b 2054 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
51bbfa0c
RS
2055 way to pass unnamed args in registers, so we must force them into
2056 memory. */
e5e809f4
JL
2057
2058 if ((STRICT_ARGUMENT_NAMING
9ab70a9b 2059 || ! PRETEND_OUTGOING_VARARGS_NAMED)
e5e809f4 2060 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 2061 n_named_args
0ee902cb 2062 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 2063 /* Don't include the last named arg. */
d0f9021a 2064 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
0ee902cb
RM
2065 /* Count the struct value address, if it is passed as a parm. */
2066 + structure_value_addr_parm);
51bbfa0c 2067 else
51bbfa0c
RS
2068 /* If we know nothing, treat all args as named. */
2069 n_named_args = num_actuals;
2070
2071 /* Make a vector to hold all the information about each arg. */
2072 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 2073 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c 2074
d7cdf113
JL
2075 /* Build up entries inthe ARGS array, compute the size of the arguments
2076 into ARGS_SIZE, etc. */
2077 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
959f3a06 2078 actparms, fndecl, &args_so_far,
d7cdf113
JL
2079 reg_parm_stack_space, &old_stack_level,
2080 &old_pending_adj, &must_preallocate,
2081 &is_const);
51bbfa0c 2082
6f90e075
JW
2083#ifdef FINAL_REG_PARM_STACK_SPACE
2084 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2085 args_size.var);
2086#endif
2087
51bbfa0c
RS
2088 if (args_size.var)
2089 {
2090 /* If this function requires a variable-sized argument list, don't try to
2091 make a cse'able block for this call. We may be able to do this
2092 eventually, but it is too complicated to keep track of what insns go
2093 in the cse'able block and which don't. */
2094
2095 is_const = 0;
2096 must_preallocate = 1;
51bbfa0c 2097 }
e5e809f4 2098
599f37b6
JL
2099 /* Compute the actual size of the argument block required. The variable
2100 and constant sizes must be combined, the size may have to be rounded,
2101 and there may be a minimum required size. */
2102 unadjusted_args_size
c2f8b491
JH
2103 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2104 preferred_stack_boundary);
51bbfa0c 2105
0f9b3ea6
JL
2106 /* Now make final decision about preallocating stack space. */
2107 must_preallocate = finalize_must_preallocate (must_preallocate,
2108 num_actuals, args, &args_size);
51bbfa0c
RS
2109
2110 /* If the structure value address will reference the stack pointer, we must
2111 stabilize it. We don't need to do this if we know that we are not going
2112 to adjust the stack pointer in processing this call. */
2113
2114 if (structure_value_addr
2115 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2116 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2117 && (args_size.var
2118#ifndef ACCUMULATE_OUTGOING_ARGS
2119 || args_size.constant
2120#endif
2121 ))
2122 structure_value_addr = copy_to_reg (structure_value_addr);
2123
cc0b1adc
JL
2124 /* Precompute any arguments as needed. */
2125 precompute_arguments (is_const, must_preallocate, num_actuals,
2126 args, &args_size);
51bbfa0c
RS
2127
2128 /* Now we are about to start emitting insns that can be deleted
2129 if a libcall is deleted. */
9ae8ffe7 2130 if (is_const || is_malloc)
51bbfa0c
RS
2131 start_sequence ();
2132
2133 /* If we have no actual push instructions, or shouldn't use them,
2134 make space for all args right now. */
2135
2136 if (args_size.var != 0)
2137 {
2138 if (old_stack_level == 0)
2139 {
e5d70561 2140 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
2141 old_pending_adj = pending_stack_adjust;
2142 pending_stack_adjust = 0;
d64f5a78 2143#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2144 /* stack_arg_under_construction says whether a stack arg is
2145 being constructed at the old stack level. Pushing the stack
2146 gets a clean outgoing argument block. */
2147 old_stack_arg_under_construction = stack_arg_under_construction;
2148 stack_arg_under_construction = 0;
d64f5a78 2149#endif
51bbfa0c
RS
2150 }
2151 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2152 }
26a258fe 2153 else
51bbfa0c
RS
2154 {
2155 /* Note that we must go through the motions of allocating an argument
2156 block even if the size is zero because we may be storing args
2157 in the area reserved for register arguments, which may be part of
2158 the stack frame. */
26a258fe 2159
51bbfa0c
RS
2160 int needed = args_size.constant;
2161
0f41302f
MS
2162 /* Store the maximum argument space used. It will be pushed by
2163 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2164 checking). */
51bbfa0c
RS
2165
2166 if (needed > current_function_outgoing_args_size)
2167 current_function_outgoing_args_size = needed;
2168
26a258fe
PB
2169 if (must_preallocate)
2170 {
2171#ifdef ACCUMULATE_OUTGOING_ARGS
2172 /* Since the stack pointer will never be pushed, it is possible for
2173 the evaluation of a parm to clobber something we have already
2174 written to the stack. Since most function calls on RISC machines
2175 do not use the stack, this is uncommon, but must work correctly.
2176
2177 Therefore, we save any area of the stack that was already written
2178 and that we are using. Here we set up to do this by making a new
2179 stack usage map from the old one. The actual save will be done
2180 by store_one_arg.
2181
2182 Another approach might be to try to reorder the argument
2183 evaluations to avoid this conflicting stack usage. */
2184
e5e809f4 2185#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
2186 /* Since we will be writing into the entire argument area, the
2187 map must be allocated for its entire size, not just the part that
2188 is the responsibility of the caller. */
2189 needed += reg_parm_stack_space;
51bbfa0c
RS
2190#endif
2191
2192#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
2193 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2194 needed + 1);
51bbfa0c 2195#else
26a258fe
PB
2196 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2197 needed);
51bbfa0c 2198#endif
26a258fe 2199 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2200
26a258fe
PB
2201 if (initial_highest_arg_in_use)
2202 bcopy (initial_stack_usage_map, stack_usage_map,
2203 initial_highest_arg_in_use);
51bbfa0c 2204
26a258fe
PB
2205 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2206 bzero (&stack_usage_map[initial_highest_arg_in_use],
2207 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2208 needed = 0;
2f4aa534 2209
26a258fe
PB
2210 /* The address of the outgoing argument list must not be copied to a
2211 register here, because argblock would be left pointing to the
2212 wrong place after the call to allocate_dynamic_stack_space below.
2213 */
2f4aa534 2214
26a258fe 2215 argblock = virtual_outgoing_args_rtx;
2f4aa534 2216
51bbfa0c 2217#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2218 if (inhibit_defer_pop == 0)
51bbfa0c 2219 {
26a258fe
PB
2220 /* Try to reuse some or all of the pending_stack_adjust
2221 to get this space. Maybe we can avoid any pushing. */
2222 if (needed > pending_stack_adjust)
2223 {
2224 needed -= pending_stack_adjust;
2225 pending_stack_adjust = 0;
2226 }
2227 else
2228 {
2229 pending_stack_adjust -= needed;
2230 needed = 0;
2231 }
51bbfa0c 2232 }
26a258fe
PB
2233 /* Special case this because overhead of `push_block' in this
2234 case is non-trivial. */
2235 if (needed == 0)
2236 argblock = virtual_outgoing_args_rtx;
51bbfa0c 2237 else
26a258fe
PB
2238 argblock = push_block (GEN_INT (needed), 0, 0);
2239
2240 /* We only really need to call `copy_to_reg' in the case where push
2241 insns are going to be used to pass ARGBLOCK to a function
2242 call in ARGS. In that case, the stack pointer changes value
2243 from the allocation point to the call point, and hence
2244 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2245 But might as well always do it. */
2246 argblock = copy_to_reg (argblock);
51bbfa0c 2247#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2248 }
51bbfa0c
RS
2249 }
2250
bfbf933a
RS
2251#ifdef ACCUMULATE_OUTGOING_ARGS
2252 /* The save/restore code in store_one_arg handles all cases except one:
2253 a constructor call (including a C function returning a BLKmode struct)
2254 to initialize an argument. */
2255 if (stack_arg_under_construction)
2256 {
e5e809f4 2257#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 2258 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2259#else
e5d70561 2260 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
2261#endif
2262 if (old_stack_level == 0)
2263 {
e5d70561 2264 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
2265 old_pending_adj = pending_stack_adjust;
2266 pending_stack_adjust = 0;
2267 /* stack_arg_under_construction says whether a stack arg is
2268 being constructed at the old stack level. Pushing the stack
2269 gets a clean outgoing argument block. */
2270 old_stack_arg_under_construction = stack_arg_under_construction;
2271 stack_arg_under_construction = 0;
2272 /* Make a new map for the new argument list. */
2273 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2274 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2275 highest_outgoing_arg_in_use = 0;
2276 }
e5d70561 2277 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
2278 }
2279 /* If argument evaluation might modify the stack pointer, copy the
2280 address of the argument list to a register. */
2281 for (i = 0; i < num_actuals; i++)
2282 if (args[i].pass_on_stack)
2283 {
2284 argblock = copy_addr_to_reg (argblock);
2285 break;
2286 }
2287#endif
2288
a45bdd02 2289 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2290
51bbfa0c 2291#ifdef PUSH_ARGS_REVERSED
c795bca9 2292#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2293 /* If we push args individually in reverse order, perform stack alignment
2294 before the first push (the last arg). */
4e217aed
JH
2295 if (args_size.constant != unadjusted_args_size)
2296 {
2297 /* When the stack adjustment is pending,
2298 we get better code by combining the adjustments. */
42f602d8
JH
2299 if (pending_stack_adjust && !is_const
2300 && !inhibit_defer_pop)
4e217aed
JH
2301 {
2302 args_size.constant = (unadjusted_args_size
2303 + ((pending_stack_adjust + args_size.constant
c2732da3 2304 + arg_space_so_far
4e217aed
JH
2305 - unadjusted_args_size)
2306 % (preferred_stack_boundary / BITS_PER_UNIT)));
2307 pending_stack_adjust -= args_size.constant - unadjusted_args_size;
2308 do_pending_stack_adjust ();
2309 }
2310 else if (argblock == 0)
2311 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
c2732da3
JM
2312 arg_space_so_far += args_size.constant - unadjusted_args_size;
2313
2314 /* Now that the stack is properly aligned, pops can't safely
2315 be deferred during the evaluation of the arguments. */
2316 NO_DEFER_POP;
4e217aed 2317 }
51bbfa0c
RS
2318#endif
2319#endif
2320
2321 /* Don't try to defer pops if preallocating, not even from the first arg,
2322 since ARGBLOCK probably refers to the SP. */
2323 if (argblock)
2324 NO_DEFER_POP;
2325
a45bdd02 2326 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c
RS
2327
2328 /* Figure out the register where the value, if any, will come back. */
2329 valreg = 0;
2330 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2331 && ! structure_value_addr)
2332 {
2333 if (pcc_struct_value)
2334 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
4dc07bd7 2335 fndecl, 0);
51bbfa0c 2336 else
4dc07bd7 2337 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
51bbfa0c
RS
2338 }
2339
2340 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 2341 once we have started filling any specific hard regs. */
20efdf74 2342 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2343
2344#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 2345
20efdf74
JL
2346 /* Save the fixed argument area if it's part of the caller's frame and
2347 is clobbered by argument setup for this call. */
2348 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2349 &low_to_save, &high_to_save);
b94301c2 2350#endif
20efdf74 2351
51bbfa0c
RS
2352
2353 /* Now store (and compute if necessary) all non-register parms.
2354 These come before register parms, since they can require block-moves,
2355 which could clobber the registers used for register parms.
2356 Parms which have partial registers are not stored here,
2357 but we do preallocate space here if they want that. */
2358
2359 for (i = 0; i < num_actuals; i++)
2360 if (args[i].reg == 0 || args[i].pass_on_stack)
2361 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2362 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2363
4ab56118
RK
2364 /* If we have a parm that is passed in registers but not in memory
2365 and whose alignment does not permit a direct copy into registers,
2366 make a group of pseudos that correspond to each register that we
2367 will later fill. */
45d44c98 2368 if (STRICT_ALIGNMENT)
20efdf74 2369 store_unaligned_arguments_into_pseudos (args, num_actuals);
4ab56118 2370
51bbfa0c
RS
2371 /* Now store any partially-in-registers parm.
2372 This is the last place a block-move can happen. */
2373 if (reg_parm_seen)
2374 for (i = 0; i < num_actuals; i++)
2375 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2376 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2377 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2378
2379#ifndef PUSH_ARGS_REVERSED
c795bca9 2380#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2381 /* If we pushed args in forward order, perform stack alignment
2382 after pushing the last arg. */
2383 if (argblock == 0)
3c0fca12 2384 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
51bbfa0c
RS
2385#endif
2386#endif
2387
756e0e12
RS
2388 /* If register arguments require space on the stack and stack space
2389 was not preallocated, allocate stack space here for arguments
2390 passed in registers. */
6e716e89 2391#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 2392 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 2393 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2394#endif
2395
51bbfa0c
RS
2396 /* Pass the function the address in which to return a structure value. */
2397 if (structure_value_addr && ! structure_value_addr_parm)
2398 {
2399 emit_move_insn (struct_value_rtx,
2400 force_reg (Pmode,
e5d70561
RK
2401 force_operand (structure_value_addr,
2402 NULL_RTX)));
7815214e
RK
2403
2404 /* Mark the memory for the aggregate as write-only. */
7d384cc0 2405 if (current_function_check_memory_usage)
7815214e
RK
2406 emit_library_call (chkr_set_right_libfunc, 1,
2407 VOIDmode, 3,
6a9c4aed 2408 structure_value_addr, Pmode,
7815214e 2409 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
2410 GEN_INT (MEMORY_USE_WO),
2411 TYPE_MODE (integer_type_node));
7815214e 2412
51bbfa0c 2413 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2414 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
2415 }
2416
77cac2f2 2417 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 2418
21a3b983 2419 load_register_parameters (args, num_actuals, &call_fusage);
51bbfa0c
RS
2420
2421 /* Perform postincrements before actually calling the function. */
2422 emit_queue ();
2423
c2939b57
JW
2424 /* Save a pointer to the last insn before the call, so that we can
2425 later safely search backwards to find the CALL_INSN. */
2426 before_call = get_last_insn ();
2427
51bbfa0c
RS
2428 /* All arguments and registers used for the call must be set up by now! */
2429
51bbfa0c 2430 /* Generate the actual call instruction. */
fb5eebb9
RH
2431 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2432 args_size.constant, struct_value_size,
51bbfa0c 2433 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
12a22e76 2434 valreg, old_inhibit_defer_pop, call_fusage, is_const, nothrow);
51bbfa0c
RS
2435
2436 /* If call is cse'able, make appropriate pair of reg-notes around it.
2437 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
2438 if return type is void. Disable for PARALLEL return values, because
2439 we have no way to move such values into a pseudo register. */
2440 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
2441 {
2442 rtx note = 0;
2443 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2444 rtx insns;
2445
9ae8ffe7
JL
2446 /* Mark the return value as a pointer if needed. */
2447 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2448 {
2449 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2450 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2451 }
2452
51bbfa0c
RS
2453 /* Construct an "equal form" for the value which mentions all the
2454 arguments in order as well as the function name. */
2455#ifdef PUSH_ARGS_REVERSED
2456 for (i = 0; i < num_actuals; i++)
38a448ca 2457 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
2458#else
2459 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 2460 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2461#endif
38a448ca 2462 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
2463
2464 insns = get_insns ();
2465 end_sequence ();
2466
2467 emit_libcall_block (insns, temp, valreg, note);
2468
2469 valreg = temp;
2470 }
4f48d56a
RK
2471 else if (is_const)
2472 {
2473 /* Otherwise, just write out the sequence without a note. */
2474 rtx insns = get_insns ();
2475
2476 end_sequence ();
2477 emit_insns (insns);
2478 }
9ae8ffe7
JL
2479 else if (is_malloc)
2480 {
2481 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2482 rtx last, insns;
2483
2484 /* The return value from a malloc-like function is a pointer. */
2485 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2486 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2487
2488 emit_move_insn (temp, valreg);
2489
2490 /* The return value from a malloc-like function can not alias
2491 anything else. */
2492 last = get_last_insn ();
2493 REG_NOTES (last) =
38a448ca 2494 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2495
2496 /* Write out the sequence. */
2497 insns = get_insns ();
2498 end_sequence ();
2499 emit_insns (insns);
2500 valreg = temp;
2501 }
51bbfa0c
RS
2502
2503 /* For calls to `setjmp', etc., inform flow.c it should complain
2504 if nonvolatile values are live. */
2505
2506 if (returns_twice)
2507 {
c2939b57
JW
2508 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2509 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2510 we must search for it here. */
2511 rtx last = get_last_insn ();
2512 while (GET_CODE (last) != CALL_INSN)
2513 {
2514 last = PREV_INSN (last);
2515 /* There was no CALL_INSN? */
2516 if (last == before_call)
2517 abort ();
2518 }
2519 emit_note_after (NOTE_INSN_SETJMP, last);
51bbfa0c
RS
2520 current_function_calls_setjmp = 1;
2521 }
2522
2523 if (is_longjmp)
2524 current_function_calls_longjmp = 1;
2525
2526 /* Notice functions that cannot return.
2527 If optimizing, insns emitted below will be dead.
2528 If not optimizing, they will exist, which is useful
2529 if the user uses the `return' command in the debugger. */
2530
2531 if (is_volatile || is_longjmp)
2532 emit_barrier ();
2533
51bbfa0c
RS
2534 /* If value type not void, return an rtx for the value. */
2535
e976b8b2
MS
2536 /* If there are cleanups to be called, don't use a hard reg as target.
2537 We need to double check this and see if it matters anymore. */
e9a25f70 2538 if (any_pending_cleanups (1)
51bbfa0c
RS
2539 && target && REG_P (target)
2540 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2541 target = 0;
2542
2543 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2544 || ignore)
2545 {
2546 target = const0_rtx;
2547 }
2548 else if (structure_value_addr)
2549 {
2550 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2551 {
38a448ca
RH
2552 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2553 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2554 structure_value_addr));
c6df88cb
MM
2555 MEM_SET_IN_STRUCT_P (target,
2556 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
29008b51 2557 }
51bbfa0c
RS
2558 }
2559 else if (pcc_struct_value)
2560 {
f78b5ca1
JL
2561 /* This is the special C++ case where we need to
2562 know what the true target was. We take care to
2563 never use this value more than once in one expression. */
38a448ca
RH
2564 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2565 copy_to_reg (valreg));
c6df88cb 2566 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
51bbfa0c 2567 }
cacbd532
JW
2568 /* Handle calls that return values in multiple non-contiguous locations.
2569 The Irix 6 ABI has examples of this. */
2570 else if (GET_CODE (valreg) == PARALLEL)
2571 {
aac5cc16
RH
2572 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2573
cacbd532
JW
2574 if (target == 0)
2575 {
2b4092f2 2576 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
c6df88cb 2577 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532
JW
2578 preserve_temp_slots (target);
2579 }
2580
c5c76735
JL
2581 if (! rtx_equal_p (target, valreg))
2582 emit_group_store (target, valreg, bytes,
2583 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
cacbd532 2584 }
059c3d84
JW
2585 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2586 && GET_MODE (target) == GET_MODE (valreg))
2587 /* TARGET and VALREG cannot be equal at this point because the latter
2588 would not have REG_FUNCTION_VALUE_P true, while the former would if
2589 it were referring to the same register.
2590
2591 If they refer to the same register, this move will be a no-op, except
2592 when function inlining is being done. */
2593 emit_move_insn (target, valreg);
766b19fb 2594 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
c36fce9a 2595 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
51bbfa0c
RS
2596 else
2597 target = copy_to_reg (valreg);
2598
84b55618 2599#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2600 /* If we promoted this return value, make the proper SUBREG. TARGET
2601 might be const0_rtx here, so be careful. */
2602 if (GET_CODE (target) == REG
766b19fb 2603 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2604 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2605 {
321e0bba
RK
2606 tree type = TREE_TYPE (exp);
2607 int unsignedp = TREE_UNSIGNED (type);
84b55618 2608
321e0bba
RK
2609 /* If we don't promote as expected, something is wrong. */
2610 if (GET_MODE (target)
2611 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2612 abort ();
2613
38a448ca 2614 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2615 SUBREG_PROMOTED_VAR_P (target) = 1;
2616 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2617 }
2618#endif
2619
2f4aa534
RS
2620 /* If size of args is variable or this was a constructor call for a stack
2621 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2622
2623 if (old_stack_level)
2624 {
e5d70561 2625 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2626 pending_stack_adjust = old_pending_adj;
d64f5a78 2627#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2628 stack_arg_under_construction = old_stack_arg_under_construction;
2629 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2630 stack_usage_map = initial_stack_usage_map;
d64f5a78 2631#endif
51bbfa0c 2632 }
51bbfa0c
RS
2633#ifdef ACCUMULATE_OUTGOING_ARGS
2634 else
2635 {
2636#ifdef REG_PARM_STACK_SPACE
2637 if (save_area)
20efdf74
JL
2638 restore_fixed_argument_area (save_area, argblock,
2639 high_to_save, low_to_save);
b94301c2 2640#endif
51bbfa0c 2641
51bbfa0c
RS
2642 /* If we saved any argument areas, restore them. */
2643 for (i = 0; i < num_actuals; i++)
2644 if (args[i].save_area)
2645 {
2646 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2647 rtx stack_area
38a448ca
RH
2648 = gen_rtx_MEM (save_mode,
2649 memory_address (save_mode,
2650 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2651
2652 if (save_mode != BLKmode)
2653 emit_move_insn (stack_area, args[i].save_area);
2654 else
2655 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2656 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2657 PARM_BOUNDARY / BITS_PER_UNIT);
2658 }
2659
2660 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2661 stack_usage_map = initial_stack_usage_map;
2662 }
2663#endif
2664
59257ff7
RK
2665 /* If this was alloca, record the new stack level for nonlocal gotos.
2666 Check for the handler slots since we might not have a save area
0f41302f 2667 for non-local gotos. */
59257ff7 2668
ba716ac9 2669 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
e5d70561 2670 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2671
2672 pop_temp_slots ();
2673
8e6a59fe
MM
2674 /* Free up storage we no longer need. */
2675 for (i = 0; i < num_actuals; ++i)
2676 if (args[i].aligned_regs)
2677 free (args[i].aligned_regs);
2678
51bbfa0c
RS
2679 return target;
2680}
2681\f
12a22e76
JM
2682/* Returns nonzero if FUN is the symbol for a library function which can
2683 not throw. */
2684
2685static int
2686libfunc_nothrow (fun)
2687 rtx fun;
2688{
2689 if (fun == throw_libfunc
2690 || fun == rethrow_libfunc
2691 || fun == sjthrow_libfunc
2692 || fun == sjpopnthrow_libfunc)
2693 return 0;
2694
2695 return 1;
2696}
322e3e34 2697
3c0fca12
RH
2698/* Output a library call to function FUN (a SYMBOL_REF rtx)
2699 (emitting the queue unless NO_QUEUE is nonzero),
2700 for a value of mode OUTMODE,
2701 with NARGS different arguments, passed as alternating rtx values
2702 and machine_modes to convert them to.
2703 The rtx values should have been passed through protect_from_queue already.
2704
2705 NO_QUEUE will be true if and only if the library call is a `const' call
2706 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2707 to the variable is_const in expand_call.
2708
2709 NO_QUEUE must be true for const calls, because if it isn't, then
2710 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2711 and will be lost if the libcall sequence is optimized away.
2712
2713 NO_QUEUE must be false for non-const calls, because if it isn't, the
2714 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2715 optimized. For instance, the instruction scheduler may incorrectly
2716 move memory references across the non-const call. */
2717
2718void
2719emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
2720 int nargs, ...))
322e3e34 2721{
3c0fca12
RH
2722#ifndef ANSI_PROTOTYPES
2723 rtx orgfun;
2724 int no_queue;
2725 enum machine_mode outmode;
2726 int nargs;
2727#endif
2728 va_list p;
322e3e34
RK
2729 /* Total size in bytes of all the stack-parms scanned so far. */
2730 struct args_size args_size;
2731 /* Size of arguments before any adjustments (such as rounding). */
2732 struct args_size original_args_size;
2733 register int argnum;
322e3e34 2734 rtx fun;
322e3e34
RK
2735 int inc;
2736 int count;
4fc026cd 2737 struct args_size alignment_pad;
322e3e34
RK
2738 rtx argblock = 0;
2739 CUMULATIVE_ARGS args_so_far;
2740 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2741 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2742 struct arg *argvec;
2743 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2744 rtx call_fusage = 0;
e5e809f4 2745 int reg_parm_stack_space = 0;
12a22e76 2746 int nothrow;
f046b3cc
JL
2747#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2748 /* Define the boundary of the register parm stack space that needs to be
2749 save, if any. */
6a651371 2750 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
2751 rtx save_area = 0; /* Place that it is saved */
2752#endif
2753
2754#ifdef ACCUMULATE_OUTGOING_ARGS
2755 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2756 char *initial_stack_usage_map = stack_usage_map;
3c0fca12 2757 int needed;
f046b3cc
JL
2758#endif
2759
2760#ifdef REG_PARM_STACK_SPACE
3c0fca12 2761 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2762#ifdef MAYBE_REG_PARM_STACK_SPACE
2763 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2764#else
ab87f8c8 2765 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
2766#endif
2767#endif
322e3e34 2768
3c0fca12 2769 VA_START (p, nargs);
c2f8b491 2770
3c0fca12
RH
2771#ifndef ANSI_PROTOTYPES
2772 orgfun = va_arg (p, rtx);
2773 no_queue = va_arg (p, int);
2774 outmode = va_arg (p, enum machine_mode);
2775 nargs = va_arg (p, int);
fac0ad80 2776#endif
779c643a 2777
3c0fca12 2778 fun = orgfun;
322e3e34 2779
3c0fca12 2780 nothrow = libfunc_nothrow (fun);
322e3e34
RK
2781
2782 /* Copy all the libcall-arguments out of the varargs data
2783 and into a vector ARGVEC.
2784
2785 Compute how to pass each argument. We only support a very small subset
2786 of the full argument passing conventions to limit complexity here since
2787 library functions shouldn't have many args. */
2788
3c0fca12
RH
2789 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2790 bzero ((char *) argvec, nargs * sizeof (struct arg));
2791
322e3e34 2792
eecb6f50 2793 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2794
2795 args_size.constant = 0;
2796 args_size.var = 0;
2797
888aa7a9
RS
2798 push_temp_slots ();
2799
3c0fca12
RH
2800#ifdef PREFERRED_STACK_BOUNDARY
2801 /* Ensure current function's preferred stack boundary is at least
2802 what we need. */
2803 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
2804 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
322e3e34
RK
2805#endif
2806
3c0fca12 2807 for (count = 0; count < nargs; count++)
322e3e34
RK
2808 {
2809 rtx val = va_arg (p, rtx);
2810 enum machine_mode mode = va_arg (p, enum machine_mode);
2811
2812 /* We cannot convert the arg value to the mode the library wants here;
2813 must do it earlier where we know the signedness of the arg. */
2814 if (mode == BLKmode
2815 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2816 abort ();
2817
2818 /* On some machines, there's no way to pass a float to a library fcn.
2819 Pass it as a double instead. */
2820#ifdef LIBGCC_NEEDS_DOUBLE
2821 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2822 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2823#endif
2824
2825 /* There's no need to call protect_from_queue, because
2826 either emit_move_insn or emit_push_insn will do that. */
2827
2828 /* Make sure it is a reasonable operand for a move or push insn. */
2829 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2830 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2831 val = force_operand (val, NULL_RTX);
2832
322e3e34
RK
2833#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2834 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2835 {
a44492f0
RK
2836 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2837 be viewed as just an efficiency improvement. */
888aa7a9
RS
2838 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2839 emit_move_insn (slot, val);
43bc5f13 2840 val = force_operand (XEXP (slot, 0), NULL_RTX);
888aa7a9
RS
2841 mode = Pmode;
2842 }
322e3e34
RK
2843#endif
2844
888aa7a9
RS
2845 argvec[count].value = val;
2846 argvec[count].mode = mode;
2847
322e3e34 2848 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bf44874e 2849
322e3e34
RK
2850#ifdef FUNCTION_ARG_PARTIAL_NREGS
2851 argvec[count].partial
2852 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2853#else
2854 argvec[count].partial = 0;
2855#endif
2856
2857 locate_and_pad_parm (mode, NULL_TREE,
2858 argvec[count].reg && argvec[count].partial == 0,
2859 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 2860 &argvec[count].size, &alignment_pad);
322e3e34
RK
2861
2862 if (argvec[count].size.var)
2863 abort ();
2864
e5e809f4 2865 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2866 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2867
2868 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2869 || reg_parm_stack_space > 0)
322e3e34
RK
2870 args_size.constant += argvec[count].size.constant;
2871
0f41302f 2872 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34 2873 }
3c0fca12 2874 va_end (p);
322e3e34 2875
f046b3cc
JL
2876#ifdef FINAL_REG_PARM_STACK_SPACE
2877 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2878 args_size.var);
2879#endif
3c0fca12 2880
322e3e34
RK
2881 /* If this machine requires an external definition for library
2882 functions, write one out. */
2883 assemble_external_libcall (fun);
2884
2885 original_args_size = args_size;
c795bca9 2886#ifdef PREFERRED_STACK_BOUNDARY
3c0fca12
RH
2887 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2888 / STACK_BYTES) * STACK_BYTES);
322e3e34
RK
2889#endif
2890
322e3e34 2891 args_size.constant = MAX (args_size.constant,
f046b3cc 2892 reg_parm_stack_space);
e5e809f4 2893
322e3e34 2894#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 2895 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2896#endif
2897
322e3e34
RK
2898 if (args_size.constant > current_function_outgoing_args_size)
2899 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2900
2901#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2902 /* Since the stack pointer will never be pushed, it is possible for
2903 the evaluation of a parm to clobber something we have already
2904 written to the stack. Since most function calls on RISC machines
2905 do not use the stack, this is uncommon, but must work correctly.
2906
2907 Therefore, we save any area of the stack that was already written
2908 and that we are using. Here we set up to do this by making a new
2909 stack usage map from the old one.
2910
2911 Another approach might be to try to reorder the argument
2912 evaluations to avoid this conflicting stack usage. */
2913
2914 needed = args_size.constant;
e5e809f4
JL
2915
2916#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2917 /* Since we will be writing into the entire argument area, the
2918 map must be allocated for its entire size, not just the part that
2919 is the responsibility of the caller. */
2920 needed += reg_parm_stack_space;
2921#endif
2922
2923#ifdef ARGS_GROW_DOWNWARD
2924 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2925 needed + 1);
2926#else
2927 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2928 needed);
322e3e34 2929#endif
f046b3cc
JL
2930 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2931
2932 if (initial_highest_arg_in_use)
2933 bcopy (initial_stack_usage_map, stack_usage_map,
2934 initial_highest_arg_in_use);
2935
2936 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2937 bzero (&stack_usage_map[initial_highest_arg_in_use],
2938 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2939 needed = 0;
322e3e34 2940
f046b3cc
JL
2941 /* The address of the outgoing argument list must not be copied to a
2942 register here, because argblock would be left pointing to the
2943 wrong place after the call to allocate_dynamic_stack_space below.
2944 */
2945
2946 argblock = virtual_outgoing_args_rtx;
2947#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2948#ifndef PUSH_ROUNDING
2949 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2950#endif
f046b3cc 2951#endif
322e3e34
RK
2952
2953#ifdef PUSH_ARGS_REVERSED
c795bca9 2954#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2955 /* If we push args individually in reverse order, perform stack alignment
2956 before the first push (the last arg). */
2957 if (argblock == 0)
3c0fca12
RH
2958 anti_adjust_stack (GEN_INT (args_size.constant
2959 - original_args_size.constant));
322e3e34
RK
2960#endif
2961#endif
2962
2963#ifdef PUSH_ARGS_REVERSED
2964 inc = -1;
2965 argnum = nargs - 1;
2966#else
2967 inc = 1;
2968 argnum = 0;
2969#endif
2970
f046b3cc
JL
2971#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2972 /* The argument list is the property of the called routine and it
2973 may clobber it. If the fixed area has been used for previous
2974 parameters, we must save and restore it.
2975
2976 Here we compute the boundary of the that needs to be saved, if any. */
2977
2978#ifdef ARGS_GROW_DOWNWARD
2979 for (count = 0; count < reg_parm_stack_space + 1; count++)
2980#else
2981 for (count = 0; count < reg_parm_stack_space; count++)
2982#endif
2983 {
2984 if (count >= highest_outgoing_arg_in_use
2985 || stack_usage_map[count] == 0)
2986 continue;
2987
2988 if (low_to_save == -1)
2989 low_to_save = count;
2990
2991 high_to_save = count;
2992 }
2993
2994 if (low_to_save >= 0)
2995 {
2996 int num_to_save = high_to_save - low_to_save + 1;
2997 enum machine_mode save_mode
2998 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2999 rtx stack_area;
3000
3001 /* If we don't have the required alignment, must do this in BLKmode. */
3002 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3003 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3004 save_mode = BLKmode;
3005
ceb83206 3006#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3007 stack_area = gen_rtx_MEM (save_mode,
3008 memory_address (save_mode,
38a448ca 3009 plus_constant (argblock,
ceb83206 3010 - high_to_save)));
f046b3cc 3011#else
ceb83206
JL
3012 stack_area = gen_rtx_MEM (save_mode,
3013 memory_address (save_mode,
38a448ca 3014 plus_constant (argblock,
ceb83206 3015 low_to_save)));
f046b3cc 3016#endif
f046b3cc
JL
3017 if (save_mode == BLKmode)
3018 {
3019 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3020 emit_block_move (validize_mem (save_area), stack_area,
3021 GEN_INT (num_to_save),
3022 PARM_BOUNDARY / BITS_PER_UNIT);
3023 }
3024 else
3025 {
3026 save_area = gen_reg_rtx (save_mode);
3027 emit_move_insn (save_area, stack_area);
3028 }
3029 }
3030#endif
3031
322e3e34
RK
3032 /* Push the args that need to be pushed. */
3033
5e26979c
JL
3034 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3035 are to be pushed. */
322e3e34
RK
3036 for (count = 0; count < nargs; count++, argnum += inc)
3037 {
3038 register enum machine_mode mode = argvec[argnum].mode;
3039 register rtx val = argvec[argnum].value;
3040 rtx reg = argvec[argnum].reg;
3041 int partial = argvec[argnum].partial;
69d4ca36 3042#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3043 int lower_bound, upper_bound, i;
69d4ca36 3044#endif
322e3e34
RK
3045
3046 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3047 {
3048#ifdef ACCUMULATE_OUTGOING_ARGS
3049 /* If this is being stored into a pre-allocated, fixed-size, stack
3050 area, save any previous data at that location. */
3051
3052#ifdef ARGS_GROW_DOWNWARD
3053 /* stack_slot is negative, but we want to index stack_usage_map
3054 with positive values. */
5e26979c
JL
3055 upper_bound = -argvec[argnum].offset.constant + 1;
3056 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3057#else
5e26979c
JL
3058 lower_bound = argvec[argnum].offset.constant;
3059 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3060#endif
3061
3062 for (i = lower_bound; i < upper_bound; i++)
3063 if (stack_usage_map[i]
f046b3cc
JL
3064 /* Don't store things in the fixed argument area at this point;
3065 it has already been saved. */
e5e809f4 3066 && i > reg_parm_stack_space)
f046b3cc
JL
3067 break;
3068
3069 if (i != upper_bound)
3070 {
e5e809f4 3071 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3072 enum machine_mode save_mode
5e26979c 3073 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3074 MODE_INT, 1);
3075 rtx stack_area
c5c76735
JL
3076 = gen_rtx_MEM
3077 (save_mode,
3078 memory_address
3079 (save_mode,
3080 plus_constant (argblock,
3081 argvec[argnum].offset.constant)));
c5c76735 3082
3c0fca12 3083 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5e26979c 3084 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3085 }
3086#endif
3087 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 3088 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 3089 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
f046b3cc
JL
3090
3091#ifdef ACCUMULATE_OUTGOING_ARGS
3092 /* Now mark the segment we just used. */
3093 for (i = lower_bound; i < upper_bound; i++)
3094 stack_usage_map[i] = 1;
3095#endif
3096
3097 NO_DEFER_POP;
3098 }
322e3e34
RK
3099 }
3100
3101#ifndef PUSH_ARGS_REVERSED
c795bca9 3102#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3103 /* If we pushed args in forward order, perform stack alignment
3104 after pushing the last arg. */
3105 if (argblock == 0)
3c0fca12
RH
3106 anti_adjust_stack (GEN_INT (args_size.constant
3107 - original_args_size.constant));
322e3e34
RK
3108#endif
3109#endif
3110
3111#ifdef PUSH_ARGS_REVERSED
3112 argnum = nargs - 1;
3113#else
3114 argnum = 0;
3115#endif
3116
77cac2f2 3117 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3118
322e3e34
RK
3119 /* Now load any reg parms into their regs. */
3120
5e26979c
JL
3121 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3122 are to be pushed. */
322e3e34
RK
3123 for (count = 0; count < nargs; count++, argnum += inc)
3124 {
322e3e34
RK
3125 register rtx val = argvec[argnum].value;
3126 rtx reg = argvec[argnum].reg;
3127 int partial = argvec[argnum].partial;
3128
bf44874e
JL
3129 /* Handle calls that pass values in multiple non-contiguous
3130 locations. The PA64 has examples of this for library calls. */
19e3f61a 3131 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bf44874e
JL
3132 emit_group_load (reg, val,
3133 GET_MODE_SIZE (GET_MODE (val)),
3134 GET_MODE_ALIGNMENT (GET_MODE (val)));
3135 else if (reg != 0 && partial == 0)
322e3e34 3136 emit_move_insn (reg, val);
bf44874e 3137
322e3e34
RK
3138 NO_DEFER_POP;
3139 }
3140
322e3e34
RK
3141 /* For version 1.37, try deleting this entirely. */
3142 if (! no_queue)
3143 emit_queue ();
322e3e34
RK
3144
3145 /* Any regs containing parms remain in use through the call. */
322e3e34 3146 for (count = 0; count < nargs; count++)
bf44874e 3147 {
19e3f61a 3148 rtx reg = argvec[count].reg;
3c0fca12 3149 if (reg != 0 && GET_CODE (argvec[count].reg) == PARALLEL)
19e3f61a
JM
3150 use_group_regs (&call_fusage, reg);
3151 else if (reg != 0)
3152 use_reg (&call_fusage, reg);
bf44874e 3153 }
322e3e34 3154
322e3e34
RK
3155 /* Don't allow popping to be deferred, since then
3156 cse'ing of library calls could delete a call and leave the pop. */
3157 NO_DEFER_POP;
3158
3159 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3160 will set inhibit_defer_pop to that value. */
3c0fca12 3161
43bc5f13
JH
3162 /* The return type is needed to decide how many bytes the function pops.
3163 Signedness plays no role in that, so for simplicity, we pretend it's
3164 always signed. We also assume that the list of arguments passed has
3165 no impact, so we pretend it is unknown. */
322e3e34 3166
2c8da025 3167 emit_call_1 (fun,
3c0fca12 3168 get_identifier (XSTR (orgfun, 0)),
43bc5f13
JH
3169 build_function_type (outmode == VOIDmode ? void_type_node
3170 : type_for_mode (outmode, 0), NULL_TREE),
3c0fca12 3171 original_args_size.constant, args_size.constant, 0,
322e3e34 3172 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3c0fca12
RH
3173 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
3174 old_inhibit_defer_pop + 1, call_fusage, no_queue, nothrow);
322e3e34 3175
888aa7a9
RS
3176 pop_temp_slots ();
3177
3c0fca12
RH
3178 /* Now restore inhibit_defer_pop to its actual original value. */
3179 OK_DEFER_POP;
fac0ad80 3180
f046b3cc
JL
3181#ifdef ACCUMULATE_OUTGOING_ARGS
3182#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3183 if (save_area)
3184 {
3185 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3186#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3187 rtx stack_area
38a448ca
RH
3188 = gen_rtx_MEM (save_mode,
3189 memory_address (save_mode,
ceb83206 3190 plus_constant (argblock,
3c0fca12 3191 - high_to_save)));
f046b3cc 3192#else
ceb83206
JL
3193 rtx stack_area
3194 = gen_rtx_MEM (save_mode,
3195 memory_address (save_mode,
3196 plus_constant (argblock, low_to_save)));
f046b3cc 3197#endif
3c0fca12 3198
e9a25f70
JL
3199 if (save_mode != BLKmode)
3200 emit_move_insn (stack_area, save_area);
3201 else
3202 emit_block_move (stack_area, validize_mem (save_area),
3203 GEN_INT (high_to_save - low_to_save + 1),
3c0fca12 3204 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3205 }
f046b3cc
JL
3206#endif
3207
3208 /* If we saved any argument areas, restore them. */
3209 for (count = 0; count < nargs; count++)
3210 if (argvec[count].save_area)
3211 {
3212 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3213 rtx stack_area
38a448ca 3214 = gen_rtx_MEM (save_mode,
c5c76735
JL
3215 memory_address
3216 (save_mode,
3217 plus_constant (argblock,
3218 argvec[count].offset.constant)));
f046b3cc
JL
3219
3220 emit_move_insn (stack_area, argvec[count].save_area);
3221 }
3222
3223 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3224 stack_usage_map = initial_stack_usage_map;
3225#endif
43bc5f13
JH
3226}
3227\f
3228/* Like emit_library_call except that an extra argument, VALUE,
3229 comes second and says where to store the result.
3230 (If VALUE is zero, this function chooses a convenient way
3231 to return the value.
3232
3233 This function returns an rtx for where the value is to be found.
3234 If VALUE is nonzero, VALUE is returned. */
3235
3236rtx
3237emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3238 enum machine_mode outmode, int nargs, ...))
3239{
3240#ifndef ANSI_PROTOTYPES
3241 rtx orgfun;
3242 rtx value;
3243 int no_queue;
3244 enum machine_mode outmode;
3245 int nargs;
3246#endif
3247 va_list p;
3c0fca12
RH
3248 /* Total size in bytes of all the stack-parms scanned so far. */
3249 struct args_size args_size;
3250 /* Size of arguments before any adjustments (such as rounding). */
3251 struct args_size original_args_size;
3252 register int argnum;
3253 rtx fun;
3254 int inc;
3255 int count;
3256 struct args_size alignment_pad;
3257 rtx argblock = 0;
3258 CUMULATIVE_ARGS args_so_far;
3259 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3260 struct args_size offset; struct args_size size; rtx save_area; };
3261 struct arg *argvec;
3262 int old_inhibit_defer_pop = inhibit_defer_pop;
3263 rtx call_fusage = 0;
3264 rtx mem_value = 0;
3265 int pcc_struct_value = 0;
3266 int struct_value_size = 0;
3267 int is_const;
3268 int reg_parm_stack_space = 0;
3269 int nothrow;
3270#ifdef ACCUMULATE_OUTGOING_ARGS
3271 int needed;
3272#endif
3273
3274#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3275 /* Define the boundary of the register parm stack space that needs to be
3276 save, if any. */
3277 int low_to_save = -1, high_to_save = 0;
3278 rtx save_area = 0; /* Place that it is saved */
3279#endif
3280
3281#ifdef ACCUMULATE_OUTGOING_ARGS
3282 /* Size of the stack reserved for parameter registers. */
3283 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3284 char *initial_stack_usage_map = stack_usage_map;
3285#endif
3286
3287#ifdef REG_PARM_STACK_SPACE
3288#ifdef MAYBE_REG_PARM_STACK_SPACE
3289 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3290#else
3291 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3292#endif
3293#endif
3294
3295 VA_START (p, nargs);
3296
3297#ifndef ANSI_PROTOTYPES
3298 orgfun = va_arg (p, rtx);
3299 value = va_arg (p, rtx);
3300 no_queue = va_arg (p, int);
3301 outmode = va_arg (p, enum machine_mode);
3302 nargs = va_arg (p, int);
3303#endif
3304
3305 is_const = no_queue;
3306 fun = orgfun;
3307
3308 nothrow = libfunc_nothrow (fun);
3309
3310#ifdef PREFERRED_STACK_BOUNDARY
3311 /* Ensure current function's preferred stack boundary is at least
3312 what we need. */
3313 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3314 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3315#endif
3316
3317 /* If this kind of value comes back in memory,
3318 decide where in memory it should come back. */
3319 if (aggregate_value_p (type_for_mode (outmode, 0)))
3320 {
3321#ifdef PCC_STATIC_STRUCT_RETURN
3322 rtx pointer_reg
3323 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3324 0, 0);
3325 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3326 pcc_struct_value = 1;
3327 if (value == 0)
3328 value = gen_reg_rtx (outmode);
3329#else /* not PCC_STATIC_STRUCT_RETURN */
3330 struct_value_size = GET_MODE_SIZE (outmode);
3331 if (value != 0 && GET_CODE (value) == MEM)
3332 mem_value = value;
3333 else
3334 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3335#endif
3336
3337 /* This call returns a big structure. */
3338 is_const = 0;
3339 }
3340
3341 /* ??? Unfinished: must pass the memory address as an argument. */
3342
3343 /* Copy all the libcall-arguments out of the varargs data
3344 and into a vector ARGVEC.
3345
3346 Compute how to pass each argument. We only support a very small subset
3347 of the full argument passing conventions to limit complexity here since
3348 library functions shouldn't have many args. */
3349
3350 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3351 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3352
3353 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3354
3355 args_size.constant = 0;
3356 args_size.var = 0;
3357
3358 count = 0;
3359
3360 push_temp_slots ();
3361
3362 /* If there's a structure value address to be passed,
3363 either pass it in the special place, or pass it as an extra argument. */
3364 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3365 {
3366 rtx addr = XEXP (mem_value, 0);
3367 nargs++;
3368
3369 /* Make sure it is a reasonable operand for a move or push insn. */
3370 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3371 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3372 addr = force_operand (addr, NULL_RTX);
3373
3374 argvec[count].value = addr;
3375 argvec[count].mode = Pmode;
3376 argvec[count].partial = 0;
3377
3378 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3379#ifdef FUNCTION_ARG_PARTIAL_NREGS
3380 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3381 abort ();
3382#endif
3383
3384 locate_and_pad_parm (Pmode, NULL_TREE,
3385 argvec[count].reg && argvec[count].partial == 0,
3386 NULL_TREE, &args_size, &argvec[count].offset,
3387 &argvec[count].size, &alignment_pad);
3388
3389
3390 if (argvec[count].reg == 0 || argvec[count].partial != 0
3391 || reg_parm_stack_space > 0)
3392 args_size.constant += argvec[count].size.constant;
3393
3394 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3395
3396 count++;
3397 }
3398
3399 for (; count < nargs; count++)
3400 {
3401 rtx val = va_arg (p, rtx);
3402 enum machine_mode mode = va_arg (p, enum machine_mode);
3403
3404 /* We cannot convert the arg value to the mode the library wants here;
3405 must do it earlier where we know the signedness of the arg. */
3406 if (mode == BLKmode
3407 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3408 abort ();
3409
3410 /* On some machines, there's no way to pass a float to a library fcn.
3411 Pass it as a double instead. */
3412#ifdef LIBGCC_NEEDS_DOUBLE
3413 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3414 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3415#endif
3416
3417 /* There's no need to call protect_from_queue, because
3418 either emit_move_insn or emit_push_insn will do that. */
3419
3420 /* Make sure it is a reasonable operand for a move or push insn. */
3421 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3422 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3423 val = force_operand (val, NULL_RTX);
3424
3425#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3426 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3427 {
3428 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3429 be viewed as just an efficiency improvement. */
3430 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3431 emit_move_insn (slot, val);
3432 val = XEXP (slot, 0);
3433 mode = Pmode;
3434 }
3435#endif
3436
3437 argvec[count].value = val;
3438 argvec[count].mode = mode;
3439
3440 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3441
3442#ifdef FUNCTION_ARG_PARTIAL_NREGS
3443 argvec[count].partial
3444 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3445#else
3446 argvec[count].partial = 0;
3447#endif
3448
3449 locate_and_pad_parm (mode, NULL_TREE,
3450 argvec[count].reg && argvec[count].partial == 0,
3451 NULL_TREE, &args_size, &argvec[count].offset,
3452 &argvec[count].size, &alignment_pad);
3453
3454 if (argvec[count].size.var)
3455 abort ();
3456
3457 if (reg_parm_stack_space == 0 && argvec[count].partial)
3458 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3459
3460 if (argvec[count].reg == 0 || argvec[count].partial != 0
3461 || reg_parm_stack_space > 0)
3462 args_size.constant += argvec[count].size.constant;
3463
3464 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3465 }
3466 va_end (p);
3467
3468#ifdef FINAL_REG_PARM_STACK_SPACE
3469 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3470 args_size.var);
3471#endif
3472 /* If this machine requires an external definition for library
3473 functions, write one out. */
3474 assemble_external_libcall (fun);
3475
3476 original_args_size = args_size;
3477#ifdef PREFERRED_STACK_BOUNDARY
3478 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3479 / STACK_BYTES) * STACK_BYTES);
3480#endif
3481
3482 args_size.constant = MAX (args_size.constant,
3483 reg_parm_stack_space);
3484
3485#ifndef OUTGOING_REG_PARM_STACK_SPACE
3486 args_size.constant -= reg_parm_stack_space;
3487#endif
3488
3489 if (args_size.constant > current_function_outgoing_args_size)
3490 current_function_outgoing_args_size = args_size.constant;
3491
3492#ifdef ACCUMULATE_OUTGOING_ARGS
3493 /* Since the stack pointer will never be pushed, it is possible for
3494 the evaluation of a parm to clobber something we have already
3495 written to the stack. Since most function calls on RISC machines
3496 do not use the stack, this is uncommon, but must work correctly.
3497
3498 Therefore, we save any area of the stack that was already written
3499 and that we are using. Here we set up to do this by making a new
3500 stack usage map from the old one.
3501
3502 Another approach might be to try to reorder the argument
3503 evaluations to avoid this conflicting stack usage. */
3504
3505 needed = args_size.constant;
3506
3507#ifndef OUTGOING_REG_PARM_STACK_SPACE
3508 /* Since we will be writing into the entire argument area, the
3509 map must be allocated for its entire size, not just the part that
3510 is the responsibility of the caller. */
3511 needed += reg_parm_stack_space;
3512#endif
3513
3514#ifdef ARGS_GROW_DOWNWARD
3515 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3516 needed + 1);
3517#else
3518 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3519 needed);
3520#endif
3521 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3522
3523 if (initial_highest_arg_in_use)
3524 bcopy (initial_stack_usage_map, stack_usage_map,
3525 initial_highest_arg_in_use);
3526
3527 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3528 bzero (&stack_usage_map[initial_highest_arg_in_use],
3529 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3530 needed = 0;
3531
3532 /* The address of the outgoing argument list must not be copied to a
3533 register here, because argblock would be left pointing to the
3534 wrong place after the call to allocate_dynamic_stack_space below.
3535 */
3536
3537 argblock = virtual_outgoing_args_rtx;
3538#else /* not ACCUMULATE_OUTGOING_ARGS */
3539#ifndef PUSH_ROUNDING
3540 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3541#endif
3542#endif
3543
3544#ifdef PUSH_ARGS_REVERSED
3545#ifdef PREFERRED_STACK_BOUNDARY
3546 /* If we push args individually in reverse order, perform stack alignment
3547 before the first push (the last arg). */
3548 if (argblock == 0)
3549 anti_adjust_stack (GEN_INT (args_size.constant
3550 - original_args_size.constant));
3551#endif
3552#endif
3553
3554#ifdef PUSH_ARGS_REVERSED
3555 inc = -1;
3556 argnum = nargs - 1;
3557#else
3558 inc = 1;
3559 argnum = 0;
3560#endif
3561
3562#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3563 /* The argument list is the property of the called routine and it
3564 may clobber it. If the fixed area has been used for previous
3565 parameters, we must save and restore it.
3566
3567 Here we compute the boundary of the that needs to be saved, if any. */
3568
3569#ifdef ARGS_GROW_DOWNWARD
3570 for (count = 0; count < reg_parm_stack_space + 1; count++)
3571#else
3572 for (count = 0; count < reg_parm_stack_space; count++)
3573#endif
3574 {
3575 if (count >= highest_outgoing_arg_in_use
3576 || stack_usage_map[count] == 0)
3577 continue;
3578
3579 if (low_to_save == -1)
3580 low_to_save = count;
3581
3582 high_to_save = count;
3583 }
3584
3585 if (low_to_save >= 0)
3586 {
3587 int num_to_save = high_to_save - low_to_save + 1;
3588 enum machine_mode save_mode
3589 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3590 rtx stack_area;
3591
3592 /* If we don't have the required alignment, must do this in BLKmode. */
3593 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3594 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3595 save_mode = BLKmode;
3596
3597#ifdef ARGS_GROW_DOWNWARD
3598 stack_area = gen_rtx_MEM (save_mode,
3599 memory_address (save_mode,
3600 plus_constant (argblock,
3601 - high_to_save)));
3602#else
3603 stack_area = gen_rtx_MEM (save_mode,
3604 memory_address (save_mode,
3605 plus_constant (argblock,
3606 low_to_save)));
3607#endif
3608 if (save_mode == BLKmode)
3609 {
3610 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3611 emit_block_move (validize_mem (save_area), stack_area,
3612 GEN_INT (num_to_save),
3613 PARM_BOUNDARY / BITS_PER_UNIT);
3614 }
3615 else
3616 {
3617 save_area = gen_reg_rtx (save_mode);
3618 emit_move_insn (save_area, stack_area);
3619 }
3620 }
3621#endif
3622
3623 /* Push the args that need to be pushed. */
3624
3625 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3626 are to be pushed. */
3627 for (count = 0; count < nargs; count++, argnum += inc)
3628 {
3629 register enum machine_mode mode = argvec[argnum].mode;
3630 register rtx val = argvec[argnum].value;
3631 rtx reg = argvec[argnum].reg;
3632 int partial = argvec[argnum].partial;
3633#ifdef ACCUMULATE_OUTGOING_ARGS
3634 int lower_bound, upper_bound, i;
3635#endif
3636
3637 if (! (reg != 0 && partial == 0))
3638 {
3639#ifdef ACCUMULATE_OUTGOING_ARGS
3640 /* If this is being stored into a pre-allocated, fixed-size, stack
3641 area, save any previous data at that location. */
3642
3643#ifdef ARGS_GROW_DOWNWARD
3644 /* stack_slot is negative, but we want to index stack_usage_map
3645 with positive values. */
3646 upper_bound = -argvec[argnum].offset.constant + 1;
3647 lower_bound = upper_bound - argvec[argnum].size.constant;
3648#else
3649 lower_bound = argvec[argnum].offset.constant;
3650 upper_bound = lower_bound + argvec[argnum].size.constant;
3651#endif
3652
3653 for (i = lower_bound; i < upper_bound; i++)
3654 if (stack_usage_map[i]
3655 /* Don't store things in the fixed argument area at this point;
3656 it has already been saved. */
3657 && i > reg_parm_stack_space)
3658 break;
3659
3660 if (i != upper_bound)
3661 {
3662 /* We need to make a save area. See what mode we can make it. */
3663 enum machine_mode save_mode
3664 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3665 MODE_INT, 1);
3666 rtx stack_area
3667 = gen_rtx_MEM
3668 (save_mode,
3669 memory_address
3670 (save_mode,
3671 plus_constant (argblock,
3672 argvec[argnum].offset.constant)));
3673 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3674
3675 emit_move_insn (argvec[argnum].save_area, stack_area);
3676 }
3677#endif
3678 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3679 argblock, GEN_INT (argvec[argnum].offset.constant),
3680 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3681
3682#ifdef ACCUMULATE_OUTGOING_ARGS
3683 /* Now mark the segment we just used. */
3684 for (i = lower_bound; i < upper_bound; i++)
3685 stack_usage_map[i] = 1;
3686#endif
3687
3688 NO_DEFER_POP;
3689 }
3690 }
3691
3692#ifndef PUSH_ARGS_REVERSED
3693#ifdef PREFERRED_STACK_BOUNDARY
3694 /* If we pushed args in forward order, perform stack alignment
3695 after pushing the last arg. */
3696 if (argblock == 0)
3697 anti_adjust_stack (GEN_INT (args_size.constant
3698 - original_args_size.constant));
3699#endif
3700#endif
3701
3702#ifdef PUSH_ARGS_REVERSED
3703 argnum = nargs - 1;
3704#else
3705 argnum = 0;
3706#endif
3707
3708 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3709
3710 /* Now load any reg parms into their regs. */
3711
3712 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3713 are to be pushed. */
3714 for (count = 0; count < nargs; count++, argnum += inc)
3715 {
3716 register rtx val = argvec[argnum].value;
3717 rtx reg = argvec[argnum].reg;
3718 int partial = argvec[argnum].partial;
3719
3720 /* Handle calls that pass values in multiple non-contiguous
3721 locations. The PA64 has examples of this for library calls. */
3722 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3723 emit_group_load (reg, val,
3724 GET_MODE_SIZE (GET_MODE (val)),
3725 GET_MODE_ALIGNMENT (GET_MODE (val)));
3726 else if (reg != 0 && partial == 0)
3727 emit_move_insn (reg, val);
3728
3729 NO_DEFER_POP;
3730 }
3731
3732#if 0
3733 /* For version 1.37, try deleting this entirely. */
3734 if (! no_queue)
3735 emit_queue ();
3736#endif
3737
3738 /* Any regs containing parms remain in use through the call. */
3739 for (count = 0; count < nargs; count++)
3740 {
3741 rtx reg = argvec[count].reg;
3742 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3743 use_group_regs (&call_fusage, reg);
3744 else if (reg != 0)
3745 use_reg (&call_fusage, reg);
3746 }
3747
3748 /* Pass the function the address in which to return a structure value. */
3749 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3750 {
3751 emit_move_insn (struct_value_rtx,
3752 force_reg (Pmode,
3753 force_operand (XEXP (mem_value, 0),
3754 NULL_RTX)));
3755 if (GET_CODE (struct_value_rtx) == REG)
3756 use_reg (&call_fusage, struct_value_rtx);
3757 }
3758
3759 /* Don't allow popping to be deferred, since then
3760 cse'ing of library calls could delete a call and leave the pop. */
3761 NO_DEFER_POP;
3762
3763 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3764 will set inhibit_defer_pop to that value. */
3765 /* See the comment in emit_library_call about the function type we build
3766 and pass here. */
3767
3768 emit_call_1 (fun,
3769 get_identifier (XSTR (orgfun, 0)),
3770 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3771 original_args_size.constant, args_size.constant,
3772 struct_value_size,
3773 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3774 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3775 old_inhibit_defer_pop + 1, call_fusage, is_const, nothrow);
3776
3777 /* Now restore inhibit_defer_pop to its actual original value. */
3778 OK_DEFER_POP;
3779
3780 pop_temp_slots ();
3781
3782 /* Copy the value to the right place. */
3783 if (outmode != VOIDmode)
3784 {
3785 if (mem_value)
3786 {
3787 if (value == 0)
3788 value = mem_value;
3789 if (value != mem_value)
3790 emit_move_insn (value, mem_value);
3791 }
3792 else if (value != 0)
3793 emit_move_insn (value, hard_libcall_value (outmode));
3794 else
3795 value = hard_libcall_value (outmode);
3796 }
3797
3798#ifdef ACCUMULATE_OUTGOING_ARGS
3799#ifdef REG_PARM_STACK_SPACE
3800 if (save_area)
3801 {
3802 enum machine_mode save_mode = GET_MODE (save_area);
3803#ifdef ARGS_GROW_DOWNWARD
3804 rtx stack_area
3805 = gen_rtx_MEM (save_mode,
3806 memory_address (save_mode,
3807 plus_constant (argblock,
3808 - high_to_save)));
3809#else
3810 rtx stack_area
3811 = gen_rtx_MEM (save_mode,
3812 memory_address (save_mode,
3813 plus_constant (argblock, low_to_save)));
3814#endif
3815 if (save_mode != BLKmode)
3816 emit_move_insn (stack_area, save_area);
3817 else
3818 emit_block_move (stack_area, validize_mem (save_area),
3819 GEN_INT (high_to_save - low_to_save + 1),
3820 PARM_BOUNDARY / BITS_PER_UNIT);
3821 }
3822#endif
3823
3824 /* If we saved any argument areas, restore them. */
3825 for (count = 0; count < nargs; count++)
3826 if (argvec[count].save_area)
3827 {
3828 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3829 rtx stack_area
3830 = gen_rtx_MEM (save_mode,
3831 memory_address
3832 (save_mode,
3833 plus_constant (argblock,
3834 argvec[count].offset.constant)));
3835
3836 emit_move_insn (stack_area, argvec[count].save_area);
3837 }
3838
3839 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3840 stack_usage_map = initial_stack_usage_map;
3841#endif
43bc5f13 3842
fac0ad80 3843 return value;
322e3e34
RK
3844}
3845\f
51bbfa0c
RS
3846#if 0
3847/* Return an rtx which represents a suitable home on the stack
3848 given TYPE, the type of the argument looking for a home.
3849 This is called only for BLKmode arguments.
3850
3851 SIZE is the size needed for this target.
3852 ARGS_ADDR is the address of the bottom of the argument block for this call.
3853 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3854 if this machine uses push insns. */
3855
3856static rtx
3857target_for_arg (type, size, args_addr, offset)
3858 tree type;
3859 rtx size;
3860 rtx args_addr;
3861 struct args_size offset;
3862{
3863 rtx target;
3864 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3865
3866 /* We do not call memory_address if possible,
3867 because we want to address as close to the stack
3868 as possible. For non-variable sized arguments,
3869 this will be stack-pointer relative addressing. */
3870 if (GET_CODE (offset_rtx) == CONST_INT)
3871 target = plus_constant (args_addr, INTVAL (offset_rtx));
3872 else
3873 {
3874 /* I have no idea how to guarantee that this
3875 will work in the presence of register parameters. */
38a448ca 3876 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3877 target = memory_address (QImode, target);
3878 }
3879
38a448ca 3880 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3881}
3882#endif
3883\f
3884/* Store a single argument for a function call
3885 into the register or memory area where it must be passed.
3886 *ARG describes the argument value and where to pass it.
3887
3888 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3889 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3890
3891 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3892 so must be careful about how the stack is used.
3893
3894 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3895 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3896 that we need not worry about saving and restoring the stack.
3897
3898 FNDECL is the declaration of the function we are calling. */
3899
3900static void
c84e2712 3901store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3902 reg_parm_stack_space)
51bbfa0c
RS
3903 struct arg_data *arg;
3904 rtx argblock;
3905 int may_be_alloca;
0f9b3ea6 3906 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3907 int reg_parm_stack_space;
51bbfa0c
RS
3908{
3909 register tree pval = arg->tree_value;
3910 rtx reg = 0;
3911 int partial = 0;
3912 int used = 0;
69d4ca36 3913#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3914 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3915#endif
51bbfa0c
RS
3916
3917 if (TREE_CODE (pval) == ERROR_MARK)
3918 return;
3919
cc79451b
RK
3920 /* Push a new temporary level for any temporaries we make for
3921 this argument. */
3922 push_temp_slots ();
3923
51bbfa0c
RS
3924#ifdef ACCUMULATE_OUTGOING_ARGS
3925 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3926 save any previous data at that location. */
3927 if (argblock && ! variable_size && arg->stack)
3928 {
3929#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3930 /* stack_slot is negative, but we want to index stack_usage_map
3931 with positive values. */
51bbfa0c
RS
3932 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3933 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3934 else
50eb43ca 3935 upper_bound = 0;
51bbfa0c
RS
3936
3937 lower_bound = upper_bound - arg->size.constant;
3938#else
3939 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3940 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3941 else
3942 lower_bound = 0;
3943
3944 upper_bound = lower_bound + arg->size.constant;
3945#endif
3946
3947 for (i = lower_bound; i < upper_bound; i++)
3948 if (stack_usage_map[i]
51bbfa0c
RS
3949 /* Don't store things in the fixed argument area at this point;
3950 it has already been saved. */
e5e809f4 3951 && i > reg_parm_stack_space)
51bbfa0c
RS
3952 break;
3953
3954 if (i != upper_bound)
3955 {
3956 /* We need to make a save area. See what mode we can make it. */
3957 enum machine_mode save_mode
3958 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3959 rtx stack_area
38a448ca
RH
3960 = gen_rtx_MEM (save_mode,
3961 memory_address (save_mode,
3962 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3963
3964 if (save_mode == BLKmode)
3965 {
3966 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3967 arg->size.constant, 0);
c6df88cb
MM
3968 MEM_SET_IN_STRUCT_P (arg->save_area,
3969 AGGREGATE_TYPE_P (TREE_TYPE
3970 (arg->tree_value)));
cc79451b 3971 preserve_temp_slots (arg->save_area);
51bbfa0c 3972 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3973 GEN_INT (arg->size.constant),
51bbfa0c
RS
3974 PARM_BOUNDARY / BITS_PER_UNIT);
3975 }
3976 else
3977 {
3978 arg->save_area = gen_reg_rtx (save_mode);
3979 emit_move_insn (arg->save_area, stack_area);
3980 }
3981 }
3982 }
b564df06
JL
3983
3984 /* Now that we have saved any slots that will be overwritten by this
3985 store, mark all slots this store will use. We must do this before
3986 we actually expand the argument since the expansion itself may
3987 trigger library calls which might need to use the same stack slot. */
3988 if (argblock && ! variable_size && arg->stack)
3989 for (i = lower_bound; i < upper_bound; i++)
3990 stack_usage_map[i] = 1;
51bbfa0c
RS
3991#endif
3992
3993 /* If this isn't going to be placed on both the stack and in registers,
3994 set up the register and number of words. */
3995 if (! arg->pass_on_stack)
3996 reg = arg->reg, partial = arg->partial;
3997
3998 if (reg != 0 && partial == 0)
3999 /* Being passed entirely in a register. We shouldn't be called in
4000 this case. */
4001 abort ();
4002
4ab56118
RK
4003 /* If this arg needs special alignment, don't load the registers
4004 here. */
4005 if (arg->n_aligned_regs != 0)
4006 reg = 0;
4ab56118 4007
4ab56118 4008 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4009 it directly into its stack slot. Otherwise, we can. */
4010 if (arg->value == 0)
d64f5a78
RS
4011 {
4012#ifdef ACCUMULATE_OUTGOING_ARGS
4013 /* stack_arg_under_construction is nonzero if a function argument is
4014 being evaluated directly into the outgoing argument list and
4015 expand_call must take special action to preserve the argument list
4016 if it is called recursively.
4017
4018 For scalar function arguments stack_usage_map is sufficient to
4019 determine which stack slots must be saved and restored. Scalar
4020 arguments in general have pass_on_stack == 0.
4021
4022 If this argument is initialized by a function which takes the
4023 address of the argument (a C++ constructor or a C function
4024 returning a BLKmode structure), then stack_usage_map is
4025 insufficient and expand_call must push the stack around the
4026 function call. Such arguments have pass_on_stack == 1.
4027
4028 Note that it is always safe to set stack_arg_under_construction,
4029 but this generates suboptimal code if set when not needed. */
4030
4031 if (arg->pass_on_stack)
4032 stack_arg_under_construction++;
4033#endif
3a08477a
RK
4034 arg->value = expand_expr (pval,
4035 (partial
4036 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4037 ? NULL_RTX : arg->stack,
e5d70561 4038 VOIDmode, 0);
1efe6448
RK
4039
4040 /* If we are promoting object (or for any other reason) the mode
4041 doesn't agree, convert the mode. */
4042
7373d92d
RK
4043 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4044 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4045 arg->value, arg->unsignedp);
1efe6448 4046
d64f5a78
RS
4047#ifdef ACCUMULATE_OUTGOING_ARGS
4048 if (arg->pass_on_stack)
4049 stack_arg_under_construction--;
4050#endif
4051 }
51bbfa0c
RS
4052
4053 /* Don't allow anything left on stack from computation
4054 of argument to alloca. */
4055 if (may_be_alloca)
4056 do_pending_stack_adjust ();
4057
4058 if (arg->value == arg->stack)
7815214e 4059 {
c5c76735 4060 /* If the value is already in the stack slot, we are done. */
7d384cc0 4061 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 4062 {
7815214e 4063 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4064 XEXP (arg->stack, 0), Pmode,
7d384cc0 4065 ARGS_SIZE_RTX (arg->size),
7815214e 4066 TYPE_MODE (sizetype),
956d6950
JL
4067 GEN_INT (MEMORY_USE_RW),
4068 TYPE_MODE (integer_type_node));
7815214e
RK
4069 }
4070 }
1efe6448 4071 else if (arg->mode != BLKmode)
51bbfa0c
RS
4072 {
4073 register int size;
4074
4075 /* Argument is a scalar, not entirely passed in registers.
4076 (If part is passed in registers, arg->partial says how much
4077 and emit_push_insn will take care of putting it there.)
4078
4079 Push it, and if its size is less than the
4080 amount of space allocated to it,
4081 also bump stack pointer by the additional space.
4082 Note that in C the default argument promotions
4083 will prevent such mismatches. */
4084
1efe6448 4085 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4086 /* Compute how much space the push instruction will push.
4087 On many machines, pushing a byte will advance the stack
4088 pointer by a halfword. */
4089#ifdef PUSH_ROUNDING
4090 size = PUSH_ROUNDING (size);
4091#endif
4092 used = size;
4093
4094 /* Compute how much space the argument should get:
4095 round up to a multiple of the alignment for arguments. */
1efe6448 4096 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4097 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4098 / (PARM_BOUNDARY / BITS_PER_UNIT))
4099 * (PARM_BOUNDARY / BITS_PER_UNIT));
4100
4101 /* This isn't already where we want it on the stack, so put it there.
4102 This can either be done with push or copy insns. */
e5e809f4
JL
4103 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4104 partial, reg, used - size, argblock,
4fc026cd
CM
4105 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4106 ARGS_SIZE_RTX (arg->alignment_pad));
4107
c2732da3 4108 arg_space_so_far += used;
51bbfa0c
RS
4109 }
4110 else
4111 {
4112 /* BLKmode, at least partly to be pushed. */
4113
4114 register int excess;
4115 rtx size_rtx;
4116
4117 /* Pushing a nonscalar.
4118 If part is passed in registers, PARTIAL says how much
4119 and emit_push_insn will take care of putting it there. */
4120
4121 /* Round its size up to a multiple
4122 of the allocation unit for arguments. */
4123
4124 if (arg->size.var != 0)
4125 {
4126 excess = 0;
4127 size_rtx = ARGS_SIZE_RTX (arg->size);
4128 }
4129 else
4130 {
51bbfa0c
RS
4131 /* PUSH_ROUNDING has no effect on us, because
4132 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 4133 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4134 + partial * UNITS_PER_WORD);
e4f93898 4135 size_rtx = expr_size (pval);
c2732da3 4136 arg_space_so_far += excess + INTVAL (size_rtx);
51bbfa0c
RS
4137 }
4138
1efe6448 4139 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 4140 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4 4141 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
4142 reg_parm_stack_space,
4143 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
4144 }
4145
4146
4147 /* Unless this is a partially-in-register argument, the argument is now
4148 in the stack.
4149
4150 ??? Note that this can change arg->value from arg->stack to
4151 arg->stack_slot and it matters when they are not the same.
4152 It isn't totally clear that this is correct in all cases. */
4153 if (partial == 0)
3b917a55 4154 arg->value = arg->stack_slot;
51bbfa0c
RS
4155
4156 /* Once we have pushed something, pops can't safely
4157 be deferred during the rest of the arguments. */
4158 NO_DEFER_POP;
4159
4160 /* ANSI doesn't require a sequence point here,
4161 but PCC has one, so this will avoid some problems. */
4162 emit_queue ();
4163
db907e7b
RK
4164 /* Free any temporary slots made in processing this argument. Show
4165 that we might have taken the address of something and pushed that
4166 as an operand. */
4167 preserve_temp_slots (NULL_RTX);
51bbfa0c 4168 free_temp_slots ();
cc79451b 4169 pop_temp_slots ();
51bbfa0c 4170}
This page took 1.140117 seconds and 5 git commands to generate.