]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
calls.c (special_function_p): New argument fork_or_exec.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
a544cfd2 2 Copyright (C) 1989, 92-99, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
670ee920
KG
22#include "system.h"
23#include "rtl.h"
24#include "tree.h"
25#include "flags.h"
26#include "expr.h"
49ad7cfa 27#include "function.h"
670ee920 28#include "regs.h"
51bbfa0c 29#include "insn-flags.h"
5f6da302 30#include "toplev.h"
d6f4ec51 31#include "output.h"
b1474bb7 32#include "tm_p.h"
51bbfa0c 33
c795bca9
BS
34#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36#endif
37
51bbfa0c 38/* Decide whether a function's arguments should be processed
bbc8a071
RK
39 from first to last or from last to first.
40
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
51bbfa0c 43
51bbfa0c 44#ifdef PUSH_ROUNDING
bbc8a071 45
40083ddf 46#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
47#define PUSH_ARGS_REVERSED /* If it's last to first */
48#endif
bbc8a071 49
51bbfa0c
RS
50#endif
51
c795bca9
BS
52/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
54
55/* Data structure and subroutines used within expand_call. */
56
57struct arg_data
58{
59 /* Tree node for this argument. */
60 tree tree_value;
1efe6448
RK
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
51bbfa0c
RS
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
64 rtx value;
65 /* Initially-compute RTL value for argument; only for const functions. */
66 rtx initial_value;
67 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 68 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
69 registers. */
70 rtx reg;
84b55618
RK
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
51bbfa0c
RS
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
d64f5a78
RS
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
81 int pass_on_stack;
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
94 rtx stack;
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
98 rtx stack_slot;
99#ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
101 rtx save_area;
102#endif
4ab56118
RK
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
107 rtx *aligned_regs;
108 int n_aligned_regs;
4fc026cd
CM
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad;
51bbfa0c
RS
112};
113
114#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 115/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119static char *stack_usage_map;
120
121/* Size of STACK_USAGE_MAP. */
122static int highest_outgoing_arg_in_use;
2f4aa534
RS
123
124/* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129int stack_arg_under_construction;
51bbfa0c
RS
130#endif
131
3d994c6b
KG
132static int calls_function PARAMS ((tree, int));
133static int calls_function_1 PARAMS ((tree, int));
134static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
135 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
136 rtx, int, rtx, int));
137static void precompute_register_parameters PARAMS ((int,
138 struct arg_data *,
139 int *));
140static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
141 int));
142static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
143 int));
144static int finalize_must_preallocate PARAMS ((int, int,
145 struct arg_data *,
146 struct args_size *));
147static void precompute_arguments PARAMS ((int, int, int,
148 struct arg_data *,
149 struct args_size *));
150static int compute_argument_block_size PARAMS ((int,
151 struct args_size *));
152static void initialize_argument_information PARAMS ((int,
153 struct arg_data *,
154 struct args_size *,
155 int, tree, tree,
156 CUMULATIVE_ARGS *,
157 int, rtx *, int *,
158 int *, int *));
159static void compute_argument_addresses PARAMS ((struct arg_data *,
160 rtx, int));
161static rtx rtx_for_function_call PARAMS ((tree, tree));
162static void load_register_parameters PARAMS ((struct arg_data *,
163 int, rtx *));
21a3b983 164
20efdf74 165#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3d994c6b
KG
166static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
167static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 168#endif
51bbfa0c 169\f
1ce0cb53
JW
170/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
171 `alloca'.
172
173 If WHICH is 0, return 1 if EXP contains a call to any function.
174 Actually, we only need return 1 if evaluating EXP would require pushing
175 arguments on the stack, but that is too difficult to compute, so we just
176 assume any function call might require the stack. */
51bbfa0c 177
1c8d7aef
RS
178static tree calls_function_save_exprs;
179
51bbfa0c 180static int
1ce0cb53 181calls_function (exp, which)
51bbfa0c 182 tree exp;
1ce0cb53 183 int which;
1c8d7aef
RS
184{
185 int val;
186 calls_function_save_exprs = 0;
187 val = calls_function_1 (exp, which);
188 calls_function_save_exprs = 0;
189 return val;
190}
191
192static int
193calls_function_1 (exp, which)
194 tree exp;
195 int which;
51bbfa0c
RS
196{
197 register int i;
0207efa2
RK
198 enum tree_code code = TREE_CODE (exp);
199 int type = TREE_CODE_CLASS (code);
200 int length = tree_code_length[(int) code];
51bbfa0c 201
ddd5a7c1 202 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
203 if ((int) code >= NUM_TREE_CODES)
204 return 1;
51bbfa0c 205
0207efa2 206 /* Only expressions and references can contain calls. */
3b59a331
RS
207 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
208 && type != 'b')
51bbfa0c
RS
209 return 0;
210
0207efa2 211 switch (code)
51bbfa0c
RS
212 {
213 case CALL_EXPR:
1ce0cb53
JW
214 if (which == 0)
215 return 1;
216 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
217 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
218 == FUNCTION_DECL))
219 {
220 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
221
222 if ((DECL_BUILT_IN (fndecl)
95815af9 223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
224 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
225 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 226 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
227 return 1;
228 }
51bbfa0c
RS
229
230 /* Third operand is RTL. */
231 length = 2;
232 break;
233
234 case SAVE_EXPR:
235 if (SAVE_EXPR_RTL (exp) != 0)
236 return 0;
1c8d7aef
RS
237 if (value_member (exp, calls_function_save_exprs))
238 return 0;
239 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
240 calls_function_save_exprs);
241 return (TREE_OPERAND (exp, 0) != 0
242 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
243
244 case BLOCK:
ef03bc85
CH
245 {
246 register tree local;
247
248 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 249 if (DECL_INITIAL (local) != 0
1c8d7aef 250 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
251 return 1;
252 }
253 {
254 register tree subblock;
255
256 for (subblock = BLOCK_SUBBLOCKS (exp);
257 subblock;
258 subblock = TREE_CHAIN (subblock))
1c8d7aef 259 if (calls_function_1 (subblock, which))
ef03bc85
CH
260 return 1;
261 }
262 return 0;
51bbfa0c
RS
263
264 case METHOD_CALL_EXPR:
265 length = 3;
266 break;
267
268 case WITH_CLEANUP_EXPR:
269 length = 1;
270 break;
271
272 case RTL_EXPR:
273 return 0;
e9a25f70
JL
274
275 default:
276 break;
51bbfa0c
RS
277 }
278
279 for (i = 0; i < length; i++)
280 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 281 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
282 return 1;
283
284 return 0;
285}
286\f
287/* Force FUNEXP into a form suitable for the address of a CALL,
288 and return that as an rtx. Also load the static chain register
289 if FNDECL is a nested function.
290
77cac2f2
RK
291 CALL_FUSAGE points to a variable holding the prospective
292 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 293
03dacb02 294rtx
77cac2f2 295prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
296 rtx funexp;
297 tree fndecl;
77cac2f2 298 rtx *call_fusage;
01368078 299 int reg_parm_seen;
51bbfa0c
RS
300{
301 rtx static_chain_value = 0;
302
303 funexp = protect_from_queue (funexp, 0);
304
305 if (fndecl != 0)
0f41302f 306 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
307 static_chain_value = lookup_static_chain (fndecl);
308
309 /* Make a valid memory address and copy constants thru pseudo-regs,
310 but not for a constant address if -fno-function-cse. */
311 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 312 /* If we are using registers for parameters, force the
e9a25f70
JL
313 function address into a register now. */
314 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
315 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
316 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
317 else
318 {
319#ifndef NO_FUNCTION_CSE
320 if (optimize && ! flag_no_function_cse)
321#ifdef NO_RECURSIVE_FUNCTION_CSE
322 if (fndecl != current_function_decl)
323#endif
324 funexp = force_reg (Pmode, funexp);
325#endif
326 }
327
328 if (static_chain_value != 0)
329 {
330 emit_move_insn (static_chain_rtx, static_chain_value);
331
f991a240
RK
332 if (GET_CODE (static_chain_rtx) == REG)
333 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
334 }
335
336 return funexp;
337}
338
339/* Generate instructions to call function FUNEXP,
340 and optionally pop the results.
341 The CALL_INSN is the first insn generated.
342
607ea900 343 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
344 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
345
334c4f0f
RK
346 FUNTYPE is the data type of the function. This is given to the macro
347 RETURN_POPS_ARGS to determine whether this function pops its own args.
348 We used to allow an identifier for library functions, but that doesn't
349 work when the return type is an aggregate type and the calling convention
350 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
351
352 STACK_SIZE is the number of bytes of arguments on the stack,
c795bca9 353 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
51bbfa0c
RS
354 This is both to put into the call insn and
355 to generate explicit popping code if necessary.
356
357 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
358 It is zero if this call doesn't want a structure value.
359
360 NEXT_ARG_REG is the rtx that results from executing
361 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
362 just after all the args have had their registers assigned.
363 This could be whatever you like, but normally it is the first
364 arg-register beyond those used for args in this call,
365 or 0 if all the arg-registers are used in this call.
366 It is passed on to `gen_call' so you can put this info in the call insn.
367
368 VALREG is a hard register in which a value is returned,
369 or 0 if the call does not return a value.
370
371 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
372 the args to this call were processed.
373 We restore `inhibit_defer_pop' to that value.
374
94b25f81
RK
375 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
376 denote registers used by the called function.
51bbfa0c
RS
377
378 IS_CONST is true if this is a `const' call. */
379
322e3e34 380static void
fb5eebb9
RH
381emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
382 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
383 call_fusage, is_const)
51bbfa0c 384 rtx funexp;
c84e2712
KG
385 tree fndecl ATTRIBUTE_UNUSED;
386 tree funtype ATTRIBUTE_UNUSED;
6a651371 387 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 388 HOST_WIDE_INT rounded_stack_size;
962f1324 389 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
390 rtx next_arg_reg;
391 rtx valreg;
392 int old_inhibit_defer_pop;
77cac2f2 393 rtx call_fusage;
51bbfa0c
RS
394 int is_const;
395{
062e7fd8 396 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 397#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 398 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 399#endif
51bbfa0c 400 rtx call_insn;
081f5e7e 401#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 402 int already_popped = 0;
fb5eebb9 403 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 404#endif
51bbfa0c
RS
405
406 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
407 and we don't want to load it into a register as an optimization,
408 because prepare_call_address already did it if it should be done. */
409 if (GET_CODE (funexp) != SYMBOL_REF)
410 funexp = memory_address (FUNCTION_MODE, funexp);
411
412#ifndef ACCUMULATE_OUTGOING_ARGS
413#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
414/* If the target has "call" or "call_value" insns, then prefer them
415 if no arguments are actually popped. If the target does not have
416 "call" or "call_value" insns, then we must use the popping versions
417 even if the call has no arguments to pop. */
418#if defined (HAVE_call) && defined (HAVE_call_value)
419 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
420 && n_popped > 0)
421#else
422 if (HAVE_call_pop && HAVE_call_value_pop)
423#endif
51bbfa0c 424 {
fb5eebb9 425 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
426 rtx pat;
427
428 /* If this subroutine pops its own args, record that in the call insn
429 if possible, for the sake of frame pointer elimination. */
2c8da025 430
51bbfa0c
RS
431 if (valreg)
432 pat = gen_call_value_pop (valreg,
38a448ca 433 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 434 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 435 else
38a448ca 436 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 437 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
438
439 emit_call_insn (pat);
440 already_popped = 1;
441 }
442 else
443#endif
444#endif
445
446#if defined (HAVE_call) && defined (HAVE_call_value)
447 if (HAVE_call && HAVE_call_value)
448 {
449 if (valreg)
450 emit_call_insn (gen_call_value (valreg,
38a448ca 451 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 452 rounded_stack_size_rtx, next_arg_reg,
e992302c 453 NULL_RTX));
51bbfa0c 454 else
38a448ca 455 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 456 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
457 struct_value_size_rtx));
458 }
459 else
460#endif
461 abort ();
462
77cac2f2 463 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
464 for (call_insn = get_last_insn ();
465 call_insn && GET_CODE (call_insn) != CALL_INSN;
466 call_insn = PREV_INSN (call_insn))
467 ;
468
469 if (! call_insn)
470 abort ();
471
e59e60a7
RK
472 /* Put the register usage information on the CALL. If there is already
473 some usage information, put ours at the end. */
474 if (CALL_INSN_FUNCTION_USAGE (call_insn))
475 {
476 rtx link;
477
478 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
479 link = XEXP (link, 1))
480 ;
481
482 XEXP (link, 1) = call_fusage;
483 }
484 else
485 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
486
487 /* If this is a const call, then set the insn's unchanging bit. */
488 if (is_const)
489 CONST_CALL_P (call_insn) = 1;
490
b1e64e0d
RS
491 /* Restore this now, so that we do defer pops for this call's args
492 if the context of the call as a whole permits. */
493 inhibit_defer_pop = old_inhibit_defer_pop;
494
51bbfa0c
RS
495#ifndef ACCUMULATE_OUTGOING_ARGS
496 /* If returning from the subroutine does not automatically pop the args,
497 we need an instruction to pop them sooner or later.
498 Perhaps do it now; perhaps just record how much space to pop later.
499
500 If returning from the subroutine does pop the args, indicate that the
501 stack pointer will be changed. */
502
fb5eebb9 503 if (n_popped > 0)
51bbfa0c
RS
504 {
505 if (!already_popped)
e3da301d 506 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
507 = gen_rtx_EXPR_LIST (VOIDmode,
508 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
509 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 510 rounded_stack_size -= n_popped;
062e7fd8 511 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
512 }
513
fb5eebb9 514 if (rounded_stack_size != 0)
51bbfa0c 515 {
70a73141 516 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
fb5eebb9 517 pending_stack_adjust += rounded_stack_size;
51bbfa0c 518 else
062e7fd8 519 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
520 }
521#endif
522}
523
20efdf74
JL
524/* Determine if the function identified by NAME and FNDECL is one with
525 special properties we wish to know about.
526
527 For example, if the function might return more than one time (setjmp), then
528 set RETURNS_TWICE to a nonzero value.
529
530 Similarly set IS_LONGJMP for if the function is in the longjmp family.
531
532 Set IS_MALLOC for any of the standard memory allocation functions which
533 allocate from the heap.
534
535 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
536 space from the stack such as alloca. */
537
3a8c995b
MM
538void
539special_function_p (fndecl, returns_twice, is_longjmp,
20efdf74 540 is_malloc, may_be_alloca)
20efdf74
JL
541 tree fndecl;
542 int *returns_twice;
543 int *is_longjmp;
544 int *is_malloc;
545 int *may_be_alloca;
546{
547 *returns_twice = 0;
548 *is_longjmp = 0;
20efdf74
JL
549 *may_be_alloca = 0;
550
140592a0
AG
551 /* The function decl may have the `malloc' attribute. */
552 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
553
3a8c995b
MM
554 if (! *is_malloc
555 && fndecl && DECL_NAME (fndecl)
140592a0 556 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
557 /* Exclude functions not at the file scope, or not `extern',
558 since they are not the magic functions we would otherwise
559 think they are. */
560 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
561 {
3a8c995b 562 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
563 char *tname = name;
564
ca54603f
JL
565 /* We assume that alloca will always be called by name. It
566 makes no sense to pass it as a pointer-to-function to
567 anything that does not understand its behavior. */
568 *may_be_alloca
569 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
570 && name[0] == 'a'
571 && ! strcmp (name, "alloca"))
572 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
573 && name[0] == '_'
574 && ! strcmp (name, "__builtin_alloca"))));
575
20efdf74
JL
576 /* Disregard prefix _, __ or __x. */
577 if (name[0] == '_')
578 {
579 if (name[1] == '_' && name[2] == 'x')
580 tname += 3;
581 else if (name[1] == '_')
582 tname += 2;
583 else
584 tname += 1;
585 }
586
587 if (tname[0] == 's')
588 {
589 *returns_twice
590 = ((tname[1] == 'e'
591 && (! strcmp (tname, "setjmp")
592 || ! strcmp (tname, "setjmp_syscall")))
593 || (tname[1] == 'i'
594 && ! strcmp (tname, "sigsetjmp"))
595 || (tname[1] == 'a'
596 && ! strcmp (tname, "savectx")));
597 if (tname[1] == 'i'
598 && ! strcmp (tname, "siglongjmp"))
599 *is_longjmp = 1;
600 }
601 else if ((tname[0] == 'q' && tname[1] == 's'
602 && ! strcmp (tname, "qsetjmp"))
603 || (tname[0] == 'v' && tname[1] == 'f'
604 && ! strcmp (tname, "vfork")))
605 *returns_twice = 1;
606
607 else if (tname[0] == 'l' && tname[1] == 'o'
608 && ! strcmp (tname, "longjmp"))
609 *is_longjmp = 1;
140592a0 610 /* Do not add any more malloc-like functions to this list,
82514696
KG
611 instead mark them as malloc functions using the malloc attribute.
612 Note, realloc is not suitable for attribute malloc since
613 it may return the same address across multiple calls. */
20efdf74
JL
614 else if (! strcmp (tname, "malloc")
615 || ! strcmp (tname, "calloc")
82514696 616 || ! strcmp (tname, "strdup")
20efdf74
JL
617 /* Note use of NAME rather than TNAME here. These functions
618 are only reserved when preceded with __. */
619 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
620 || ! strcmp (name, "__nw") /* mangled __builtin_new */
621 || ! strcmp (name, "__builtin_new")
622 || ! strcmp (name, "__builtin_vec_new"))
623 *is_malloc = 1;
624 }
625}
626
627/* Precompute all register parameters as described by ARGS, storing values
628 into fields within the ARGS array.
629
630 NUM_ACTUALS indicates the total number elements in the ARGS array.
631
632 Set REG_PARM_SEEN if we encounter a register parameter. */
633
634static void
635precompute_register_parameters (num_actuals, args, reg_parm_seen)
636 int num_actuals;
637 struct arg_data *args;
638 int *reg_parm_seen;
639{
640 int i;
641
642 *reg_parm_seen = 0;
643
644 for (i = 0; i < num_actuals; i++)
645 if (args[i].reg != 0 && ! args[i].pass_on_stack)
646 {
647 *reg_parm_seen = 1;
648
649 if (args[i].value == 0)
650 {
651 push_temp_slots ();
652 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
653 VOIDmode, 0);
654 preserve_temp_slots (args[i].value);
655 pop_temp_slots ();
656
657 /* ANSI doesn't require a sequence point here,
658 but PCC has one, so this will avoid some problems. */
659 emit_queue ();
660 }
661
662 /* If we are to promote the function arg to a wider mode,
663 do it now. */
664
665 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
666 args[i].value
667 = convert_modes (args[i].mode,
668 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
669 args[i].value, args[i].unsignedp);
670
671 /* If the value is expensive, and we are inside an appropriately
672 short loop, put the value into a pseudo and then put the pseudo
673 into the hard reg.
674
675 For small register classes, also do this if this call uses
676 register parameters. This is to avoid reload conflicts while
677 loading the parameters registers. */
678
679 if ((! (GET_CODE (args[i].value) == REG
680 || (GET_CODE (args[i].value) == SUBREG
681 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
682 && args[i].mode != BLKmode
683 && rtx_cost (args[i].value, SET) > 2
684 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
685 || preserve_subexpressions_p ()))
686 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
687 }
688}
689
690#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
691
692 /* The argument list is the property of the called routine and it
693 may clobber it. If the fixed area has been used for previous
694 parameters, we must save and restore it. */
695static rtx
696save_fixed_argument_area (reg_parm_stack_space, argblock,
697 low_to_save, high_to_save)
698 int reg_parm_stack_space;
699 rtx argblock;
700 int *low_to_save;
701 int *high_to_save;
702{
703 int i;
704 rtx save_area = NULL_RTX;
705
706 /* Compute the boundary of the that needs to be saved, if any. */
707#ifdef ARGS_GROW_DOWNWARD
708 for (i = 0; i < reg_parm_stack_space + 1; i++)
709#else
710 for (i = 0; i < reg_parm_stack_space; i++)
711#endif
712 {
713 if (i >= highest_outgoing_arg_in_use
714 || stack_usage_map[i] == 0)
715 continue;
716
717 if (*low_to_save == -1)
718 *low_to_save = i;
719
720 *high_to_save = i;
721 }
722
723 if (*low_to_save >= 0)
724 {
725 int num_to_save = *high_to_save - *low_to_save + 1;
726 enum machine_mode save_mode
727 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
728 rtx stack_area;
729
730 /* If we don't have the required alignment, must do this in BLKmode. */
731 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
732 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
733 save_mode = BLKmode;
734
735#ifdef ARGS_GROW_DOWNWARD
736 stack_area = gen_rtx_MEM (save_mode,
737 memory_address (save_mode,
738 plus_constant (argblock,
739 - *high_to_save)));
740#else
741 stack_area = gen_rtx_MEM (save_mode,
742 memory_address (save_mode,
743 plus_constant (argblock,
744 *low_to_save)));
745#endif
746 if (save_mode == BLKmode)
747 {
748 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
04572513
JJ
749 /* Cannot use emit_block_move here because it can be done by a library
750 call which in turn gets into this place again and deadly infinite
751 recursion happens. */
752 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
753 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
754 }
755 else
756 {
757 save_area = gen_reg_rtx (save_mode);
758 emit_move_insn (save_area, stack_area);
759 }
760 }
761 return save_area;
762}
763
764static void
765restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
766 rtx save_area;
767 rtx argblock;
768 int high_to_save;
769 int low_to_save;
770{
771 enum machine_mode save_mode = GET_MODE (save_area);
772#ifdef ARGS_GROW_DOWNWARD
773 rtx stack_area
774 = gen_rtx_MEM (save_mode,
775 memory_address (save_mode,
776 plus_constant (argblock,
777 - high_to_save)));
778#else
779 rtx stack_area
780 = gen_rtx_MEM (save_mode,
781 memory_address (save_mode,
782 plus_constant (argblock,
783 low_to_save)));
784#endif
785
786 if (save_mode != BLKmode)
787 emit_move_insn (stack_area, save_area);
788 else
04572513
JJ
789 /* Cannot use emit_block_move here because it can be done by a library
790 call which in turn gets into this place again and deadly infinite
791 recursion happens. */
792 move_by_pieces (stack_area, validize_mem (save_area),
793 high_to_save - low_to_save + 1,
794 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
795}
796#endif
797
798/* If any elements in ARGS refer to parameters that are to be passed in
799 registers, but not in memory, and whose alignment does not permit a
800 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
801 which we will later copy into the appropriate hard registers.
802
803 Pseudos for each unaligned argument will be stored into the array
804 args[argnum].aligned_regs. The caller is responsible for deallocating
805 the aligned_regs array if it is nonzero. */
806
20efdf74
JL
807static void
808store_unaligned_arguments_into_pseudos (args, num_actuals)
809 struct arg_data *args;
810 int num_actuals;
811{
812 int i, j;
813
814 for (i = 0; i < num_actuals; i++)
815 if (args[i].reg != 0 && ! args[i].pass_on_stack
816 && args[i].mode == BLKmode
817 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
818 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
819 {
820 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
821 int big_endian_correction = 0;
822
823 args[i].n_aligned_regs
824 = args[i].partial ? args[i].partial
825 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
826
8e6a59fe
MM
827 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
828 * args[i].n_aligned_regs);
20efdf74
JL
829
830 /* Structures smaller than a word are aligned to the least
831 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
832 this means we must skip the empty high order bytes when
833 calculating the bit offset. */
834 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
835 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
836
837 for (j = 0; j < args[i].n_aligned_regs; j++)
838 {
839 rtx reg = gen_reg_rtx (word_mode);
840 rtx word = operand_subword_force (args[i].value, j, BLKmode);
841 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
842 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
843
844 args[i].aligned_regs[j] = reg;
845
846 /* There is no need to restrict this code to loading items
847 in TYPE_ALIGN sized hunks. The bitfield instructions can
848 load up entire word sized registers efficiently.
849
850 ??? This may not be needed anymore.
851 We use to emit a clobber here but that doesn't let later
852 passes optimize the instructions we emit. By storing 0 into
853 the register later passes know the first AND to zero out the
854 bitfield being set in the register is unnecessary. The store
855 of 0 will be deleted as will at least the first AND. */
856
857 emit_move_insn (reg, const0_rtx);
858
859 bytes -= bitsize / BITS_PER_UNIT;
860 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
861 extract_bit_field (word, bitsize, 0, 1,
862 NULL_RTX, word_mode,
863 word_mode,
864 bitalign / BITS_PER_UNIT,
865 BITS_PER_WORD),
866 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
867 }
868 }
869}
870
d7cdf113
JL
871/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
872 ACTPARMS.
873
874 NUM_ACTUALS is the total number of parameters.
875
876 N_NAMED_ARGS is the total number of named arguments.
877
878 FNDECL is the tree code for the target of this call (if known)
879
880 ARGS_SO_FAR holds state needed by the target to know where to place
881 the next argument.
882
883 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
884 for arguments which are passed in registers.
885
886 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
887 and may be modified by this routine.
888
889 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
890 flags which may may be modified by this routine. */
891
892static void
893initialize_argument_information (num_actuals, args, args_size, n_named_args,
894 actparms, fndecl, args_so_far,
895 reg_parm_stack_space, old_stack_level,
896 old_pending_adj, must_preallocate, is_const)
91813b28 897 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
898 struct arg_data *args;
899 struct args_size *args_size;
91813b28 900 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
901 tree actparms;
902 tree fndecl;
959f3a06 903 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
904 int reg_parm_stack_space;
905 rtx *old_stack_level;
906 int *old_pending_adj;
907 int *must_preallocate;
908 int *is_const;
909{
910 /* 1 if scanning parms front to back, -1 if scanning back to front. */
911 int inc;
912
913 /* Count arg position in order args appear. */
914 int argpos;
915
4fc026cd 916 struct args_size alignment_pad;
d7cdf113
JL
917 int i;
918 tree p;
919
920 args_size->constant = 0;
921 args_size->var = 0;
922
923 /* In this loop, we consider args in the order they are written.
924 We fill up ARGS from the front or from the back if necessary
925 so that in any case the first arg to be pushed ends up at the front. */
926
927#ifdef PUSH_ARGS_REVERSED
928 i = num_actuals - 1, inc = -1;
929 /* In this case, must reverse order of args
930 so that we compute and push the last arg first. */
931#else
932 i = 0, inc = 1;
933#endif
934
935 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
936 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
937 {
938 tree type = TREE_TYPE (TREE_VALUE (p));
939 int unsignedp;
940 enum machine_mode mode;
941
942 args[i].tree_value = TREE_VALUE (p);
943
944 /* Replace erroneous argument with constant zero. */
945 if (type == error_mark_node || TYPE_SIZE (type) == 0)
946 args[i].tree_value = integer_zero_node, type = integer_type_node;
947
948 /* If TYPE is a transparent union, pass things the way we would
949 pass the first field of the union. We have already verified that
950 the modes are the same. */
951 if (TYPE_TRANSPARENT_UNION (type))
952 type = TREE_TYPE (TYPE_FIELDS (type));
953
954 /* Decide where to pass this arg.
955
956 args[i].reg is nonzero if all or part is passed in registers.
957
958 args[i].partial is nonzero if part but not all is passed in registers,
959 and the exact value says how many words are passed in registers.
960
961 args[i].pass_on_stack is nonzero if the argument must at least be
962 computed on the stack. It may then be loaded back into registers
963 if args[i].reg is nonzero.
964
965 These decisions are driven by the FUNCTION_... macros and must agree
966 with those made by function.c. */
967
968 /* See if this argument should be passed by invisible reference. */
969 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
970 && contains_placeholder_p (TYPE_SIZE (type)))
971 || TREE_ADDRESSABLE (type)
972#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 973 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
974 type, argpos < n_named_args)
975#endif
976 )
977 {
978 /* If we're compiling a thunk, pass through invisible
979 references instead of making a copy. */
980 if (current_function_is_thunk
981#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 982 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
983 type, argpos < n_named_args)
984 /* If it's in a register, we must make a copy of it too. */
985 /* ??? Is this a sufficient test? Is there a better one? */
986 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
987 && REG_P (DECL_RTL (args[i].tree_value)))
988 && ! TREE_ADDRESSABLE (type))
989#endif
990 )
991 {
992 /* C++ uses a TARGET_EXPR to indicate that we want to make a
993 new object from the argument. If we are passing by
994 invisible reference, the callee will do that for us, so we
995 can strip off the TARGET_EXPR. This is not always safe,
996 but it is safe in the only case where this is a useful
997 optimization; namely, when the argument is a plain object.
998 In that case, the frontend is just asking the backend to
999 make a bitwise copy of the argument. */
1000
1001 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1002 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1003 (args[i].tree_value, 1)))
1004 == 'd')
1005 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1006 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1007
1008 args[i].tree_value = build1 (ADDR_EXPR,
1009 build_pointer_type (type),
1010 args[i].tree_value);
1011 type = build_pointer_type (type);
1012 }
1013 else
1014 {
1015 /* We make a copy of the object and pass the address to the
1016 function being called. */
1017 rtx copy;
1018
1019 if (TYPE_SIZE (type) == 0
1020 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1021 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1022 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1023 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1024 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1025 {
1026 /* This is a variable-sized object. Make space on the stack
1027 for it. */
1028 rtx size_rtx = expr_size (TREE_VALUE (p));
1029
1030 if (*old_stack_level == 0)
1031 {
1032 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1033 *old_pending_adj = pending_stack_adjust;
1034 pending_stack_adjust = 0;
1035 }
1036
1037 copy = gen_rtx_MEM (BLKmode,
1038 allocate_dynamic_stack_space (size_rtx,
1039 NULL_RTX,
1040 TYPE_ALIGN (type)));
1041 }
1042 else
1043 {
1044 int size = int_size_in_bytes (type);
1045 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1046 }
1047
1048 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1049
1050 store_expr (args[i].tree_value, copy, 0);
1051 *is_const = 0;
1052
1053 args[i].tree_value = build1 (ADDR_EXPR,
1054 build_pointer_type (type),
1055 make_tree (type, copy));
1056 type = build_pointer_type (type);
1057 }
1058 }
1059
1060 mode = TYPE_MODE (type);
1061 unsignedp = TREE_UNSIGNED (type);
1062
1063#ifdef PROMOTE_FUNCTION_ARGS
1064 mode = promote_mode (type, mode, &unsignedp, 1);
1065#endif
1066
1067 args[i].unsignedp = unsignedp;
1068 args[i].mode = mode;
959f3a06 1069 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
d7cdf113
JL
1070 argpos < n_named_args);
1071#ifdef FUNCTION_ARG_PARTIAL_NREGS
1072 if (args[i].reg)
1073 args[i].partial
959f3a06 1074 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1075 argpos < n_named_args);
1076#endif
1077
1078 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1079
1080 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1081 it means that we are to pass this arg in the register(s) designated
1082 by the PARALLEL, but also to pass it in the stack. */
1083 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1084 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1085 args[i].pass_on_stack = 1;
1086
1087 /* If this is an addressable type, we must preallocate the stack
1088 since we must evaluate the object into its final location.
1089
1090 If this is to be passed in both registers and the stack, it is simpler
1091 to preallocate. */
1092 if (TREE_ADDRESSABLE (type)
1093 || (args[i].pass_on_stack && args[i].reg != 0))
1094 *must_preallocate = 1;
1095
1096 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1097 we cannot consider this function call constant. */
1098 if (TREE_ADDRESSABLE (type))
1099 *is_const = 0;
1100
1101 /* Compute the stack-size of this argument. */
1102 if (args[i].reg == 0 || args[i].partial != 0
1103 || reg_parm_stack_space > 0
1104 || args[i].pass_on_stack)
1105 locate_and_pad_parm (mode, type,
1106#ifdef STACK_PARMS_IN_REG_PARM_AREA
1107 1,
1108#else
1109 args[i].reg != 0,
1110#endif
1111 fndecl, args_size, &args[i].offset,
4fc026cd 1112 &args[i].size, &alignment_pad);
d7cdf113
JL
1113
1114#ifndef ARGS_GROW_DOWNWARD
1115 args[i].slot_offset = *args_size;
1116#endif
1117
4fc026cd
CM
1118 args[i].alignment_pad = alignment_pad;
1119
d7cdf113
JL
1120 /* If a part of the arg was put into registers,
1121 don't include that part in the amount pushed. */
1122 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1123 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1124 / (PARM_BOUNDARY / BITS_PER_UNIT)
1125 * (PARM_BOUNDARY / BITS_PER_UNIT));
1126
1127 /* Update ARGS_SIZE, the total stack space for args so far. */
1128
1129 args_size->constant += args[i].size.constant;
1130 if (args[i].size.var)
1131 {
1132 ADD_PARM_SIZE (*args_size, args[i].size.var);
1133 }
1134
1135 /* Since the slot offset points to the bottom of the slot,
1136 we must record it after incrementing if the args grow down. */
1137#ifdef ARGS_GROW_DOWNWARD
1138 args[i].slot_offset = *args_size;
1139
1140 args[i].slot_offset.constant = -args_size->constant;
1141 if (args_size->var)
1142 {
1143 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1144 }
1145#endif
1146
1147 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1148 have been used, etc. */
1149
959f3a06 1150 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1151 argpos < n_named_args);
1152 }
1153}
1154
599f37b6
JL
1155/* Update ARGS_SIZE to contain the total size for the argument block.
1156 Return the original constant component of the argument block's size.
1157
1158 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1159 for arguments passed in registers. */
1160
1161static int
1162compute_argument_block_size (reg_parm_stack_space, args_size)
1163 int reg_parm_stack_space;
1164 struct args_size *args_size;
1165{
1166 int unadjusted_args_size = args_size->constant;
1167
1168 /* Compute the actual size of the argument block required. The variable
1169 and constant sizes must be combined, the size may have to be rounded,
1170 and there may be a minimum required size. */
1171
1172 if (args_size->var)
1173 {
1174 args_size->var = ARGS_SIZE_TREE (*args_size);
1175 args_size->constant = 0;
1176
1177#ifdef PREFERRED_STACK_BOUNDARY
1178 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1179 args_size->var = round_up (args_size->var, STACK_BYTES);
1180#endif
1181
1182 if (reg_parm_stack_space > 0)
1183 {
1184 args_size->var
1185 = size_binop (MAX_EXPR, args_size->var,
1186 size_int (reg_parm_stack_space));
1187
1188#ifndef OUTGOING_REG_PARM_STACK_SPACE
1189 /* The area corresponding to register parameters is not to count in
1190 the size of the block we need. So make the adjustment. */
1191 args_size->var
1192 = size_binop (MINUS_EXPR, args_size->var,
1193 size_int (reg_parm_stack_space));
1194#endif
1195 }
1196 }
1197 else
1198 {
1199#ifdef PREFERRED_STACK_BOUNDARY
fb5eebb9
RH
1200 args_size->constant = (((args_size->constant
1201 + pending_stack_adjust
1202 + STACK_BYTES - 1)
1203 / STACK_BYTES * STACK_BYTES)
1204 - pending_stack_adjust);
599f37b6
JL
1205#endif
1206
1207 args_size->constant = MAX (args_size->constant,
1208 reg_parm_stack_space);
1209
1210#ifdef MAYBE_REG_PARM_STACK_SPACE
1211 if (reg_parm_stack_space == 0)
1212 args_size->constant = 0;
1213#endif
1214
1215#ifndef OUTGOING_REG_PARM_STACK_SPACE
1216 args_size->constant -= reg_parm_stack_space;
1217#endif
1218 }
1219 return unadjusted_args_size;
1220}
1221
19832c77 1222/* Precompute parameters as needed for a function call.
cc0b1adc
JL
1223
1224 IS_CONST indicates the target function is a pure function.
1225
1226 MUST_PREALLOCATE indicates that we must preallocate stack space for
1227 any stack arguments.
1228
1229 NUM_ACTUALS is the number of arguments.
1230
1231 ARGS is an array containing information for each argument; this routine
1232 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1233
1234 ARGS_SIZE contains information about the size of the arg list. */
1235
1236static void
1237precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1238 int is_const;
1239 int must_preallocate;
1240 int num_actuals;
1241 struct arg_data *args;
1242 struct args_size *args_size;
1243{
1244 int i;
1245
1246 /* If this function call is cse'able, precompute all the parameters.
1247 Note that if the parameter is constructed into a temporary, this will
1248 cause an additional copy because the parameter will be constructed
1249 into a temporary location and then copied into the outgoing arguments.
1250 If a parameter contains a call to alloca and this function uses the
1251 stack, precompute the parameter. */
1252
1253 /* If we preallocated the stack space, and some arguments must be passed
1254 on the stack, then we must precompute any parameter which contains a
1255 function call which will store arguments on the stack.
1256 Otherwise, evaluating the parameter may clobber previous parameters
1257 which have already been stored into the stack. */
1258
1259 for (i = 0; i < num_actuals; i++)
1260 if (is_const
1261 || ((args_size->var != 0 || args_size->constant != 0)
1262 && calls_function (args[i].tree_value, 1))
1263 || (must_preallocate
1264 && (args_size->var != 0 || args_size->constant != 0)
1265 && calls_function (args[i].tree_value, 0)))
1266 {
1267 /* If this is an addressable type, we cannot pre-evaluate it. */
1268 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1269 abort ();
1270
1271 push_temp_slots ();
1272
47841d1b 1273 args[i].value
cc0b1adc
JL
1274 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1275
1276 preserve_temp_slots (args[i].value);
1277 pop_temp_slots ();
1278
1279 /* ANSI doesn't require a sequence point here,
1280 but PCC has one, so this will avoid some problems. */
1281 emit_queue ();
1282
1283 args[i].initial_value = args[i].value
47841d1b 1284 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1285
1286 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1287 {
1288 args[i].value
1289 = convert_modes (args[i].mode,
1290 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1291 args[i].value, args[i].unsignedp);
1292#ifdef PROMOTE_FOR_CALL_ONLY
1293 /* CSE will replace this only if it contains args[i].value
1294 pseudo, so convert it down to the declared mode using
1295 a SUBREG. */
1296 if (GET_CODE (args[i].value) == REG
1297 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1298 {
1299 args[i].initial_value
1300 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1301 args[i].value, 0);
1302 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1303 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1304 = args[i].unsignedp;
1305 }
1306#endif
1307 }
cc0b1adc
JL
1308 }
1309}
1310
0f9b3ea6
JL
1311/* Given the current state of MUST_PREALLOCATE and information about
1312 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1313 compute and return the final value for MUST_PREALLOCATE. */
1314
1315static int
1316finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1317 int must_preallocate;
1318 int num_actuals;
1319 struct arg_data *args;
1320 struct args_size *args_size;
1321{
1322 /* See if we have or want to preallocate stack space.
1323
1324 If we would have to push a partially-in-regs parm
1325 before other stack parms, preallocate stack space instead.
1326
1327 If the size of some parm is not a multiple of the required stack
1328 alignment, we must preallocate.
1329
1330 If the total size of arguments that would otherwise create a copy in
1331 a temporary (such as a CALL) is more than half the total argument list
1332 size, preallocation is faster.
1333
1334 Another reason to preallocate is if we have a machine (like the m88k)
1335 where stack alignment is required to be maintained between every
1336 pair of insns, not just when the call is made. However, we assume here
1337 that such machines either do not have push insns (and hence preallocation
1338 would occur anyway) or the problem is taken care of with
1339 PUSH_ROUNDING. */
1340
1341 if (! must_preallocate)
1342 {
1343 int partial_seen = 0;
1344 int copy_to_evaluate_size = 0;
1345 int i;
1346
1347 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1348 {
1349 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1350 partial_seen = 1;
1351 else if (partial_seen && args[i].reg == 0)
1352 must_preallocate = 1;
1353
1354 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1355 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1356 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1357 || TREE_CODE (args[i].tree_value) == COND_EXPR
1358 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1359 copy_to_evaluate_size
1360 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1361 }
1362
1363 if (copy_to_evaluate_size * 2 >= args_size->constant
1364 && args_size->constant > 0)
1365 must_preallocate = 1;
1366 }
1367 return must_preallocate;
1368}
599f37b6 1369
a45bdd02
JL
1370/* If we preallocated stack space, compute the address of each argument
1371 and store it into the ARGS array.
1372
1373 We need not ensure it is a valid memory address here; it will be
1374 validized when it is used.
1375
1376 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1377
1378static void
1379compute_argument_addresses (args, argblock, num_actuals)
1380 struct arg_data *args;
1381 rtx argblock;
1382 int num_actuals;
1383{
1384 if (argblock)
1385 {
1386 rtx arg_reg = argblock;
1387 int i, arg_offset = 0;
1388
1389 if (GET_CODE (argblock) == PLUS)
1390 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1391
1392 for (i = 0; i < num_actuals; i++)
1393 {
1394 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1395 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1396 rtx addr;
1397
1398 /* Skip this parm if it will not be passed on the stack. */
1399 if (! args[i].pass_on_stack && args[i].reg != 0)
1400 continue;
1401
1402 if (GET_CODE (offset) == CONST_INT)
1403 addr = plus_constant (arg_reg, INTVAL (offset));
1404 else
1405 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1406
1407 addr = plus_constant (addr, arg_offset);
1408 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1409 MEM_SET_IN_STRUCT_P
1410 (args[i].stack,
1411 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1412
1413 if (GET_CODE (slot_offset) == CONST_INT)
1414 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1415 else
1416 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1417
1418 addr = plus_constant (addr, arg_offset);
1419 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1420 }
1421 }
1422}
1423
1424/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1425 in a call instruction.
1426
1427 FNDECL is the tree node for the target function. For an indirect call
1428 FNDECL will be NULL_TREE.
1429
1430 EXP is the CALL_EXPR for this call. */
1431
1432static rtx
1433rtx_for_function_call (fndecl, exp)
1434 tree fndecl;
1435 tree exp;
1436{
1437 rtx funexp;
1438
1439 /* Get the function to call, in the form of RTL. */
1440 if (fndecl)
1441 {
1442 /* If this is the first use of the function, see if we need to
1443 make an external definition for it. */
1444 if (! TREE_USED (fndecl))
1445 {
1446 assemble_external (fndecl);
1447 TREE_USED (fndecl) = 1;
1448 }
1449
1450 /* Get a SYMBOL_REF rtx for the function address. */
1451 funexp = XEXP (DECL_RTL (fndecl), 0);
1452 }
1453 else
1454 /* Generate an rtx (probably a pseudo-register) for the address. */
1455 {
91ab1046 1456 rtx funaddr;
a45bdd02 1457 push_temp_slots ();
91ab1046
DT
1458 funaddr = funexp =
1459 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1460 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1461
1462 /* Check the function is executable. */
1463 if (current_function_check_memory_usage)
91ab1046
DT
1464 {
1465#ifdef POINTERS_EXTEND_UNSIGNED
1466 /* It might be OK to convert funexp in place, but there's
1467 a lot going on between here and when it happens naturally
1468 that this seems safer. */
1469 funaddr = convert_memory_address (Pmode, funexp);
1470#endif
1471 emit_library_call (chkr_check_exec_libfunc, 1,
1472 VOIDmode, 1,
1473 funaddr, Pmode);
1474 }
a45bdd02
JL
1475 emit_queue ();
1476 }
1477 return funexp;
1478}
1479
21a3b983
JL
1480/* Do the register loads required for any wholly-register parms or any
1481 parms which are passed both on the stack and in a register. Their
1482 expressions were already evaluated.
1483
1484 Mark all register-parms as living through the call, putting these USE
1485 insns in the CALL_INSN_FUNCTION_USAGE field. */
1486
1487static void
1488load_register_parameters (args, num_actuals, call_fusage)
1489 struct arg_data *args;
1490 int num_actuals;
1491 rtx *call_fusage;
1492{
1493 int i, j;
1494
1495#ifdef LOAD_ARGS_REVERSED
1496 for (i = num_actuals - 1; i >= 0; i--)
1497#else
1498 for (i = 0; i < num_actuals; i++)
1499#endif
1500 {
1501 rtx reg = args[i].reg;
1502 int partial = args[i].partial;
1503 int nregs;
1504
1505 if (reg)
1506 {
1507 /* Set to non-negative if must move a word at a time, even if just
1508 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1509 we just use a normal move insn. This value can be zero if the
1510 argument is a zero size structure with no fields. */
1511 nregs = (partial ? partial
1512 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1513 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1514 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1515 : -1));
1516
1517 /* Handle calls that pass values in multiple non-contiguous
1518 locations. The Irix 6 ABI has examples of this. */
1519
1520 if (GET_CODE (reg) == PARALLEL)
1521 {
1522 emit_group_load (reg, args[i].value,
1523 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1524 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1525 / BITS_PER_UNIT));
1526 }
1527
1528 /* If simple case, just do move. If normal partial, store_one_arg
1529 has already loaded the register for us. In all other cases,
1530 load the register(s) from memory. */
1531
1532 else if (nregs == -1)
1533 emit_move_insn (reg, args[i].value);
1534
1535 /* If we have pre-computed the values to put in the registers in
1536 the case of non-aligned structures, copy them in now. */
1537
1538 else if (args[i].n_aligned_regs != 0)
1539 for (j = 0; j < args[i].n_aligned_regs; j++)
1540 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1541 args[i].aligned_regs[j]);
1542
1543 else if (partial == 0 || args[i].pass_on_stack)
1544 move_block_to_reg (REGNO (reg),
1545 validize_mem (args[i].value), nregs,
1546 args[i].mode);
1547
1548 /* Handle calls that pass values in multiple non-contiguous
1549 locations. The Irix 6 ABI has examples of this. */
1550 if (GET_CODE (reg) == PARALLEL)
1551 use_group_regs (call_fusage, reg);
1552 else if (nregs == -1)
1553 use_reg (call_fusage, reg);
1554 else
1555 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1556 }
1557 }
1558}
1559
51bbfa0c
RS
1560/* Generate all the code for a function call
1561 and return an rtx for its value.
1562 Store the value in TARGET (specified as an rtx) if convenient.
1563 If the value is stored in TARGET then TARGET is returned.
1564 If IGNORE is nonzero, then we ignore the value of the function call. */
1565
1566rtx
8129842c 1567expand_call (exp, target, ignore)
51bbfa0c
RS
1568 tree exp;
1569 rtx target;
1570 int ignore;
51bbfa0c
RS
1571{
1572 /* List of actual parameters. */
1573 tree actparms = TREE_OPERAND (exp, 1);
1574 /* RTX for the function to be called. */
1575 rtx funexp;
51bbfa0c
RS
1576 /* Data type of the function. */
1577 tree funtype;
1578 /* Declaration of the function being called,
1579 or 0 if the function is computed (not known by name). */
1580 tree fndecl = 0;
1581 char *name = 0;
1582
1583 /* Register in which non-BLKmode value will be returned,
1584 or 0 if no value or if value is BLKmode. */
1585 rtx valreg;
1586 /* Address where we should return a BLKmode value;
1587 0 if value not BLKmode. */
1588 rtx structure_value_addr = 0;
1589 /* Nonzero if that address is being passed by treating it as
1590 an extra, implicit first parameter. Otherwise,
1591 it is passed by being copied directly into struct_value_rtx. */
1592 int structure_value_addr_parm = 0;
1593 /* Size of aggregate value wanted, or zero if none wanted
1594 or if we are using the non-reentrant PCC calling convention
1595 or expecting the value in registers. */
e5e809f4 1596 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1597 /* Nonzero if called function returns an aggregate in memory PCC style,
1598 by returning the address of where to find it. */
1599 int pcc_struct_value = 0;
1600
1601 /* Number of actual parameters in this call, including struct value addr. */
1602 int num_actuals;
1603 /* Number of named args. Args after this are anonymous ones
1604 and they must all go on the stack. */
1605 int n_named_args;
51bbfa0c
RS
1606
1607 /* Vector of information about each argument.
1608 Arguments are numbered in the order they will be pushed,
1609 not the order they are written. */
1610 struct arg_data *args;
1611
1612 /* Total size in bytes of all the stack-parms scanned so far. */
1613 struct args_size args_size;
1614 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1615 int unadjusted_args_size;
51bbfa0c
RS
1616 /* Data on reg parms scanned so far. */
1617 CUMULATIVE_ARGS args_so_far;
1618 /* Nonzero if a reg parm has been scanned. */
1619 int reg_parm_seen;
efd65a8b 1620 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1621
1622 /* Nonzero if we must avoid push-insns in the args for this call.
1623 If stack space is allocated for register parameters, but not by the
1624 caller, then it is preallocated in the fixed part of the stack frame.
1625 So the entire argument block must then be preallocated (i.e., we
1626 ignore PUSH_ROUNDING in that case). */
1627
51bbfa0c
RS
1628#ifdef PUSH_ROUNDING
1629 int must_preallocate = 0;
1630#else
1631 int must_preallocate = 1;
51bbfa0c
RS
1632#endif
1633
f72aed24 1634 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1635 int reg_parm_stack_space = 0;
1636
51bbfa0c
RS
1637 /* Address of space preallocated for stack parms
1638 (on machines that lack push insns), or 0 if space not preallocated. */
1639 rtx argblock = 0;
1640
1641 /* Nonzero if it is plausible that this is a call to alloca. */
1642 int may_be_alloca;
9ae8ffe7
JL
1643 /* Nonzero if this is a call to malloc or a related function. */
1644 int is_malloc;
51bbfa0c
RS
1645 /* Nonzero if this is a call to setjmp or a related function. */
1646 int returns_twice;
1647 /* Nonzero if this is a call to `longjmp'. */
1648 int is_longjmp;
1649 /* Nonzero if this is a call to an inline function. */
1650 int is_integrable = 0;
51bbfa0c
RS
1651 /* Nonzero if this is a call to a `const' function.
1652 Note that only explicitly named functions are handled as `const' here. */
1653 int is_const = 0;
1654 /* Nonzero if this is a call to a `volatile' function. */
1655 int is_volatile = 0;
1656#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1657 /* Define the boundary of the register parm stack space that needs to be
1658 save, if any. */
1659 int low_to_save = -1, high_to_save;
1660 rtx save_area = 0; /* Place that it is saved */
1661#endif
1662
1663#ifdef ACCUMULATE_OUTGOING_ARGS
1664 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1665 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1666 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1667#endif
1668
1669 rtx old_stack_level = 0;
79be3418 1670 int old_pending_adj = 0;
51bbfa0c 1671 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 1672 rtx call_fusage = 0;
51bbfa0c 1673 register tree p;
21a3b983 1674 register int i;
51bbfa0c 1675
7815214e
RK
1676 /* The value of the function call can be put in a hard register. But
1677 if -fcheck-memory-usage, code which invokes functions (and thus
1678 damages some hard registers) can be inserted before using the value.
1679 So, target is always a pseudo-register in that case. */
7d384cc0 1680 if (current_function_check_memory_usage)
7815214e
RK
1681 target = 0;
1682
51bbfa0c
RS
1683 /* See if we can find a DECL-node for the actual function.
1684 As a result, decide whether this is a call to an integrable function. */
1685
1686 p = TREE_OPERAND (exp, 0);
1687 if (TREE_CODE (p) == ADDR_EXPR)
1688 {
1689 fndecl = TREE_OPERAND (p, 0);
1690 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1691 fndecl = 0;
51bbfa0c
RS
1692 else
1693 {
1694 if (!flag_no_inline
1695 && fndecl != current_function_decl
aa10adff 1696 && DECL_INLINE (fndecl)
1cf4f698 1697 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1698 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1699 is_integrable = 1;
1700 else if (! TREE_ADDRESSABLE (fndecl))
1701 {
13d39dbc 1702 /* In case this function later becomes inlinable,
51bbfa0c
RS
1703 record that there was already a non-inline call to it.
1704
1705 Use abstraction instead of setting TREE_ADDRESSABLE
1706 directly. */
da8c1713
RK
1707 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1708 && optimize > 0)
1907795e
JM
1709 {
1710 warning_with_decl (fndecl, "can't inline call to `%s'");
1711 warning ("called from here");
1712 }
51bbfa0c
RS
1713 mark_addressable (fndecl);
1714 }
1715
d45cf215
RS
1716 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1717 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1718 is_const = 1;
5e24110e
RS
1719
1720 if (TREE_THIS_VOLATILE (fndecl))
1721 is_volatile = 1;
51bbfa0c
RS
1722 }
1723 }
1724
fdff8c6d
RK
1725 /* If we don't have specific function to call, see if we have a
1726 constant or `noreturn' function from the type. */
1727 if (fndecl == 0)
1728 {
1729 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1730 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1731 }
1732
6f90e075
JW
1733#ifdef REG_PARM_STACK_SPACE
1734#ifdef MAYBE_REG_PARM_STACK_SPACE
1735 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1736#else
1737 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1738#endif
1739#endif
1740
e5e809f4
JL
1741#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1742 if (reg_parm_stack_space > 0)
1743 must_preallocate = 1;
1744#endif
1745
51bbfa0c
RS
1746 /* Warn if this value is an aggregate type,
1747 regardless of which calling convention we are using for it. */
05e3bdb9 1748 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1749 warning ("function call has aggregate value");
1750
1751 /* Set up a place to return a structure. */
1752
1753 /* Cater to broken compilers. */
1754 if (aggregate_value_p (exp))
1755 {
1756 /* This call returns a big structure. */
1757 is_const = 0;
1758
1759#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1760 {
1761 pcc_struct_value = 1;
0dd532dc
JW
1762 /* Easier than making that case work right. */
1763 if (is_integrable)
1764 {
1765 /* In case this is a static function, note that it has been
1766 used. */
1767 if (! TREE_ADDRESSABLE (fndecl))
1768 mark_addressable (fndecl);
1769 is_integrable = 0;
1770 }
9e7b1d0a
RS
1771 }
1772#else /* not PCC_STATIC_STRUCT_RETURN */
1773 {
1774 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1775
9e7b1d0a
RS
1776 if (target && GET_CODE (target) == MEM)
1777 structure_value_addr = XEXP (target, 0);
1778 else
1779 {
e9a25f70
JL
1780 /* Assign a temporary to hold the value. */
1781 tree d;
51bbfa0c 1782
9e7b1d0a
RS
1783 /* For variable-sized objects, we must be called with a target
1784 specified. If we were to allocate space on the stack here,
1785 we would have no way of knowing when to free it. */
51bbfa0c 1786
002bdd6c
RK
1787 if (struct_value_size < 0)
1788 abort ();
1789
e9a25f70
JL
1790 /* This DECL is just something to feed to mark_addressable;
1791 it doesn't get pushed. */
1792 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1793 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1794 mark_addressable (d);
14a774a9 1795 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 1796 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1797 TREE_USED (d) = 1;
9e7b1d0a
RS
1798 target = 0;
1799 }
1800 }
1801#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1802 }
1803
1804 /* If called function is inline, try to integrate it. */
1805
1806 if (is_integrable)
1807 {
1808 rtx temp;
69d4ca36 1809#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534 1810 rtx before_call = get_last_insn ();
69d4ca36 1811#endif
51bbfa0c
RS
1812
1813 temp = expand_inline_function (fndecl, actparms, target,
1814 ignore, TREE_TYPE (exp),
1815 structure_value_addr);
1816
1817 /* If inlining succeeded, return. */
2e0dd623 1818 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1819 {
d64f5a78 1820#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1821 /* If the outgoing argument list must be preserved, push
1822 the stack before executing the inlined function if it
1823 makes any calls. */
1824
1825 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1826 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1827 break;
1828
1829 if (stack_arg_under_construction || i >= 0)
1830 {
a1917650
RK
1831 rtx first_insn
1832 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1833 rtx insn = NULL_RTX, seq;
2f4aa534 1834
d64f5a78 1835 /* Look for a call in the inline function code.
49ad7cfa 1836 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1837 nonzero then there is a call and it is not necessary
1838 to scan the insns. */
1839
49ad7cfa 1840 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1841 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1842 if (GET_CODE (insn) == CALL_INSN)
1843 break;
2f4aa534
RS
1844
1845 if (insn)
1846 {
d64f5a78
RS
1847 /* Reserve enough stack space so that the largest
1848 argument list of any function call in the inline
1849 function does not overlap the argument list being
1850 evaluated. This is usually an overestimate because
1851 allocate_dynamic_stack_space reserves space for an
1852 outgoing argument list in addition to the requested
1853 space, but there is no way to ask for stack space such
1854 that an argument list of a certain length can be
e5e809f4 1855 safely constructed.
d64f5a78 1856
e5e809f4
JL
1857 Add the stack space reserved for register arguments, if
1858 any, in the inline function. What is really needed is the
d64f5a78
RS
1859 largest value of reg_parm_stack_space in the inline
1860 function, but that is not available. Using the current
1861 value of reg_parm_stack_space is wrong, but gives
1862 correct results on all supported machines. */
e5e809f4 1863
49ad7cfa 1864 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1865 + reg_parm_stack_space);
1866
2f4aa534 1867 start_sequence ();
ccf5d244 1868 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1869 allocate_dynamic_stack_space (GEN_INT (adjust),
1870 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
1871 seq = get_insns ();
1872 end_sequence ();
a1917650 1873 emit_insns_before (seq, first_insn);
e5d70561 1874 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
1875 }
1876 }
d64f5a78 1877#endif
51bbfa0c
RS
1878
1879 /* If the result is equivalent to TARGET, return TARGET to simplify
1880 checks in store_expr. They can be equivalent but not equal in the
1881 case of a function that returns BLKmode. */
1882 if (temp != target && rtx_equal_p (temp, target))
1883 return target;
1884 return temp;
1885 }
1886
1887 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
1888 separately after all. If function was declared inline,
1889 give a warning. */
1890 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 1891 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
1892 {
1893 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1894 warning ("called from here");
1895 }
51bbfa0c
RS
1896 mark_addressable (fndecl);
1897 }
1898
51bbfa0c
RS
1899 function_call_count++;
1900
1901 if (fndecl && DECL_NAME (fndecl))
1902 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1903
51bbfa0c 1904 /* See if this is a call to a function that can return more than once
20efdf74 1905 or a call to longjmp or malloc. */
3a8c995b 1906 special_function_p (fndecl, &returns_twice, &is_longjmp,
20efdf74 1907 &is_malloc, &may_be_alloca);
51bbfa0c 1908
51bbfa0c
RS
1909 if (may_be_alloca)
1910 current_function_calls_alloca = 1;
1911
39842893
JL
1912 /* Operand 0 is a pointer-to-function; get the type of the function. */
1913 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1914 if (! POINTER_TYPE_P (funtype))
1915 abort ();
1916 funtype = TREE_TYPE (funtype);
1917
1918 /* When calling a const function, we must pop the stack args right away,
1919 so that the pop is deleted or moved with the call. */
1920 if (is_const)
1921 NO_DEFER_POP;
1922
51bbfa0c
RS
1923 /* Don't let pending stack adjusts add up to too much.
1924 Also, do all pending adjustments now
1925 if there is any chance this might be a call to alloca. */
1926
1927 if (pending_stack_adjust >= 32
1928 || (pending_stack_adjust > 0 && may_be_alloca))
1929 do_pending_stack_adjust ();
1930
cc79451b
RK
1931 /* Push the temporary stack slot level so that we can free any temporaries
1932 we make. */
51bbfa0c
RS
1933 push_temp_slots ();
1934
eecb6f50
JL
1935 /* Start updating where the next arg would go.
1936
1937 On some machines (such as the PA) indirect calls have a different
1938 calling convention than normal calls. The last argument in
1939 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1940 or not. */
1941 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
1942
1943 /* If struct_value_rtx is 0, it means pass the address
1944 as if it were an extra parameter. */
1945 if (structure_value_addr && struct_value_rtx == 0)
1946 {
5582b006
RK
1947 /* If structure_value_addr is a REG other than
1948 virtual_outgoing_args_rtx, we can use always use it. If it
1949 is not a REG, we must always copy it into a register.
1950 If it is virtual_outgoing_args_rtx, we must copy it to another
1951 register in some cases. */
1952 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 1953#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
1954 || (stack_arg_under_construction
1955 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 1956#endif
5582b006
RK
1957 ? copy_addr_to_reg (structure_value_addr)
1958 : structure_value_addr);
d64f5a78 1959
51bbfa0c
RS
1960 actparms
1961 = tree_cons (error_mark_node,
1962 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 1963 temp),
51bbfa0c
RS
1964 actparms);
1965 structure_value_addr_parm = 1;
1966 }
1967
1968 /* Count the arguments and set NUM_ACTUALS. */
1969 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1970 num_actuals = i;
1971
1972 /* Compute number of named args.
1973 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 1974 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
1975 (If no anonymous args follow, the result of list_length is actually
1976 one too large. This is harmless.)
51bbfa0c 1977
9ab70a9b
R
1978 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1979 zero, this machine will be able to place unnamed args that were passed in
469225d8
JW
1980 registers into the stack. So treat all args as named. This allows the
1981 insns emitting for a specific argument list to be independent of the
1982 function declaration.
51bbfa0c 1983
9ab70a9b 1984 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
51bbfa0c
RS
1985 way to pass unnamed args in registers, so we must force them into
1986 memory. */
e5e809f4
JL
1987
1988 if ((STRICT_ARGUMENT_NAMING
9ab70a9b 1989 || ! PRETEND_OUTGOING_VARARGS_NAMED)
e5e809f4 1990 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 1991 n_named_args
0ee902cb 1992 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 1993 /* Don't include the last named arg. */
d0f9021a 1994 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
0ee902cb
RM
1995 /* Count the struct value address, if it is passed as a parm. */
1996 + structure_value_addr_parm);
51bbfa0c 1997 else
51bbfa0c
RS
1998 /* If we know nothing, treat all args as named. */
1999 n_named_args = num_actuals;
2000
2001 /* Make a vector to hold all the information about each arg. */
2002 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 2003 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c 2004
d7cdf113
JL
2005 /* Build up entries inthe ARGS array, compute the size of the arguments
2006 into ARGS_SIZE, etc. */
2007 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
959f3a06 2008 actparms, fndecl, &args_so_far,
d7cdf113
JL
2009 reg_parm_stack_space, &old_stack_level,
2010 &old_pending_adj, &must_preallocate,
2011 &is_const);
51bbfa0c 2012
6f90e075
JW
2013#ifdef FINAL_REG_PARM_STACK_SPACE
2014 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2015 args_size.var);
2016#endif
2017
51bbfa0c
RS
2018 if (args_size.var)
2019 {
2020 /* If this function requires a variable-sized argument list, don't try to
2021 make a cse'able block for this call. We may be able to do this
2022 eventually, but it is too complicated to keep track of what insns go
2023 in the cse'able block and which don't. */
2024
2025 is_const = 0;
2026 must_preallocate = 1;
51bbfa0c 2027 }
e5e809f4 2028
599f37b6
JL
2029 /* Compute the actual size of the argument block required. The variable
2030 and constant sizes must be combined, the size may have to be rounded,
2031 and there may be a minimum required size. */
2032 unadjusted_args_size
2033 = compute_argument_block_size (reg_parm_stack_space, &args_size);
51bbfa0c 2034
0f9b3ea6
JL
2035 /* Now make final decision about preallocating stack space. */
2036 must_preallocate = finalize_must_preallocate (must_preallocate,
2037 num_actuals, args, &args_size);
51bbfa0c
RS
2038
2039 /* If the structure value address will reference the stack pointer, we must
2040 stabilize it. We don't need to do this if we know that we are not going
2041 to adjust the stack pointer in processing this call. */
2042
2043 if (structure_value_addr
2044 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2045 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2046 && (args_size.var
2047#ifndef ACCUMULATE_OUTGOING_ARGS
2048 || args_size.constant
2049#endif
2050 ))
2051 structure_value_addr = copy_to_reg (structure_value_addr);
2052
cc0b1adc
JL
2053 /* Precompute any arguments as needed. */
2054 precompute_arguments (is_const, must_preallocate, num_actuals,
2055 args, &args_size);
51bbfa0c
RS
2056
2057 /* Now we are about to start emitting insns that can be deleted
2058 if a libcall is deleted. */
9ae8ffe7 2059 if (is_const || is_malloc)
51bbfa0c
RS
2060 start_sequence ();
2061
2062 /* If we have no actual push instructions, or shouldn't use them,
2063 make space for all args right now. */
2064
2065 if (args_size.var != 0)
2066 {
2067 if (old_stack_level == 0)
2068 {
e5d70561 2069 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
2070 old_pending_adj = pending_stack_adjust;
2071 pending_stack_adjust = 0;
d64f5a78 2072#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2073 /* stack_arg_under_construction says whether a stack arg is
2074 being constructed at the old stack level. Pushing the stack
2075 gets a clean outgoing argument block. */
2076 old_stack_arg_under_construction = stack_arg_under_construction;
2077 stack_arg_under_construction = 0;
d64f5a78 2078#endif
51bbfa0c
RS
2079 }
2080 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2081 }
26a258fe 2082 else
51bbfa0c
RS
2083 {
2084 /* Note that we must go through the motions of allocating an argument
2085 block even if the size is zero because we may be storing args
2086 in the area reserved for register arguments, which may be part of
2087 the stack frame. */
26a258fe 2088
51bbfa0c
RS
2089 int needed = args_size.constant;
2090
0f41302f
MS
2091 /* Store the maximum argument space used. It will be pushed by
2092 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2093 checking). */
51bbfa0c
RS
2094
2095 if (needed > current_function_outgoing_args_size)
2096 current_function_outgoing_args_size = needed;
2097
26a258fe
PB
2098 if (must_preallocate)
2099 {
2100#ifdef ACCUMULATE_OUTGOING_ARGS
2101 /* Since the stack pointer will never be pushed, it is possible for
2102 the evaluation of a parm to clobber something we have already
2103 written to the stack. Since most function calls on RISC machines
2104 do not use the stack, this is uncommon, but must work correctly.
2105
2106 Therefore, we save any area of the stack that was already written
2107 and that we are using. Here we set up to do this by making a new
2108 stack usage map from the old one. The actual save will be done
2109 by store_one_arg.
2110
2111 Another approach might be to try to reorder the argument
2112 evaluations to avoid this conflicting stack usage. */
2113
e5e809f4 2114#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
2115 /* Since we will be writing into the entire argument area, the
2116 map must be allocated for its entire size, not just the part that
2117 is the responsibility of the caller. */
2118 needed += reg_parm_stack_space;
51bbfa0c
RS
2119#endif
2120
2121#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
2122 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2123 needed + 1);
51bbfa0c 2124#else
26a258fe
PB
2125 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2126 needed);
51bbfa0c 2127#endif
26a258fe 2128 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2129
26a258fe
PB
2130 if (initial_highest_arg_in_use)
2131 bcopy (initial_stack_usage_map, stack_usage_map,
2132 initial_highest_arg_in_use);
51bbfa0c 2133
26a258fe
PB
2134 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2135 bzero (&stack_usage_map[initial_highest_arg_in_use],
2136 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2137 needed = 0;
2f4aa534 2138
26a258fe
PB
2139 /* The address of the outgoing argument list must not be copied to a
2140 register here, because argblock would be left pointing to the
2141 wrong place after the call to allocate_dynamic_stack_space below.
2142 */
2f4aa534 2143
26a258fe 2144 argblock = virtual_outgoing_args_rtx;
2f4aa534 2145
51bbfa0c 2146#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2147 if (inhibit_defer_pop == 0)
51bbfa0c 2148 {
26a258fe
PB
2149 /* Try to reuse some or all of the pending_stack_adjust
2150 to get this space. Maybe we can avoid any pushing. */
2151 if (needed > pending_stack_adjust)
2152 {
2153 needed -= pending_stack_adjust;
2154 pending_stack_adjust = 0;
2155 }
2156 else
2157 {
2158 pending_stack_adjust -= needed;
2159 needed = 0;
2160 }
51bbfa0c 2161 }
26a258fe
PB
2162 /* Special case this because overhead of `push_block' in this
2163 case is non-trivial. */
2164 if (needed == 0)
2165 argblock = virtual_outgoing_args_rtx;
51bbfa0c 2166 else
26a258fe
PB
2167 argblock = push_block (GEN_INT (needed), 0, 0);
2168
2169 /* We only really need to call `copy_to_reg' in the case where push
2170 insns are going to be used to pass ARGBLOCK to a function
2171 call in ARGS. In that case, the stack pointer changes value
2172 from the allocation point to the call point, and hence
2173 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2174 But might as well always do it. */
2175 argblock = copy_to_reg (argblock);
51bbfa0c 2176#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2177 }
51bbfa0c
RS
2178 }
2179
bfbf933a
RS
2180#ifdef ACCUMULATE_OUTGOING_ARGS
2181 /* The save/restore code in store_one_arg handles all cases except one:
2182 a constructor call (including a C function returning a BLKmode struct)
2183 to initialize an argument. */
2184 if (stack_arg_under_construction)
2185 {
e5e809f4 2186#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 2187 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2188#else
e5d70561 2189 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
2190#endif
2191 if (old_stack_level == 0)
2192 {
e5d70561 2193 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
2194 old_pending_adj = pending_stack_adjust;
2195 pending_stack_adjust = 0;
2196 /* stack_arg_under_construction says whether a stack arg is
2197 being constructed at the old stack level. Pushing the stack
2198 gets a clean outgoing argument block. */
2199 old_stack_arg_under_construction = stack_arg_under_construction;
2200 stack_arg_under_construction = 0;
2201 /* Make a new map for the new argument list. */
2202 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2203 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2204 highest_outgoing_arg_in_use = 0;
2205 }
e5d70561 2206 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
2207 }
2208 /* If argument evaluation might modify the stack pointer, copy the
2209 address of the argument list to a register. */
2210 for (i = 0; i < num_actuals; i++)
2211 if (args[i].pass_on_stack)
2212 {
2213 argblock = copy_addr_to_reg (argblock);
2214 break;
2215 }
2216#endif
2217
a45bdd02 2218 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2219
51bbfa0c 2220#ifdef PUSH_ARGS_REVERSED
c795bca9 2221#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2222 /* If we push args individually in reverse order, perform stack alignment
2223 before the first push (the last arg). */
2224 if (argblock == 0)
599f37b6 2225 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
51bbfa0c
RS
2226#endif
2227#endif
2228
2229 /* Don't try to defer pops if preallocating, not even from the first arg,
2230 since ARGBLOCK probably refers to the SP. */
2231 if (argblock)
2232 NO_DEFER_POP;
2233
a45bdd02 2234 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c
RS
2235
2236 /* Figure out the register where the value, if any, will come back. */
2237 valreg = 0;
2238 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2239 && ! structure_value_addr)
2240 {
2241 if (pcc_struct_value)
2242 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
4dc07bd7 2243 fndecl, 0);
51bbfa0c 2244 else
4dc07bd7 2245 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
51bbfa0c
RS
2246 }
2247
2248 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 2249 once we have started filling any specific hard regs. */
20efdf74 2250 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2251
2252#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 2253
20efdf74
JL
2254 /* Save the fixed argument area if it's part of the caller's frame and
2255 is clobbered by argument setup for this call. */
2256 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2257 &low_to_save, &high_to_save);
b94301c2 2258#endif
20efdf74 2259
51bbfa0c
RS
2260
2261 /* Now store (and compute if necessary) all non-register parms.
2262 These come before register parms, since they can require block-moves,
2263 which could clobber the registers used for register parms.
2264 Parms which have partial registers are not stored here,
2265 but we do preallocate space here if they want that. */
2266
2267 for (i = 0; i < num_actuals; i++)
2268 if (args[i].reg == 0 || args[i].pass_on_stack)
2269 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2270 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2271
4ab56118
RK
2272 /* If we have a parm that is passed in registers but not in memory
2273 and whose alignment does not permit a direct copy into registers,
2274 make a group of pseudos that correspond to each register that we
2275 will later fill. */
45d44c98 2276 if (STRICT_ALIGNMENT)
20efdf74 2277 store_unaligned_arguments_into_pseudos (args, num_actuals);
4ab56118 2278
51bbfa0c
RS
2279 /* Now store any partially-in-registers parm.
2280 This is the last place a block-move can happen. */
2281 if (reg_parm_seen)
2282 for (i = 0; i < num_actuals; i++)
2283 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2284 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2285 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2286
2287#ifndef PUSH_ARGS_REVERSED
c795bca9 2288#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2289 /* If we pushed args in forward order, perform stack alignment
2290 after pushing the last arg. */
2291 if (argblock == 0)
599f37b6 2292 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
51bbfa0c
RS
2293#endif
2294#endif
2295
756e0e12
RS
2296 /* If register arguments require space on the stack and stack space
2297 was not preallocated, allocate stack space here for arguments
2298 passed in registers. */
6e716e89 2299#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 2300 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 2301 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2302#endif
2303
51bbfa0c
RS
2304 /* Pass the function the address in which to return a structure value. */
2305 if (structure_value_addr && ! structure_value_addr_parm)
2306 {
2307 emit_move_insn (struct_value_rtx,
2308 force_reg (Pmode,
e5d70561
RK
2309 force_operand (structure_value_addr,
2310 NULL_RTX)));
7815214e
RK
2311
2312 /* Mark the memory for the aggregate as write-only. */
7d384cc0 2313 if (current_function_check_memory_usage)
7815214e
RK
2314 emit_library_call (chkr_set_right_libfunc, 1,
2315 VOIDmode, 3,
6a9c4aed 2316 structure_value_addr, Pmode,
7815214e 2317 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
2318 GEN_INT (MEMORY_USE_WO),
2319 TYPE_MODE (integer_type_node));
7815214e 2320
51bbfa0c 2321 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2322 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
2323 }
2324
77cac2f2 2325 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 2326
21a3b983 2327 load_register_parameters (args, num_actuals, &call_fusage);
51bbfa0c
RS
2328
2329 /* Perform postincrements before actually calling the function. */
2330 emit_queue ();
2331
2332 /* All arguments and registers used for the call must be set up by now! */
2333
51bbfa0c 2334 /* Generate the actual call instruction. */
fb5eebb9
RH
2335 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2336 args_size.constant, struct_value_size,
51bbfa0c 2337 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 2338 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
2339
2340 /* If call is cse'able, make appropriate pair of reg-notes around it.
2341 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
2342 if return type is void. Disable for PARALLEL return values, because
2343 we have no way to move such values into a pseudo register. */
2344 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
2345 {
2346 rtx note = 0;
2347 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2348 rtx insns;
2349
9ae8ffe7
JL
2350 /* Mark the return value as a pointer if needed. */
2351 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2352 {
2353 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2354 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2355 }
2356
51bbfa0c
RS
2357 /* Construct an "equal form" for the value which mentions all the
2358 arguments in order as well as the function name. */
2359#ifdef PUSH_ARGS_REVERSED
2360 for (i = 0; i < num_actuals; i++)
38a448ca 2361 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
2362#else
2363 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 2364 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2365#endif
38a448ca 2366 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
2367
2368 insns = get_insns ();
2369 end_sequence ();
2370
2371 emit_libcall_block (insns, temp, valreg, note);
2372
2373 valreg = temp;
2374 }
4f48d56a
RK
2375 else if (is_const)
2376 {
2377 /* Otherwise, just write out the sequence without a note. */
2378 rtx insns = get_insns ();
2379
2380 end_sequence ();
2381 emit_insns (insns);
2382 }
9ae8ffe7
JL
2383 else if (is_malloc)
2384 {
2385 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2386 rtx last, insns;
2387
2388 /* The return value from a malloc-like function is a pointer. */
2389 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2390 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2391
2392 emit_move_insn (temp, valreg);
2393
2394 /* The return value from a malloc-like function can not alias
2395 anything else. */
2396 last = get_last_insn ();
2397 REG_NOTES (last) =
38a448ca 2398 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2399
2400 /* Write out the sequence. */
2401 insns = get_insns ();
2402 end_sequence ();
2403 emit_insns (insns);
2404 valreg = temp;
2405 }
51bbfa0c
RS
2406
2407 /* For calls to `setjmp', etc., inform flow.c it should complain
2408 if nonvolatile values are live. */
2409
2410 if (returns_twice)
2411 {
2412 emit_note (name, NOTE_INSN_SETJMP);
2413 current_function_calls_setjmp = 1;
2414 }
2415
2416 if (is_longjmp)
2417 current_function_calls_longjmp = 1;
2418
2419 /* Notice functions that cannot return.
2420 If optimizing, insns emitted below will be dead.
2421 If not optimizing, they will exist, which is useful
2422 if the user uses the `return' command in the debugger. */
2423
2424 if (is_volatile || is_longjmp)
2425 emit_barrier ();
2426
51bbfa0c
RS
2427 /* If value type not void, return an rtx for the value. */
2428
e976b8b2
MS
2429 /* If there are cleanups to be called, don't use a hard reg as target.
2430 We need to double check this and see if it matters anymore. */
e9a25f70 2431 if (any_pending_cleanups (1)
51bbfa0c
RS
2432 && target && REG_P (target)
2433 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2434 target = 0;
2435
2436 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2437 || ignore)
2438 {
2439 target = const0_rtx;
2440 }
2441 else if (structure_value_addr)
2442 {
2443 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2444 {
38a448ca
RH
2445 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2446 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2447 structure_value_addr));
c6df88cb
MM
2448 MEM_SET_IN_STRUCT_P (target,
2449 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
29008b51 2450 }
51bbfa0c
RS
2451 }
2452 else if (pcc_struct_value)
2453 {
f78b5ca1
JL
2454 /* This is the special C++ case where we need to
2455 know what the true target was. We take care to
2456 never use this value more than once in one expression. */
38a448ca
RH
2457 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2458 copy_to_reg (valreg));
c6df88cb 2459 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
51bbfa0c 2460 }
cacbd532
JW
2461 /* Handle calls that return values in multiple non-contiguous locations.
2462 The Irix 6 ABI has examples of this. */
2463 else if (GET_CODE (valreg) == PARALLEL)
2464 {
aac5cc16
RH
2465 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2466
cacbd532
JW
2467 if (target == 0)
2468 {
2b4092f2 2469 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
c6df88cb 2470 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532
JW
2471 preserve_temp_slots (target);
2472 }
2473
c5c76735
JL
2474 if (! rtx_equal_p (target, valreg))
2475 emit_group_store (target, valreg, bytes,
2476 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
cacbd532 2477 }
059c3d84
JW
2478 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2479 && GET_MODE (target) == GET_MODE (valreg))
2480 /* TARGET and VALREG cannot be equal at this point because the latter
2481 would not have REG_FUNCTION_VALUE_P true, while the former would if
2482 it were referring to the same register.
2483
2484 If they refer to the same register, this move will be a no-op, except
2485 when function inlining is being done. */
2486 emit_move_insn (target, valreg);
766b19fb 2487 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
c36fce9a 2488 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
51bbfa0c
RS
2489 else
2490 target = copy_to_reg (valreg);
2491
84b55618 2492#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2493 /* If we promoted this return value, make the proper SUBREG. TARGET
2494 might be const0_rtx here, so be careful. */
2495 if (GET_CODE (target) == REG
766b19fb 2496 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2497 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2498 {
321e0bba
RK
2499 tree type = TREE_TYPE (exp);
2500 int unsignedp = TREE_UNSIGNED (type);
84b55618 2501
321e0bba
RK
2502 /* If we don't promote as expected, something is wrong. */
2503 if (GET_MODE (target)
2504 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2505 abort ();
2506
38a448ca 2507 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2508 SUBREG_PROMOTED_VAR_P (target) = 1;
2509 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2510 }
2511#endif
2512
2f4aa534
RS
2513 /* If size of args is variable or this was a constructor call for a stack
2514 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2515
2516 if (old_stack_level)
2517 {
e5d70561 2518 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2519 pending_stack_adjust = old_pending_adj;
d64f5a78 2520#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2521 stack_arg_under_construction = old_stack_arg_under_construction;
2522 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2523 stack_usage_map = initial_stack_usage_map;
d64f5a78 2524#endif
51bbfa0c 2525 }
51bbfa0c
RS
2526#ifdef ACCUMULATE_OUTGOING_ARGS
2527 else
2528 {
2529#ifdef REG_PARM_STACK_SPACE
2530 if (save_area)
20efdf74
JL
2531 restore_fixed_argument_area (save_area, argblock,
2532 high_to_save, low_to_save);
b94301c2 2533#endif
51bbfa0c 2534
51bbfa0c
RS
2535 /* If we saved any argument areas, restore them. */
2536 for (i = 0; i < num_actuals; i++)
2537 if (args[i].save_area)
2538 {
2539 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2540 rtx stack_area
38a448ca
RH
2541 = gen_rtx_MEM (save_mode,
2542 memory_address (save_mode,
2543 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2544
2545 if (save_mode != BLKmode)
2546 emit_move_insn (stack_area, args[i].save_area);
2547 else
2548 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2549 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2550 PARM_BOUNDARY / BITS_PER_UNIT);
2551 }
2552
2553 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2554 stack_usage_map = initial_stack_usage_map;
2555 }
2556#endif
2557
59257ff7
RK
2558 /* If this was alloca, record the new stack level for nonlocal gotos.
2559 Check for the handler slots since we might not have a save area
0f41302f 2560 for non-local gotos. */
59257ff7 2561
ba716ac9 2562 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
e5d70561 2563 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2564
2565 pop_temp_slots ();
2566
8e6a59fe
MM
2567 /* Free up storage we no longer need. */
2568 for (i = 0; i < num_actuals; ++i)
2569 if (args[i].aligned_regs)
2570 free (args[i].aligned_regs);
2571
51bbfa0c
RS
2572 return target;
2573}
2574\f
322e3e34
RK
2575/* Output a library call to function FUN (a SYMBOL_REF rtx)
2576 (emitting the queue unless NO_QUEUE is nonzero),
2577 for a value of mode OUTMODE,
2578 with NARGS different arguments, passed as alternating rtx values
2579 and machine_modes to convert them to.
2580 The rtx values should have been passed through protect_from_queue already.
2581
2582 NO_QUEUE will be true if and only if the library call is a `const' call
2583 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2584 to the variable is_const in expand_call.
2585
2586 NO_QUEUE must be true for const calls, because if it isn't, then
2587 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2588 and will be lost if the libcall sequence is optimized away.
2589
2590 NO_QUEUE must be false for non-const calls, because if it isn't, the
2591 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2592 optimized. For instance, the instruction scheduler may incorrectly
2593 move memory references across the non-const call. */
2594
2595void
3d994c6b
KG
2596emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
2597 int nargs, ...))
322e3e34 2598{
5148a72b 2599#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
2600 rtx orgfun;
2601 int no_queue;
2602 enum machine_mode outmode;
2603 int nargs;
2604#endif
322e3e34
RK
2605 va_list p;
2606 /* Total size in bytes of all the stack-parms scanned so far. */
2607 struct args_size args_size;
2608 /* Size of arguments before any adjustments (such as rounding). */
2609 struct args_size original_args_size;
2610 register int argnum;
322e3e34 2611 rtx fun;
322e3e34
RK
2612 int inc;
2613 int count;
fbb57b2a 2614 struct args_size alignment_pad;
322e3e34
RK
2615 rtx argblock = 0;
2616 CUMULATIVE_ARGS args_so_far;
2617 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2618 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2619 struct arg *argvec;
2620 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2621 rtx call_fusage = 0;
e5e809f4 2622 int reg_parm_stack_space = 0;
f046b3cc
JL
2623#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2624 /* Define the boundary of the register parm stack space that needs to be
2625 save, if any. */
6a651371 2626 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
2627 rtx save_area = 0; /* Place that it is saved */
2628#endif
2629
2630#ifdef ACCUMULATE_OUTGOING_ARGS
2631 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2632 char *initial_stack_usage_map = stack_usage_map;
2633 int needed;
2634#endif
2635
2636#ifdef REG_PARM_STACK_SPACE
69d4ca36 2637 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2638#ifdef MAYBE_REG_PARM_STACK_SPACE
2639 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2640#else
ab87f8c8 2641 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
2642#endif
2643#endif
322e3e34 2644
4f90e4a0
RK
2645 VA_START (p, nargs);
2646
5148a72b 2647#ifndef ANSI_PROTOTYPES
4f90e4a0 2648 orgfun = va_arg (p, rtx);
322e3e34
RK
2649 no_queue = va_arg (p, int);
2650 outmode = va_arg (p, enum machine_mode);
2651 nargs = va_arg (p, int);
4f90e4a0
RK
2652#endif
2653
2654 fun = orgfun;
322e3e34
RK
2655
2656 /* Copy all the libcall-arguments out of the varargs data
2657 and into a vector ARGVEC.
2658
2659 Compute how to pass each argument. We only support a very small subset
2660 of the full argument passing conventions to limit complexity here since
2661 library functions shouldn't have many args. */
2662
2663 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2664 bzero ((char *) argvec, nargs * sizeof (struct arg));
2665
322e3e34 2666
eecb6f50 2667 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2668
2669 args_size.constant = 0;
2670 args_size.var = 0;
2671
888aa7a9
RS
2672 push_temp_slots ();
2673
322e3e34
RK
2674 for (count = 0; count < nargs; count++)
2675 {
2676 rtx val = va_arg (p, rtx);
2677 enum machine_mode mode = va_arg (p, enum machine_mode);
2678
2679 /* We cannot convert the arg value to the mode the library wants here;
2680 must do it earlier where we know the signedness of the arg. */
2681 if (mode == BLKmode
2682 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2683 abort ();
2684
2685 /* On some machines, there's no way to pass a float to a library fcn.
2686 Pass it as a double instead. */
2687#ifdef LIBGCC_NEEDS_DOUBLE
2688 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2689 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2690#endif
2691
2692 /* There's no need to call protect_from_queue, because
2693 either emit_move_insn or emit_push_insn will do that. */
2694
2695 /* Make sure it is a reasonable operand for a move or push insn. */
2696 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2697 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2698 val = force_operand (val, NULL_RTX);
2699
322e3e34
RK
2700#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2701 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2702 {
a44492f0
RK
2703 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2704 be viewed as just an efficiency improvement. */
888aa7a9
RS
2705 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2706 emit_move_insn (slot, val);
8301b6e2 2707 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2708 mode = Pmode;
888aa7a9 2709 }
322e3e34
RK
2710#endif
2711
888aa7a9
RS
2712 argvec[count].value = val;
2713 argvec[count].mode = mode;
2714
322e3e34 2715 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2716 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2717 abort ();
2718#ifdef FUNCTION_ARG_PARTIAL_NREGS
2719 argvec[count].partial
2720 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2721#else
2722 argvec[count].partial = 0;
2723#endif
2724
2725 locate_and_pad_parm (mode, NULL_TREE,
2726 argvec[count].reg && argvec[count].partial == 0,
2727 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 2728 &argvec[count].size, &alignment_pad);
322e3e34
RK
2729
2730 if (argvec[count].size.var)
2731 abort ();
2732
e5e809f4 2733 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2734 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2735
2736 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2737 || reg_parm_stack_space > 0)
322e3e34
RK
2738 args_size.constant += argvec[count].size.constant;
2739
0f41302f 2740 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2741 }
2742 va_end (p);
2743
f046b3cc
JL
2744#ifdef FINAL_REG_PARM_STACK_SPACE
2745 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2746 args_size.var);
2747#endif
2748
322e3e34
RK
2749 /* If this machine requires an external definition for library
2750 functions, write one out. */
2751 assemble_external_libcall (fun);
2752
2753 original_args_size = args_size;
c795bca9 2754#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2755 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2756 / STACK_BYTES) * STACK_BYTES);
2757#endif
2758
322e3e34 2759 args_size.constant = MAX (args_size.constant,
f046b3cc 2760 reg_parm_stack_space);
e5e809f4 2761
322e3e34 2762#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2763 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2764#endif
2765
322e3e34
RK
2766 if (args_size.constant > current_function_outgoing_args_size)
2767 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2768
2769#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2770 /* Since the stack pointer will never be pushed, it is possible for
2771 the evaluation of a parm to clobber something we have already
2772 written to the stack. Since most function calls on RISC machines
2773 do not use the stack, this is uncommon, but must work correctly.
2774
2775 Therefore, we save any area of the stack that was already written
2776 and that we are using. Here we set up to do this by making a new
2777 stack usage map from the old one.
2778
2779 Another approach might be to try to reorder the argument
2780 evaluations to avoid this conflicting stack usage. */
2781
2782 needed = args_size.constant;
e5e809f4
JL
2783
2784#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2785 /* Since we will be writing into the entire argument area, the
2786 map must be allocated for its entire size, not just the part that
2787 is the responsibility of the caller. */
2788 needed += reg_parm_stack_space;
2789#endif
2790
2791#ifdef ARGS_GROW_DOWNWARD
2792 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2793 needed + 1);
2794#else
2795 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2796 needed);
322e3e34 2797#endif
f046b3cc
JL
2798 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2799
2800 if (initial_highest_arg_in_use)
2801 bcopy (initial_stack_usage_map, stack_usage_map,
2802 initial_highest_arg_in_use);
2803
2804 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2805 bzero (&stack_usage_map[initial_highest_arg_in_use],
2806 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2807 needed = 0;
322e3e34 2808
f046b3cc
JL
2809 /* The address of the outgoing argument list must not be copied to a
2810 register here, because argblock would be left pointing to the
2811 wrong place after the call to allocate_dynamic_stack_space below.
2812 */
2813
2814 argblock = virtual_outgoing_args_rtx;
2815#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2816#ifndef PUSH_ROUNDING
2817 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2818#endif
f046b3cc 2819#endif
322e3e34
RK
2820
2821#ifdef PUSH_ARGS_REVERSED
c795bca9 2822#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2823 /* If we push args individually in reverse order, perform stack alignment
2824 before the first push (the last arg). */
2825 if (argblock == 0)
2826 anti_adjust_stack (GEN_INT (args_size.constant
2827 - original_args_size.constant));
2828#endif
2829#endif
2830
2831#ifdef PUSH_ARGS_REVERSED
2832 inc = -1;
2833 argnum = nargs - 1;
2834#else
2835 inc = 1;
2836 argnum = 0;
2837#endif
2838
f046b3cc
JL
2839#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2840 /* The argument list is the property of the called routine and it
2841 may clobber it. If the fixed area has been used for previous
2842 parameters, we must save and restore it.
2843
2844 Here we compute the boundary of the that needs to be saved, if any. */
2845
2846#ifdef ARGS_GROW_DOWNWARD
2847 for (count = 0; count < reg_parm_stack_space + 1; count++)
2848#else
2849 for (count = 0; count < reg_parm_stack_space; count++)
2850#endif
2851 {
2852 if (count >= highest_outgoing_arg_in_use
2853 || stack_usage_map[count] == 0)
2854 continue;
2855
2856 if (low_to_save == -1)
2857 low_to_save = count;
2858
2859 high_to_save = count;
2860 }
2861
2862 if (low_to_save >= 0)
2863 {
2864 int num_to_save = high_to_save - low_to_save + 1;
2865 enum machine_mode save_mode
2866 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2867 rtx stack_area;
2868
2869 /* If we don't have the required alignment, must do this in BLKmode. */
2870 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2871 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2872 save_mode = BLKmode;
2873
ceb83206 2874#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
2875 stack_area = gen_rtx_MEM (save_mode,
2876 memory_address (save_mode,
38a448ca 2877 plus_constant (argblock,
ceb83206 2878 - high_to_save)));
f046b3cc 2879#else
ceb83206
JL
2880 stack_area = gen_rtx_MEM (save_mode,
2881 memory_address (save_mode,
38a448ca 2882 plus_constant (argblock,
ceb83206 2883 low_to_save)));
f046b3cc 2884#endif
f046b3cc
JL
2885 if (save_mode == BLKmode)
2886 {
2887 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
2888 emit_block_move (validize_mem (save_area), stack_area,
2889 GEN_INT (num_to_save),
2890 PARM_BOUNDARY / BITS_PER_UNIT);
2891 }
2892 else
2893 {
2894 save_area = gen_reg_rtx (save_mode);
2895 emit_move_insn (save_area, stack_area);
2896 }
2897 }
2898#endif
2899
322e3e34
RK
2900 /* Push the args that need to be pushed. */
2901
5e26979c
JL
2902 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2903 are to be pushed. */
322e3e34
RK
2904 for (count = 0; count < nargs; count++, argnum += inc)
2905 {
2906 register enum machine_mode mode = argvec[argnum].mode;
2907 register rtx val = argvec[argnum].value;
2908 rtx reg = argvec[argnum].reg;
2909 int partial = argvec[argnum].partial;
69d4ca36 2910#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2911 int lower_bound, upper_bound, i;
69d4ca36 2912#endif
322e3e34
RK
2913
2914 if (! (reg != 0 && partial == 0))
f046b3cc
JL
2915 {
2916#ifdef ACCUMULATE_OUTGOING_ARGS
2917 /* If this is being stored into a pre-allocated, fixed-size, stack
2918 area, save any previous data at that location. */
2919
2920#ifdef ARGS_GROW_DOWNWARD
2921 /* stack_slot is negative, but we want to index stack_usage_map
2922 with positive values. */
5e26979c
JL
2923 upper_bound = -argvec[argnum].offset.constant + 1;
2924 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 2925#else
5e26979c
JL
2926 lower_bound = argvec[argnum].offset.constant;
2927 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
2928#endif
2929
2930 for (i = lower_bound; i < upper_bound; i++)
2931 if (stack_usage_map[i]
f046b3cc
JL
2932 /* Don't store things in the fixed argument area at this point;
2933 it has already been saved. */
e5e809f4 2934 && i > reg_parm_stack_space)
f046b3cc
JL
2935 break;
2936
2937 if (i != upper_bound)
2938 {
e5e809f4 2939 /* We need to make a save area. See what mode we can make it. */
f046b3cc 2940 enum machine_mode save_mode
5e26979c 2941 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
2942 MODE_INT, 1);
2943 rtx stack_area
c5c76735
JL
2944 = gen_rtx_MEM
2945 (save_mode,
2946 memory_address
2947 (save_mode,
2948 plus_constant (argblock,
2949 argvec[argnum].offset.constant)));
2950
5e26979c
JL
2951 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2952 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
2953 }
2954#endif
2955 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 2956 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 2957 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
f046b3cc
JL
2958
2959#ifdef ACCUMULATE_OUTGOING_ARGS
2960 /* Now mark the segment we just used. */
2961 for (i = lower_bound; i < upper_bound; i++)
2962 stack_usage_map[i] = 1;
2963#endif
2964
2965 NO_DEFER_POP;
2966 }
322e3e34
RK
2967 }
2968
2969#ifndef PUSH_ARGS_REVERSED
c795bca9 2970#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
2971 /* If we pushed args in forward order, perform stack alignment
2972 after pushing the last arg. */
2973 if (argblock == 0)
2974 anti_adjust_stack (GEN_INT (args_size.constant
2975 - original_args_size.constant));
2976#endif
2977#endif
2978
2979#ifdef PUSH_ARGS_REVERSED
2980 argnum = nargs - 1;
2981#else
2982 argnum = 0;
2983#endif
2984
77cac2f2 2985 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2986
322e3e34
RK
2987 /* Now load any reg parms into their regs. */
2988
5e26979c
JL
2989 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2990 are to be pushed. */
322e3e34
RK
2991 for (count = 0; count < nargs; count++, argnum += inc)
2992 {
322e3e34
RK
2993 register rtx val = argvec[argnum].value;
2994 rtx reg = argvec[argnum].reg;
2995 int partial = argvec[argnum].partial;
2996
2997 if (reg != 0 && partial == 0)
2998 emit_move_insn (reg, val);
2999 NO_DEFER_POP;
3000 }
3001
3002 /* For version 1.37, try deleting this entirely. */
3003 if (! no_queue)
3004 emit_queue ();
3005
3006 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3007 for (count = 0; count < nargs; count++)
3008 if (argvec[count].reg != 0)
77cac2f2 3009 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3010
322e3e34
RK
3011 /* Don't allow popping to be deferred, since then
3012 cse'ing of library calls could delete a call and leave the pop. */
3013 NO_DEFER_POP;
3014
3015 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3016 will set inhibit_defer_pop to that value. */
3017
334c4f0f
RK
3018 /* The return type is needed to decide how many bytes the function pops.
3019 Signedness plays no role in that, so for simplicity, we pretend it's
3020 always signed. We also assume that the list of arguments passed has
3021 no impact, so we pretend it is unknown. */
3022
2c8da025
RK
3023 emit_call_1 (fun,
3024 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
3025 build_function_type (outmode == VOIDmode ? void_type_node
3026 : type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9 3027 original_args_size.constant, args_size.constant, 0,
322e3e34
RK
3028 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3029 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3030 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 3031
888aa7a9
RS
3032 pop_temp_slots ();
3033
322e3e34
RK
3034 /* Now restore inhibit_defer_pop to its actual original value. */
3035 OK_DEFER_POP;
f046b3cc
JL
3036
3037#ifdef ACCUMULATE_OUTGOING_ARGS
3038#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3039 if (save_area)
3040 {
3041 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3042#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3043 rtx stack_area
38a448ca
RH
3044 = gen_rtx_MEM (save_mode,
3045 memory_address (save_mode,
ceb83206
JL
3046 plus_constant (argblock,
3047 - high_to_save)));
f046b3cc 3048#else
ceb83206
JL
3049 rtx stack_area
3050 = gen_rtx_MEM (save_mode,
3051 memory_address (save_mode,
3052 plus_constant (argblock, low_to_save)));
f046b3cc 3053#endif
f046b3cc 3054
e9a25f70
JL
3055 if (save_mode != BLKmode)
3056 emit_move_insn (stack_area, save_area);
3057 else
3058 emit_block_move (stack_area, validize_mem (save_area),
3059 GEN_INT (high_to_save - low_to_save + 1),
3060 PARM_BOUNDARY / BITS_PER_UNIT);
3061 }
f046b3cc
JL
3062#endif
3063
3064 /* If we saved any argument areas, restore them. */
3065 for (count = 0; count < nargs; count++)
3066 if (argvec[count].save_area)
3067 {
3068 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3069 rtx stack_area
38a448ca 3070 = gen_rtx_MEM (save_mode,
c5c76735
JL
3071 memory_address
3072 (save_mode,
3073 plus_constant (argblock,
3074 argvec[count].offset.constant)));
f046b3cc
JL
3075
3076 emit_move_insn (stack_area, argvec[count].save_area);
3077 }
3078
3079 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3080 stack_usage_map = initial_stack_usage_map;
3081#endif
322e3e34
RK
3082}
3083\f
3084/* Like emit_library_call except that an extra argument, VALUE,
3085 comes second and says where to store the result.
fac0ad80
RS
3086 (If VALUE is zero, this function chooses a convenient way
3087 to return the value.
322e3e34 3088
fac0ad80
RS
3089 This function returns an rtx for where the value is to be found.
3090 If VALUE is nonzero, VALUE is returned. */
3091
3092rtx
3d994c6b
KG
3093emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3094 enum machine_mode outmode, int nargs, ...))
322e3e34 3095{
5148a72b 3096#ifndef ANSI_PROTOTYPES
4f90e4a0
RK
3097 rtx orgfun;
3098 rtx value;
3099 int no_queue;
3100 enum machine_mode outmode;
3101 int nargs;
3102#endif
322e3e34
RK
3103 va_list p;
3104 /* Total size in bytes of all the stack-parms scanned so far. */
3105 struct args_size args_size;
3106 /* Size of arguments before any adjustments (such as rounding). */
3107 struct args_size original_args_size;
3108 register int argnum;
322e3e34 3109 rtx fun;
322e3e34
RK
3110 int inc;
3111 int count;
4fc026cd 3112 struct args_size alignment_pad;
322e3e34
RK
3113 rtx argblock = 0;
3114 CUMULATIVE_ARGS args_so_far;
3115 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 3116 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
3117 struct arg *argvec;
3118 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 3119 rtx call_fusage = 0;
322e3e34 3120 rtx mem_value = 0;
fac0ad80 3121 int pcc_struct_value = 0;
4f389214 3122 int struct_value_size = 0;
d61bee95 3123 int is_const;
e5e809f4 3124 int reg_parm_stack_space = 0;
69d4ca36 3125#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3126 int needed;
69d4ca36 3127#endif
f046b3cc
JL
3128
3129#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3130 /* Define the boundary of the register parm stack space that needs to be
3131 save, if any. */
6a651371 3132 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
3133 rtx save_area = 0; /* Place that it is saved */
3134#endif
3135
3136#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36 3137 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
3138 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3139 char *initial_stack_usage_map = stack_usage_map;
3140#endif
3141
3142#ifdef REG_PARM_STACK_SPACE
3143#ifdef MAYBE_REG_PARM_STACK_SPACE
3144 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3145#else
ab87f8c8 3146 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
3147#endif
3148#endif
322e3e34 3149
4f90e4a0
RK
3150 VA_START (p, nargs);
3151
5148a72b 3152#ifndef ANSI_PROTOTYPES
4f90e4a0 3153 orgfun = va_arg (p, rtx);
322e3e34
RK
3154 value = va_arg (p, rtx);
3155 no_queue = va_arg (p, int);
3156 outmode = va_arg (p, enum machine_mode);
3157 nargs = va_arg (p, int);
4f90e4a0
RK
3158#endif
3159
d61bee95 3160 is_const = no_queue;
4f90e4a0 3161 fun = orgfun;
322e3e34
RK
3162
3163 /* If this kind of value comes back in memory,
3164 decide where in memory it should come back. */
fac0ad80 3165 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 3166 {
fac0ad80
RS
3167#ifdef PCC_STATIC_STRUCT_RETURN
3168 rtx pointer_reg
3169 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
4dc07bd7 3170 0, 0);
38a448ca 3171 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
3172 pcc_struct_value = 1;
3173 if (value == 0)
3174 value = gen_reg_rtx (outmode);
3175#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 3176 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 3177 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
3178 mem_value = value;
3179 else
3180 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 3181#endif
779c643a
JW
3182
3183 /* This call returns a big structure. */
3184 is_const = 0;
322e3e34
RK
3185 }
3186
3187 /* ??? Unfinished: must pass the memory address as an argument. */
3188
3189 /* Copy all the libcall-arguments out of the varargs data
3190 and into a vector ARGVEC.
3191
3192 Compute how to pass each argument. We only support a very small subset
3193 of the full argument passing conventions to limit complexity here since
3194 library functions shouldn't have many args. */
3195
3196 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 3197 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 3198
eecb6f50 3199 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
3200
3201 args_size.constant = 0;
3202 args_size.var = 0;
3203
3204 count = 0;
3205
888aa7a9
RS
3206 push_temp_slots ();
3207
322e3e34
RK
3208 /* If there's a structure value address to be passed,
3209 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 3210 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
3211 {
3212 rtx addr = XEXP (mem_value, 0);
fac0ad80 3213 nargs++;
322e3e34 3214
fac0ad80
RS
3215 /* Make sure it is a reasonable operand for a move or push insn. */
3216 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3217 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3218 addr = force_operand (addr, NULL_RTX);
322e3e34 3219
fac0ad80 3220 argvec[count].value = addr;
4fc3dcd5 3221 argvec[count].mode = Pmode;
fac0ad80 3222 argvec[count].partial = 0;
322e3e34 3223
4fc3dcd5 3224 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 3225#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 3226 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 3227 abort ();
322e3e34
RK
3228#endif
3229
4fc3dcd5 3230 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
3231 argvec[count].reg && argvec[count].partial == 0,
3232 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 3233 &argvec[count].size, &alignment_pad);
322e3e34
RK
3234
3235
fac0ad80 3236 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3237 || reg_parm_stack_space > 0)
fac0ad80 3238 args_size.constant += argvec[count].size.constant;
322e3e34 3239
0f41302f 3240 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
3241
3242 count++;
322e3e34
RK
3243 }
3244
3245 for (; count < nargs; count++)
3246 {
3247 rtx val = va_arg (p, rtx);
3248 enum machine_mode mode = va_arg (p, enum machine_mode);
3249
3250 /* We cannot convert the arg value to the mode the library wants here;
3251 must do it earlier where we know the signedness of the arg. */
3252 if (mode == BLKmode
3253 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3254 abort ();
3255
3256 /* On some machines, there's no way to pass a float to a library fcn.
3257 Pass it as a double instead. */
3258#ifdef LIBGCC_NEEDS_DOUBLE
3259 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 3260 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
3261#endif
3262
3263 /* There's no need to call protect_from_queue, because
3264 either emit_move_insn or emit_push_insn will do that. */
3265
3266 /* Make sure it is a reasonable operand for a move or push insn. */
3267 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3268 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3269 val = force_operand (val, NULL_RTX);
3270
322e3e34
RK
3271#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3272 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 3273 {
a44492f0
RK
3274 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3275 be viewed as just an efficiency improvement. */
888aa7a9
RS
3276 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3277 emit_move_insn (slot, val);
3278 val = XEXP (slot, 0);
3279 mode = Pmode;
3280 }
322e3e34
RK
3281#endif
3282
888aa7a9
RS
3283 argvec[count].value = val;
3284 argvec[count].mode = mode;
3285
322e3e34 3286 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 3287 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
3288 abort ();
3289#ifdef FUNCTION_ARG_PARTIAL_NREGS
3290 argvec[count].partial
3291 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3292#else
3293 argvec[count].partial = 0;
3294#endif
3295
3296 locate_and_pad_parm (mode, NULL_TREE,
3297 argvec[count].reg && argvec[count].partial == 0,
3298 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 3299 &argvec[count].size, &alignment_pad);
322e3e34
RK
3300
3301 if (argvec[count].size.var)
3302 abort ();
3303
e5e809f4 3304 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 3305 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
3306
3307 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3308 || reg_parm_stack_space > 0)
322e3e34
RK
3309 args_size.constant += argvec[count].size.constant;
3310
0f41302f 3311 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3312 }
3313 va_end (p);
3314
f046b3cc
JL
3315#ifdef FINAL_REG_PARM_STACK_SPACE
3316 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3317 args_size.var);
3318#endif
322e3e34
RK
3319 /* If this machine requires an external definition for library
3320 functions, write one out. */
3321 assemble_external_libcall (fun);
3322
3323 original_args_size = args_size;
c795bca9 3324#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3325 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3326 / STACK_BYTES) * STACK_BYTES);
3327#endif
3328
322e3e34 3329 args_size.constant = MAX (args_size.constant,
f046b3cc 3330 reg_parm_stack_space);
e5e809f4 3331
322e3e34 3332#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3333 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3334#endif
3335
322e3e34
RK
3336 if (args_size.constant > current_function_outgoing_args_size)
3337 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3338
3339#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3340 /* Since the stack pointer will never be pushed, it is possible for
3341 the evaluation of a parm to clobber something we have already
3342 written to the stack. Since most function calls on RISC machines
3343 do not use the stack, this is uncommon, but must work correctly.
3344
3345 Therefore, we save any area of the stack that was already written
3346 and that we are using. Here we set up to do this by making a new
3347 stack usage map from the old one.
3348
3349 Another approach might be to try to reorder the argument
3350 evaluations to avoid this conflicting stack usage. */
3351
3352 needed = args_size.constant;
e5e809f4
JL
3353
3354#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
3355 /* Since we will be writing into the entire argument area, the
3356 map must be allocated for its entire size, not just the part that
3357 is the responsibility of the caller. */
3358 needed += reg_parm_stack_space;
3359#endif
3360
3361#ifdef ARGS_GROW_DOWNWARD
3362 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3363 needed + 1);
3364#else
3365 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3366 needed);
322e3e34 3367#endif
f046b3cc
JL
3368 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3369
3370 if (initial_highest_arg_in_use)
3371 bcopy (initial_stack_usage_map, stack_usage_map,
3372 initial_highest_arg_in_use);
3373
3374 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3375 bzero (&stack_usage_map[initial_highest_arg_in_use],
3376 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3377 needed = 0;
322e3e34 3378
f046b3cc
JL
3379 /* The address of the outgoing argument list must not be copied to a
3380 register here, because argblock would be left pointing to the
3381 wrong place after the call to allocate_dynamic_stack_space below.
3382 */
3383
3384 argblock = virtual_outgoing_args_rtx;
3385#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3386#ifndef PUSH_ROUNDING
3387 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3388#endif
f046b3cc 3389#endif
322e3e34
RK
3390
3391#ifdef PUSH_ARGS_REVERSED
c795bca9 3392#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3393 /* If we push args individually in reverse order, perform stack alignment
3394 before the first push (the last arg). */
3395 if (argblock == 0)
3396 anti_adjust_stack (GEN_INT (args_size.constant
3397 - original_args_size.constant));
3398#endif
3399#endif
3400
3401#ifdef PUSH_ARGS_REVERSED
3402 inc = -1;
3403 argnum = nargs - 1;
3404#else
3405 inc = 1;
3406 argnum = 0;
3407#endif
3408
f046b3cc
JL
3409#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3410 /* The argument list is the property of the called routine and it
3411 may clobber it. If the fixed area has been used for previous
3412 parameters, we must save and restore it.
3413
3414 Here we compute the boundary of the that needs to be saved, if any. */
3415
3416#ifdef ARGS_GROW_DOWNWARD
3417 for (count = 0; count < reg_parm_stack_space + 1; count++)
3418#else
3419 for (count = 0; count < reg_parm_stack_space; count++)
3420#endif
3421 {
3422 if (count >= highest_outgoing_arg_in_use
3423 || stack_usage_map[count] == 0)
3424 continue;
3425
3426 if (low_to_save == -1)
3427 low_to_save = count;
3428
3429 high_to_save = count;
3430 }
3431
3432 if (low_to_save >= 0)
3433 {
3434 int num_to_save = high_to_save - low_to_save + 1;
3435 enum machine_mode save_mode
3436 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3437 rtx stack_area;
3438
3439 /* If we don't have the required alignment, must do this in BLKmode. */
3440 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3441 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3442 save_mode = BLKmode;
3443
ceb83206 3444#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3445 stack_area = gen_rtx_MEM (save_mode,
3446 memory_address (save_mode,
38a448ca 3447 plus_constant (argblock,
ceb83206 3448 - high_to_save)));
f046b3cc 3449#else
ceb83206
JL
3450 stack_area = gen_rtx_MEM (save_mode,
3451 memory_address (save_mode,
38a448ca 3452 plus_constant (argblock,
ceb83206 3453 low_to_save)));
f046b3cc 3454#endif
f046b3cc
JL
3455 if (save_mode == BLKmode)
3456 {
3457 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3458 emit_block_move (validize_mem (save_area), stack_area,
3459 GEN_INT (num_to_save),
3460 PARM_BOUNDARY / BITS_PER_UNIT);
3461 }
3462 else
3463 {
3464 save_area = gen_reg_rtx (save_mode);
3465 emit_move_insn (save_area, stack_area);
3466 }
3467 }
3468#endif
3469
322e3e34
RK
3470 /* Push the args that need to be pushed. */
3471
5e26979c
JL
3472 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3473 are to be pushed. */
322e3e34
RK
3474 for (count = 0; count < nargs; count++, argnum += inc)
3475 {
3476 register enum machine_mode mode = argvec[argnum].mode;
3477 register rtx val = argvec[argnum].value;
3478 rtx reg = argvec[argnum].reg;
3479 int partial = argvec[argnum].partial;
69d4ca36 3480#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3481 int lower_bound, upper_bound, i;
69d4ca36 3482#endif
322e3e34
RK
3483
3484 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3485 {
3486#ifdef ACCUMULATE_OUTGOING_ARGS
3487 /* If this is being stored into a pre-allocated, fixed-size, stack
3488 area, save any previous data at that location. */
3489
3490#ifdef ARGS_GROW_DOWNWARD
3491 /* stack_slot is negative, but we want to index stack_usage_map
3492 with positive values. */
5e26979c
JL
3493 upper_bound = -argvec[argnum].offset.constant + 1;
3494 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3495#else
5e26979c
JL
3496 lower_bound = argvec[argnum].offset.constant;
3497 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3498#endif
3499
3500 for (i = lower_bound; i < upper_bound; i++)
3501 if (stack_usage_map[i]
f046b3cc
JL
3502 /* Don't store things in the fixed argument area at this point;
3503 it has already been saved. */
e5e809f4 3504 && i > reg_parm_stack_space)
f046b3cc
JL
3505 break;
3506
3507 if (i != upper_bound)
3508 {
e5e809f4 3509 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3510 enum machine_mode save_mode
5e26979c 3511 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3512 MODE_INT, 1);
3513 rtx stack_area
c5c76735
JL
3514 = gen_rtx_MEM
3515 (save_mode,
3516 memory_address
3517 (save_mode,
3518 plus_constant (argblock,
3519 argvec[argnum].offset.constant)));
5e26979c 3520 argvec[argnum].save_area = gen_reg_rtx (save_mode);
c5c76735 3521
5e26979c 3522 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3523 }
3524#endif
3525 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 3526 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 3527 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
f046b3cc
JL
3528
3529#ifdef ACCUMULATE_OUTGOING_ARGS
3530 /* Now mark the segment we just used. */
3531 for (i = lower_bound; i < upper_bound; i++)
3532 stack_usage_map[i] = 1;
3533#endif
3534
3535 NO_DEFER_POP;
3536 }
322e3e34
RK
3537 }
3538
3539#ifndef PUSH_ARGS_REVERSED
c795bca9 3540#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3541 /* If we pushed args in forward order, perform stack alignment
3542 after pushing the last arg. */
3543 if (argblock == 0)
3544 anti_adjust_stack (GEN_INT (args_size.constant
3545 - original_args_size.constant));
3546#endif
3547#endif
3548
3549#ifdef PUSH_ARGS_REVERSED
3550 argnum = nargs - 1;
3551#else
3552 argnum = 0;
3553#endif
3554
77cac2f2 3555 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3556
322e3e34
RK
3557 /* Now load any reg parms into their regs. */
3558
5e26979c
JL
3559 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3560 are to be pushed. */
322e3e34
RK
3561 for (count = 0; count < nargs; count++, argnum += inc)
3562 {
322e3e34
RK
3563 register rtx val = argvec[argnum].value;
3564 rtx reg = argvec[argnum].reg;
3565 int partial = argvec[argnum].partial;
3566
3567 if (reg != 0 && partial == 0)
3568 emit_move_insn (reg, val);
3569 NO_DEFER_POP;
3570 }
3571
3572#if 0
3573 /* For version 1.37, try deleting this entirely. */
3574 if (! no_queue)
3575 emit_queue ();
3576#endif
3577
3578 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3579 for (count = 0; count < nargs; count++)
3580 if (argvec[count].reg != 0)
77cac2f2 3581 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3582
fac0ad80
RS
3583 /* Pass the function the address in which to return a structure value. */
3584 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3585 {
3586 emit_move_insn (struct_value_rtx,
3587 force_reg (Pmode,
3588 force_operand (XEXP (mem_value, 0),
3589 NULL_RTX)));
3590 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3591 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3592 }
3593
322e3e34
RK
3594 /* Don't allow popping to be deferred, since then
3595 cse'ing of library calls could delete a call and leave the pop. */
3596 NO_DEFER_POP;
3597
3598 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3599 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3600 /* See the comment in emit_library_call about the function type we build
3601 and pass here. */
322e3e34 3602
2c8da025
RK
3603 emit_call_1 (fun,
3604 get_identifier (XSTR (orgfun, 0)),
334c4f0f 3605 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9
RH
3606 original_args_size.constant, args_size.constant,
3607 struct_value_size,
322e3e34 3608 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3609 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3610 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
3611
3612 /* Now restore inhibit_defer_pop to its actual original value. */
3613 OK_DEFER_POP;
3614
888aa7a9
RS
3615 pop_temp_slots ();
3616
322e3e34
RK
3617 /* Copy the value to the right place. */
3618 if (outmode != VOIDmode)
3619 {
3620 if (mem_value)
3621 {
3622 if (value == 0)
fac0ad80 3623 value = mem_value;
322e3e34
RK
3624 if (value != mem_value)
3625 emit_move_insn (value, mem_value);
3626 }
3627 else if (value != 0)
3628 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3629 else
3630 value = hard_libcall_value (outmode);
322e3e34 3631 }
fac0ad80 3632
f046b3cc
JL
3633#ifdef ACCUMULATE_OUTGOING_ARGS
3634#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3635 if (save_area)
3636 {
3637 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3638#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3639 rtx stack_area
38a448ca
RH
3640 = gen_rtx_MEM (save_mode,
3641 memory_address (save_mode,
ceb83206
JL
3642 plus_constant (argblock,
3643 - high_to_save)));
f046b3cc 3644#else
ceb83206
JL
3645 rtx stack_area
3646 = gen_rtx_MEM (save_mode,
3647 memory_address (save_mode,
3648 plus_constant (argblock, low_to_save)));
f046b3cc 3649#endif
e9a25f70
JL
3650 if (save_mode != BLKmode)
3651 emit_move_insn (stack_area, save_area);
3652 else
3653 emit_block_move (stack_area, validize_mem (save_area),
3654 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3655 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3656 }
f046b3cc
JL
3657#endif
3658
3659 /* If we saved any argument areas, restore them. */
3660 for (count = 0; count < nargs; count++)
3661 if (argvec[count].save_area)
3662 {
3663 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3664 rtx stack_area
38a448ca 3665 = gen_rtx_MEM (save_mode,
c5c76735
JL
3666 memory_address
3667 (save_mode,
3668 plus_constant (argblock,
3669 argvec[count].offset.constant)));
f046b3cc
JL
3670
3671 emit_move_insn (stack_area, argvec[count].save_area);
3672 }
3673
3674 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3675 stack_usage_map = initial_stack_usage_map;
3676#endif
3677
fac0ad80 3678 return value;
322e3e34
RK
3679}
3680\f
51bbfa0c
RS
3681#if 0
3682/* Return an rtx which represents a suitable home on the stack
3683 given TYPE, the type of the argument looking for a home.
3684 This is called only for BLKmode arguments.
3685
3686 SIZE is the size needed for this target.
3687 ARGS_ADDR is the address of the bottom of the argument block for this call.
3688 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3689 if this machine uses push insns. */
3690
3691static rtx
3692target_for_arg (type, size, args_addr, offset)
3693 tree type;
3694 rtx size;
3695 rtx args_addr;
3696 struct args_size offset;
3697{
3698 rtx target;
3699 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3700
3701 /* We do not call memory_address if possible,
3702 because we want to address as close to the stack
3703 as possible. For non-variable sized arguments,
3704 this will be stack-pointer relative addressing. */
3705 if (GET_CODE (offset_rtx) == CONST_INT)
3706 target = plus_constant (args_addr, INTVAL (offset_rtx));
3707 else
3708 {
3709 /* I have no idea how to guarantee that this
3710 will work in the presence of register parameters. */
38a448ca 3711 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3712 target = memory_address (QImode, target);
3713 }
3714
38a448ca 3715 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3716}
3717#endif
3718\f
3719/* Store a single argument for a function call
3720 into the register or memory area where it must be passed.
3721 *ARG describes the argument value and where to pass it.
3722
3723 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3724 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3725
3726 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3727 so must be careful about how the stack is used.
3728
3729 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3730 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3731 that we need not worry about saving and restoring the stack.
3732
3733 FNDECL is the declaration of the function we are calling. */
3734
3735static void
c84e2712 3736store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3737 reg_parm_stack_space)
51bbfa0c
RS
3738 struct arg_data *arg;
3739 rtx argblock;
3740 int may_be_alloca;
0f9b3ea6 3741 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3742 int reg_parm_stack_space;
51bbfa0c
RS
3743{
3744 register tree pval = arg->tree_value;
3745 rtx reg = 0;
3746 int partial = 0;
3747 int used = 0;
69d4ca36 3748#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3749 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3750#endif
51bbfa0c
RS
3751
3752 if (TREE_CODE (pval) == ERROR_MARK)
3753 return;
3754
cc79451b
RK
3755 /* Push a new temporary level for any temporaries we make for
3756 this argument. */
3757 push_temp_slots ();
3758
51bbfa0c
RS
3759#ifdef ACCUMULATE_OUTGOING_ARGS
3760 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3761 save any previous data at that location. */
3762 if (argblock && ! variable_size && arg->stack)
3763 {
3764#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3765 /* stack_slot is negative, but we want to index stack_usage_map
3766 with positive values. */
51bbfa0c
RS
3767 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3768 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3769 else
50eb43ca 3770 upper_bound = 0;
51bbfa0c
RS
3771
3772 lower_bound = upper_bound - arg->size.constant;
3773#else
3774 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3775 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3776 else
3777 lower_bound = 0;
3778
3779 upper_bound = lower_bound + arg->size.constant;
3780#endif
3781
3782 for (i = lower_bound; i < upper_bound; i++)
3783 if (stack_usage_map[i]
51bbfa0c
RS
3784 /* Don't store things in the fixed argument area at this point;
3785 it has already been saved. */
e5e809f4 3786 && i > reg_parm_stack_space)
51bbfa0c
RS
3787 break;
3788
3789 if (i != upper_bound)
3790 {
3791 /* We need to make a save area. See what mode we can make it. */
3792 enum machine_mode save_mode
3793 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3794 rtx stack_area
38a448ca
RH
3795 = gen_rtx_MEM (save_mode,
3796 memory_address (save_mode,
3797 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3798
3799 if (save_mode == BLKmode)
3800 {
3801 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3802 arg->size.constant, 0);
c6df88cb
MM
3803 MEM_SET_IN_STRUCT_P (arg->save_area,
3804 AGGREGATE_TYPE_P (TREE_TYPE
3805 (arg->tree_value)));
cc79451b 3806 preserve_temp_slots (arg->save_area);
51bbfa0c 3807 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3808 GEN_INT (arg->size.constant),
51bbfa0c
RS
3809 PARM_BOUNDARY / BITS_PER_UNIT);
3810 }
3811 else
3812 {
3813 arg->save_area = gen_reg_rtx (save_mode);
3814 emit_move_insn (arg->save_area, stack_area);
3815 }
3816 }
3817 }
b564df06
JL
3818
3819 /* Now that we have saved any slots that will be overwritten by this
3820 store, mark all slots this store will use. We must do this before
3821 we actually expand the argument since the expansion itself may
3822 trigger library calls which might need to use the same stack slot. */
3823 if (argblock && ! variable_size && arg->stack)
3824 for (i = lower_bound; i < upper_bound; i++)
3825 stack_usage_map[i] = 1;
51bbfa0c
RS
3826#endif
3827
3828 /* If this isn't going to be placed on both the stack and in registers,
3829 set up the register and number of words. */
3830 if (! arg->pass_on_stack)
3831 reg = arg->reg, partial = arg->partial;
3832
3833 if (reg != 0 && partial == 0)
3834 /* Being passed entirely in a register. We shouldn't be called in
3835 this case. */
3836 abort ();
3837
4ab56118
RK
3838 /* If this arg needs special alignment, don't load the registers
3839 here. */
3840 if (arg->n_aligned_regs != 0)
3841 reg = 0;
4ab56118 3842
4ab56118 3843 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3844 it directly into its stack slot. Otherwise, we can. */
3845 if (arg->value == 0)
d64f5a78
RS
3846 {
3847#ifdef ACCUMULATE_OUTGOING_ARGS
3848 /* stack_arg_under_construction is nonzero if a function argument is
3849 being evaluated directly into the outgoing argument list and
3850 expand_call must take special action to preserve the argument list
3851 if it is called recursively.
3852
3853 For scalar function arguments stack_usage_map is sufficient to
3854 determine which stack slots must be saved and restored. Scalar
3855 arguments in general have pass_on_stack == 0.
3856
3857 If this argument is initialized by a function which takes the
3858 address of the argument (a C++ constructor or a C function
3859 returning a BLKmode structure), then stack_usage_map is
3860 insufficient and expand_call must push the stack around the
3861 function call. Such arguments have pass_on_stack == 1.
3862
3863 Note that it is always safe to set stack_arg_under_construction,
3864 but this generates suboptimal code if set when not needed. */
3865
3866 if (arg->pass_on_stack)
3867 stack_arg_under_construction++;
3868#endif
3a08477a
RK
3869 arg->value = expand_expr (pval,
3870 (partial
3871 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3872 ? NULL_RTX : arg->stack,
e5d70561 3873 VOIDmode, 0);
1efe6448
RK
3874
3875 /* If we are promoting object (or for any other reason) the mode
3876 doesn't agree, convert the mode. */
3877
7373d92d
RK
3878 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3879 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3880 arg->value, arg->unsignedp);
1efe6448 3881
d64f5a78
RS
3882#ifdef ACCUMULATE_OUTGOING_ARGS
3883 if (arg->pass_on_stack)
3884 stack_arg_under_construction--;
3885#endif
3886 }
51bbfa0c
RS
3887
3888 /* Don't allow anything left on stack from computation
3889 of argument to alloca. */
3890 if (may_be_alloca)
3891 do_pending_stack_adjust ();
3892
3893 if (arg->value == arg->stack)
7815214e 3894 {
c5c76735 3895 /* If the value is already in the stack slot, we are done. */
7d384cc0 3896 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 3897 {
7815214e 3898 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3899 XEXP (arg->stack, 0), Pmode,
7d384cc0 3900 ARGS_SIZE_RTX (arg->size),
7815214e 3901 TYPE_MODE (sizetype),
956d6950
JL
3902 GEN_INT (MEMORY_USE_RW),
3903 TYPE_MODE (integer_type_node));
7815214e
RK
3904 }
3905 }
1efe6448 3906 else if (arg->mode != BLKmode)
51bbfa0c
RS
3907 {
3908 register int size;
3909
3910 /* Argument is a scalar, not entirely passed in registers.
3911 (If part is passed in registers, arg->partial says how much
3912 and emit_push_insn will take care of putting it there.)
3913
3914 Push it, and if its size is less than the
3915 amount of space allocated to it,
3916 also bump stack pointer by the additional space.
3917 Note that in C the default argument promotions
3918 will prevent such mismatches. */
3919
1efe6448 3920 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3921 /* Compute how much space the push instruction will push.
3922 On many machines, pushing a byte will advance the stack
3923 pointer by a halfword. */
3924#ifdef PUSH_ROUNDING
3925 size = PUSH_ROUNDING (size);
3926#endif
3927 used = size;
3928
3929 /* Compute how much space the argument should get:
3930 round up to a multiple of the alignment for arguments. */
1efe6448 3931 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3932 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3933 / (PARM_BOUNDARY / BITS_PER_UNIT))
3934 * (PARM_BOUNDARY / BITS_PER_UNIT));
3935
3936 /* This isn't already where we want it on the stack, so put it there.
3937 This can either be done with push or copy insns. */
e5e809f4
JL
3938 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3939 partial, reg, used - size, argblock,
4fc026cd
CM
3940 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
3941 ARGS_SIZE_RTX (arg->alignment_pad));
3942
51bbfa0c
RS
3943 }
3944 else
3945 {
3946 /* BLKmode, at least partly to be pushed. */
3947
3948 register int excess;
3949 rtx size_rtx;
3950
3951 /* Pushing a nonscalar.
3952 If part is passed in registers, PARTIAL says how much
3953 and emit_push_insn will take care of putting it there. */
3954
3955 /* Round its size up to a multiple
3956 of the allocation unit for arguments. */
3957
3958 if (arg->size.var != 0)
3959 {
3960 excess = 0;
3961 size_rtx = ARGS_SIZE_RTX (arg->size);
3962 }
3963 else
3964 {
51bbfa0c
RS
3965 /* PUSH_ROUNDING has no effect on us, because
3966 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3967 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3968 + partial * UNITS_PER_WORD);
e4f93898 3969 size_rtx = expr_size (pval);
51bbfa0c
RS
3970 }
3971
1efe6448 3972 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 3973 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4 3974 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
3975 reg_parm_stack_space,
3976 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
3977 }
3978
3979
3980 /* Unless this is a partially-in-register argument, the argument is now
3981 in the stack.
3982
3983 ??? Note that this can change arg->value from arg->stack to
3984 arg->stack_slot and it matters when they are not the same.
3985 It isn't totally clear that this is correct in all cases. */
3986 if (partial == 0)
3b917a55 3987 arg->value = arg->stack_slot;
51bbfa0c
RS
3988
3989 /* Once we have pushed something, pops can't safely
3990 be deferred during the rest of the arguments. */
3991 NO_DEFER_POP;
3992
3993 /* ANSI doesn't require a sequence point here,
3994 but PCC has one, so this will avoid some problems. */
3995 emit_queue ();
3996
db907e7b
RK
3997 /* Free any temporary slots made in processing this argument. Show
3998 that we might have taken the address of something and pushed that
3999 as an operand. */
4000 preserve_temp_slots (NULL_RTX);
51bbfa0c 4001 free_temp_slots ();
cc79451b 4002 pop_temp_slots ();
51bbfa0c 4003}
This page took 1.16393 seconds and 5 git commands to generate.