]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
Merge from gcc-2.8
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
4d6a19ff 2 Copyright (C) 1989, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
e9a25f70 22#include <stdio.h>
51bbfa0c
RS
23#include "rtl.h"
24#include "tree.h"
25#include "flags.h"
26#include "expr.h"
e9a25f70 27#include "regs.h"
4f90e4a0 28#ifdef __STDC__
04fe4385 29#include <stdarg.h>
4f90e4a0 30#else
04fe4385 31#include <varargs.h>
4f90e4a0 32#endif
51bbfa0c
RS
33#include "insn-flags.h"
34
35/* Decide whether a function's arguments should be processed
bbc8a071
RK
36 from first to last or from last to first.
37
38 They should if the stack and args grow in opposite directions, but
39 only if we have push insns. */
51bbfa0c 40
51bbfa0c 41#ifdef PUSH_ROUNDING
bbc8a071 42
40083ddf 43#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
44#define PUSH_ARGS_REVERSED /* If it's last to first */
45#endif
bbc8a071 46
51bbfa0c
RS
47#endif
48
49/* Like STACK_BOUNDARY but in units of bytes, not bits. */
50#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
51
52/* Data structure and subroutines used within expand_call. */
53
54struct arg_data
55{
56 /* Tree node for this argument. */
57 tree tree_value;
1efe6448
RK
58 /* Mode for value; TYPE_MODE unless promoted. */
59 enum machine_mode mode;
51bbfa0c
RS
60 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 rtx value;
62 /* Initially-compute RTL value for argument; only for const functions. */
63 rtx initial_value;
64 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 65 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
66 registers. */
67 rtx reg;
84b55618
RK
68 /* If REG was promoted from the actual mode of the argument expression,
69 indicates whether the promotion is sign- or zero-extended. */
70 int unsignedp;
51bbfa0c
RS
71 /* Number of registers to use. 0 means put the whole arg in registers.
72 Also 0 if not passed in registers. */
73 int partial;
d64f5a78
RS
74 /* Non-zero if argument must be passed on stack.
75 Note that some arguments may be passed on the stack
76 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
77 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
78 int pass_on_stack;
79 /* Offset of this argument from beginning of stack-args. */
80 struct args_size offset;
81 /* Similar, but offset to the start of the stack slot. Different from
82 OFFSET if this arg pads downward. */
83 struct args_size slot_offset;
84 /* Size of this argument on the stack, rounded up for any padding it gets,
85 parts of the argument passed in registers do not count.
86 If REG_PARM_STACK_SPACE is defined, then register parms
87 are counted here as well. */
88 struct args_size size;
89 /* Location on the stack at which parameter should be stored. The store
90 has already been done if STACK == VALUE. */
91 rtx stack;
92 /* Location on the stack of the start of this argument slot. This can
93 differ from STACK if this arg pads downward. This location is known
94 to be aligned to FUNCTION_ARG_BOUNDARY. */
95 rtx stack_slot;
96#ifdef ACCUMULATE_OUTGOING_ARGS
97 /* Place that this stack area has been saved, if needed. */
98 rtx save_area;
99#endif
4ab56118
RK
100 /* If an argument's alignment does not permit direct copying into registers,
101 copy in smaller-sized pieces into pseudos. These are stored in a
102 block pointed to by this field. The next field says how many
103 word-sized pseudos we made. */
104 rtx *aligned_regs;
105 int n_aligned_regs;
51bbfa0c
RS
106};
107
108#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 109/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
110 the corresponding stack location has been used.
111 This vector is used to prevent a function call within an argument from
112 clobbering any stack already set up. */
113static char *stack_usage_map;
114
115/* Size of STACK_USAGE_MAP. */
116static int highest_outgoing_arg_in_use;
2f4aa534
RS
117
118/* stack_arg_under_construction is nonzero when an argument may be
119 initialized with a constructor call (including a C function that
120 returns a BLKmode struct) and expand_call must take special action
121 to make sure the object being constructed does not overlap the
122 argument list for the constructor call. */
123int stack_arg_under_construction;
51bbfa0c
RS
124#endif
125
322e3e34 126static int calls_function PROTO((tree, int));
9f4d9f6c 127static int calls_function_1 PROTO((tree, int));
5d6155d4
RK
128static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx,
129 int, rtx, int));
322e3e34
RK
130static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
131 tree, int));
51bbfa0c 132\f
1ce0cb53
JW
133/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
134 `alloca'.
135
136 If WHICH is 0, return 1 if EXP contains a call to any function.
137 Actually, we only need return 1 if evaluating EXP would require pushing
138 arguments on the stack, but that is too difficult to compute, so we just
139 assume any function call might require the stack. */
51bbfa0c 140
1c8d7aef
RS
141static tree calls_function_save_exprs;
142
51bbfa0c 143static int
1ce0cb53 144calls_function (exp, which)
51bbfa0c 145 tree exp;
1ce0cb53 146 int which;
1c8d7aef
RS
147{
148 int val;
149 calls_function_save_exprs = 0;
150 val = calls_function_1 (exp, which);
151 calls_function_save_exprs = 0;
152 return val;
153}
154
155static int
156calls_function_1 (exp, which)
157 tree exp;
158 int which;
51bbfa0c
RS
159{
160 register int i;
0207efa2
RK
161 enum tree_code code = TREE_CODE (exp);
162 int type = TREE_CODE_CLASS (code);
163 int length = tree_code_length[(int) code];
51bbfa0c 164
ddd5a7c1 165 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
166 if ((int) code >= NUM_TREE_CODES)
167 return 1;
51bbfa0c 168
0207efa2 169 /* Only expressions and references can contain calls. */
3b59a331
RS
170 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
171 && type != 'b')
51bbfa0c
RS
172 return 0;
173
0207efa2 174 switch (code)
51bbfa0c
RS
175 {
176 case CALL_EXPR:
1ce0cb53
JW
177 if (which == 0)
178 return 1;
179 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
180 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
181 == FUNCTION_DECL))
182 {
183 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
184
185 if ((DECL_BUILT_IN (fndecl)
186 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
187 || (DECL_SAVED_INSNS (fndecl)
188 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
189 & FUNCTION_FLAGS_CALLS_ALLOCA)))
190 return 1;
191 }
51bbfa0c
RS
192
193 /* Third operand is RTL. */
194 length = 2;
195 break;
196
197 case SAVE_EXPR:
198 if (SAVE_EXPR_RTL (exp) != 0)
199 return 0;
1c8d7aef
RS
200 if (value_member (exp, calls_function_save_exprs))
201 return 0;
202 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
203 calls_function_save_exprs);
204 return (TREE_OPERAND (exp, 0) != 0
205 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
206
207 case BLOCK:
ef03bc85
CH
208 {
209 register tree local;
210
211 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 212 if (DECL_INITIAL (local) != 0
1c8d7aef 213 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
214 return 1;
215 }
216 {
217 register tree subblock;
218
219 for (subblock = BLOCK_SUBBLOCKS (exp);
220 subblock;
221 subblock = TREE_CHAIN (subblock))
1c8d7aef 222 if (calls_function_1 (subblock, which))
ef03bc85
CH
223 return 1;
224 }
225 return 0;
51bbfa0c
RS
226
227 case METHOD_CALL_EXPR:
228 length = 3;
229 break;
230
231 case WITH_CLEANUP_EXPR:
232 length = 1;
233 break;
234
235 case RTL_EXPR:
236 return 0;
e9a25f70
JL
237
238 default:
239 break;
51bbfa0c
RS
240 }
241
242 for (i = 0; i < length; i++)
243 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 244 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
245 return 1;
246
247 return 0;
248}
249\f
250/* Force FUNEXP into a form suitable for the address of a CALL,
251 and return that as an rtx. Also load the static chain register
252 if FNDECL is a nested function.
253
77cac2f2
RK
254 CALL_FUSAGE points to a variable holding the prospective
255 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 256
03dacb02 257rtx
77cac2f2 258prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
259 rtx funexp;
260 tree fndecl;
77cac2f2 261 rtx *call_fusage;
01368078 262 int reg_parm_seen;
51bbfa0c
RS
263{
264 rtx static_chain_value = 0;
265
266 funexp = protect_from_queue (funexp, 0);
267
268 if (fndecl != 0)
0f41302f 269 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
270 static_chain_value = lookup_static_chain (fndecl);
271
272 /* Make a valid memory address and copy constants thru pseudo-regs,
273 but not for a constant address if -fno-function-cse. */
274 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 275 /* If we are using registers for parameters, force the
e9a25f70
JL
276 function address into a register now. */
277 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
278 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
279 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
280 else
281 {
282#ifndef NO_FUNCTION_CSE
283 if (optimize && ! flag_no_function_cse)
284#ifdef NO_RECURSIVE_FUNCTION_CSE
285 if (fndecl != current_function_decl)
286#endif
287 funexp = force_reg (Pmode, funexp);
288#endif
289 }
290
291 if (static_chain_value != 0)
292 {
293 emit_move_insn (static_chain_rtx, static_chain_value);
294
f991a240
RK
295 if (GET_CODE (static_chain_rtx) == REG)
296 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
297 }
298
299 return funexp;
300}
301
302/* Generate instructions to call function FUNEXP,
303 and optionally pop the results.
304 The CALL_INSN is the first insn generated.
305
607ea900 306 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
307 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
308
334c4f0f
RK
309 FUNTYPE is the data type of the function. This is given to the macro
310 RETURN_POPS_ARGS to determine whether this function pops its own args.
311 We used to allow an identifier for library functions, but that doesn't
312 work when the return type is an aggregate type and the calling convention
313 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
314
315 STACK_SIZE is the number of bytes of arguments on the stack,
316 rounded up to STACK_BOUNDARY; zero if the size is variable.
317 This is both to put into the call insn and
318 to generate explicit popping code if necessary.
319
320 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
321 It is zero if this call doesn't want a structure value.
322
323 NEXT_ARG_REG is the rtx that results from executing
324 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
325 just after all the args have had their registers assigned.
326 This could be whatever you like, but normally it is the first
327 arg-register beyond those used for args in this call,
328 or 0 if all the arg-registers are used in this call.
329 It is passed on to `gen_call' so you can put this info in the call insn.
330
331 VALREG is a hard register in which a value is returned,
332 or 0 if the call does not return a value.
333
334 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
335 the args to this call were processed.
336 We restore `inhibit_defer_pop' to that value.
337
94b25f81
RK
338 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
339 denote registers used by the called function.
51bbfa0c
RS
340
341 IS_CONST is true if this is a `const' call. */
342
322e3e34 343static void
2c8da025 344emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
5d6155d4
RK
345 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
346 is_const)
51bbfa0c 347 rtx funexp;
2c8da025 348 tree fndecl;
51bbfa0c
RS
349 tree funtype;
350 int stack_size;
351 int struct_value_size;
352 rtx next_arg_reg;
353 rtx valreg;
354 int old_inhibit_defer_pop;
77cac2f2 355 rtx call_fusage;
51bbfa0c
RS
356 int is_const;
357{
e5d70561
RK
358 rtx stack_size_rtx = GEN_INT (stack_size);
359 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
360 rtx call_insn;
361 int already_popped = 0;
362
363 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
364 and we don't want to load it into a register as an optimization,
365 because prepare_call_address already did it if it should be done. */
366 if (GET_CODE (funexp) != SYMBOL_REF)
367 funexp = memory_address (FUNCTION_MODE, funexp);
368
369#ifndef ACCUMULATE_OUTGOING_ARGS
370#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
371 if (HAVE_call_pop && HAVE_call_value_pop
2c8da025
RK
372 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
373 || stack_size == 0))
51bbfa0c 374 {
2c8da025 375 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
51bbfa0c
RS
376 rtx pat;
377
378 /* If this subroutine pops its own args, record that in the call insn
379 if possible, for the sake of frame pointer elimination. */
2c8da025 380
51bbfa0c
RS
381 if (valreg)
382 pat = gen_call_value_pop (valreg,
383 gen_rtx (MEM, FUNCTION_MODE, funexp),
384 stack_size_rtx, next_arg_reg, n_pop);
385 else
386 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
387 stack_size_rtx, next_arg_reg, n_pop);
388
389 emit_call_insn (pat);
390 already_popped = 1;
391 }
392 else
393#endif
394#endif
395
396#if defined (HAVE_call) && defined (HAVE_call_value)
397 if (HAVE_call && HAVE_call_value)
398 {
399 if (valreg)
400 emit_call_insn (gen_call_value (valreg,
401 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
402 stack_size_rtx, next_arg_reg,
403 NULL_RTX));
51bbfa0c
RS
404 else
405 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
406 stack_size_rtx, next_arg_reg,
407 struct_value_size_rtx));
408 }
409 else
410#endif
411 abort ();
412
77cac2f2 413 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
414 for (call_insn = get_last_insn ();
415 call_insn && GET_CODE (call_insn) != CALL_INSN;
416 call_insn = PREV_INSN (call_insn))
417 ;
418
419 if (! call_insn)
420 abort ();
421
e59e60a7
RK
422 /* Put the register usage information on the CALL. If there is already
423 some usage information, put ours at the end. */
424 if (CALL_INSN_FUNCTION_USAGE (call_insn))
425 {
426 rtx link;
427
428 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
429 link = XEXP (link, 1))
430 ;
431
432 XEXP (link, 1) = call_fusage;
433 }
434 else
435 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
436
437 /* If this is a const call, then set the insn's unchanging bit. */
438 if (is_const)
439 CONST_CALL_P (call_insn) = 1;
440
b1e64e0d
RS
441 /* Restore this now, so that we do defer pops for this call's args
442 if the context of the call as a whole permits. */
443 inhibit_defer_pop = old_inhibit_defer_pop;
444
51bbfa0c
RS
445#ifndef ACCUMULATE_OUTGOING_ARGS
446 /* If returning from the subroutine does not automatically pop the args,
447 we need an instruction to pop them sooner or later.
448 Perhaps do it now; perhaps just record how much space to pop later.
449
450 If returning from the subroutine does pop the args, indicate that the
451 stack pointer will be changed. */
452
2c8da025 453 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
51bbfa0c
RS
454 {
455 if (!already_popped)
e3da301d
MS
456 CALL_INSN_FUNCTION_USAGE (call_insn)
457 = gen_rtx (EXPR_LIST, VOIDmode,
458 gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
459 CALL_INSN_FUNCTION_USAGE (call_insn));
2c8da025 460 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
e5d70561 461 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
462 }
463
464 if (stack_size != 0)
465 {
70a73141 466 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
467 pending_stack_adjust += stack_size;
468 else
469 adjust_stack (stack_size_rtx);
470 }
471#endif
472}
473
474/* Generate all the code for a function call
475 and return an rtx for its value.
476 Store the value in TARGET (specified as an rtx) if convenient.
477 If the value is stored in TARGET then TARGET is returned.
478 If IGNORE is nonzero, then we ignore the value of the function call. */
479
480rtx
8129842c 481expand_call (exp, target, ignore)
51bbfa0c
RS
482 tree exp;
483 rtx target;
484 int ignore;
51bbfa0c
RS
485{
486 /* List of actual parameters. */
487 tree actparms = TREE_OPERAND (exp, 1);
488 /* RTX for the function to be called. */
489 rtx funexp;
490 /* Tree node for the function to be called (not the address!). */
491 tree funtree;
492 /* Data type of the function. */
493 tree funtype;
494 /* Declaration of the function being called,
495 or 0 if the function is computed (not known by name). */
496 tree fndecl = 0;
497 char *name = 0;
498
499 /* Register in which non-BLKmode value will be returned,
500 or 0 if no value or if value is BLKmode. */
501 rtx valreg;
502 /* Address where we should return a BLKmode value;
503 0 if value not BLKmode. */
504 rtx structure_value_addr = 0;
505 /* Nonzero if that address is being passed by treating it as
506 an extra, implicit first parameter. Otherwise,
507 it is passed by being copied directly into struct_value_rtx. */
508 int structure_value_addr_parm = 0;
509 /* Size of aggregate value wanted, or zero if none wanted
510 or if we are using the non-reentrant PCC calling convention
511 or expecting the value in registers. */
512 int struct_value_size = 0;
513 /* Nonzero if called function returns an aggregate in memory PCC style,
514 by returning the address of where to find it. */
515 int pcc_struct_value = 0;
516
517 /* Number of actual parameters in this call, including struct value addr. */
518 int num_actuals;
519 /* Number of named args. Args after this are anonymous ones
520 and they must all go on the stack. */
521 int n_named_args;
522 /* Count arg position in order args appear. */
523 int argpos;
524
525 /* Vector of information about each argument.
526 Arguments are numbered in the order they will be pushed,
527 not the order they are written. */
528 struct arg_data *args;
529
530 /* Total size in bytes of all the stack-parms scanned so far. */
531 struct args_size args_size;
532 /* Size of arguments before any adjustments (such as rounding). */
533 struct args_size original_args_size;
534 /* Data on reg parms scanned so far. */
535 CUMULATIVE_ARGS args_so_far;
536 /* Nonzero if a reg parm has been scanned. */
537 int reg_parm_seen;
efd65a8b 538 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
539
540 /* Nonzero if we must avoid push-insns in the args for this call.
541 If stack space is allocated for register parameters, but not by the
542 caller, then it is preallocated in the fixed part of the stack frame.
543 So the entire argument block must then be preallocated (i.e., we
544 ignore PUSH_ROUNDING in that case). */
545
546#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
547 int must_preallocate = 1;
548#else
549#ifdef PUSH_ROUNDING
550 int must_preallocate = 0;
551#else
552 int must_preallocate = 1;
553#endif
554#endif
555
f72aed24 556 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
557 int reg_parm_stack_space = 0;
558
51bbfa0c
RS
559 /* 1 if scanning parms front to back, -1 if scanning back to front. */
560 int inc;
561 /* Address of space preallocated for stack parms
562 (on machines that lack push insns), or 0 if space not preallocated. */
563 rtx argblock = 0;
564
565 /* Nonzero if it is plausible that this is a call to alloca. */
566 int may_be_alloca;
9ae8ffe7
JL
567 /* Nonzero if this is a call to malloc or a related function. */
568 int is_malloc;
51bbfa0c
RS
569 /* Nonzero if this is a call to setjmp or a related function. */
570 int returns_twice;
571 /* Nonzero if this is a call to `longjmp'. */
572 int is_longjmp;
573 /* Nonzero if this is a call to an inline function. */
574 int is_integrable = 0;
51bbfa0c
RS
575 /* Nonzero if this is a call to a `const' function.
576 Note that only explicitly named functions are handled as `const' here. */
577 int is_const = 0;
578 /* Nonzero if this is a call to a `volatile' function. */
579 int is_volatile = 0;
580#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
581 /* Define the boundary of the register parm stack space that needs to be
582 save, if any. */
583 int low_to_save = -1, high_to_save;
584 rtx save_area = 0; /* Place that it is saved */
585#endif
586
587#ifdef ACCUMULATE_OUTGOING_ARGS
588 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
589 char *initial_stack_usage_map = stack_usage_map;
590#endif
591
592 rtx old_stack_level = 0;
79be3418 593 int old_pending_adj = 0;
2f4aa534 594 int old_stack_arg_under_construction;
51bbfa0c 595 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 596 rtx call_fusage = 0;
51bbfa0c 597 register tree p;
4ab56118 598 register int i, j;
51bbfa0c 599
7815214e
RK
600 /* The value of the function call can be put in a hard register. But
601 if -fcheck-memory-usage, code which invokes functions (and thus
602 damages some hard registers) can be inserted before using the value.
603 So, target is always a pseudo-register in that case. */
604 if (flag_check_memory_usage)
605 target = 0;
606
51bbfa0c
RS
607 /* See if we can find a DECL-node for the actual function.
608 As a result, decide whether this is a call to an integrable function. */
609
610 p = TREE_OPERAND (exp, 0);
611 if (TREE_CODE (p) == ADDR_EXPR)
612 {
613 fndecl = TREE_OPERAND (p, 0);
614 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 615 fndecl = 0;
51bbfa0c
RS
616 else
617 {
618 if (!flag_no_inline
619 && fndecl != current_function_decl
aa10adff 620 && DECL_INLINE (fndecl)
1cf4f698
RK
621 && DECL_SAVED_INSNS (fndecl)
622 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
51bbfa0c
RS
623 is_integrable = 1;
624 else if (! TREE_ADDRESSABLE (fndecl))
625 {
13d39dbc 626 /* In case this function later becomes inlinable,
51bbfa0c
RS
627 record that there was already a non-inline call to it.
628
629 Use abstraction instead of setting TREE_ADDRESSABLE
630 directly. */
da8c1713
RK
631 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
632 && optimize > 0)
1907795e
JM
633 {
634 warning_with_decl (fndecl, "can't inline call to `%s'");
635 warning ("called from here");
636 }
51bbfa0c
RS
637 mark_addressable (fndecl);
638 }
639
d45cf215
RS
640 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
641 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 642 is_const = 1;
5e24110e
RS
643
644 if (TREE_THIS_VOLATILE (fndecl))
645 is_volatile = 1;
51bbfa0c
RS
646 }
647 }
648
fdff8c6d
RK
649 /* If we don't have specific function to call, see if we have a
650 constant or `noreturn' function from the type. */
651 if (fndecl == 0)
652 {
653 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
654 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
655 }
656
6f90e075
JW
657#ifdef REG_PARM_STACK_SPACE
658#ifdef MAYBE_REG_PARM_STACK_SPACE
659 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
660#else
661 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
662#endif
663#endif
664
51bbfa0c
RS
665 /* Warn if this value is an aggregate type,
666 regardless of which calling convention we are using for it. */
05e3bdb9 667 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
668 warning ("function call has aggregate value");
669
670 /* Set up a place to return a structure. */
671
672 /* Cater to broken compilers. */
673 if (aggregate_value_p (exp))
674 {
675 /* This call returns a big structure. */
676 is_const = 0;
677
678#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
679 {
680 pcc_struct_value = 1;
0dd532dc
JW
681 /* Easier than making that case work right. */
682 if (is_integrable)
683 {
684 /* In case this is a static function, note that it has been
685 used. */
686 if (! TREE_ADDRESSABLE (fndecl))
687 mark_addressable (fndecl);
688 is_integrable = 0;
689 }
9e7b1d0a
RS
690 }
691#else /* not PCC_STATIC_STRUCT_RETURN */
692 {
693 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 694
9e7b1d0a
RS
695 if (target && GET_CODE (target) == MEM)
696 structure_value_addr = XEXP (target, 0);
697 else
698 {
e9a25f70
JL
699 /* Assign a temporary to hold the value. */
700 tree d;
51bbfa0c 701
9e7b1d0a
RS
702 /* For variable-sized objects, we must be called with a target
703 specified. If we were to allocate space on the stack here,
704 we would have no way of knowing when to free it. */
51bbfa0c 705
002bdd6c
RK
706 if (struct_value_size < 0)
707 abort ();
708
e9a25f70
JL
709 /* This DECL is just something to feed to mark_addressable;
710 it doesn't get pushed. */
711 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
712 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
713 mark_addressable (d);
714 structure_value_addr = XEXP (DECL_RTL (d), 0);
3668e76e
JL
715 MEM_IN_STRUCT_P (structure_value_addr)
716 = AGGREGATE_TYPE_P (TREE_TYPE (exp));
9e7b1d0a
RS
717 target = 0;
718 }
719 }
720#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
721 }
722
723 /* If called function is inline, try to integrate it. */
724
725 if (is_integrable)
726 {
727 rtx temp;
2f4aa534 728 rtx before_call = get_last_insn ();
51bbfa0c
RS
729
730 temp = expand_inline_function (fndecl, actparms, target,
731 ignore, TREE_TYPE (exp),
732 structure_value_addr);
733
734 /* If inlining succeeded, return. */
854e97f0 735 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 736 {
d64f5a78 737#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
738 /* If the outgoing argument list must be preserved, push
739 the stack before executing the inlined function if it
740 makes any calls. */
741
742 for (i = reg_parm_stack_space - 1; i >= 0; i--)
743 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
744 break;
745
746 if (stack_arg_under_construction || i >= 0)
747 {
a1917650
RK
748 rtx first_insn
749 = before_call ? NEXT_INSN (before_call) : get_insns ();
750 rtx insn, seq;
2f4aa534 751
d64f5a78
RS
752 /* Look for a call in the inline function code.
753 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
754 nonzero then there is a call and it is not necessary
755 to scan the insns. */
756
757 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
a1917650 758 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
759 if (GET_CODE (insn) == CALL_INSN)
760 break;
2f4aa534
RS
761
762 if (insn)
763 {
d64f5a78
RS
764 /* Reserve enough stack space so that the largest
765 argument list of any function call in the inline
766 function does not overlap the argument list being
767 evaluated. This is usually an overestimate because
768 allocate_dynamic_stack_space reserves space for an
769 outgoing argument list in addition to the requested
770 space, but there is no way to ask for stack space such
771 that an argument list of a certain length can be
772 safely constructed. */
773
774 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
775#ifdef REG_PARM_STACK_SPACE
776 /* Add the stack space reserved for register arguments
777 in the inline function. What is really needed is the
778 largest value of reg_parm_stack_space in the inline
779 function, but that is not available. Using the current
780 value of reg_parm_stack_space is wrong, but gives
781 correct results on all supported machines. */
782 adjust += reg_parm_stack_space;
783#endif
2f4aa534 784 start_sequence ();
ccf5d244 785 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
786 allocate_dynamic_stack_space (GEN_INT (adjust),
787 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
788 seq = get_insns ();
789 end_sequence ();
a1917650 790 emit_insns_before (seq, first_insn);
e5d70561 791 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
792 }
793 }
d64f5a78 794#endif
51bbfa0c
RS
795
796 /* If the result is equivalent to TARGET, return TARGET to simplify
797 checks in store_expr. They can be equivalent but not equal in the
798 case of a function that returns BLKmode. */
799 if (temp != target && rtx_equal_p (temp, target))
800 return target;
801 return temp;
802 }
803
804 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
805 separately after all. If function was declared inline,
806 give a warning. */
807 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 808 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
809 {
810 warning_with_decl (fndecl, "inlining failed in call to `%s'");
811 warning ("called from here");
812 }
51bbfa0c
RS
813 mark_addressable (fndecl);
814 }
815
816 /* When calling a const function, we must pop the stack args right away,
817 so that the pop is deleted or moved with the call. */
818 if (is_const)
819 NO_DEFER_POP;
820
821 function_call_count++;
822
823 if (fndecl && DECL_NAME (fndecl))
824 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
825
826#if 0
827 /* Unless it's a call to a specific function that isn't alloca,
828 if it has one argument, we must assume it might be alloca. */
829
e3da301d
MS
830 may_be_alloca
831 = (!(fndecl != 0 && strcmp (name, "alloca"))
832 && actparms != 0
833 && TREE_CHAIN (actparms) == 0);
51bbfa0c
RS
834#else
835 /* We assume that alloca will always be called by name. It
836 makes no sense to pass it as a pointer-to-function to
837 anything that does not understand its behavior. */
e3da301d
MS
838 may_be_alloca
839 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
51bbfa0c
RS
840 && name[0] == 'a'
841 && ! strcmp (name, "alloca"))
842 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
843 && name[0] == '_'
844 && ! strcmp (name, "__builtin_alloca"))));
845#endif
846
847 /* See if this is a call to a function that can return more than once
848 or a call to longjmp. */
849
850 returns_twice = 0;
851 is_longjmp = 0;
9ae8ffe7 852 is_malloc = 0;
51bbfa0c
RS
853
854 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
855 {
856 char *tname = name;
857
8d515633 858 /* Disregard prefix _, __ or __x. */
51bbfa0c 859 if (name[0] == '_')
8d515633
RS
860 {
861 if (name[1] == '_' && name[2] == 'x')
862 tname += 3;
863 else if (name[1] == '_')
864 tname += 2;
865 else
866 tname += 1;
867 }
51bbfa0c
RS
868
869 if (tname[0] == 's')
870 {
871 returns_twice
872 = ((tname[1] == 'e'
873 && (! strcmp (tname, "setjmp")
874 || ! strcmp (tname, "setjmp_syscall")))
875 || (tname[1] == 'i'
876 && ! strcmp (tname, "sigsetjmp"))
877 || (tname[1] == 'a'
878 && ! strcmp (tname, "savectx")));
879 if (tname[1] == 'i'
880 && ! strcmp (tname, "siglongjmp"))
881 is_longjmp = 1;
882 }
883 else if ((tname[0] == 'q' && tname[1] == 's'
884 && ! strcmp (tname, "qsetjmp"))
885 || (tname[0] == 'v' && tname[1] == 'f'
886 && ! strcmp (tname, "vfork")))
887 returns_twice = 1;
888
889 else if (tname[0] == 'l' && tname[1] == 'o'
890 && ! strcmp (tname, "longjmp"))
891 is_longjmp = 1;
9ae8ffe7
JL
892 /* Only recognize malloc when alias analysis is enabled. */
893 else if (flag_alias_check
894 && ((tname[0] == 'm' && ! strcmp(tname + 1, "alloc"))
895 || (tname[0] == 'c' && ! strcmp(tname + 1, "alloc"))
896 || (tname[0] == 'r' && ! strcmp(tname + 1, "ealloc"))))
897 is_malloc = 1;
51bbfa0c
RS
898 }
899
51bbfa0c
RS
900 if (may_be_alloca)
901 current_function_calls_alloca = 1;
902
903 /* Don't let pending stack adjusts add up to too much.
904 Also, do all pending adjustments now
905 if there is any chance this might be a call to alloca. */
906
907 if (pending_stack_adjust >= 32
908 || (pending_stack_adjust > 0 && may_be_alloca))
909 do_pending_stack_adjust ();
910
911 /* Operand 0 is a pointer-to-function; get the type of the function. */
912 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
913 if (TREE_CODE (funtype) != POINTER_TYPE)
914 abort ();
915 funtype = TREE_TYPE (funtype);
916
cc79451b
RK
917 /* Push the temporary stack slot level so that we can free any temporaries
918 we make. */
51bbfa0c
RS
919 push_temp_slots ();
920
eecb6f50
JL
921 /* Start updating where the next arg would go.
922
923 On some machines (such as the PA) indirect calls have a different
924 calling convention than normal calls. The last argument in
925 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
926 or not. */
927 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
928
929 /* If struct_value_rtx is 0, it means pass the address
930 as if it were an extra parameter. */
931 if (structure_value_addr && struct_value_rtx == 0)
932 {
5582b006
RK
933 /* If structure_value_addr is a REG other than
934 virtual_outgoing_args_rtx, we can use always use it. If it
935 is not a REG, we must always copy it into a register.
936 If it is virtual_outgoing_args_rtx, we must copy it to another
937 register in some cases. */
938 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 939#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
940 || (stack_arg_under_construction
941 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 942#endif
5582b006
RK
943 ? copy_addr_to_reg (structure_value_addr)
944 : structure_value_addr);
d64f5a78 945
51bbfa0c
RS
946 actparms
947 = tree_cons (error_mark_node,
948 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 949 temp),
51bbfa0c
RS
950 actparms);
951 structure_value_addr_parm = 1;
952 }
953
954 /* Count the arguments and set NUM_ACTUALS. */
955 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
956 num_actuals = i;
957
958 /* Compute number of named args.
959 Normally, don't include the last named arg if anonymous args follow.
469225d8
JW
960 We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
961 (If no anonymous args follow, the result of list_length is actually
962 one too large. This is harmless.)
51bbfa0c 963
469225d8
JW
964 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
965 this machine will be able to place unnamed args that were passed in
966 registers into the stack. So treat all args as named. This allows the
967 insns emitting for a specific argument list to be independent of the
968 function declaration.
51bbfa0c
RS
969
970 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
971 way to pass unnamed args in registers, so we must force them into
972 memory. */
469225d8 973#if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
51bbfa0c
RS
974 if (TYPE_ARG_TYPES (funtype) != 0)
975 n_named_args
0ee902cb 976 = (list_length (TYPE_ARG_TYPES (funtype))
469225d8 977#ifndef STRICT_ARGUMENT_NAMING
0ee902cb
RM
978 /* Don't include the last named arg. */
979 - 1
469225d8 980#endif
0ee902cb
RM
981 /* Count the struct value address, if it is passed as a parm. */
982 + structure_value_addr_parm);
51bbfa0c
RS
983 else
984#endif
985 /* If we know nothing, treat all args as named. */
986 n_named_args = num_actuals;
987
988 /* Make a vector to hold all the information about each arg. */
989 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 990 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
991
992 args_size.constant = 0;
993 args_size.var = 0;
994
995 /* In this loop, we consider args in the order they are written.
0ee902cb 996 We fill up ARGS from the front or from the back if necessary
51bbfa0c
RS
997 so that in any case the first arg to be pushed ends up at the front. */
998
999#ifdef PUSH_ARGS_REVERSED
1000 i = num_actuals - 1, inc = -1;
1001 /* In this case, must reverse order of args
1002 so that we compute and push the last arg first. */
1003#else
1004 i = 0, inc = 1;
1005#endif
1006
1007 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1008 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1009 {
1010 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 1011 int unsignedp;
84b55618 1012 enum machine_mode mode;
51bbfa0c
RS
1013
1014 args[i].tree_value = TREE_VALUE (p);
1015
1016 /* Replace erroneous argument with constant zero. */
1017 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1018 args[i].tree_value = integer_zero_node, type = integer_type_node;
1019
5c1c34d3
RK
1020 /* If TYPE is a transparent union, pass things the way we would
1021 pass the first field of the union. We have already verified that
1022 the modes are the same. */
1023 if (TYPE_TRANSPARENT_UNION (type))
1024 type = TREE_TYPE (TYPE_FIELDS (type));
1025
51bbfa0c
RS
1026 /* Decide where to pass this arg.
1027
1028 args[i].reg is nonzero if all or part is passed in registers.
1029
1030 args[i].partial is nonzero if part but not all is passed in registers,
1031 and the exact value says how many words are passed in registers.
1032
1033 args[i].pass_on_stack is nonzero if the argument must at least be
1034 computed on the stack. It may then be loaded back into registers
1035 if args[i].reg is nonzero.
1036
1037 These decisions are driven by the FUNCTION_... macros and must agree
1038 with those made by function.c. */
1039
51bbfa0c 1040 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1041 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1042 && contains_placeholder_p (TYPE_SIZE (type)))
657bb6dc 1043 || TREE_ADDRESSABLE (type)
7ef1fbd7
RK
1044#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1045 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1046 type, argpos < n_named_args)
1047#endif
1048 )
51bbfa0c 1049 {
173cd503
JM
1050 /* If we're compiling a thunk, pass through invisible
1051 references instead of making a copy. */
1052 if (current_function_is_thunk
5e0de251 1053#ifdef FUNCTION_ARG_CALLEE_COPIES
173cd503
JM
1054 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1055 type, argpos < n_named_args)
1056 /* If it's in a register, we must make a copy of it too. */
1057 /* ??? Is this a sufficient test? Is there a better one? */
1058 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1059 && REG_P (DECL_RTL (args[i].tree_value)))
1060 && ! TREE_ADDRESSABLE (type))
1061#endif
1062 )
51bbfa0c 1063 {
5e0de251
DE
1064 args[i].tree_value = build1 (ADDR_EXPR,
1065 build_pointer_type (type),
1066 args[i].tree_value);
1067 type = build_pointer_type (type);
51bbfa0c
RS
1068 }
1069 else
82c0ff02 1070 {
5e0de251
DE
1071 /* We make a copy of the object and pass the address to the
1072 function being called. */
1073 rtx copy;
51bbfa0c 1074
5e0de251 1075 if (TYPE_SIZE (type) == 0
2d59d98e
RK
1076 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1077 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1078 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1079 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1080 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
5e0de251
DE
1081 {
1082 /* This is a variable-sized object. Make space on the stack
1083 for it. */
1084 rtx size_rtx = expr_size (TREE_VALUE (p));
1085
1086 if (old_stack_level == 0)
1087 {
1088 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1089 old_pending_adj = pending_stack_adjust;
1090 pending_stack_adjust = 0;
1091 }
1092
1093 copy = gen_rtx (MEM, BLKmode,
1094 allocate_dynamic_stack_space (size_rtx,
1095 NULL_RTX,
1096 TYPE_ALIGN (type)));
1097 }
1098 else
1099 {
1100 int size = int_size_in_bytes (type);
6fa51029 1101 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
5e0de251 1102 }
51bbfa0c 1103
05e3bdb9 1104 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1105
5e0de251 1106 store_expr (args[i].tree_value, copy, 0);
ba3a053e 1107 is_const = 0;
5e0de251
DE
1108
1109 args[i].tree_value = build1 (ADDR_EXPR,
1110 build_pointer_type (type),
1111 make_tree (type, copy));
1112 type = build_pointer_type (type);
1113 }
51bbfa0c 1114 }
51bbfa0c 1115
84b55618 1116 mode = TYPE_MODE (type);
321e0bba 1117 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1118
1119#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1120 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1121#endif
1122
321e0bba 1123 args[i].unsignedp = unsignedp;
1efe6448 1124 args[i].mode = mode;
84b55618 1125 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1126 argpos < n_named_args);
1127#ifdef FUNCTION_ARG_PARTIAL_NREGS
1128 if (args[i].reg)
1129 args[i].partial
84b55618 1130 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1131 argpos < n_named_args);
1132#endif
1133
84b55618 1134 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c 1135
cacbd532
JW
1136 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1137 it means that we are to pass this arg in the register(s) designated
1138 by the PARALLEL, but also to pass it in the stack. */
1139 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1140 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1141 args[i].pass_on_stack = 1;
51bbfa0c
RS
1142
1143 /* If this is an addressable type, we must preallocate the stack
1144 since we must evaluate the object into its final location.
1145
1146 If this is to be passed in both registers and the stack, it is simpler
1147 to preallocate. */
1148 if (TREE_ADDRESSABLE (type)
1149 || (args[i].pass_on_stack && args[i].reg != 0))
1150 must_preallocate = 1;
1151
1152 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1153 we cannot consider this function call constant. */
1154 if (TREE_ADDRESSABLE (type))
1155 is_const = 0;
1156
1157 /* Compute the stack-size of this argument. */
1158 if (args[i].reg == 0 || args[i].partial != 0
1159#ifdef REG_PARM_STACK_SPACE
6f90e075 1160 || reg_parm_stack_space > 0
51bbfa0c
RS
1161#endif
1162 || args[i].pass_on_stack)
1efe6448 1163 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1164#ifdef STACK_PARMS_IN_REG_PARM_AREA
1165 1,
1166#else
1167 args[i].reg != 0,
1168#endif
1169 fndecl, &args_size, &args[i].offset,
1170 &args[i].size);
1171
1172#ifndef ARGS_GROW_DOWNWARD
1173 args[i].slot_offset = args_size;
1174#endif
1175
1176#ifndef REG_PARM_STACK_SPACE
1177 /* If a part of the arg was put into registers,
1178 don't include that part in the amount pushed. */
1179 if (! args[i].pass_on_stack)
1180 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1181 / (PARM_BOUNDARY / BITS_PER_UNIT)
1182 * (PARM_BOUNDARY / BITS_PER_UNIT));
1183#endif
1184
1185 /* Update ARGS_SIZE, the total stack space for args so far. */
1186
1187 args_size.constant += args[i].size.constant;
1188 if (args[i].size.var)
1189 {
1190 ADD_PARM_SIZE (args_size, args[i].size.var);
1191 }
1192
1193 /* Since the slot offset points to the bottom of the slot,
1194 we must record it after incrementing if the args grow down. */
1195#ifdef ARGS_GROW_DOWNWARD
1196 args[i].slot_offset = args_size;
1197
1198 args[i].slot_offset.constant = -args_size.constant;
1199 if (args_size.var)
1200 {
1201 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1202 }
1203#endif
1204
1205 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1206 have been used, etc. */
1207
1208 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1209 argpos < n_named_args);
1210 }
1211
6f90e075
JW
1212#ifdef FINAL_REG_PARM_STACK_SPACE
1213 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1214 args_size.var);
1215#endif
1216
51bbfa0c
RS
1217 /* Compute the actual size of the argument block required. The variable
1218 and constant sizes must be combined, the size may have to be rounded,
1219 and there may be a minimum required size. */
1220
1221 original_args_size = args_size;
1222 if (args_size.var)
1223 {
1224 /* If this function requires a variable-sized argument list, don't try to
1225 make a cse'able block for this call. We may be able to do this
1226 eventually, but it is too complicated to keep track of what insns go
1227 in the cse'able block and which don't. */
1228
1229 is_const = 0;
1230 must_preallocate = 1;
1231
1232 args_size.var = ARGS_SIZE_TREE (args_size);
1233 args_size.constant = 0;
1234
1235#ifdef STACK_BOUNDARY
1236 if (STACK_BOUNDARY != BITS_PER_UNIT)
1237 args_size.var = round_up (args_size.var, STACK_BYTES);
1238#endif
1239
1240#ifdef REG_PARM_STACK_SPACE
6f90e075 1241 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1242 {
1243 args_size.var
1244 = size_binop (MAX_EXPR, args_size.var,
1245 size_int (REG_PARM_STACK_SPACE (fndecl)));
1246
1247#ifndef OUTGOING_REG_PARM_STACK_SPACE
1248 /* The area corresponding to register parameters is not to count in
1249 the size of the block we need. So make the adjustment. */
1250 args_size.var
1251 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1252 size_int (reg_parm_stack_space));
51bbfa0c
RS
1253#endif
1254 }
1255#endif
1256 }
1257 else
1258 {
1259#ifdef STACK_BOUNDARY
1260 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1261 / STACK_BYTES) * STACK_BYTES);
1262#endif
1263
1264#ifdef REG_PARM_STACK_SPACE
1265 args_size.constant = MAX (args_size.constant,
6f90e075 1266 reg_parm_stack_space);
e1336658
JW
1267#ifdef MAYBE_REG_PARM_STACK_SPACE
1268 if (reg_parm_stack_space == 0)
1269 args_size.constant = 0;
1270#endif
51bbfa0c 1271#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1272 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1273#endif
1274#endif
1275 }
1276
1277 /* See if we have or want to preallocate stack space.
1278
1279 If we would have to push a partially-in-regs parm
1280 before other stack parms, preallocate stack space instead.
1281
1282 If the size of some parm is not a multiple of the required stack
1283 alignment, we must preallocate.
1284
1285 If the total size of arguments that would otherwise create a copy in
1286 a temporary (such as a CALL) is more than half the total argument list
1287 size, preallocation is faster.
1288
1289 Another reason to preallocate is if we have a machine (like the m88k)
1290 where stack alignment is required to be maintained between every
1291 pair of insns, not just when the call is made. However, we assume here
1292 that such machines either do not have push insns (and hence preallocation
1293 would occur anyway) or the problem is taken care of with
1294 PUSH_ROUNDING. */
1295
1296 if (! must_preallocate)
1297 {
1298 int partial_seen = 0;
1299 int copy_to_evaluate_size = 0;
1300
1301 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1302 {
1303 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1304 partial_seen = 1;
1305 else if (partial_seen && args[i].reg == 0)
1306 must_preallocate = 1;
1307
1308 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1309 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1310 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1311 || TREE_CODE (args[i].tree_value) == COND_EXPR
1312 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1313 copy_to_evaluate_size
1314 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1315 }
1316
c62f36cf
RS
1317 if (copy_to_evaluate_size * 2 >= args_size.constant
1318 && args_size.constant > 0)
51bbfa0c
RS
1319 must_preallocate = 1;
1320 }
1321
1322 /* If the structure value address will reference the stack pointer, we must
1323 stabilize it. We don't need to do this if we know that we are not going
1324 to adjust the stack pointer in processing this call. */
1325
1326 if (structure_value_addr
1327 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1328 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1329 && (args_size.var
1330#ifndef ACCUMULATE_OUTGOING_ARGS
1331 || args_size.constant
1332#endif
1333 ))
1334 structure_value_addr = copy_to_reg (structure_value_addr);
1335
1336 /* If this function call is cse'able, precompute all the parameters.
1337 Note that if the parameter is constructed into a temporary, this will
1338 cause an additional copy because the parameter will be constructed
1339 into a temporary location and then copied into the outgoing arguments.
1340 If a parameter contains a call to alloca and this function uses the
1341 stack, precompute the parameter. */
1342
1ce0cb53
JW
1343 /* If we preallocated the stack space, and some arguments must be passed
1344 on the stack, then we must precompute any parameter which contains a
1345 function call which will store arguments on the stack.
1346 Otherwise, evaluating the parameter may clobber previous parameters
1347 which have already been stored into the stack. */
1348
51bbfa0c
RS
1349 for (i = 0; i < num_actuals; i++)
1350 if (is_const
1351 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1352 && calls_function (args[i].tree_value, 1))
1353 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1354 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1355 {
657bb6dc
JM
1356 /* If this is an addressable type, we cannot pre-evaluate it. */
1357 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1358 abort ();
1359
cc79451b
RK
1360 push_temp_slots ();
1361
51bbfa0c 1362 args[i].initial_value = args[i].value
e5d70561 1363 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1364
51bbfa0c 1365 preserve_temp_slots (args[i].value);
cc79451b 1366 pop_temp_slots ();
51bbfa0c
RS
1367
1368 /* ANSI doesn't require a sequence point here,
1369 but PCC has one, so this will avoid some problems. */
1370 emit_queue ();
8e6c802b
RK
1371
1372 args[i].initial_value = args[i].value
1373 = protect_from_queue (args[i].initial_value, 0);
1374
1375 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1376 args[i].value
1377 = convert_modes (args[i].mode,
1378 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1379 args[i].value, args[i].unsignedp);
51bbfa0c
RS
1380 }
1381
1382 /* Now we are about to start emitting insns that can be deleted
1383 if a libcall is deleted. */
9ae8ffe7 1384 if (is_const || is_malloc)
51bbfa0c
RS
1385 start_sequence ();
1386
1387 /* If we have no actual push instructions, or shouldn't use them,
1388 make space for all args right now. */
1389
1390 if (args_size.var != 0)
1391 {
1392 if (old_stack_level == 0)
1393 {
e5d70561 1394 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1395 old_pending_adj = pending_stack_adjust;
1396 pending_stack_adjust = 0;
d64f5a78 1397#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1398 /* stack_arg_under_construction says whether a stack arg is
1399 being constructed at the old stack level. Pushing the stack
1400 gets a clean outgoing argument block. */
1401 old_stack_arg_under_construction = stack_arg_under_construction;
1402 stack_arg_under_construction = 0;
d64f5a78 1403#endif
51bbfa0c
RS
1404 }
1405 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1406 }
26a258fe 1407 else
51bbfa0c
RS
1408 {
1409 /* Note that we must go through the motions of allocating an argument
1410 block even if the size is zero because we may be storing args
1411 in the area reserved for register arguments, which may be part of
1412 the stack frame. */
26a258fe 1413
51bbfa0c
RS
1414 int needed = args_size.constant;
1415
0f41302f
MS
1416 /* Store the maximum argument space used. It will be pushed by
1417 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1418 checking). */
51bbfa0c
RS
1419
1420 if (needed > current_function_outgoing_args_size)
1421 current_function_outgoing_args_size = needed;
1422
26a258fe
PB
1423 if (must_preallocate)
1424 {
1425#ifdef ACCUMULATE_OUTGOING_ARGS
1426 /* Since the stack pointer will never be pushed, it is possible for
1427 the evaluation of a parm to clobber something we have already
1428 written to the stack. Since most function calls on RISC machines
1429 do not use the stack, this is uncommon, but must work correctly.
1430
1431 Therefore, we save any area of the stack that was already written
1432 and that we are using. Here we set up to do this by making a new
1433 stack usage map from the old one. The actual save will be done
1434 by store_one_arg.
1435
1436 Another approach might be to try to reorder the argument
1437 evaluations to avoid this conflicting stack usage. */
1438
51bbfa0c 1439#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
26a258fe
PB
1440 /* Since we will be writing into the entire argument area, the
1441 map must be allocated for its entire size, not just the part that
1442 is the responsibility of the caller. */
1443 needed += reg_parm_stack_space;
51bbfa0c
RS
1444#endif
1445
1446#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1447 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1448 needed + 1);
51bbfa0c 1449#else
26a258fe
PB
1450 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1451 needed);
51bbfa0c 1452#endif
26a258fe 1453 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1454
26a258fe
PB
1455 if (initial_highest_arg_in_use)
1456 bcopy (initial_stack_usage_map, stack_usage_map,
1457 initial_highest_arg_in_use);
51bbfa0c 1458
26a258fe
PB
1459 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1460 bzero (&stack_usage_map[initial_highest_arg_in_use],
1461 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1462 needed = 0;
2f4aa534 1463
26a258fe
PB
1464 /* The address of the outgoing argument list must not be copied to a
1465 register here, because argblock would be left pointing to the
1466 wrong place after the call to allocate_dynamic_stack_space below.
1467 */
2f4aa534 1468
26a258fe 1469 argblock = virtual_outgoing_args_rtx;
2f4aa534 1470
51bbfa0c 1471#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1472 if (inhibit_defer_pop == 0)
51bbfa0c 1473 {
26a258fe
PB
1474 /* Try to reuse some or all of the pending_stack_adjust
1475 to get this space. Maybe we can avoid any pushing. */
1476 if (needed > pending_stack_adjust)
1477 {
1478 needed -= pending_stack_adjust;
1479 pending_stack_adjust = 0;
1480 }
1481 else
1482 {
1483 pending_stack_adjust -= needed;
1484 needed = 0;
1485 }
51bbfa0c 1486 }
26a258fe
PB
1487 /* Special case this because overhead of `push_block' in this
1488 case is non-trivial. */
1489 if (needed == 0)
1490 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1491 else
26a258fe
PB
1492 argblock = push_block (GEN_INT (needed), 0, 0);
1493
1494 /* We only really need to call `copy_to_reg' in the case where push
1495 insns are going to be used to pass ARGBLOCK to a function
1496 call in ARGS. In that case, the stack pointer changes value
1497 from the allocation point to the call point, and hence
1498 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1499 But might as well always do it. */
1500 argblock = copy_to_reg (argblock);
51bbfa0c 1501#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1502 }
51bbfa0c
RS
1503 }
1504
bfbf933a
RS
1505#ifdef ACCUMULATE_OUTGOING_ARGS
1506 /* The save/restore code in store_one_arg handles all cases except one:
1507 a constructor call (including a C function returning a BLKmode struct)
1508 to initialize an argument. */
1509 if (stack_arg_under_construction)
1510 {
1511#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1512 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1513#else
e5d70561 1514 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1515#endif
1516 if (old_stack_level == 0)
1517 {
e5d70561 1518 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1519 old_pending_adj = pending_stack_adjust;
1520 pending_stack_adjust = 0;
1521 /* stack_arg_under_construction says whether a stack arg is
1522 being constructed at the old stack level. Pushing the stack
1523 gets a clean outgoing argument block. */
1524 old_stack_arg_under_construction = stack_arg_under_construction;
1525 stack_arg_under_construction = 0;
1526 /* Make a new map for the new argument list. */
1527 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1528 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1529 highest_outgoing_arg_in_use = 0;
1530 }
e5d70561 1531 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1532 }
1533 /* If argument evaluation might modify the stack pointer, copy the
1534 address of the argument list to a register. */
1535 for (i = 0; i < num_actuals; i++)
1536 if (args[i].pass_on_stack)
1537 {
1538 argblock = copy_addr_to_reg (argblock);
1539 break;
1540 }
1541#endif
1542
1543
51bbfa0c
RS
1544 /* If we preallocated stack space, compute the address of each argument.
1545 We need not ensure it is a valid memory address here; it will be
1546 validized when it is used. */
1547 if (argblock)
1548 {
1549 rtx arg_reg = argblock;
1550 int arg_offset = 0;
1551
1552 if (GET_CODE (argblock) == PLUS)
1553 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1554
1555 for (i = 0; i < num_actuals; i++)
1556 {
1557 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1558 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1559 rtx addr;
1560
1561 /* Skip this parm if it will not be passed on the stack. */
1562 if (! args[i].pass_on_stack && args[i].reg != 0)
1563 continue;
1564
1565 if (GET_CODE (offset) == CONST_INT)
1566 addr = plus_constant (arg_reg, INTVAL (offset));
1567 else
1568 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1569
1570 addr = plus_constant (addr, arg_offset);
1efe6448 1571 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
0c0600d5 1572 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1573 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1574
1575 if (GET_CODE (slot_offset) == CONST_INT)
1576 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1577 else
1578 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1579
1580 addr = plus_constant (addr, arg_offset);
1efe6448 1581 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1582 }
1583 }
1584
1585#ifdef PUSH_ARGS_REVERSED
1586#ifdef STACK_BOUNDARY
1587 /* If we push args individually in reverse order, perform stack alignment
1588 before the first push (the last arg). */
1589 if (argblock == 0)
e5d70561
RK
1590 anti_adjust_stack (GEN_INT (args_size.constant
1591 - original_args_size.constant));
51bbfa0c
RS
1592#endif
1593#endif
1594
1595 /* Don't try to defer pops if preallocating, not even from the first arg,
1596 since ARGBLOCK probably refers to the SP. */
1597 if (argblock)
1598 NO_DEFER_POP;
1599
1600 /* Get the function to call, in the form of RTL. */
1601 if (fndecl)
ef5d30c9
RK
1602 {
1603 /* If this is the first use of the function, see if we need to
1604 make an external definition for it. */
1605 if (! TREE_USED (fndecl))
1606 {
1607 assemble_external (fndecl);
1608 TREE_USED (fndecl) = 1;
1609 }
1610
1611 /* Get a SYMBOL_REF rtx for the function address. */
1612 funexp = XEXP (DECL_RTL (fndecl), 0);
1613 }
51bbfa0c
RS
1614 else
1615 /* Generate an rtx (probably a pseudo-register) for the address. */
1616 {
cc79451b 1617 push_temp_slots ();
e5d70561 1618 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1619 pop_temp_slots (); /* FUNEXP can't be BLKmode */
7815214e
RK
1620
1621 /* Check the function is executable. */
1622 if (flag_check_memory_usage)
1623 emit_library_call (chkr_check_exec_libfunc, 1,
1624 VOIDmode, 1,
1625 funexp, ptr_mode);
51bbfa0c
RS
1626 emit_queue ();
1627 }
1628
1629 /* Figure out the register where the value, if any, will come back. */
1630 valreg = 0;
1631 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1632 && ! structure_value_addr)
1633 {
1634 if (pcc_struct_value)
1635 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1636 fndecl);
1637 else
1638 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1639 }
1640
1641 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 1642 once we have started filling any specific hard regs. */
51bbfa0c
RS
1643 reg_parm_seen = 0;
1644 for (i = 0; i < num_actuals; i++)
1645 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1646 {
1647 reg_parm_seen = 1;
1648
1649 if (args[i].value == 0)
1650 {
cc79451b 1651 push_temp_slots ();
e5d70561
RK
1652 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1653 VOIDmode, 0);
51bbfa0c 1654 preserve_temp_slots (args[i].value);
cc79451b 1655 pop_temp_slots ();
51bbfa0c
RS
1656
1657 /* ANSI doesn't require a sequence point here,
1658 but PCC has one, so this will avoid some problems. */
1659 emit_queue ();
1660 }
84b55618
RK
1661
1662 /* If we are to promote the function arg to a wider mode,
1663 do it now. */
84b55618 1664
843fec55
RK
1665 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1666 args[i].value
1667 = convert_modes (args[i].mode,
1668 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1669 args[i].value, args[i].unsignedp);
ebef2728
RK
1670
1671 /* If the value is expensive, and we are inside an appropriately
1672 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1673 into the hard reg.
1674
1675 For small register classes, also do this if this call uses
1676 register parameters. This is to avoid reload conflicts while
1677 loading the parameters registers. */
ebef2728
RK
1678
1679 if ((! (GET_CODE (args[i].value) == REG
1680 || (GET_CODE (args[i].value) == SUBREG
1681 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1682 && args[i].mode != BLKmode
1683 && rtx_cost (args[i].value, SET) > 2
f95182a4 1684 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
e9a25f70 1685 || preserve_subexpressions_p ()))
ebef2728 1686 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1687 }
1688
1689#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1690 /* The argument list is the property of the called routine and it
1691 may clobber it. If the fixed area has been used for previous
1692 parameters, we must save and restore it.
1693
1694 Here we compute the boundary of the that needs to be saved, if any. */
1695
b94301c2
RS
1696#ifdef ARGS_GROW_DOWNWARD
1697 for (i = 0; i < reg_parm_stack_space + 1; i++)
1698#else
6f90e075 1699 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1700#endif
51bbfa0c
RS
1701 {
1702 if (i >= highest_outgoing_arg_in_use
1703 || stack_usage_map[i] == 0)
1704 continue;
1705
1706 if (low_to_save == -1)
1707 low_to_save = i;
1708
1709 high_to_save = i;
1710 }
1711
1712 if (low_to_save >= 0)
1713 {
1714 int num_to_save = high_to_save - low_to_save + 1;
1715 enum machine_mode save_mode
1716 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1717 rtx stack_area;
1718
1719 /* If we don't have the required alignment, must do this in BLKmode. */
1720 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1721 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1722 save_mode = BLKmode;
1723
1724 stack_area = gen_rtx (MEM, save_mode,
1725 memory_address (save_mode,
b94301c2
RS
1726
1727#ifdef ARGS_GROW_DOWNWARD
1728 plus_constant (argblock,
1729 - high_to_save)
1730#else
51bbfa0c 1731 plus_constant (argblock,
b94301c2
RS
1732 low_to_save)
1733#endif
1734 ));
51bbfa0c
RS
1735 if (save_mode == BLKmode)
1736 {
6fa51029 1737 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3668e76e 1738 MEM_IN_STRUCT_P (save_area) = 0;
51bbfa0c 1739 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1740 GEN_INT (num_to_save),
51bbfa0c
RS
1741 PARM_BOUNDARY / BITS_PER_UNIT);
1742 }
1743 else
1744 {
1745 save_area = gen_reg_rtx (save_mode);
1746 emit_move_insn (save_area, stack_area);
1747 }
1748 }
1749#endif
1750
1751
1752 /* Now store (and compute if necessary) all non-register parms.
1753 These come before register parms, since they can require block-moves,
1754 which could clobber the registers used for register parms.
1755 Parms which have partial registers are not stored here,
1756 but we do preallocate space here if they want that. */
1757
1758 for (i = 0; i < num_actuals; i++)
1759 if (args[i].reg == 0 || args[i].pass_on_stack)
1760 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1761 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1762
4ab56118
RK
1763 /* If we have a parm that is passed in registers but not in memory
1764 and whose alignment does not permit a direct copy into registers,
1765 make a group of pseudos that correspond to each register that we
1766 will later fill. */
1767
45d44c98
RK
1768 if (STRICT_ALIGNMENT)
1769 for (i = 0; i < num_actuals; i++)
1770 if (args[i].reg != 0 && ! args[i].pass_on_stack
4ab56118 1771 && args[i].mode == BLKmode
45d44c98
RK
1772 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1773 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1774 {
1775 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1776 int big_endian_correction = 0;
4ab56118 1777
45d44c98
RK
1778 args[i].n_aligned_regs
1779 = args[i].partial ? args[i].partial
1780 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
4ab56118 1781
45d44c98
RK
1782 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1783 * args[i].n_aligned_regs);
4ab56118 1784
45d44c98
RK
1785 /* Structures smaller than a word are aligned to the least
1786 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1787 this means we must skip the empty high order bytes when
1788 calculating the bit offset. */
1789 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1790 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
8498efd0 1791
45d44c98
RK
1792 for (j = 0; j < args[i].n_aligned_regs; j++)
1793 {
1794 rtx reg = gen_reg_rtx (word_mode);
1795 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1796 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1797 int bitpos;
1798
1799 args[i].aligned_regs[j] = reg;
1800
1801 /* Clobber REG and move each partword into it. Ensure we don't
1802 go past the end of the structure. Note that the loop below
1803 works because we've already verified that padding
a22ad972 1804 and endianness are compatible.
45d44c98 1805
a22ad972
DE
1806 We use to emit a clobber here but that doesn't let later
1807 passes optimize the instructions we emit. By storing 0 into
1808 the register later passes know the first AND to zero out the
1809 bitfield being set in the register is unnecessary. The store
1810 of 0 will be deleted as will at least the first AND. */
1811
1812 emit_move_insn (reg, const0_rtx);
45d44c98
RK
1813
1814 for (bitpos = 0;
1815 bitpos < BITS_PER_WORD && bytes > 0;
1816 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1817 {
1818 int xbitpos = bitpos + big_endian_correction;
1819
1820 store_bit_field (reg, bitsize, xbitpos, word_mode,
1821 extract_bit_field (word, bitsize, bitpos, 1,
1822 NULL_RTX, word_mode,
1823 word_mode,
1824 bitsize / BITS_PER_UNIT,
1825 BITS_PER_WORD),
1826 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1827 }
1828 }
1829 }
4ab56118 1830
51bbfa0c
RS
1831 /* Now store any partially-in-registers parm.
1832 This is the last place a block-move can happen. */
1833 if (reg_parm_seen)
1834 for (i = 0; i < num_actuals; i++)
1835 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1836 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1837 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1838
1839#ifndef PUSH_ARGS_REVERSED
1840#ifdef STACK_BOUNDARY
1841 /* If we pushed args in forward order, perform stack alignment
1842 after pushing the last arg. */
1843 if (argblock == 0)
e5d70561
RK
1844 anti_adjust_stack (GEN_INT (args_size.constant
1845 - original_args_size.constant));
51bbfa0c
RS
1846#endif
1847#endif
1848
756e0e12
RS
1849 /* If register arguments require space on the stack and stack space
1850 was not preallocated, allocate stack space here for arguments
1851 passed in registers. */
6e716e89 1852#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1853 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1854 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1855#endif
1856
51bbfa0c
RS
1857 /* Pass the function the address in which to return a structure value. */
1858 if (structure_value_addr && ! structure_value_addr_parm)
1859 {
1860 emit_move_insn (struct_value_rtx,
1861 force_reg (Pmode,
e5d70561
RK
1862 force_operand (structure_value_addr,
1863 NULL_RTX)));
7815214e
RK
1864
1865 /* Mark the memory for the aggregate as write-only. */
1866 if (flag_check_memory_usage)
1867 emit_library_call (chkr_set_right_libfunc, 1,
1868 VOIDmode, 3,
1869 structure_value_addr, ptr_mode,
1870 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
1871 GEN_INT (MEMORY_USE_WO),
1872 TYPE_MODE (integer_type_node));
7815214e 1873
51bbfa0c 1874 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1875 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1876 }
1877
77cac2f2 1878 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1879
51bbfa0c
RS
1880 /* Now do the register loads required for any wholly-register parms or any
1881 parms which are passed both on the stack and in a register. Their
1882 expressions were already evaluated.
1883
1884 Mark all register-parms as living through the call, putting these USE
77cac2f2 1885 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c
RS
1886
1887 for (i = 0; i < num_actuals; i++)
1888 {
cacbd532 1889 rtx reg = args[i].reg;
51bbfa0c 1890 int partial = args[i].partial;
cacbd532 1891 int nregs;
51bbfa0c 1892
cacbd532 1893 if (reg)
51bbfa0c 1894 {
6b972c4f
JW
1895 /* Set to non-negative if must move a word at a time, even if just
1896 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1897 we just use a normal move insn. This value can be zero if the
1898 argument is a zero size structure with no fields. */
51bbfa0c
RS
1899 nregs = (partial ? partial
1900 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1901 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1902 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1903 : -1));
51bbfa0c 1904
cacbd532
JW
1905 /* Handle calls that pass values in multiple non-contiguous
1906 locations. The Irix 6 ABI has examples of this. */
1907
1908 if (GET_CODE (reg) == PARALLEL)
1909 emit_group_load (reg, args[i].value);
1910
51bbfa0c
RS
1911 /* If simple case, just do move. If normal partial, store_one_arg
1912 has already loaded the register for us. In all other cases,
1913 load the register(s) from memory. */
1914
cacbd532 1915 else if (nregs == -1)
51bbfa0c 1916 emit_move_insn (reg, args[i].value);
4ab56118 1917
4ab56118
RK
1918 /* If we have pre-computed the values to put in the registers in
1919 the case of non-aligned structures, copy them in now. */
1920
1921 else if (args[i].n_aligned_regs != 0)
1922 for (j = 0; j < args[i].n_aligned_regs; j++)
1923 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1924 args[i].aligned_regs[j]);
4ab56118 1925
cacbd532 1926 else if (partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1927 move_block_to_reg (REGNO (reg),
1928 validize_mem (args[i].value), nregs,
1929 args[i].mode);
0304dfbb 1930
cacbd532
JW
1931 /* Handle calls that pass values in multiple non-contiguous
1932 locations. The Irix 6 ABI has examples of this. */
1933 if (GET_CODE (reg) == PARALLEL)
1934 use_group_regs (&call_fusage, reg);
1935 else if (nregs == -1)
0304dfbb
DE
1936 use_reg (&call_fusage, reg);
1937 else
1938 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1939 }
1940 }
1941
1942 /* Perform postincrements before actually calling the function. */
1943 emit_queue ();
1944
1945 /* All arguments and registers used for the call must be set up by now! */
1946
51bbfa0c 1947 /* Generate the actual call instruction. */
2c8da025 1948 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
51bbfa0c 1949 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1950 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1951
1952 /* If call is cse'able, make appropriate pair of reg-notes around it.
1953 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
1954 if return type is void. Disable for PARALLEL return values, because
1955 we have no way to move such values into a pseudo register. */
1956 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
1957 {
1958 rtx note = 0;
1959 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1960 rtx insns;
1961
9ae8ffe7
JL
1962 /* Mark the return value as a pointer if needed. */
1963 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1964 {
1965 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1966 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1967 }
1968
51bbfa0c
RS
1969 /* Construct an "equal form" for the value which mentions all the
1970 arguments in order as well as the function name. */
1971#ifdef PUSH_ARGS_REVERSED
1972 for (i = 0; i < num_actuals; i++)
1973 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1974#else
1975 for (i = num_actuals - 1; i >= 0; i--)
1976 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1977#endif
1978 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1979
1980 insns = get_insns ();
1981 end_sequence ();
1982
1983 emit_libcall_block (insns, temp, valreg, note);
1984
1985 valreg = temp;
1986 }
4f48d56a
RK
1987 else if (is_const)
1988 {
1989 /* Otherwise, just write out the sequence without a note. */
1990 rtx insns = get_insns ();
1991
1992 end_sequence ();
1993 emit_insns (insns);
1994 }
9ae8ffe7
JL
1995 else if (is_malloc)
1996 {
1997 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1998 rtx last, insns;
1999
2000 /* The return value from a malloc-like function is a pointer. */
2001 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2002 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2003
2004 emit_move_insn (temp, valreg);
2005
2006 /* The return value from a malloc-like function can not alias
2007 anything else. */
2008 last = get_last_insn ();
2009 REG_NOTES (last) =
2010 gen_rtx (EXPR_LIST, REG_NOALIAS, temp, REG_NOTES (last));
2011
2012 /* Write out the sequence. */
2013 insns = get_insns ();
2014 end_sequence ();
2015 emit_insns (insns);
2016 valreg = temp;
2017 }
51bbfa0c
RS
2018
2019 /* For calls to `setjmp', etc., inform flow.c it should complain
2020 if nonvolatile values are live. */
2021
2022 if (returns_twice)
2023 {
2024 emit_note (name, NOTE_INSN_SETJMP);
2025 current_function_calls_setjmp = 1;
2026 }
2027
2028 if (is_longjmp)
2029 current_function_calls_longjmp = 1;
2030
2031 /* Notice functions that cannot return.
2032 If optimizing, insns emitted below will be dead.
2033 If not optimizing, they will exist, which is useful
2034 if the user uses the `return' command in the debugger. */
2035
2036 if (is_volatile || is_longjmp)
2037 emit_barrier ();
2038
51bbfa0c
RS
2039 /* If value type not void, return an rtx for the value. */
2040
e976b8b2
MS
2041 /* If there are cleanups to be called, don't use a hard reg as target.
2042 We need to double check this and see if it matters anymore. */
e9a25f70 2043 if (any_pending_cleanups (1)
51bbfa0c
RS
2044 && target && REG_P (target)
2045 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2046 target = 0;
2047
2048 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2049 || ignore)
2050 {
2051 target = const0_rtx;
2052 }
2053 else if (structure_value_addr)
2054 {
2055 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
2056 {
2057 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2058 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2059 structure_value_addr));
05e3bdb9 2060 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 2061 }
51bbfa0c
RS
2062 }
2063 else if (pcc_struct_value)
2064 {
f78b5ca1
JL
2065 /* This is the special C++ case where we need to
2066 know what the true target was. We take care to
2067 never use this value more than once in one expression. */
2068 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2069 copy_to_reg (valreg));
2070 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
51bbfa0c 2071 }
cacbd532
JW
2072 /* Handle calls that return values in multiple non-contiguous locations.
2073 The Irix 6 ABI has examples of this. */
2074 else if (GET_CODE (valreg) == PARALLEL)
2075 {
2076 if (target == 0)
2077 {
2078 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2b4092f2 2079 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
cacbd532
JW
2080 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2081 preserve_temp_slots (target);
2082 }
2083
2084 emit_group_store (target, valreg);
2085 }
059c3d84
JW
2086 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2087 && GET_MODE (target) == GET_MODE (valreg))
2088 /* TARGET and VALREG cannot be equal at this point because the latter
2089 would not have REG_FUNCTION_VALUE_P true, while the former would if
2090 it were referring to the same register.
2091
2092 If they refer to the same register, this move will be a no-op, except
2093 when function inlining is being done. */
2094 emit_move_insn (target, valreg);
766b19fb
JL
2095 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2096 {
2097 /* Some machines (the PA for example) want to return all small
2098 structures in registers regardless of the structure's alignment.
2099
2100 Deal with them explicitly by copying from the return registers
2101 into the target MEM locations. */
2102 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2103 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2104 int i;
2105 enum machine_mode tmpmode;
1b5c5873
RK
2106 rtx src, dst;
2107 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2108 int bitpos, xbitpos, big_endian_correction = 0;
766b19fb
JL
2109
2110 if (target == 0)
822e3422
RK
2111 {
2112 target = assign_stack_temp (BLKmode, bytes, 0);
2113 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2114 preserve_temp_slots (target);
2115 }
766b19fb 2116
e934eef9
RK
2117 /* This code assumes valreg is at least a full word. If it isn't,
2118 copy it into a new pseudo which is a full word. */
2119 if (GET_MODE (valreg) != BLKmode
2120 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
144a3150 2121 valreg = convert_to_mode (word_mode, valreg,
e934eef9
RK
2122 TREE_UNSIGNED (TREE_TYPE (exp)));
2123
1b5c5873
RK
2124 /* Structures whose size is not a multiple of a word are aligned
2125 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2126 machine, this means we must skip the empty high order bytes when
2127 calculating the bit offset. */
2128 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2129 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2130 * BITS_PER_UNIT));
2131
2132 /* Copy the structure BITSIZE bites at a time.
2133
2134 We could probably emit more efficient code for machines
766b19fb
JL
2135 which do not use strict alignment, but it doesn't seem
2136 worth the effort at the current time. */
1b5c5873
RK
2137 for (bitpos = 0, xbitpos = big_endian_correction;
2138 bitpos < bytes * BITS_PER_UNIT;
2139 bitpos += bitsize, xbitpos += bitsize)
766b19fb 2140 {
1b5c5873
RK
2141
2142 /* We need a new source operand each time xbitpos is on a
2143 word boundary and when xbitpos == big_endian_correction
2144 (the first time through). */
2145 if (xbitpos % BITS_PER_WORD == 0
2146 || xbitpos == big_endian_correction)
2147 src = operand_subword_force (valreg,
2148 xbitpos / BITS_PER_WORD,
2149 BLKmode);
2150
2151 /* We need a new destination operand each time bitpos is on
2152 a word boundary. */
2153 if (bitpos % BITS_PER_WORD == 0)
2154 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
766b19fb 2155
1b5c5873
RK
2156 /* Use xbitpos for the source extraction (right justified) and
2157 xbitpos for the destination store (left justified). */
2158 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2159 extract_bit_field (src, bitsize,
2160 xbitpos % BITS_PER_WORD, 1,
2161 NULL_RTX, word_mode,
2162 word_mode,
2163 bitsize / BITS_PER_UNIT,
2164 BITS_PER_WORD),
2165 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
766b19fb
JL
2166 }
2167 }
51bbfa0c
RS
2168 else
2169 target = copy_to_reg (valreg);
2170
84b55618 2171#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2172 /* If we promoted this return value, make the proper SUBREG. TARGET
2173 might be const0_rtx here, so be careful. */
2174 if (GET_CODE (target) == REG
766b19fb 2175 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2176 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2177 {
321e0bba
RK
2178 tree type = TREE_TYPE (exp);
2179 int unsignedp = TREE_UNSIGNED (type);
84b55618 2180
321e0bba
RK
2181 /* If we don't promote as expected, something is wrong. */
2182 if (GET_MODE (target)
2183 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2184 abort ();
2185
321e0bba 2186 target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
84b55618
RK
2187 SUBREG_PROMOTED_VAR_P (target) = 1;
2188 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2189 }
2190#endif
2191
2f4aa534
RS
2192 /* If size of args is variable or this was a constructor call for a stack
2193 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2194
2195 if (old_stack_level)
2196 {
e5d70561 2197 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2198 pending_stack_adjust = old_pending_adj;
d64f5a78 2199#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2200 stack_arg_under_construction = old_stack_arg_under_construction;
2201 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2202 stack_usage_map = initial_stack_usage_map;
d64f5a78 2203#endif
51bbfa0c 2204 }
51bbfa0c
RS
2205#ifdef ACCUMULATE_OUTGOING_ARGS
2206 else
2207 {
2208#ifdef REG_PARM_STACK_SPACE
2209 if (save_area)
2210 {
2211 enum machine_mode save_mode = GET_MODE (save_area);
2212 rtx stack_area
2213 = gen_rtx (MEM, save_mode,
2214 memory_address (save_mode,
b94301c2
RS
2215#ifdef ARGS_GROW_DOWNWARD
2216 plus_constant (argblock, - high_to_save)
2217#else
2218 plus_constant (argblock, low_to_save)
2219#endif
2220 ));
51bbfa0c
RS
2221
2222 if (save_mode != BLKmode)
2223 emit_move_insn (stack_area, save_area);
2224 else
2225 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2226 GEN_INT (high_to_save - low_to_save + 1),
2227 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2228 }
2229#endif
2230
2231 /* If we saved any argument areas, restore them. */
2232 for (i = 0; i < num_actuals; i++)
2233 if (args[i].save_area)
2234 {
2235 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2236 rtx stack_area
2237 = gen_rtx (MEM, save_mode,
2238 memory_address (save_mode,
2239 XEXP (args[i].stack_slot, 0)));
2240
2241 if (save_mode != BLKmode)
2242 emit_move_insn (stack_area, args[i].save_area);
2243 else
2244 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2245 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2246 PARM_BOUNDARY / BITS_PER_UNIT);
2247 }
2248
2249 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2250 stack_usage_map = initial_stack_usage_map;
2251 }
2252#endif
2253
59257ff7
RK
2254 /* If this was alloca, record the new stack level for nonlocal gotos.
2255 Check for the handler slots since we might not have a save area
0f41302f 2256 for non-local gotos. */
59257ff7
RK
2257
2258 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2259 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2260
2261 pop_temp_slots ();
2262
2263 return target;
2264}
2265\f
322e3e34
RK
2266/* Output a library call to function FUN (a SYMBOL_REF rtx)
2267 (emitting the queue unless NO_QUEUE is nonzero),
2268 for a value of mode OUTMODE,
2269 with NARGS different arguments, passed as alternating rtx values
2270 and machine_modes to convert them to.
2271 The rtx values should have been passed through protect_from_queue already.
2272
2273 NO_QUEUE will be true if and only if the library call is a `const' call
2274 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2275 to the variable is_const in expand_call.
2276
2277 NO_QUEUE must be true for const calls, because if it isn't, then
2278 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2279 and will be lost if the libcall sequence is optimized away.
2280
2281 NO_QUEUE must be false for non-const calls, because if it isn't, the
2282 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2283 optimized. For instance, the instruction scheduler may incorrectly
2284 move memory references across the non-const call. */
2285
2286void
4f90e4a0
RK
2287emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2288 int nargs, ...))
322e3e34 2289{
4f90e4a0
RK
2290#ifndef __STDC__
2291 rtx orgfun;
2292 int no_queue;
2293 enum machine_mode outmode;
2294 int nargs;
2295#endif
322e3e34
RK
2296 va_list p;
2297 /* Total size in bytes of all the stack-parms scanned so far. */
2298 struct args_size args_size;
2299 /* Size of arguments before any adjustments (such as rounding). */
2300 struct args_size original_args_size;
2301 register int argnum;
322e3e34 2302 rtx fun;
322e3e34
RK
2303 int inc;
2304 int count;
2305 rtx argblock = 0;
2306 CUMULATIVE_ARGS args_so_far;
2307 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2308 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2309 struct arg *argvec;
2310 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2311 rtx call_fusage = 0;
f046b3cc
JL
2312 /* Size of the stack reserved for parameter registers. */
2313 int reg_parm_stack_space = 0;
2314#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2315 /* Define the boundary of the register parm stack space that needs to be
2316 save, if any. */
2317 int low_to_save = -1, high_to_save;
2318 rtx save_area = 0; /* Place that it is saved */
2319#endif
2320
2321#ifdef ACCUMULATE_OUTGOING_ARGS
2322 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2323 char *initial_stack_usage_map = stack_usage_map;
2324 int needed;
2325#endif
2326
2327#ifdef REG_PARM_STACK_SPACE
2328#ifdef MAYBE_REG_PARM_STACK_SPACE
2329 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2330#else
2331 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2332#endif
2333#endif
322e3e34 2334
4f90e4a0
RK
2335 VA_START (p, nargs);
2336
2337#ifndef __STDC__
2338 orgfun = va_arg (p, rtx);
322e3e34
RK
2339 no_queue = va_arg (p, int);
2340 outmode = va_arg (p, enum machine_mode);
2341 nargs = va_arg (p, int);
4f90e4a0
RK
2342#endif
2343
2344 fun = orgfun;
322e3e34
RK
2345
2346 /* Copy all the libcall-arguments out of the varargs data
2347 and into a vector ARGVEC.
2348
2349 Compute how to pass each argument. We only support a very small subset
2350 of the full argument passing conventions to limit complexity here since
2351 library functions shouldn't have many args. */
2352
2353 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2354 bzero ((char *) argvec, nargs * sizeof (struct arg));
2355
322e3e34 2356
eecb6f50 2357 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2358
2359 args_size.constant = 0;
2360 args_size.var = 0;
2361
888aa7a9
RS
2362 push_temp_slots ();
2363
322e3e34
RK
2364 for (count = 0; count < nargs; count++)
2365 {
2366 rtx val = va_arg (p, rtx);
2367 enum machine_mode mode = va_arg (p, enum machine_mode);
2368
2369 /* We cannot convert the arg value to the mode the library wants here;
2370 must do it earlier where we know the signedness of the arg. */
2371 if (mode == BLKmode
2372 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2373 abort ();
2374
2375 /* On some machines, there's no way to pass a float to a library fcn.
2376 Pass it as a double instead. */
2377#ifdef LIBGCC_NEEDS_DOUBLE
2378 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2379 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2380#endif
2381
2382 /* There's no need to call protect_from_queue, because
2383 either emit_move_insn or emit_push_insn will do that. */
2384
2385 /* Make sure it is a reasonable operand for a move or push insn. */
2386 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2387 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2388 val = force_operand (val, NULL_RTX);
2389
322e3e34
RK
2390#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2391 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2392 {
a44492f0
RK
2393 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2394 be viewed as just an efficiency improvement. */
888aa7a9
RS
2395 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2396 emit_move_insn (slot, val);
8301b6e2 2397 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2398 mode = Pmode;
888aa7a9 2399 }
322e3e34
RK
2400#endif
2401
888aa7a9
RS
2402 argvec[count].value = val;
2403 argvec[count].mode = mode;
2404
322e3e34 2405 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2406 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2407 abort ();
2408#ifdef FUNCTION_ARG_PARTIAL_NREGS
2409 argvec[count].partial
2410 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2411#else
2412 argvec[count].partial = 0;
2413#endif
2414
2415 locate_and_pad_parm (mode, NULL_TREE,
2416 argvec[count].reg && argvec[count].partial == 0,
2417 NULL_TREE, &args_size, &argvec[count].offset,
2418 &argvec[count].size);
2419
2420 if (argvec[count].size.var)
2421 abort ();
2422
2423#ifndef REG_PARM_STACK_SPACE
2424 if (argvec[count].partial)
2425 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2426#endif
2427
2428 if (argvec[count].reg == 0 || argvec[count].partial != 0
2429#ifdef REG_PARM_STACK_SPACE
2430 || 1
2431#endif
2432 )
2433 args_size.constant += argvec[count].size.constant;
2434
0f41302f 2435 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2436 }
2437 va_end (p);
2438
f046b3cc
JL
2439#ifdef FINAL_REG_PARM_STACK_SPACE
2440 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2441 args_size.var);
2442#endif
2443
322e3e34
RK
2444 /* If this machine requires an external definition for library
2445 functions, write one out. */
2446 assemble_external_libcall (fun);
2447
2448 original_args_size = args_size;
2449#ifdef STACK_BOUNDARY
2450 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2451 / STACK_BYTES) * STACK_BYTES);
2452#endif
2453
2454#ifdef REG_PARM_STACK_SPACE
2455 args_size.constant = MAX (args_size.constant,
f046b3cc 2456 reg_parm_stack_space);
322e3e34 2457#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2458 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2459#endif
2460#endif
2461
322e3e34
RK
2462 if (args_size.constant > current_function_outgoing_args_size)
2463 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2464
2465#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2466 /* Since the stack pointer will never be pushed, it is possible for
2467 the evaluation of a parm to clobber something we have already
2468 written to the stack. Since most function calls on RISC machines
2469 do not use the stack, this is uncommon, but must work correctly.
2470
2471 Therefore, we save any area of the stack that was already written
2472 and that we are using. Here we set up to do this by making a new
2473 stack usage map from the old one.
2474
2475 Another approach might be to try to reorder the argument
2476 evaluations to avoid this conflicting stack usage. */
2477
2478 needed = args_size.constant;
2479#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2480 /* Since we will be writing into the entire argument area, the
2481 map must be allocated for its entire size, not just the part that
2482 is the responsibility of the caller. */
2483 needed += reg_parm_stack_space;
2484#endif
2485
2486#ifdef ARGS_GROW_DOWNWARD
2487 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2488 needed + 1);
2489#else
2490 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2491 needed);
322e3e34 2492#endif
f046b3cc
JL
2493 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2494
2495 if (initial_highest_arg_in_use)
2496 bcopy (initial_stack_usage_map, stack_usage_map,
2497 initial_highest_arg_in_use);
2498
2499 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2500 bzero (&stack_usage_map[initial_highest_arg_in_use],
2501 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2502 needed = 0;
322e3e34 2503
f046b3cc
JL
2504 /* The address of the outgoing argument list must not be copied to a
2505 register here, because argblock would be left pointing to the
2506 wrong place after the call to allocate_dynamic_stack_space below.
2507 */
2508
2509 argblock = virtual_outgoing_args_rtx;
2510#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2511#ifndef PUSH_ROUNDING
2512 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2513#endif
f046b3cc 2514#endif
322e3e34
RK
2515
2516#ifdef PUSH_ARGS_REVERSED
2517#ifdef STACK_BOUNDARY
2518 /* If we push args individually in reverse order, perform stack alignment
2519 before the first push (the last arg). */
2520 if (argblock == 0)
2521 anti_adjust_stack (GEN_INT (args_size.constant
2522 - original_args_size.constant));
2523#endif
2524#endif
2525
2526#ifdef PUSH_ARGS_REVERSED
2527 inc = -1;
2528 argnum = nargs - 1;
2529#else
2530 inc = 1;
2531 argnum = 0;
2532#endif
2533
f046b3cc
JL
2534#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2535 /* The argument list is the property of the called routine and it
2536 may clobber it. If the fixed area has been used for previous
2537 parameters, we must save and restore it.
2538
2539 Here we compute the boundary of the that needs to be saved, if any. */
2540
2541#ifdef ARGS_GROW_DOWNWARD
2542 for (count = 0; count < reg_parm_stack_space + 1; count++)
2543#else
2544 for (count = 0; count < reg_parm_stack_space; count++)
2545#endif
2546 {
2547 if (count >= highest_outgoing_arg_in_use
2548 || stack_usage_map[count] == 0)
2549 continue;
2550
2551 if (low_to_save == -1)
2552 low_to_save = count;
2553
2554 high_to_save = count;
2555 }
2556
2557 if (low_to_save >= 0)
2558 {
2559 int num_to_save = high_to_save - low_to_save + 1;
2560 enum machine_mode save_mode
2561 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2562 rtx stack_area;
2563
2564 /* If we don't have the required alignment, must do this in BLKmode. */
2565 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2566 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2567 save_mode = BLKmode;
2568
2569 stack_area = gen_rtx (MEM, save_mode,
2570 memory_address (save_mode,
2571
2572#ifdef ARGS_GROW_DOWNWARD
2573 plus_constant (argblock,
2574 - high_to_save)
2575#else
2576 plus_constant (argblock,
2577 low_to_save)
2578#endif
2579 ));
2580 if (save_mode == BLKmode)
2581 {
2582 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2583 MEM_IN_STRUCT_P (save_area) = 0;
2584 emit_block_move (validize_mem (save_area), stack_area,
2585 GEN_INT (num_to_save),
2586 PARM_BOUNDARY / BITS_PER_UNIT);
2587 }
2588 else
2589 {
2590 save_area = gen_reg_rtx (save_mode);
2591 emit_move_insn (save_area, stack_area);
2592 }
2593 }
2594#endif
2595
322e3e34
RK
2596 /* Push the args that need to be pushed. */
2597
5e26979c
JL
2598 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2599 are to be pushed. */
322e3e34
RK
2600 for (count = 0; count < nargs; count++, argnum += inc)
2601 {
2602 register enum machine_mode mode = argvec[argnum].mode;
2603 register rtx val = argvec[argnum].value;
2604 rtx reg = argvec[argnum].reg;
2605 int partial = argvec[argnum].partial;
f046b3cc 2606 int lower_bound, upper_bound, i;
322e3e34
RK
2607
2608 if (! (reg != 0 && partial == 0))
f046b3cc
JL
2609 {
2610#ifdef ACCUMULATE_OUTGOING_ARGS
2611 /* If this is being stored into a pre-allocated, fixed-size, stack
2612 area, save any previous data at that location. */
2613
2614#ifdef ARGS_GROW_DOWNWARD
2615 /* stack_slot is negative, but we want to index stack_usage_map
2616 with positive values. */
5e26979c
JL
2617 upper_bound = -argvec[argnum].offset.constant + 1;
2618 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 2619#else
5e26979c
JL
2620 lower_bound = argvec[argnum].offset.constant;
2621 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
2622#endif
2623
2624 for (i = lower_bound; i < upper_bound; i++)
2625 if (stack_usage_map[i]
2626#ifdef REG_PARM_STACK_SPACE
2627 /* Don't store things in the fixed argument area at this point;
2628 it has already been saved. */
2629 && i > reg_parm_stack_space
2630#endif
2631 )
2632 break;
2633
2634 if (i != upper_bound)
2635 {
2636 /* We need to make a save area. See what mode we can make it. */
2637 enum machine_mode save_mode
5e26979c 2638 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
2639 MODE_INT, 1);
2640 rtx stack_area
2641 = gen_rtx (MEM, save_mode,
2642 memory_address (save_mode, plus_constant (argblock,
5e26979c
JL
2643 argvec[argnum].offset.constant)));
2644 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2645 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
2646 }
2647#endif
2648 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
5e26979c 2649 argblock, GEN_INT (argvec[argnum].offset.constant));
f046b3cc
JL
2650
2651#ifdef ACCUMULATE_OUTGOING_ARGS
2652 /* Now mark the segment we just used. */
2653 for (i = lower_bound; i < upper_bound; i++)
2654 stack_usage_map[i] = 1;
2655#endif
2656
2657 NO_DEFER_POP;
2658 }
322e3e34
RK
2659 }
2660
2661#ifndef PUSH_ARGS_REVERSED
2662#ifdef STACK_BOUNDARY
2663 /* If we pushed args in forward order, perform stack alignment
2664 after pushing the last arg. */
2665 if (argblock == 0)
2666 anti_adjust_stack (GEN_INT (args_size.constant
2667 - original_args_size.constant));
2668#endif
2669#endif
2670
2671#ifdef PUSH_ARGS_REVERSED
2672 argnum = nargs - 1;
2673#else
2674 argnum = 0;
2675#endif
2676
77cac2f2 2677 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2678
322e3e34
RK
2679 /* Now load any reg parms into their regs. */
2680
5e26979c
JL
2681 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2682 are to be pushed. */
322e3e34
RK
2683 for (count = 0; count < nargs; count++, argnum += inc)
2684 {
2685 register enum machine_mode mode = argvec[argnum].mode;
2686 register rtx val = argvec[argnum].value;
2687 rtx reg = argvec[argnum].reg;
2688 int partial = argvec[argnum].partial;
2689
2690 if (reg != 0 && partial == 0)
2691 emit_move_insn (reg, val);
2692 NO_DEFER_POP;
2693 }
2694
2695 /* For version 1.37, try deleting this entirely. */
2696 if (! no_queue)
2697 emit_queue ();
2698
2699 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2700 for (count = 0; count < nargs; count++)
2701 if (argvec[count].reg != 0)
77cac2f2 2702 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2703
322e3e34
RK
2704 /* Don't allow popping to be deferred, since then
2705 cse'ing of library calls could delete a call and leave the pop. */
2706 NO_DEFER_POP;
2707
2708 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2709 will set inhibit_defer_pop to that value. */
2710
334c4f0f
RK
2711 /* The return type is needed to decide how many bytes the function pops.
2712 Signedness plays no role in that, so for simplicity, we pretend it's
2713 always signed. We also assume that the list of arguments passed has
2714 no impact, so we pretend it is unknown. */
2715
2c8da025
RK
2716 emit_call_1 (fun,
2717 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
2718 build_function_type (outmode == VOIDmode ? void_type_node
2719 : type_for_mode (outmode, 0), NULL_TREE),
334c4f0f 2720 args_size.constant, 0,
322e3e34
RK
2721 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2722 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2723 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2724
888aa7a9
RS
2725 pop_temp_slots ();
2726
322e3e34
RK
2727 /* Now restore inhibit_defer_pop to its actual original value. */
2728 OK_DEFER_POP;
f046b3cc
JL
2729
2730#ifdef ACCUMULATE_OUTGOING_ARGS
2731#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
2732 if (save_area)
2733 {
2734 enum machine_mode save_mode = GET_MODE (save_area);
2735 rtx stack_area
2736 = gen_rtx (MEM, save_mode,
2737 memory_address (save_mode,
f046b3cc 2738#ifdef ARGS_GROW_DOWNWARD
e9a25f70 2739 plus_constant (argblock, - high_to_save)
f046b3cc 2740#else
e9a25f70 2741 plus_constant (argblock, low_to_save)
f046b3cc 2742#endif
e9a25f70 2743 ));
f046b3cc 2744
e9a25f70
JL
2745 if (save_mode != BLKmode)
2746 emit_move_insn (stack_area, save_area);
2747 else
2748 emit_block_move (stack_area, validize_mem (save_area),
2749 GEN_INT (high_to_save - low_to_save + 1),
2750 PARM_BOUNDARY / BITS_PER_UNIT);
2751 }
f046b3cc
JL
2752#endif
2753
2754 /* If we saved any argument areas, restore them. */
2755 for (count = 0; count < nargs; count++)
2756 if (argvec[count].save_area)
2757 {
2758 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2759 rtx stack_area
2760 = gen_rtx (MEM, save_mode,
2761 memory_address (save_mode, plus_constant (argblock,
2762 argvec[count].offset.constant)));
2763
2764 emit_move_insn (stack_area, argvec[count].save_area);
2765 }
2766
2767 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2768 stack_usage_map = initial_stack_usage_map;
2769#endif
322e3e34
RK
2770}
2771\f
2772/* Like emit_library_call except that an extra argument, VALUE,
2773 comes second and says where to store the result.
fac0ad80
RS
2774 (If VALUE is zero, this function chooses a convenient way
2775 to return the value.
322e3e34 2776
fac0ad80
RS
2777 This function returns an rtx for where the value is to be found.
2778 If VALUE is nonzero, VALUE is returned. */
2779
2780rtx
4f90e4a0
RK
2781emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2782 enum machine_mode outmode, int nargs, ...))
322e3e34 2783{
4f90e4a0
RK
2784#ifndef __STDC__
2785 rtx orgfun;
2786 rtx value;
2787 int no_queue;
2788 enum machine_mode outmode;
2789 int nargs;
2790#endif
322e3e34
RK
2791 va_list p;
2792 /* Total size in bytes of all the stack-parms scanned so far. */
2793 struct args_size args_size;
2794 /* Size of arguments before any adjustments (such as rounding). */
2795 struct args_size original_args_size;
2796 register int argnum;
322e3e34 2797 rtx fun;
322e3e34
RK
2798 int inc;
2799 int count;
2800 rtx argblock = 0;
2801 CUMULATIVE_ARGS args_so_far;
2802 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2803 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2804 struct arg *argvec;
2805 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2806 rtx call_fusage = 0;
f046b3cc
JL
2807 /* Size of the stack reserved for parameter registers. */
2808 int reg_parm_stack_space = 0;
322e3e34 2809 rtx mem_value = 0;
fac0ad80 2810 int pcc_struct_value = 0;
4f389214 2811 int struct_value_size = 0;
d61bee95 2812 int is_const;
f046b3cc
JL
2813 int needed;
2814
2815#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2816 /* Define the boundary of the register parm stack space that needs to be
2817 save, if any. */
2818 int low_to_save = -1, high_to_save;
2819 rtx save_area = 0; /* Place that it is saved */
2820#endif
2821
2822#ifdef ACCUMULATE_OUTGOING_ARGS
2823 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2824 char *initial_stack_usage_map = stack_usage_map;
2825#endif
2826
2827#ifdef REG_PARM_STACK_SPACE
2828#ifdef MAYBE_REG_PARM_STACK_SPACE
2829 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2830#else
2831 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2832#endif
2833#endif
322e3e34 2834
4f90e4a0
RK
2835 VA_START (p, nargs);
2836
2837#ifndef __STDC__
2838 orgfun = va_arg (p, rtx);
322e3e34
RK
2839 value = va_arg (p, rtx);
2840 no_queue = va_arg (p, int);
2841 outmode = va_arg (p, enum machine_mode);
2842 nargs = va_arg (p, int);
4f90e4a0
RK
2843#endif
2844
d61bee95 2845 is_const = no_queue;
4f90e4a0 2846 fun = orgfun;
322e3e34
RK
2847
2848 /* If this kind of value comes back in memory,
2849 decide where in memory it should come back. */
fac0ad80 2850 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2851 {
fac0ad80
RS
2852#ifdef PCC_STATIC_STRUCT_RETURN
2853 rtx pointer_reg
2854 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2855 0);
2856 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2857 pcc_struct_value = 1;
2858 if (value == 0)
2859 value = gen_reg_rtx (outmode);
2860#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2861 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2862 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2863 mem_value = value;
2864 else
2865 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2866#endif
779c643a
JW
2867
2868 /* This call returns a big structure. */
2869 is_const = 0;
322e3e34
RK
2870 }
2871
2872 /* ??? Unfinished: must pass the memory address as an argument. */
2873
2874 /* Copy all the libcall-arguments out of the varargs data
2875 and into a vector ARGVEC.
2876
2877 Compute how to pass each argument. We only support a very small subset
2878 of the full argument passing conventions to limit complexity here since
2879 library functions shouldn't have many args. */
2880
2881 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 2882 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 2883
eecb6f50 2884 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2885
2886 args_size.constant = 0;
2887 args_size.var = 0;
2888
2889 count = 0;
2890
888aa7a9
RS
2891 push_temp_slots ();
2892
322e3e34
RK
2893 /* If there's a structure value address to be passed,
2894 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2895 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2896 {
2897 rtx addr = XEXP (mem_value, 0);
fac0ad80 2898 nargs++;
322e3e34 2899
fac0ad80
RS
2900 /* Make sure it is a reasonable operand for a move or push insn. */
2901 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2902 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2903 addr = force_operand (addr, NULL_RTX);
322e3e34 2904
fac0ad80 2905 argvec[count].value = addr;
4fc3dcd5 2906 argvec[count].mode = Pmode;
fac0ad80 2907 argvec[count].partial = 0;
322e3e34 2908
4fc3dcd5 2909 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2910#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2911 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2912 abort ();
322e3e34
RK
2913#endif
2914
4fc3dcd5 2915 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2916 argvec[count].reg && argvec[count].partial == 0,
2917 NULL_TREE, &args_size, &argvec[count].offset,
2918 &argvec[count].size);
322e3e34
RK
2919
2920
fac0ad80 2921 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2922#ifdef REG_PARM_STACK_SPACE
fac0ad80 2923 || 1
322e3e34 2924#endif
fac0ad80
RS
2925 )
2926 args_size.constant += argvec[count].size.constant;
322e3e34 2927
0f41302f 2928 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2929
2930 count++;
322e3e34
RK
2931 }
2932
2933 for (; count < nargs; count++)
2934 {
2935 rtx val = va_arg (p, rtx);
2936 enum machine_mode mode = va_arg (p, enum machine_mode);
2937
2938 /* We cannot convert the arg value to the mode the library wants here;
2939 must do it earlier where we know the signedness of the arg. */
2940 if (mode == BLKmode
2941 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2942 abort ();
2943
2944 /* On some machines, there's no way to pass a float to a library fcn.
2945 Pass it as a double instead. */
2946#ifdef LIBGCC_NEEDS_DOUBLE
2947 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2948 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2949#endif
2950
2951 /* There's no need to call protect_from_queue, because
2952 either emit_move_insn or emit_push_insn will do that. */
2953
2954 /* Make sure it is a reasonable operand for a move or push insn. */
2955 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2956 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2957 val = force_operand (val, NULL_RTX);
2958
322e3e34
RK
2959#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2960 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2961 {
a44492f0
RK
2962 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2963 be viewed as just an efficiency improvement. */
888aa7a9
RS
2964 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2965 emit_move_insn (slot, val);
2966 val = XEXP (slot, 0);
2967 mode = Pmode;
2968 }
322e3e34
RK
2969#endif
2970
888aa7a9
RS
2971 argvec[count].value = val;
2972 argvec[count].mode = mode;
2973
322e3e34 2974 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2975 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2976 abort ();
2977#ifdef FUNCTION_ARG_PARTIAL_NREGS
2978 argvec[count].partial
2979 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2980#else
2981 argvec[count].partial = 0;
2982#endif
2983
2984 locate_and_pad_parm (mode, NULL_TREE,
2985 argvec[count].reg && argvec[count].partial == 0,
2986 NULL_TREE, &args_size, &argvec[count].offset,
2987 &argvec[count].size);
2988
2989 if (argvec[count].size.var)
2990 abort ();
2991
2992#ifndef REG_PARM_STACK_SPACE
2993 if (argvec[count].partial)
2994 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2995#endif
2996
2997 if (argvec[count].reg == 0 || argvec[count].partial != 0
2998#ifdef REG_PARM_STACK_SPACE
2999 || 1
3000#endif
3001 )
3002 args_size.constant += argvec[count].size.constant;
3003
0f41302f 3004 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3005 }
3006 va_end (p);
3007
f046b3cc
JL
3008#ifdef FINAL_REG_PARM_STACK_SPACE
3009 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3010 args_size.var);
3011#endif
322e3e34
RK
3012 /* If this machine requires an external definition for library
3013 functions, write one out. */
3014 assemble_external_libcall (fun);
3015
3016 original_args_size = args_size;
3017#ifdef STACK_BOUNDARY
3018 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3019 / STACK_BYTES) * STACK_BYTES);
3020#endif
3021
3022#ifdef REG_PARM_STACK_SPACE
3023 args_size.constant = MAX (args_size.constant,
f046b3cc 3024 reg_parm_stack_space);
322e3e34 3025#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3026 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3027#endif
3028#endif
3029
322e3e34
RK
3030 if (args_size.constant > current_function_outgoing_args_size)
3031 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3032
3033#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3034 /* Since the stack pointer will never be pushed, it is possible for
3035 the evaluation of a parm to clobber something we have already
3036 written to the stack. Since most function calls on RISC machines
3037 do not use the stack, this is uncommon, but must work correctly.
3038
3039 Therefore, we save any area of the stack that was already written
3040 and that we are using. Here we set up to do this by making a new
3041 stack usage map from the old one.
3042
3043 Another approach might be to try to reorder the argument
3044 evaluations to avoid this conflicting stack usage. */
3045
3046 needed = args_size.constant;
3047#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
3048 /* Since we will be writing into the entire argument area, the
3049 map must be allocated for its entire size, not just the part that
3050 is the responsibility of the caller. */
3051 needed += reg_parm_stack_space;
3052#endif
3053
3054#ifdef ARGS_GROW_DOWNWARD
3055 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3056 needed + 1);
3057#else
3058 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3059 needed);
322e3e34 3060#endif
f046b3cc
JL
3061 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3062
3063 if (initial_highest_arg_in_use)
3064 bcopy (initial_stack_usage_map, stack_usage_map,
3065 initial_highest_arg_in_use);
3066
3067 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3068 bzero (&stack_usage_map[initial_highest_arg_in_use],
3069 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3070 needed = 0;
322e3e34 3071
f046b3cc
JL
3072 /* The address of the outgoing argument list must not be copied to a
3073 register here, because argblock would be left pointing to the
3074 wrong place after the call to allocate_dynamic_stack_space below.
3075 */
3076
3077 argblock = virtual_outgoing_args_rtx;
3078#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3079#ifndef PUSH_ROUNDING
3080 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3081#endif
f046b3cc 3082#endif
322e3e34
RK
3083
3084#ifdef PUSH_ARGS_REVERSED
3085#ifdef STACK_BOUNDARY
3086 /* If we push args individually in reverse order, perform stack alignment
3087 before the first push (the last arg). */
3088 if (argblock == 0)
3089 anti_adjust_stack (GEN_INT (args_size.constant
3090 - original_args_size.constant));
3091#endif
3092#endif
3093
3094#ifdef PUSH_ARGS_REVERSED
3095 inc = -1;
3096 argnum = nargs - 1;
3097#else
3098 inc = 1;
3099 argnum = 0;
3100#endif
3101
f046b3cc
JL
3102#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3103 /* The argument list is the property of the called routine and it
3104 may clobber it. If the fixed area has been used for previous
3105 parameters, we must save and restore it.
3106
3107 Here we compute the boundary of the that needs to be saved, if any. */
3108
3109#ifdef ARGS_GROW_DOWNWARD
3110 for (count = 0; count < reg_parm_stack_space + 1; count++)
3111#else
3112 for (count = 0; count < reg_parm_stack_space; count++)
3113#endif
3114 {
3115 if (count >= highest_outgoing_arg_in_use
3116 || stack_usage_map[count] == 0)
3117 continue;
3118
3119 if (low_to_save == -1)
3120 low_to_save = count;
3121
3122 high_to_save = count;
3123 }
3124
3125 if (low_to_save >= 0)
3126 {
3127 int num_to_save = high_to_save - low_to_save + 1;
3128 enum machine_mode save_mode
3129 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3130 rtx stack_area;
3131
3132 /* If we don't have the required alignment, must do this in BLKmode. */
3133 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3134 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3135 save_mode = BLKmode;
3136
3137 stack_area = gen_rtx (MEM, save_mode,
3138 memory_address (save_mode,
3139
3140#ifdef ARGS_GROW_DOWNWARD
3141 plus_constant (argblock,
3142 - high_to_save)
3143#else
3144 plus_constant (argblock,
3145 low_to_save)
3146#endif
3147 ));
3148 if (save_mode == BLKmode)
3149 {
3150 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3151 MEM_IN_STRUCT_P (save_area) = 0;
3152 emit_block_move (validize_mem (save_area), stack_area,
3153 GEN_INT (num_to_save),
3154 PARM_BOUNDARY / BITS_PER_UNIT);
3155 }
3156 else
3157 {
3158 save_area = gen_reg_rtx (save_mode);
3159 emit_move_insn (save_area, stack_area);
3160 }
3161 }
3162#endif
3163
322e3e34
RK
3164 /* Push the args that need to be pushed. */
3165
5e26979c
JL
3166 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3167 are to be pushed. */
322e3e34
RK
3168 for (count = 0; count < nargs; count++, argnum += inc)
3169 {
3170 register enum machine_mode mode = argvec[argnum].mode;
3171 register rtx val = argvec[argnum].value;
3172 rtx reg = argvec[argnum].reg;
3173 int partial = argvec[argnum].partial;
f046b3cc 3174 int lower_bound, upper_bound, i;
322e3e34
RK
3175
3176 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3177 {
3178#ifdef ACCUMULATE_OUTGOING_ARGS
3179 /* If this is being stored into a pre-allocated, fixed-size, stack
3180 area, save any previous data at that location. */
3181
3182#ifdef ARGS_GROW_DOWNWARD
3183 /* stack_slot is negative, but we want to index stack_usage_map
3184 with positive values. */
5e26979c
JL
3185 upper_bound = -argvec[argnum].offset.constant + 1;
3186 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3187#else
5e26979c
JL
3188 lower_bound = argvec[argnum].offset.constant;
3189 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3190#endif
3191
3192 for (i = lower_bound; i < upper_bound; i++)
3193 if (stack_usage_map[i]
3194#ifdef REG_PARM_STACK_SPACE
3195 /* Don't store things in the fixed argument area at this point;
3196 it has already been saved. */
3197 && i > reg_parm_stack_space
3198#endif
3199 )
3200 break;
3201
3202 if (i != upper_bound)
3203 {
3204 /* We need to make a save area. See what mode we can make it. */
3205 enum machine_mode save_mode
5e26979c 3206 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3207 MODE_INT, 1);
3208 rtx stack_area
3209 = gen_rtx (MEM, save_mode,
3210 memory_address (save_mode, plus_constant (argblock,
5e26979c
JL
3211 argvec[argnum].offset.constant)));
3212 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3213 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3214 }
3215#endif
3216 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
5e26979c 3217 argblock, GEN_INT (argvec[argnum].offset.constant));
f046b3cc
JL
3218
3219#ifdef ACCUMULATE_OUTGOING_ARGS
3220 /* Now mark the segment we just used. */
3221 for (i = lower_bound; i < upper_bound; i++)
3222 stack_usage_map[i] = 1;
3223#endif
3224
3225 NO_DEFER_POP;
3226 }
322e3e34
RK
3227 }
3228
3229#ifndef PUSH_ARGS_REVERSED
3230#ifdef STACK_BOUNDARY
3231 /* If we pushed args in forward order, perform stack alignment
3232 after pushing the last arg. */
3233 if (argblock == 0)
3234 anti_adjust_stack (GEN_INT (args_size.constant
3235 - original_args_size.constant));
3236#endif
3237#endif
3238
3239#ifdef PUSH_ARGS_REVERSED
3240 argnum = nargs - 1;
3241#else
3242 argnum = 0;
3243#endif
3244
77cac2f2 3245 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3246
322e3e34
RK
3247 /* Now load any reg parms into their regs. */
3248
5e26979c
JL
3249 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3250 are to be pushed. */
322e3e34
RK
3251 for (count = 0; count < nargs; count++, argnum += inc)
3252 {
3253 register enum machine_mode mode = argvec[argnum].mode;
3254 register rtx val = argvec[argnum].value;
3255 rtx reg = argvec[argnum].reg;
3256 int partial = argvec[argnum].partial;
3257
3258 if (reg != 0 && partial == 0)
3259 emit_move_insn (reg, val);
3260 NO_DEFER_POP;
3261 }
3262
3263#if 0
3264 /* For version 1.37, try deleting this entirely. */
3265 if (! no_queue)
3266 emit_queue ();
3267#endif
3268
3269 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3270 for (count = 0; count < nargs; count++)
3271 if (argvec[count].reg != 0)
77cac2f2 3272 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3273
fac0ad80
RS
3274 /* Pass the function the address in which to return a structure value. */
3275 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3276 {
3277 emit_move_insn (struct_value_rtx,
3278 force_reg (Pmode,
3279 force_operand (XEXP (mem_value, 0),
3280 NULL_RTX)));
3281 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3282 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3283 }
3284
322e3e34
RK
3285 /* Don't allow popping to be deferred, since then
3286 cse'ing of library calls could delete a call and leave the pop. */
3287 NO_DEFER_POP;
3288
3289 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3290 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3291 /* See the comment in emit_library_call about the function type we build
3292 and pass here. */
322e3e34 3293
2c8da025
RK
3294 emit_call_1 (fun,
3295 get_identifier (XSTR (orgfun, 0)),
334c4f0f
RK
3296 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3297 args_size.constant, struct_value_size,
322e3e34 3298 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3299 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3300 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
3301
3302 /* Now restore inhibit_defer_pop to its actual original value. */
3303 OK_DEFER_POP;
3304
888aa7a9
RS
3305 pop_temp_slots ();
3306
322e3e34
RK
3307 /* Copy the value to the right place. */
3308 if (outmode != VOIDmode)
3309 {
3310 if (mem_value)
3311 {
3312 if (value == 0)
fac0ad80 3313 value = mem_value;
322e3e34
RK
3314 if (value != mem_value)
3315 emit_move_insn (value, mem_value);
3316 }
3317 else if (value != 0)
3318 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3319 else
3320 value = hard_libcall_value (outmode);
322e3e34 3321 }
fac0ad80 3322
f046b3cc
JL
3323#ifdef ACCUMULATE_OUTGOING_ARGS
3324#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3325 if (save_area)
3326 {
3327 enum machine_mode save_mode = GET_MODE (save_area);
3328 rtx stack_area
3329 = gen_rtx (MEM, save_mode,
3330 memory_address (save_mode,
f046b3cc 3331#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3332 plus_constant (argblock, - high_to_save)
f046b3cc 3333#else
e9a25f70 3334 plus_constant (argblock, low_to_save)
f046b3cc 3335#endif
e9a25f70 3336 ));
f046b3cc 3337
e9a25f70
JL
3338 if (save_mode != BLKmode)
3339 emit_move_insn (stack_area, save_area);
3340 else
3341 emit_block_move (stack_area, validize_mem (save_area),
3342 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3343 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3344 }
f046b3cc
JL
3345#endif
3346
3347 /* If we saved any argument areas, restore them. */
3348 for (count = 0; count < nargs; count++)
3349 if (argvec[count].save_area)
3350 {
3351 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3352 rtx stack_area
3353 = gen_rtx (MEM, save_mode,
3354 memory_address (save_mode, plus_constant (argblock,
3355 argvec[count].offset.constant)));
3356
3357 emit_move_insn (stack_area, argvec[count].save_area);
3358 }
3359
3360 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3361 stack_usage_map = initial_stack_usage_map;
3362#endif
3363
fac0ad80 3364 return value;
322e3e34
RK
3365}
3366\f
51bbfa0c
RS
3367#if 0
3368/* Return an rtx which represents a suitable home on the stack
3369 given TYPE, the type of the argument looking for a home.
3370 This is called only for BLKmode arguments.
3371
3372 SIZE is the size needed for this target.
3373 ARGS_ADDR is the address of the bottom of the argument block for this call.
3374 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3375 if this machine uses push insns. */
3376
3377static rtx
3378target_for_arg (type, size, args_addr, offset)
3379 tree type;
3380 rtx size;
3381 rtx args_addr;
3382 struct args_size offset;
3383{
3384 rtx target;
3385 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3386
3387 /* We do not call memory_address if possible,
3388 because we want to address as close to the stack
3389 as possible. For non-variable sized arguments,
3390 this will be stack-pointer relative addressing. */
3391 if (GET_CODE (offset_rtx) == CONST_INT)
3392 target = plus_constant (args_addr, INTVAL (offset_rtx));
3393 else
3394 {
3395 /* I have no idea how to guarantee that this
3396 will work in the presence of register parameters. */
3397 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
3398 target = memory_address (QImode, target);
3399 }
3400
3401 return gen_rtx (MEM, BLKmode, target);
3402}
3403#endif
3404\f
3405/* Store a single argument for a function call
3406 into the register or memory area where it must be passed.
3407 *ARG describes the argument value and where to pass it.
3408
3409 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3410 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3411
3412 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3413 so must be careful about how the stack is used.
3414
3415 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3416 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3417 that we need not worry about saving and restoring the stack.
3418
3419 FNDECL is the declaration of the function we are calling. */
3420
3421static void
6f90e075
JW
3422store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
3423 reg_parm_stack_space)
51bbfa0c
RS
3424 struct arg_data *arg;
3425 rtx argblock;
3426 int may_be_alloca;
3427 int variable_size;
3428 tree fndecl;
6f90e075 3429 int reg_parm_stack_space;
51bbfa0c
RS
3430{
3431 register tree pval = arg->tree_value;
3432 rtx reg = 0;
3433 int partial = 0;
3434 int used = 0;
3435 int i, lower_bound, upper_bound;
3436
3437 if (TREE_CODE (pval) == ERROR_MARK)
3438 return;
3439
cc79451b
RK
3440 /* Push a new temporary level for any temporaries we make for
3441 this argument. */
3442 push_temp_slots ();
3443
51bbfa0c
RS
3444#ifdef ACCUMULATE_OUTGOING_ARGS
3445 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3446 save any previous data at that location. */
3447 if (argblock && ! variable_size && arg->stack)
3448 {
3449#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3450 /* stack_slot is negative, but we want to index stack_usage_map
3451 with positive values. */
51bbfa0c
RS
3452 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3453 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3454 else
50eb43ca 3455 upper_bound = 0;
51bbfa0c
RS
3456
3457 lower_bound = upper_bound - arg->size.constant;
3458#else
3459 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3460 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3461 else
3462 lower_bound = 0;
3463
3464 upper_bound = lower_bound + arg->size.constant;
3465#endif
3466
3467 for (i = lower_bound; i < upper_bound; i++)
3468 if (stack_usage_map[i]
3469#ifdef REG_PARM_STACK_SPACE
3470 /* Don't store things in the fixed argument area at this point;
3471 it has already been saved. */
6f90e075 3472 && i > reg_parm_stack_space
51bbfa0c
RS
3473#endif
3474 )
3475 break;
3476
3477 if (i != upper_bound)
3478 {
3479 /* We need to make a save area. See what mode we can make it. */
3480 enum machine_mode save_mode
3481 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3482 rtx stack_area
3483 = gen_rtx (MEM, save_mode,
3484 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
3485
3486 if (save_mode == BLKmode)
3487 {
3488 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3489 arg->size.constant, 0);
3668e76e
JL
3490 MEM_IN_STRUCT_P (arg->save_area)
3491 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
cc79451b 3492 preserve_temp_slots (arg->save_area);
51bbfa0c 3493 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3494 GEN_INT (arg->size.constant),
51bbfa0c
RS
3495 PARM_BOUNDARY / BITS_PER_UNIT);
3496 }
3497 else
3498 {
3499 arg->save_area = gen_reg_rtx (save_mode);
3500 emit_move_insn (arg->save_area, stack_area);
3501 }
3502 }
3503 }
3504#endif
3505
3506 /* If this isn't going to be placed on both the stack and in registers,
3507 set up the register and number of words. */
3508 if (! arg->pass_on_stack)
3509 reg = arg->reg, partial = arg->partial;
3510
3511 if (reg != 0 && partial == 0)
3512 /* Being passed entirely in a register. We shouldn't be called in
3513 this case. */
3514 abort ();
3515
4ab56118
RK
3516 /* If this arg needs special alignment, don't load the registers
3517 here. */
3518 if (arg->n_aligned_regs != 0)
3519 reg = 0;
4ab56118 3520
4ab56118 3521 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3522 it directly into its stack slot. Otherwise, we can. */
3523 if (arg->value == 0)
d64f5a78
RS
3524 {
3525#ifdef ACCUMULATE_OUTGOING_ARGS
3526 /* stack_arg_under_construction is nonzero if a function argument is
3527 being evaluated directly into the outgoing argument list and
3528 expand_call must take special action to preserve the argument list
3529 if it is called recursively.
3530
3531 For scalar function arguments stack_usage_map is sufficient to
3532 determine which stack slots must be saved and restored. Scalar
3533 arguments in general have pass_on_stack == 0.
3534
3535 If this argument is initialized by a function which takes the
3536 address of the argument (a C++ constructor or a C function
3537 returning a BLKmode structure), then stack_usage_map is
3538 insufficient and expand_call must push the stack around the
3539 function call. Such arguments have pass_on_stack == 1.
3540
3541 Note that it is always safe to set stack_arg_under_construction,
3542 but this generates suboptimal code if set when not needed. */
3543
3544 if (arg->pass_on_stack)
3545 stack_arg_under_construction++;
3546#endif
3a08477a
RK
3547 arg->value = expand_expr (pval,
3548 (partial
3549 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3550 ? NULL_RTX : arg->stack,
e5d70561 3551 VOIDmode, 0);
1efe6448
RK
3552
3553 /* If we are promoting object (or for any other reason) the mode
3554 doesn't agree, convert the mode. */
3555
7373d92d
RK
3556 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3557 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3558 arg->value, arg->unsignedp);
1efe6448 3559
d64f5a78
RS
3560#ifdef ACCUMULATE_OUTGOING_ARGS
3561 if (arg->pass_on_stack)
3562 stack_arg_under_construction--;
3563#endif
3564 }
51bbfa0c
RS
3565
3566 /* Don't allow anything left on stack from computation
3567 of argument to alloca. */
3568 if (may_be_alloca)
3569 do_pending_stack_adjust ();
3570
3571 if (arg->value == arg->stack)
7815214e
RK
3572 {
3573 /* If the value is already in the stack slot, we are done. */
3574 if (flag_check_memory_usage && GET_CODE (arg->stack) == MEM)
3575 {
3576 if (arg->mode == BLKmode)
3577 abort ();
3578
3579 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3580 XEXP (arg->stack, 0), ptr_mode,
3581 GEN_INT (GET_MODE_SIZE (arg->mode)),
3582 TYPE_MODE (sizetype),
956d6950
JL
3583 GEN_INT (MEMORY_USE_RW),
3584 TYPE_MODE (integer_type_node));
7815214e
RK
3585 }
3586 }
1efe6448 3587 else if (arg->mode != BLKmode)
51bbfa0c
RS
3588 {
3589 register int size;
3590
3591 /* Argument is a scalar, not entirely passed in registers.
3592 (If part is passed in registers, arg->partial says how much
3593 and emit_push_insn will take care of putting it there.)
3594
3595 Push it, and if its size is less than the
3596 amount of space allocated to it,
3597 also bump stack pointer by the additional space.
3598 Note that in C the default argument promotions
3599 will prevent such mismatches. */
3600
1efe6448 3601 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3602 /* Compute how much space the push instruction will push.
3603 On many machines, pushing a byte will advance the stack
3604 pointer by a halfword. */
3605#ifdef PUSH_ROUNDING
3606 size = PUSH_ROUNDING (size);
3607#endif
3608 used = size;
3609
3610 /* Compute how much space the argument should get:
3611 round up to a multiple of the alignment for arguments. */
1efe6448 3612 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3613 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3614 / (PARM_BOUNDARY / BITS_PER_UNIT))
3615 * (PARM_BOUNDARY / BITS_PER_UNIT));
3616
3617 /* This isn't already where we want it on the stack, so put it there.
3618 This can either be done with push or copy insns. */
ccf5d244
RK
3619 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3620 0, partial, reg, used - size,
3621 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
3622 }
3623 else
3624 {
3625 /* BLKmode, at least partly to be pushed. */
3626
3627 register int excess;
3628 rtx size_rtx;
3629
3630 /* Pushing a nonscalar.
3631 If part is passed in registers, PARTIAL says how much
3632 and emit_push_insn will take care of putting it there. */
3633
3634 /* Round its size up to a multiple
3635 of the allocation unit for arguments. */
3636
3637 if (arg->size.var != 0)
3638 {
3639 excess = 0;
3640 size_rtx = ARGS_SIZE_RTX (arg->size);
3641 }
3642 else
3643 {
51bbfa0c
RS
3644 /* PUSH_ROUNDING has no effect on us, because
3645 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3646 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3647 + partial * UNITS_PER_WORD);
e4f93898 3648 size_rtx = expr_size (pval);
51bbfa0c
RS
3649 }
3650
1efe6448 3651 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
3652 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3653 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3654 }
3655
3656
3657 /* Unless this is a partially-in-register argument, the argument is now
3658 in the stack.
3659
3660 ??? Note that this can change arg->value from arg->stack to
3661 arg->stack_slot and it matters when they are not the same.
3662 It isn't totally clear that this is correct in all cases. */
3663 if (partial == 0)
3664 arg->value = arg->stack_slot;
3665
3666 /* Once we have pushed something, pops can't safely
3667 be deferred during the rest of the arguments. */
3668 NO_DEFER_POP;
3669
3670 /* ANSI doesn't require a sequence point here,
3671 but PCC has one, so this will avoid some problems. */
3672 emit_queue ();
3673
db907e7b
RK
3674 /* Free any temporary slots made in processing this argument. Show
3675 that we might have taken the address of something and pushed that
3676 as an operand. */
3677 preserve_temp_slots (NULL_RTX);
51bbfa0c 3678 free_temp_slots ();
cc79451b 3679 pop_temp_slots ();
51bbfa0c
RS
3680
3681#ifdef ACCUMULATE_OUTGOING_ARGS
3682 /* Now mark the segment we just used. */
3683 if (argblock && ! variable_size && arg->stack)
3684 for (i = lower_bound; i < upper_bound; i++)
3685 stack_usage_map[i] = 1;
3686#endif
3687}
This page took 3.499257 seconds and 5 git commands to generate.