]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
dummy commit before merge
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
2e0dd623 2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
4f90e4a0 22#ifdef __STDC__
04fe4385 23#include <stdarg.h>
4f90e4a0 24#else
04fe4385 25#include <varargs.h>
4f90e4a0 26#endif
670ee920
KG
27#include "system.h"
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
31#include "expr.h"
32#include "regs.h"
51bbfa0c
RS
33#include "insn-flags.h"
34
35/* Decide whether a function's arguments should be processed
bbc8a071
RK
36 from first to last or from last to first.
37
38 They should if the stack and args grow in opposite directions, but
39 only if we have push insns. */
51bbfa0c 40
51bbfa0c 41#ifdef PUSH_ROUNDING
bbc8a071 42
40083ddf 43#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
44#define PUSH_ARGS_REVERSED /* If it's last to first */
45#endif
bbc8a071 46
51bbfa0c
RS
47#endif
48
49/* Like STACK_BOUNDARY but in units of bytes, not bits. */
50#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
51
52/* Data structure and subroutines used within expand_call. */
53
54struct arg_data
55{
56 /* Tree node for this argument. */
57 tree tree_value;
1efe6448
RK
58 /* Mode for value; TYPE_MODE unless promoted. */
59 enum machine_mode mode;
51bbfa0c
RS
60 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 rtx value;
62 /* Initially-compute RTL value for argument; only for const functions. */
63 rtx initial_value;
64 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 65 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
66 registers. */
67 rtx reg;
84b55618
RK
68 /* If REG was promoted from the actual mode of the argument expression,
69 indicates whether the promotion is sign- or zero-extended. */
70 int unsignedp;
51bbfa0c
RS
71 /* Number of registers to use. 0 means put the whole arg in registers.
72 Also 0 if not passed in registers. */
73 int partial;
d64f5a78
RS
74 /* Non-zero if argument must be passed on stack.
75 Note that some arguments may be passed on the stack
76 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
77 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
78 int pass_on_stack;
79 /* Offset of this argument from beginning of stack-args. */
80 struct args_size offset;
81 /* Similar, but offset to the start of the stack slot. Different from
82 OFFSET if this arg pads downward. */
83 struct args_size slot_offset;
84 /* Size of this argument on the stack, rounded up for any padding it gets,
85 parts of the argument passed in registers do not count.
86 If REG_PARM_STACK_SPACE is defined, then register parms
87 are counted here as well. */
88 struct args_size size;
89 /* Location on the stack at which parameter should be stored. The store
90 has already been done if STACK == VALUE. */
91 rtx stack;
92 /* Location on the stack of the start of this argument slot. This can
93 differ from STACK if this arg pads downward. This location is known
94 to be aligned to FUNCTION_ARG_BOUNDARY. */
95 rtx stack_slot;
96#ifdef ACCUMULATE_OUTGOING_ARGS
97 /* Place that this stack area has been saved, if needed. */
98 rtx save_area;
99#endif
4ab56118
RK
100 /* If an argument's alignment does not permit direct copying into registers,
101 copy in smaller-sized pieces into pseudos. These are stored in a
102 block pointed to by this field. The next field says how many
103 word-sized pseudos we made. */
104 rtx *aligned_regs;
105 int n_aligned_regs;
51bbfa0c
RS
106};
107
108#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 109/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
110 the corresponding stack location has been used.
111 This vector is used to prevent a function call within an argument from
112 clobbering any stack already set up. */
113static char *stack_usage_map;
114
115/* Size of STACK_USAGE_MAP. */
116static int highest_outgoing_arg_in_use;
2f4aa534
RS
117
118/* stack_arg_under_construction is nonzero when an argument may be
119 initialized with a constructor call (including a C function that
120 returns a BLKmode struct) and expand_call must take special action
121 to make sure the object being constructed does not overlap the
122 argument list for the constructor call. */
123int stack_arg_under_construction;
51bbfa0c
RS
124#endif
125
322e3e34 126static int calls_function PROTO((tree, int));
9f4d9f6c 127static int calls_function_1 PROTO((tree, int));
5d6155d4
RK
128static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx,
129 int, rtx, int));
322e3e34
RK
130static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
131 tree, int));
51bbfa0c 132\f
1ce0cb53
JW
133/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
134 `alloca'.
135
136 If WHICH is 0, return 1 if EXP contains a call to any function.
137 Actually, we only need return 1 if evaluating EXP would require pushing
138 arguments on the stack, but that is too difficult to compute, so we just
139 assume any function call might require the stack. */
51bbfa0c 140
1c8d7aef
RS
141static tree calls_function_save_exprs;
142
51bbfa0c 143static int
1ce0cb53 144calls_function (exp, which)
51bbfa0c 145 tree exp;
1ce0cb53 146 int which;
1c8d7aef
RS
147{
148 int val;
149 calls_function_save_exprs = 0;
150 val = calls_function_1 (exp, which);
151 calls_function_save_exprs = 0;
152 return val;
153}
154
155static int
156calls_function_1 (exp, which)
157 tree exp;
158 int which;
51bbfa0c
RS
159{
160 register int i;
0207efa2
RK
161 enum tree_code code = TREE_CODE (exp);
162 int type = TREE_CODE_CLASS (code);
163 int length = tree_code_length[(int) code];
51bbfa0c 164
ddd5a7c1 165 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
166 if ((int) code >= NUM_TREE_CODES)
167 return 1;
51bbfa0c 168
0207efa2 169 /* Only expressions and references can contain calls. */
3b59a331
RS
170 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
171 && type != 'b')
51bbfa0c
RS
172 return 0;
173
0207efa2 174 switch (code)
51bbfa0c
RS
175 {
176 case CALL_EXPR:
1ce0cb53
JW
177 if (which == 0)
178 return 1;
179 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
180 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
181 == FUNCTION_DECL))
182 {
183 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
184
185 if ((DECL_BUILT_IN (fndecl)
186 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
187 || (DECL_SAVED_INSNS (fndecl)
188 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
189 & FUNCTION_FLAGS_CALLS_ALLOCA)))
190 return 1;
191 }
51bbfa0c
RS
192
193 /* Third operand is RTL. */
194 length = 2;
195 break;
196
197 case SAVE_EXPR:
198 if (SAVE_EXPR_RTL (exp) != 0)
199 return 0;
1c8d7aef
RS
200 if (value_member (exp, calls_function_save_exprs))
201 return 0;
202 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
203 calls_function_save_exprs);
204 return (TREE_OPERAND (exp, 0) != 0
205 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
206
207 case BLOCK:
ef03bc85
CH
208 {
209 register tree local;
210
211 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 212 if (DECL_INITIAL (local) != 0
1c8d7aef 213 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
214 return 1;
215 }
216 {
217 register tree subblock;
218
219 for (subblock = BLOCK_SUBBLOCKS (exp);
220 subblock;
221 subblock = TREE_CHAIN (subblock))
1c8d7aef 222 if (calls_function_1 (subblock, which))
ef03bc85
CH
223 return 1;
224 }
225 return 0;
51bbfa0c
RS
226
227 case METHOD_CALL_EXPR:
228 length = 3;
229 break;
230
231 case WITH_CLEANUP_EXPR:
232 length = 1;
233 break;
234
235 case RTL_EXPR:
236 return 0;
e9a25f70
JL
237
238 default:
239 break;
51bbfa0c
RS
240 }
241
242 for (i = 0; i < length; i++)
243 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 244 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
245 return 1;
246
247 return 0;
248}
249\f
250/* Force FUNEXP into a form suitable for the address of a CALL,
251 and return that as an rtx. Also load the static chain register
252 if FNDECL is a nested function.
253
77cac2f2
RK
254 CALL_FUSAGE points to a variable holding the prospective
255 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 256
03dacb02 257rtx
77cac2f2 258prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
259 rtx funexp;
260 tree fndecl;
77cac2f2 261 rtx *call_fusage;
01368078 262 int reg_parm_seen;
51bbfa0c
RS
263{
264 rtx static_chain_value = 0;
265
266 funexp = protect_from_queue (funexp, 0);
267
268 if (fndecl != 0)
0f41302f 269 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
270 static_chain_value = lookup_static_chain (fndecl);
271
272 /* Make a valid memory address and copy constants thru pseudo-regs,
273 but not for a constant address if -fno-function-cse. */
274 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 275 /* If we are using registers for parameters, force the
e9a25f70
JL
276 function address into a register now. */
277 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
278 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
279 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
280 else
281 {
282#ifndef NO_FUNCTION_CSE
283 if (optimize && ! flag_no_function_cse)
284#ifdef NO_RECURSIVE_FUNCTION_CSE
285 if (fndecl != current_function_decl)
286#endif
287 funexp = force_reg (Pmode, funexp);
288#endif
289 }
290
291 if (static_chain_value != 0)
292 {
293 emit_move_insn (static_chain_rtx, static_chain_value);
294
f991a240
RK
295 if (GET_CODE (static_chain_rtx) == REG)
296 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
297 }
298
299 return funexp;
300}
301
302/* Generate instructions to call function FUNEXP,
303 and optionally pop the results.
304 The CALL_INSN is the first insn generated.
305
607ea900 306 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
307 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
308
334c4f0f
RK
309 FUNTYPE is the data type of the function. This is given to the macro
310 RETURN_POPS_ARGS to determine whether this function pops its own args.
311 We used to allow an identifier for library functions, but that doesn't
312 work when the return type is an aggregate type and the calling convention
313 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
314
315 STACK_SIZE is the number of bytes of arguments on the stack,
316 rounded up to STACK_BOUNDARY; zero if the size is variable.
317 This is both to put into the call insn and
318 to generate explicit popping code if necessary.
319
320 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
321 It is zero if this call doesn't want a structure value.
322
323 NEXT_ARG_REG is the rtx that results from executing
324 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
325 just after all the args have had their registers assigned.
326 This could be whatever you like, but normally it is the first
327 arg-register beyond those used for args in this call,
328 or 0 if all the arg-registers are used in this call.
329 It is passed on to `gen_call' so you can put this info in the call insn.
330
331 VALREG is a hard register in which a value is returned,
332 or 0 if the call does not return a value.
333
334 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
335 the args to this call were processed.
336 We restore `inhibit_defer_pop' to that value.
337
94b25f81
RK
338 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
339 denote registers used by the called function.
51bbfa0c
RS
340
341 IS_CONST is true if this is a `const' call. */
342
322e3e34 343static void
2c8da025 344emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
5d6155d4
RK
345 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
346 is_const)
51bbfa0c 347 rtx funexp;
2c8da025 348 tree fndecl;
51bbfa0c
RS
349 tree funtype;
350 int stack_size;
351 int struct_value_size;
352 rtx next_arg_reg;
353 rtx valreg;
354 int old_inhibit_defer_pop;
77cac2f2 355 rtx call_fusage;
51bbfa0c
RS
356 int is_const;
357{
e5d70561
RK
358 rtx stack_size_rtx = GEN_INT (stack_size);
359 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c 360 rtx call_insn;
081f5e7e 361#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 362 int already_popped = 0;
081f5e7e 363#endif
51bbfa0c
RS
364
365 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
366 and we don't want to load it into a register as an optimization,
367 because prepare_call_address already did it if it should be done. */
368 if (GET_CODE (funexp) != SYMBOL_REF)
369 funexp = memory_address (FUNCTION_MODE, funexp);
370
371#ifndef ACCUMULATE_OUTGOING_ARGS
372#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
373 if (HAVE_call_pop && HAVE_call_value_pop
2c8da025
RK
374 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
375 || stack_size == 0))
51bbfa0c 376 {
2c8da025 377 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
51bbfa0c
RS
378 rtx pat;
379
380 /* If this subroutine pops its own args, record that in the call insn
381 if possible, for the sake of frame pointer elimination. */
2c8da025 382
51bbfa0c
RS
383 if (valreg)
384 pat = gen_call_value_pop (valreg,
38a448ca 385 gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
386 stack_size_rtx, next_arg_reg, n_pop);
387 else
38a448ca 388 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
389 stack_size_rtx, next_arg_reg, n_pop);
390
391 emit_call_insn (pat);
392 already_popped = 1;
393 }
394 else
395#endif
396#endif
397
398#if defined (HAVE_call) && defined (HAVE_call_value)
399 if (HAVE_call && HAVE_call_value)
400 {
401 if (valreg)
402 emit_call_insn (gen_call_value (valreg,
38a448ca 403 gen_rtx_MEM (FUNCTION_MODE, funexp),
e992302c
BK
404 stack_size_rtx, next_arg_reg,
405 NULL_RTX));
51bbfa0c 406 else
38a448ca 407 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
408 stack_size_rtx, next_arg_reg,
409 struct_value_size_rtx));
410 }
411 else
412#endif
413 abort ();
414
77cac2f2 415 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
416 for (call_insn = get_last_insn ();
417 call_insn && GET_CODE (call_insn) != CALL_INSN;
418 call_insn = PREV_INSN (call_insn))
419 ;
420
421 if (! call_insn)
422 abort ();
423
e59e60a7
RK
424 /* Put the register usage information on the CALL. If there is already
425 some usage information, put ours at the end. */
426 if (CALL_INSN_FUNCTION_USAGE (call_insn))
427 {
428 rtx link;
429
430 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
431 link = XEXP (link, 1))
432 ;
433
434 XEXP (link, 1) = call_fusage;
435 }
436 else
437 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
438
439 /* If this is a const call, then set the insn's unchanging bit. */
440 if (is_const)
441 CONST_CALL_P (call_insn) = 1;
442
b1e64e0d
RS
443 /* Restore this now, so that we do defer pops for this call's args
444 if the context of the call as a whole permits. */
445 inhibit_defer_pop = old_inhibit_defer_pop;
446
51bbfa0c
RS
447#ifndef ACCUMULATE_OUTGOING_ARGS
448 /* If returning from the subroutine does not automatically pop the args,
449 we need an instruction to pop them sooner or later.
450 Perhaps do it now; perhaps just record how much space to pop later.
451
452 If returning from the subroutine does pop the args, indicate that the
453 stack pointer will be changed. */
454
2c8da025 455 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
51bbfa0c
RS
456 {
457 if (!already_popped)
e3da301d 458 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
459 = gen_rtx_EXPR_LIST (VOIDmode,
460 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
461 CALL_INSN_FUNCTION_USAGE (call_insn));
2c8da025 462 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
e5d70561 463 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
464 }
465
466 if (stack_size != 0)
467 {
70a73141 468 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
469 pending_stack_adjust += stack_size;
470 else
471 adjust_stack (stack_size_rtx);
472 }
473#endif
474}
475
476/* Generate all the code for a function call
477 and return an rtx for its value.
478 Store the value in TARGET (specified as an rtx) if convenient.
479 If the value is stored in TARGET then TARGET is returned.
480 If IGNORE is nonzero, then we ignore the value of the function call. */
481
482rtx
8129842c 483expand_call (exp, target, ignore)
51bbfa0c
RS
484 tree exp;
485 rtx target;
486 int ignore;
51bbfa0c
RS
487{
488 /* List of actual parameters. */
489 tree actparms = TREE_OPERAND (exp, 1);
490 /* RTX for the function to be called. */
491 rtx funexp;
51bbfa0c
RS
492 /* Data type of the function. */
493 tree funtype;
494 /* Declaration of the function being called,
495 or 0 if the function is computed (not known by name). */
496 tree fndecl = 0;
497 char *name = 0;
498
499 /* Register in which non-BLKmode value will be returned,
500 or 0 if no value or if value is BLKmode. */
501 rtx valreg;
502 /* Address where we should return a BLKmode value;
503 0 if value not BLKmode. */
504 rtx structure_value_addr = 0;
505 /* Nonzero if that address is being passed by treating it as
506 an extra, implicit first parameter. Otherwise,
507 it is passed by being copied directly into struct_value_rtx. */
508 int structure_value_addr_parm = 0;
509 /* Size of aggregate value wanted, or zero if none wanted
510 or if we are using the non-reentrant PCC calling convention
511 or expecting the value in registers. */
512 int struct_value_size = 0;
513 /* Nonzero if called function returns an aggregate in memory PCC style,
514 by returning the address of where to find it. */
515 int pcc_struct_value = 0;
516
517 /* Number of actual parameters in this call, including struct value addr. */
518 int num_actuals;
519 /* Number of named args. Args after this are anonymous ones
520 and they must all go on the stack. */
521 int n_named_args;
522 /* Count arg position in order args appear. */
523 int argpos;
524
525 /* Vector of information about each argument.
526 Arguments are numbered in the order they will be pushed,
527 not the order they are written. */
528 struct arg_data *args;
529
530 /* Total size in bytes of all the stack-parms scanned so far. */
531 struct args_size args_size;
532 /* Size of arguments before any adjustments (such as rounding). */
533 struct args_size original_args_size;
534 /* Data on reg parms scanned so far. */
535 CUMULATIVE_ARGS args_so_far;
536 /* Nonzero if a reg parm has been scanned. */
537 int reg_parm_seen;
efd65a8b 538 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
539
540 /* Nonzero if we must avoid push-insns in the args for this call.
541 If stack space is allocated for register parameters, but not by the
542 caller, then it is preallocated in the fixed part of the stack frame.
543 So the entire argument block must then be preallocated (i.e., we
544 ignore PUSH_ROUNDING in that case). */
545
546#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
547 int must_preallocate = 1;
548#else
549#ifdef PUSH_ROUNDING
550 int must_preallocate = 0;
551#else
552 int must_preallocate = 1;
553#endif
554#endif
555
f72aed24 556 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
557 int reg_parm_stack_space = 0;
558
51bbfa0c
RS
559 /* 1 if scanning parms front to back, -1 if scanning back to front. */
560 int inc;
561 /* Address of space preallocated for stack parms
562 (on machines that lack push insns), or 0 if space not preallocated. */
563 rtx argblock = 0;
564
565 /* Nonzero if it is plausible that this is a call to alloca. */
566 int may_be_alloca;
9ae8ffe7
JL
567 /* Nonzero if this is a call to malloc or a related function. */
568 int is_malloc;
51bbfa0c
RS
569 /* Nonzero if this is a call to setjmp or a related function. */
570 int returns_twice;
571 /* Nonzero if this is a call to `longjmp'. */
572 int is_longjmp;
573 /* Nonzero if this is a call to an inline function. */
574 int is_integrable = 0;
51bbfa0c
RS
575 /* Nonzero if this is a call to a `const' function.
576 Note that only explicitly named functions are handled as `const' here. */
577 int is_const = 0;
578 /* Nonzero if this is a call to a `volatile' function. */
579 int is_volatile = 0;
580#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
581 /* Define the boundary of the register parm stack space that needs to be
582 save, if any. */
583 int low_to_save = -1, high_to_save;
584 rtx save_area = 0; /* Place that it is saved */
585#endif
586
587#ifdef ACCUMULATE_OUTGOING_ARGS
588 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
589 char *initial_stack_usage_map = stack_usage_map;
69d4ca36 590 int old_stack_arg_under_construction;
51bbfa0c
RS
591#endif
592
593 rtx old_stack_level = 0;
79be3418 594 int old_pending_adj = 0;
51bbfa0c 595 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 596 rtx call_fusage = 0;
51bbfa0c 597 register tree p;
4ab56118 598 register int i, j;
51bbfa0c 599
7815214e
RK
600 /* The value of the function call can be put in a hard register. But
601 if -fcheck-memory-usage, code which invokes functions (and thus
602 damages some hard registers) can be inserted before using the value.
603 So, target is always a pseudo-register in that case. */
604 if (flag_check_memory_usage)
605 target = 0;
606
51bbfa0c
RS
607 /* See if we can find a DECL-node for the actual function.
608 As a result, decide whether this is a call to an integrable function. */
609
610 p = TREE_OPERAND (exp, 0);
611 if (TREE_CODE (p) == ADDR_EXPR)
612 {
613 fndecl = TREE_OPERAND (p, 0);
614 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 615 fndecl = 0;
51bbfa0c
RS
616 else
617 {
618 if (!flag_no_inline
619 && fndecl != current_function_decl
aa10adff 620 && DECL_INLINE (fndecl)
1cf4f698
RK
621 && DECL_SAVED_INSNS (fndecl)
622 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
51bbfa0c
RS
623 is_integrable = 1;
624 else if (! TREE_ADDRESSABLE (fndecl))
625 {
13d39dbc 626 /* In case this function later becomes inlinable,
51bbfa0c
RS
627 record that there was already a non-inline call to it.
628
629 Use abstraction instead of setting TREE_ADDRESSABLE
630 directly. */
da8c1713
RK
631 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
632 && optimize > 0)
1907795e
JM
633 {
634 warning_with_decl (fndecl, "can't inline call to `%s'");
635 warning ("called from here");
636 }
51bbfa0c
RS
637 mark_addressable (fndecl);
638 }
639
d45cf215
RS
640 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
641 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 642 is_const = 1;
5e24110e
RS
643
644 if (TREE_THIS_VOLATILE (fndecl))
645 is_volatile = 1;
51bbfa0c
RS
646 }
647 }
648
fdff8c6d
RK
649 /* If we don't have specific function to call, see if we have a
650 constant or `noreturn' function from the type. */
651 if (fndecl == 0)
652 {
653 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
654 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
655 }
656
6f90e075
JW
657#ifdef REG_PARM_STACK_SPACE
658#ifdef MAYBE_REG_PARM_STACK_SPACE
659 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
660#else
661 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
662#endif
663#endif
664
51bbfa0c
RS
665 /* Warn if this value is an aggregate type,
666 regardless of which calling convention we are using for it. */
05e3bdb9 667 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
668 warning ("function call has aggregate value");
669
670 /* Set up a place to return a structure. */
671
672 /* Cater to broken compilers. */
673 if (aggregate_value_p (exp))
674 {
675 /* This call returns a big structure. */
676 is_const = 0;
677
678#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
679 {
680 pcc_struct_value = 1;
0dd532dc
JW
681 /* Easier than making that case work right. */
682 if (is_integrable)
683 {
684 /* In case this is a static function, note that it has been
685 used. */
686 if (! TREE_ADDRESSABLE (fndecl))
687 mark_addressable (fndecl);
688 is_integrable = 0;
689 }
9e7b1d0a
RS
690 }
691#else /* not PCC_STATIC_STRUCT_RETURN */
692 {
693 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 694
9e7b1d0a
RS
695 if (target && GET_CODE (target) == MEM)
696 structure_value_addr = XEXP (target, 0);
697 else
698 {
e9a25f70
JL
699 /* Assign a temporary to hold the value. */
700 tree d;
51bbfa0c 701
9e7b1d0a
RS
702 /* For variable-sized objects, we must be called with a target
703 specified. If we were to allocate space on the stack here,
704 we would have no way of knowing when to free it. */
51bbfa0c 705
002bdd6c
RK
706 if (struct_value_size < 0)
707 abort ();
708
e9a25f70
JL
709 /* This DECL is just something to feed to mark_addressable;
710 it doesn't get pushed. */
711 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
712 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
713 mark_addressable (d);
714 structure_value_addr = XEXP (DECL_RTL (d), 0);
3668e76e
JL
715 MEM_IN_STRUCT_P (structure_value_addr)
716 = AGGREGATE_TYPE_P (TREE_TYPE (exp));
9e7b1d0a
RS
717 target = 0;
718 }
719 }
720#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
721 }
722
723 /* If called function is inline, try to integrate it. */
724
725 if (is_integrable)
726 {
727 rtx temp;
69d4ca36 728#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534 729 rtx before_call = get_last_insn ();
69d4ca36 730#endif
51bbfa0c
RS
731
732 temp = expand_inline_function (fndecl, actparms, target,
733 ignore, TREE_TYPE (exp),
734 structure_value_addr);
735
736 /* If inlining succeeded, return. */
2e0dd623 737 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 738 {
d64f5a78 739#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
740 /* If the outgoing argument list must be preserved, push
741 the stack before executing the inlined function if it
742 makes any calls. */
743
744 for (i = reg_parm_stack_space - 1; i >= 0; i--)
745 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
746 break;
747
748 if (stack_arg_under_construction || i >= 0)
749 {
a1917650
RK
750 rtx first_insn
751 = before_call ? NEXT_INSN (before_call) : get_insns ();
752 rtx insn, seq;
2f4aa534 753
d64f5a78
RS
754 /* Look for a call in the inline function code.
755 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
756 nonzero then there is a call and it is not necessary
757 to scan the insns. */
758
759 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
a1917650 760 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
761 if (GET_CODE (insn) == CALL_INSN)
762 break;
2f4aa534
RS
763
764 if (insn)
765 {
d64f5a78
RS
766 /* Reserve enough stack space so that the largest
767 argument list of any function call in the inline
768 function does not overlap the argument list being
769 evaluated. This is usually an overestimate because
770 allocate_dynamic_stack_space reserves space for an
771 outgoing argument list in addition to the requested
772 space, but there is no way to ask for stack space such
773 that an argument list of a certain length can be
774 safely constructed. */
775
776 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
777#ifdef REG_PARM_STACK_SPACE
778 /* Add the stack space reserved for register arguments
779 in the inline function. What is really needed is the
780 largest value of reg_parm_stack_space in the inline
781 function, but that is not available. Using the current
782 value of reg_parm_stack_space is wrong, but gives
783 correct results on all supported machines. */
784 adjust += reg_parm_stack_space;
785#endif
2f4aa534 786 start_sequence ();
ccf5d244 787 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
788 allocate_dynamic_stack_space (GEN_INT (adjust),
789 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
790 seq = get_insns ();
791 end_sequence ();
a1917650 792 emit_insns_before (seq, first_insn);
e5d70561 793 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
794 }
795 }
d64f5a78 796#endif
51bbfa0c
RS
797
798 /* If the result is equivalent to TARGET, return TARGET to simplify
799 checks in store_expr. They can be equivalent but not equal in the
800 case of a function that returns BLKmode. */
801 if (temp != target && rtx_equal_p (temp, target))
802 return target;
803 return temp;
804 }
805
806 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
807 separately after all. If function was declared inline,
808 give a warning. */
809 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 810 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
811 {
812 warning_with_decl (fndecl, "inlining failed in call to `%s'");
813 warning ("called from here");
814 }
51bbfa0c
RS
815 mark_addressable (fndecl);
816 }
817
818 /* When calling a const function, we must pop the stack args right away,
819 so that the pop is deleted or moved with the call. */
820 if (is_const)
821 NO_DEFER_POP;
822
823 function_call_count++;
824
825 if (fndecl && DECL_NAME (fndecl))
826 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
827
828#if 0
829 /* Unless it's a call to a specific function that isn't alloca,
830 if it has one argument, we must assume it might be alloca. */
831
e3da301d
MS
832 may_be_alloca
833 = (!(fndecl != 0 && strcmp (name, "alloca"))
834 && actparms != 0
835 && TREE_CHAIN (actparms) == 0);
51bbfa0c
RS
836#else
837 /* We assume that alloca will always be called by name. It
838 makes no sense to pass it as a pointer-to-function to
839 anything that does not understand its behavior. */
e3da301d
MS
840 may_be_alloca
841 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
51bbfa0c
RS
842 && name[0] == 'a'
843 && ! strcmp (name, "alloca"))
844 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
845 && name[0] == '_'
846 && ! strcmp (name, "__builtin_alloca"))));
847#endif
848
849 /* See if this is a call to a function that can return more than once
850 or a call to longjmp. */
851
852 returns_twice = 0;
853 is_longjmp = 0;
9ae8ffe7 854 is_malloc = 0;
51bbfa0c 855
259620a8
MM
856 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15
857 /* Exclude functions not at the file scope, or not `extern',
858 since they are not the magic functions we would otherwise
859 think they are. */
860 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
51bbfa0c
RS
861 {
862 char *tname = name;
863
8d515633 864 /* Disregard prefix _, __ or __x. */
51bbfa0c 865 if (name[0] == '_')
8d515633
RS
866 {
867 if (name[1] == '_' && name[2] == 'x')
868 tname += 3;
869 else if (name[1] == '_')
870 tname += 2;
871 else
872 tname += 1;
873 }
51bbfa0c
RS
874
875 if (tname[0] == 's')
876 {
877 returns_twice
878 = ((tname[1] == 'e'
879 && (! strcmp (tname, "setjmp")
880 || ! strcmp (tname, "setjmp_syscall")))
881 || (tname[1] == 'i'
882 && ! strcmp (tname, "sigsetjmp"))
883 || (tname[1] == 'a'
884 && ! strcmp (tname, "savectx")));
885 if (tname[1] == 'i'
886 && ! strcmp (tname, "siglongjmp"))
887 is_longjmp = 1;
888 }
889 else if ((tname[0] == 'q' && tname[1] == 's'
890 && ! strcmp (tname, "qsetjmp"))
891 || (tname[0] == 'v' && tname[1] == 'f'
892 && ! strcmp (tname, "vfork")))
893 returns_twice = 1;
894
895 else if (tname[0] == 'l' && tname[1] == 'o'
896 && ! strcmp (tname, "longjmp"))
897 is_longjmp = 1;
6e73e666
JC
898 /* XXX should have "malloc" attribute on functions instead
899 of recognizing them by name. */
900 else if (! strcmp (tname, "malloc")
901 || ! strcmp (tname, "calloc")
902 || ! strcmp (tname, "realloc")
903 || ! strcmp (tname, "__builtin_new")
904 || ! strcmp (tname, "__builtin_vec_new"))
9ae8ffe7 905 is_malloc = 1;
51bbfa0c
RS
906 }
907
51bbfa0c
RS
908 if (may_be_alloca)
909 current_function_calls_alloca = 1;
910
911 /* Don't let pending stack adjusts add up to too much.
912 Also, do all pending adjustments now
913 if there is any chance this might be a call to alloca. */
914
915 if (pending_stack_adjust >= 32
916 || (pending_stack_adjust > 0 && may_be_alloca))
917 do_pending_stack_adjust ();
918
919 /* Operand 0 is a pointer-to-function; get the type of the function. */
920 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
921 if (TREE_CODE (funtype) != POINTER_TYPE)
922 abort ();
923 funtype = TREE_TYPE (funtype);
924
cc79451b
RK
925 /* Push the temporary stack slot level so that we can free any temporaries
926 we make. */
51bbfa0c
RS
927 push_temp_slots ();
928
eecb6f50
JL
929 /* Start updating where the next arg would go.
930
931 On some machines (such as the PA) indirect calls have a different
932 calling convention than normal calls. The last argument in
933 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
934 or not. */
935 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
936
937 /* If struct_value_rtx is 0, it means pass the address
938 as if it were an extra parameter. */
939 if (structure_value_addr && struct_value_rtx == 0)
940 {
5582b006
RK
941 /* If structure_value_addr is a REG other than
942 virtual_outgoing_args_rtx, we can use always use it. If it
943 is not a REG, we must always copy it into a register.
944 If it is virtual_outgoing_args_rtx, we must copy it to another
945 register in some cases. */
946 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 947#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
948 || (stack_arg_under_construction
949 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 950#endif
5582b006
RK
951 ? copy_addr_to_reg (structure_value_addr)
952 : structure_value_addr);
d64f5a78 953
51bbfa0c
RS
954 actparms
955 = tree_cons (error_mark_node,
956 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 957 temp),
51bbfa0c
RS
958 actparms);
959 structure_value_addr_parm = 1;
960 }
961
962 /* Count the arguments and set NUM_ACTUALS. */
963 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
964 num_actuals = i;
965
966 /* Compute number of named args.
967 Normally, don't include the last named arg if anonymous args follow.
469225d8
JW
968 We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
969 (If no anonymous args follow, the result of list_length is actually
970 one too large. This is harmless.)
51bbfa0c 971
469225d8
JW
972 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
973 this machine will be able to place unnamed args that were passed in
974 registers into the stack. So treat all args as named. This allows the
975 insns emitting for a specific argument list to be independent of the
976 function declaration.
51bbfa0c
RS
977
978 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
979 way to pass unnamed args in registers, so we must force them into
980 memory. */
469225d8 981#if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
51bbfa0c
RS
982 if (TYPE_ARG_TYPES (funtype) != 0)
983 n_named_args
0ee902cb 984 = (list_length (TYPE_ARG_TYPES (funtype))
469225d8 985#ifndef STRICT_ARGUMENT_NAMING
0ee902cb
RM
986 /* Don't include the last named arg. */
987 - 1
469225d8 988#endif
0ee902cb
RM
989 /* Count the struct value address, if it is passed as a parm. */
990 + structure_value_addr_parm);
51bbfa0c
RS
991 else
992#endif
993 /* If we know nothing, treat all args as named. */
994 n_named_args = num_actuals;
995
996 /* Make a vector to hold all the information about each arg. */
997 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 998 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
999
1000 args_size.constant = 0;
1001 args_size.var = 0;
1002
1003 /* In this loop, we consider args in the order they are written.
0ee902cb 1004 We fill up ARGS from the front or from the back if necessary
51bbfa0c
RS
1005 so that in any case the first arg to be pushed ends up at the front. */
1006
1007#ifdef PUSH_ARGS_REVERSED
1008 i = num_actuals - 1, inc = -1;
1009 /* In this case, must reverse order of args
1010 so that we compute and push the last arg first. */
1011#else
1012 i = 0, inc = 1;
1013#endif
1014
1015 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1016 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1017 {
1018 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 1019 int unsignedp;
84b55618 1020 enum machine_mode mode;
51bbfa0c
RS
1021
1022 args[i].tree_value = TREE_VALUE (p);
1023
1024 /* Replace erroneous argument with constant zero. */
1025 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1026 args[i].tree_value = integer_zero_node, type = integer_type_node;
1027
5c1c34d3
RK
1028 /* If TYPE is a transparent union, pass things the way we would
1029 pass the first field of the union. We have already verified that
1030 the modes are the same. */
1031 if (TYPE_TRANSPARENT_UNION (type))
1032 type = TREE_TYPE (TYPE_FIELDS (type));
1033
51bbfa0c
RS
1034 /* Decide where to pass this arg.
1035
1036 args[i].reg is nonzero if all or part is passed in registers.
1037
1038 args[i].partial is nonzero if part but not all is passed in registers,
1039 and the exact value says how many words are passed in registers.
1040
1041 args[i].pass_on_stack is nonzero if the argument must at least be
1042 computed on the stack. It may then be loaded back into registers
1043 if args[i].reg is nonzero.
1044
1045 These decisions are driven by the FUNCTION_... macros and must agree
1046 with those made by function.c. */
1047
51bbfa0c 1048 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1049 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1050 && contains_placeholder_p (TYPE_SIZE (type)))
657bb6dc 1051 || TREE_ADDRESSABLE (type)
7ef1fbd7
RK
1052#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1053 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1054 type, argpos < n_named_args)
1055#endif
1056 )
51bbfa0c 1057 {
173cd503
JM
1058 /* If we're compiling a thunk, pass through invisible
1059 references instead of making a copy. */
1060 if (current_function_is_thunk
5e0de251 1061#ifdef FUNCTION_ARG_CALLEE_COPIES
173cd503
JM
1062 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1063 type, argpos < n_named_args)
1064 /* If it's in a register, we must make a copy of it too. */
1065 /* ??? Is this a sufficient test? Is there a better one? */
1066 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1067 && REG_P (DECL_RTL (args[i].tree_value)))
1068 && ! TREE_ADDRESSABLE (type))
1069#endif
1070 )
51bbfa0c 1071 {
5e0de251
DE
1072 args[i].tree_value = build1 (ADDR_EXPR,
1073 build_pointer_type (type),
1074 args[i].tree_value);
1075 type = build_pointer_type (type);
51bbfa0c
RS
1076 }
1077 else
82c0ff02 1078 {
5e0de251
DE
1079 /* We make a copy of the object and pass the address to the
1080 function being called. */
1081 rtx copy;
51bbfa0c 1082
5e0de251 1083 if (TYPE_SIZE (type) == 0
2d59d98e
RK
1084 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1085 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1086 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1087 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1088 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
5e0de251
DE
1089 {
1090 /* This is a variable-sized object. Make space on the stack
1091 for it. */
1092 rtx size_rtx = expr_size (TREE_VALUE (p));
1093
1094 if (old_stack_level == 0)
1095 {
1096 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1097 old_pending_adj = pending_stack_adjust;
1098 pending_stack_adjust = 0;
1099 }
1100
38a448ca
RH
1101 copy = gen_rtx_MEM (BLKmode,
1102 allocate_dynamic_stack_space (size_rtx,
1103 NULL_RTX,
1104 TYPE_ALIGN (type)));
5e0de251
DE
1105 }
1106 else
1107 {
1108 int size = int_size_in_bytes (type);
6fa51029 1109 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
5e0de251 1110 }
51bbfa0c 1111
05e3bdb9 1112 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1113
5e0de251 1114 store_expr (args[i].tree_value, copy, 0);
ba3a053e 1115 is_const = 0;
5e0de251
DE
1116
1117 args[i].tree_value = build1 (ADDR_EXPR,
1118 build_pointer_type (type),
1119 make_tree (type, copy));
1120 type = build_pointer_type (type);
1121 }
51bbfa0c 1122 }
51bbfa0c 1123
84b55618 1124 mode = TYPE_MODE (type);
321e0bba 1125 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1126
1127#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1128 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1129#endif
1130
321e0bba 1131 args[i].unsignedp = unsignedp;
1efe6448 1132 args[i].mode = mode;
84b55618 1133 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1134 argpos < n_named_args);
1135#ifdef FUNCTION_ARG_PARTIAL_NREGS
1136 if (args[i].reg)
1137 args[i].partial
84b55618 1138 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1139 argpos < n_named_args);
1140#endif
1141
84b55618 1142 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c 1143
cacbd532
JW
1144 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1145 it means that we are to pass this arg in the register(s) designated
1146 by the PARALLEL, but also to pass it in the stack. */
1147 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1148 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1149 args[i].pass_on_stack = 1;
51bbfa0c
RS
1150
1151 /* If this is an addressable type, we must preallocate the stack
1152 since we must evaluate the object into its final location.
1153
1154 If this is to be passed in both registers and the stack, it is simpler
1155 to preallocate. */
1156 if (TREE_ADDRESSABLE (type)
1157 || (args[i].pass_on_stack && args[i].reg != 0))
1158 must_preallocate = 1;
1159
1160 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1161 we cannot consider this function call constant. */
1162 if (TREE_ADDRESSABLE (type))
1163 is_const = 0;
1164
1165 /* Compute the stack-size of this argument. */
1166 if (args[i].reg == 0 || args[i].partial != 0
1167#ifdef REG_PARM_STACK_SPACE
6f90e075 1168 || reg_parm_stack_space > 0
51bbfa0c
RS
1169#endif
1170 || args[i].pass_on_stack)
1efe6448 1171 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1172#ifdef STACK_PARMS_IN_REG_PARM_AREA
1173 1,
1174#else
1175 args[i].reg != 0,
1176#endif
1177 fndecl, &args_size, &args[i].offset,
1178 &args[i].size);
1179
1180#ifndef ARGS_GROW_DOWNWARD
1181 args[i].slot_offset = args_size;
1182#endif
1183
1184#ifndef REG_PARM_STACK_SPACE
1185 /* If a part of the arg was put into registers,
1186 don't include that part in the amount pushed. */
1187 if (! args[i].pass_on_stack)
1188 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1189 / (PARM_BOUNDARY / BITS_PER_UNIT)
1190 * (PARM_BOUNDARY / BITS_PER_UNIT));
1191#endif
1192
1193 /* Update ARGS_SIZE, the total stack space for args so far. */
1194
1195 args_size.constant += args[i].size.constant;
1196 if (args[i].size.var)
1197 {
1198 ADD_PARM_SIZE (args_size, args[i].size.var);
1199 }
1200
1201 /* Since the slot offset points to the bottom of the slot,
1202 we must record it after incrementing if the args grow down. */
1203#ifdef ARGS_GROW_DOWNWARD
1204 args[i].slot_offset = args_size;
1205
1206 args[i].slot_offset.constant = -args_size.constant;
1207 if (args_size.var)
1208 {
1209 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1210 }
1211#endif
1212
1213 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1214 have been used, etc. */
1215
1216 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1217 argpos < n_named_args);
1218 }
1219
6f90e075
JW
1220#ifdef FINAL_REG_PARM_STACK_SPACE
1221 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1222 args_size.var);
1223#endif
1224
51bbfa0c
RS
1225 /* Compute the actual size of the argument block required. The variable
1226 and constant sizes must be combined, the size may have to be rounded,
1227 and there may be a minimum required size. */
1228
1229 original_args_size = args_size;
1230 if (args_size.var)
1231 {
1232 /* If this function requires a variable-sized argument list, don't try to
1233 make a cse'able block for this call. We may be able to do this
1234 eventually, but it is too complicated to keep track of what insns go
1235 in the cse'able block and which don't. */
1236
1237 is_const = 0;
1238 must_preallocate = 1;
1239
1240 args_size.var = ARGS_SIZE_TREE (args_size);
1241 args_size.constant = 0;
1242
1243#ifdef STACK_BOUNDARY
1244 if (STACK_BOUNDARY != BITS_PER_UNIT)
1245 args_size.var = round_up (args_size.var, STACK_BYTES);
1246#endif
1247
1248#ifdef REG_PARM_STACK_SPACE
6f90e075 1249 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1250 {
1251 args_size.var
1252 = size_binop (MAX_EXPR, args_size.var,
1253 size_int (REG_PARM_STACK_SPACE (fndecl)));
1254
1255#ifndef OUTGOING_REG_PARM_STACK_SPACE
1256 /* The area corresponding to register parameters is not to count in
1257 the size of the block we need. So make the adjustment. */
1258 args_size.var
1259 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1260 size_int (reg_parm_stack_space));
51bbfa0c
RS
1261#endif
1262 }
1263#endif
1264 }
1265 else
1266 {
1267#ifdef STACK_BOUNDARY
1268 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1269 / STACK_BYTES) * STACK_BYTES);
1270#endif
1271
1272#ifdef REG_PARM_STACK_SPACE
1273 args_size.constant = MAX (args_size.constant,
6f90e075 1274 reg_parm_stack_space);
e1336658
JW
1275#ifdef MAYBE_REG_PARM_STACK_SPACE
1276 if (reg_parm_stack_space == 0)
1277 args_size.constant = 0;
1278#endif
51bbfa0c 1279#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1280 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1281#endif
1282#endif
1283 }
1284
1285 /* See if we have or want to preallocate stack space.
1286
1287 If we would have to push a partially-in-regs parm
1288 before other stack parms, preallocate stack space instead.
1289
1290 If the size of some parm is not a multiple of the required stack
1291 alignment, we must preallocate.
1292
1293 If the total size of arguments that would otherwise create a copy in
1294 a temporary (such as a CALL) is more than half the total argument list
1295 size, preallocation is faster.
1296
1297 Another reason to preallocate is if we have a machine (like the m88k)
1298 where stack alignment is required to be maintained between every
1299 pair of insns, not just when the call is made. However, we assume here
1300 that such machines either do not have push insns (and hence preallocation
1301 would occur anyway) or the problem is taken care of with
1302 PUSH_ROUNDING. */
1303
1304 if (! must_preallocate)
1305 {
1306 int partial_seen = 0;
1307 int copy_to_evaluate_size = 0;
1308
1309 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1310 {
1311 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1312 partial_seen = 1;
1313 else if (partial_seen && args[i].reg == 0)
1314 must_preallocate = 1;
1315
1316 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1317 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1318 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1319 || TREE_CODE (args[i].tree_value) == COND_EXPR
1320 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1321 copy_to_evaluate_size
1322 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1323 }
1324
c62f36cf
RS
1325 if (copy_to_evaluate_size * 2 >= args_size.constant
1326 && args_size.constant > 0)
51bbfa0c
RS
1327 must_preallocate = 1;
1328 }
1329
1330 /* If the structure value address will reference the stack pointer, we must
1331 stabilize it. We don't need to do this if we know that we are not going
1332 to adjust the stack pointer in processing this call. */
1333
1334 if (structure_value_addr
1335 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1336 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1337 && (args_size.var
1338#ifndef ACCUMULATE_OUTGOING_ARGS
1339 || args_size.constant
1340#endif
1341 ))
1342 structure_value_addr = copy_to_reg (structure_value_addr);
1343
1344 /* If this function call is cse'able, precompute all the parameters.
1345 Note that if the parameter is constructed into a temporary, this will
1346 cause an additional copy because the parameter will be constructed
1347 into a temporary location and then copied into the outgoing arguments.
1348 If a parameter contains a call to alloca and this function uses the
1349 stack, precompute the parameter. */
1350
1ce0cb53
JW
1351 /* If we preallocated the stack space, and some arguments must be passed
1352 on the stack, then we must precompute any parameter which contains a
1353 function call which will store arguments on the stack.
1354 Otherwise, evaluating the parameter may clobber previous parameters
1355 which have already been stored into the stack. */
1356
51bbfa0c
RS
1357 for (i = 0; i < num_actuals; i++)
1358 if (is_const
1359 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1360 && calls_function (args[i].tree_value, 1))
1361 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1362 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1363 {
657bb6dc
JM
1364 /* If this is an addressable type, we cannot pre-evaluate it. */
1365 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1366 abort ();
1367
cc79451b
RK
1368 push_temp_slots ();
1369
51bbfa0c 1370 args[i].initial_value = args[i].value
e5d70561 1371 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1372
51bbfa0c 1373 preserve_temp_slots (args[i].value);
cc79451b 1374 pop_temp_slots ();
51bbfa0c
RS
1375
1376 /* ANSI doesn't require a sequence point here,
1377 but PCC has one, so this will avoid some problems. */
1378 emit_queue ();
8e6c802b
RK
1379
1380 args[i].initial_value = args[i].value
1381 = protect_from_queue (args[i].initial_value, 0);
1382
1383 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1384 args[i].value
1385 = convert_modes (args[i].mode,
1386 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1387 args[i].value, args[i].unsignedp);
51bbfa0c
RS
1388 }
1389
1390 /* Now we are about to start emitting insns that can be deleted
1391 if a libcall is deleted. */
9ae8ffe7 1392 if (is_const || is_malloc)
51bbfa0c
RS
1393 start_sequence ();
1394
1395 /* If we have no actual push instructions, or shouldn't use them,
1396 make space for all args right now. */
1397
1398 if (args_size.var != 0)
1399 {
1400 if (old_stack_level == 0)
1401 {
e5d70561 1402 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1403 old_pending_adj = pending_stack_adjust;
1404 pending_stack_adjust = 0;
d64f5a78 1405#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1406 /* stack_arg_under_construction says whether a stack arg is
1407 being constructed at the old stack level. Pushing the stack
1408 gets a clean outgoing argument block. */
1409 old_stack_arg_under_construction = stack_arg_under_construction;
1410 stack_arg_under_construction = 0;
d64f5a78 1411#endif
51bbfa0c
RS
1412 }
1413 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1414 }
26a258fe 1415 else
51bbfa0c
RS
1416 {
1417 /* Note that we must go through the motions of allocating an argument
1418 block even if the size is zero because we may be storing args
1419 in the area reserved for register arguments, which may be part of
1420 the stack frame. */
26a258fe 1421
51bbfa0c
RS
1422 int needed = args_size.constant;
1423
0f41302f
MS
1424 /* Store the maximum argument space used. It will be pushed by
1425 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1426 checking). */
51bbfa0c
RS
1427
1428 if (needed > current_function_outgoing_args_size)
1429 current_function_outgoing_args_size = needed;
1430
26a258fe
PB
1431 if (must_preallocate)
1432 {
1433#ifdef ACCUMULATE_OUTGOING_ARGS
1434 /* Since the stack pointer will never be pushed, it is possible for
1435 the evaluation of a parm to clobber something we have already
1436 written to the stack. Since most function calls on RISC machines
1437 do not use the stack, this is uncommon, but must work correctly.
1438
1439 Therefore, we save any area of the stack that was already written
1440 and that we are using. Here we set up to do this by making a new
1441 stack usage map from the old one. The actual save will be done
1442 by store_one_arg.
1443
1444 Another approach might be to try to reorder the argument
1445 evaluations to avoid this conflicting stack usage. */
1446
51bbfa0c 1447#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
26a258fe
PB
1448 /* Since we will be writing into the entire argument area, the
1449 map must be allocated for its entire size, not just the part that
1450 is the responsibility of the caller. */
1451 needed += reg_parm_stack_space;
51bbfa0c
RS
1452#endif
1453
1454#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1455 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1456 needed + 1);
51bbfa0c 1457#else
26a258fe
PB
1458 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1459 needed);
51bbfa0c 1460#endif
26a258fe 1461 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1462
26a258fe
PB
1463 if (initial_highest_arg_in_use)
1464 bcopy (initial_stack_usage_map, stack_usage_map,
1465 initial_highest_arg_in_use);
51bbfa0c 1466
26a258fe
PB
1467 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1468 bzero (&stack_usage_map[initial_highest_arg_in_use],
1469 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1470 needed = 0;
2f4aa534 1471
26a258fe
PB
1472 /* The address of the outgoing argument list must not be copied to a
1473 register here, because argblock would be left pointing to the
1474 wrong place after the call to allocate_dynamic_stack_space below.
1475 */
2f4aa534 1476
26a258fe 1477 argblock = virtual_outgoing_args_rtx;
2f4aa534 1478
51bbfa0c 1479#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1480 if (inhibit_defer_pop == 0)
51bbfa0c 1481 {
26a258fe
PB
1482 /* Try to reuse some or all of the pending_stack_adjust
1483 to get this space. Maybe we can avoid any pushing. */
1484 if (needed > pending_stack_adjust)
1485 {
1486 needed -= pending_stack_adjust;
1487 pending_stack_adjust = 0;
1488 }
1489 else
1490 {
1491 pending_stack_adjust -= needed;
1492 needed = 0;
1493 }
51bbfa0c 1494 }
26a258fe
PB
1495 /* Special case this because overhead of `push_block' in this
1496 case is non-trivial. */
1497 if (needed == 0)
1498 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1499 else
26a258fe
PB
1500 argblock = push_block (GEN_INT (needed), 0, 0);
1501
1502 /* We only really need to call `copy_to_reg' in the case where push
1503 insns are going to be used to pass ARGBLOCK to a function
1504 call in ARGS. In that case, the stack pointer changes value
1505 from the allocation point to the call point, and hence
1506 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1507 But might as well always do it. */
1508 argblock = copy_to_reg (argblock);
51bbfa0c 1509#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1510 }
51bbfa0c
RS
1511 }
1512
bfbf933a
RS
1513#ifdef ACCUMULATE_OUTGOING_ARGS
1514 /* The save/restore code in store_one_arg handles all cases except one:
1515 a constructor call (including a C function returning a BLKmode struct)
1516 to initialize an argument. */
1517 if (stack_arg_under_construction)
1518 {
1519#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1520 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1521#else
e5d70561 1522 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1523#endif
1524 if (old_stack_level == 0)
1525 {
e5d70561 1526 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1527 old_pending_adj = pending_stack_adjust;
1528 pending_stack_adjust = 0;
1529 /* stack_arg_under_construction says whether a stack arg is
1530 being constructed at the old stack level. Pushing the stack
1531 gets a clean outgoing argument block. */
1532 old_stack_arg_under_construction = stack_arg_under_construction;
1533 stack_arg_under_construction = 0;
1534 /* Make a new map for the new argument list. */
1535 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1536 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1537 highest_outgoing_arg_in_use = 0;
1538 }
e5d70561 1539 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1540 }
1541 /* If argument evaluation might modify the stack pointer, copy the
1542 address of the argument list to a register. */
1543 for (i = 0; i < num_actuals; i++)
1544 if (args[i].pass_on_stack)
1545 {
1546 argblock = copy_addr_to_reg (argblock);
1547 break;
1548 }
1549#endif
1550
1551
51bbfa0c
RS
1552 /* If we preallocated stack space, compute the address of each argument.
1553 We need not ensure it is a valid memory address here; it will be
1554 validized when it is used. */
1555 if (argblock)
1556 {
1557 rtx arg_reg = argblock;
1558 int arg_offset = 0;
1559
1560 if (GET_CODE (argblock) == PLUS)
1561 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1562
1563 for (i = 0; i < num_actuals; i++)
1564 {
1565 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1566 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1567 rtx addr;
1568
1569 /* Skip this parm if it will not be passed on the stack. */
1570 if (! args[i].pass_on_stack && args[i].reg != 0)
1571 continue;
1572
1573 if (GET_CODE (offset) == CONST_INT)
1574 addr = plus_constant (arg_reg, INTVAL (offset));
1575 else
38a448ca 1576 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
51bbfa0c
RS
1577
1578 addr = plus_constant (addr, arg_offset);
38a448ca 1579 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
0c0600d5 1580 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1581 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1582
1583 if (GET_CODE (slot_offset) == CONST_INT)
1584 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1585 else
38a448ca 1586 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
51bbfa0c
RS
1587
1588 addr = plus_constant (addr, arg_offset);
38a448ca 1589 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
51bbfa0c
RS
1590 }
1591 }
1592
1593#ifdef PUSH_ARGS_REVERSED
1594#ifdef STACK_BOUNDARY
1595 /* If we push args individually in reverse order, perform stack alignment
1596 before the first push (the last arg). */
1597 if (argblock == 0)
e5d70561
RK
1598 anti_adjust_stack (GEN_INT (args_size.constant
1599 - original_args_size.constant));
51bbfa0c
RS
1600#endif
1601#endif
1602
1603 /* Don't try to defer pops if preallocating, not even from the first arg,
1604 since ARGBLOCK probably refers to the SP. */
1605 if (argblock)
1606 NO_DEFER_POP;
1607
1608 /* Get the function to call, in the form of RTL. */
1609 if (fndecl)
ef5d30c9
RK
1610 {
1611 /* If this is the first use of the function, see if we need to
1612 make an external definition for it. */
1613 if (! TREE_USED (fndecl))
1614 {
1615 assemble_external (fndecl);
1616 TREE_USED (fndecl) = 1;
1617 }
1618
1619 /* Get a SYMBOL_REF rtx for the function address. */
1620 funexp = XEXP (DECL_RTL (fndecl), 0);
1621 }
51bbfa0c
RS
1622 else
1623 /* Generate an rtx (probably a pseudo-register) for the address. */
1624 {
cc79451b 1625 push_temp_slots ();
e5d70561 1626 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1627 pop_temp_slots (); /* FUNEXP can't be BLKmode */
7815214e
RK
1628
1629 /* Check the function is executable. */
1630 if (flag_check_memory_usage)
1631 emit_library_call (chkr_check_exec_libfunc, 1,
1632 VOIDmode, 1,
1633 funexp, ptr_mode);
51bbfa0c
RS
1634 emit_queue ();
1635 }
1636
1637 /* Figure out the register where the value, if any, will come back. */
1638 valreg = 0;
1639 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1640 && ! structure_value_addr)
1641 {
1642 if (pcc_struct_value)
1643 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1644 fndecl);
1645 else
1646 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1647 }
1648
1649 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 1650 once we have started filling any specific hard regs. */
51bbfa0c
RS
1651 reg_parm_seen = 0;
1652 for (i = 0; i < num_actuals; i++)
1653 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1654 {
1655 reg_parm_seen = 1;
1656
1657 if (args[i].value == 0)
1658 {
cc79451b 1659 push_temp_slots ();
e5d70561
RK
1660 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1661 VOIDmode, 0);
51bbfa0c 1662 preserve_temp_slots (args[i].value);
cc79451b 1663 pop_temp_slots ();
51bbfa0c
RS
1664
1665 /* ANSI doesn't require a sequence point here,
1666 but PCC has one, so this will avoid some problems. */
1667 emit_queue ();
1668 }
84b55618
RK
1669
1670 /* If we are to promote the function arg to a wider mode,
1671 do it now. */
84b55618 1672
843fec55
RK
1673 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1674 args[i].value
1675 = convert_modes (args[i].mode,
1676 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1677 args[i].value, args[i].unsignedp);
ebef2728
RK
1678
1679 /* If the value is expensive, and we are inside an appropriately
1680 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1681 into the hard reg.
1682
1683 For small register classes, also do this if this call uses
1684 register parameters. This is to avoid reload conflicts while
1685 loading the parameters registers. */
ebef2728
RK
1686
1687 if ((! (GET_CODE (args[i].value) == REG
1688 || (GET_CODE (args[i].value) == SUBREG
1689 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1690 && args[i].mode != BLKmode
1691 && rtx_cost (args[i].value, SET) > 2
f95182a4 1692 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
e9a25f70 1693 || preserve_subexpressions_p ()))
ebef2728 1694 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1695 }
1696
1697#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1698 /* The argument list is the property of the called routine and it
1699 may clobber it. If the fixed area has been used for previous
1700 parameters, we must save and restore it.
1701
1702 Here we compute the boundary of the that needs to be saved, if any. */
1703
b94301c2
RS
1704#ifdef ARGS_GROW_DOWNWARD
1705 for (i = 0; i < reg_parm_stack_space + 1; i++)
1706#else
6f90e075 1707 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1708#endif
51bbfa0c
RS
1709 {
1710 if (i >= highest_outgoing_arg_in_use
1711 || stack_usage_map[i] == 0)
1712 continue;
1713
1714 if (low_to_save == -1)
1715 low_to_save = i;
1716
1717 high_to_save = i;
1718 }
1719
1720 if (low_to_save >= 0)
1721 {
1722 int num_to_save = high_to_save - low_to_save + 1;
1723 enum machine_mode save_mode
1724 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1725 rtx stack_area;
1726
1727 /* If we don't have the required alignment, must do this in BLKmode. */
1728 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1729 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1730 save_mode = BLKmode;
1731
ceb83206 1732#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
1733 stack_area = gen_rtx_MEM (save_mode,
1734 memory_address (save_mode,
38a448ca 1735 plus_constant (argblock,
ceb83206 1736 - high_to_save)));
b94301c2 1737#else
ceb83206
JL
1738 stack_area = gen_rtx_MEM (save_mode,
1739 memory_address (save_mode,
38a448ca 1740 plus_constant (argblock,
ceb83206 1741 low_to_save)));
b94301c2 1742#endif
51bbfa0c
RS
1743 if (save_mode == BLKmode)
1744 {
6fa51029 1745 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3668e76e 1746 MEM_IN_STRUCT_P (save_area) = 0;
51bbfa0c 1747 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1748 GEN_INT (num_to_save),
51bbfa0c
RS
1749 PARM_BOUNDARY / BITS_PER_UNIT);
1750 }
1751 else
1752 {
1753 save_area = gen_reg_rtx (save_mode);
1754 emit_move_insn (save_area, stack_area);
1755 }
1756 }
1757#endif
1758
1759
1760 /* Now store (and compute if necessary) all non-register parms.
1761 These come before register parms, since they can require block-moves,
1762 which could clobber the registers used for register parms.
1763 Parms which have partial registers are not stored here,
1764 but we do preallocate space here if they want that. */
1765
1766 for (i = 0; i < num_actuals; i++)
1767 if (args[i].reg == 0 || args[i].pass_on_stack)
1768 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1769 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1770
4ab56118
RK
1771 /* If we have a parm that is passed in registers but not in memory
1772 and whose alignment does not permit a direct copy into registers,
1773 make a group of pseudos that correspond to each register that we
1774 will later fill. */
1775
45d44c98
RK
1776 if (STRICT_ALIGNMENT)
1777 for (i = 0; i < num_actuals; i++)
1778 if (args[i].reg != 0 && ! args[i].pass_on_stack
4ab56118 1779 && args[i].mode == BLKmode
45d44c98
RK
1780 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1781 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1782 {
1783 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1784 int big_endian_correction = 0;
4ab56118 1785
45d44c98
RK
1786 args[i].n_aligned_regs
1787 = args[i].partial ? args[i].partial
1788 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
4ab56118 1789
45d44c98
RK
1790 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1791 * args[i].n_aligned_regs);
4ab56118 1792
45d44c98
RK
1793 /* Structures smaller than a word are aligned to the least
1794 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1795 this means we must skip the empty high order bytes when
1796 calculating the bit offset. */
1797 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1798 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
8498efd0 1799
45d44c98
RK
1800 for (j = 0; j < args[i].n_aligned_regs; j++)
1801 {
1802 rtx reg = gen_reg_rtx (word_mode);
1803 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1804 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1805 int bitpos;
1806
1807 args[i].aligned_regs[j] = reg;
1808
1809 /* Clobber REG and move each partword into it. Ensure we don't
1810 go past the end of the structure. Note that the loop below
1811 works because we've already verified that padding
a22ad972 1812 and endianness are compatible.
45d44c98 1813
a22ad972
DE
1814 We use to emit a clobber here but that doesn't let later
1815 passes optimize the instructions we emit. By storing 0 into
1816 the register later passes know the first AND to zero out the
1817 bitfield being set in the register is unnecessary. The store
1818 of 0 will be deleted as will at least the first AND. */
1819
1820 emit_move_insn (reg, const0_rtx);
45d44c98
RK
1821
1822 for (bitpos = 0;
1823 bitpos < BITS_PER_WORD && bytes > 0;
1824 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1825 {
1826 int xbitpos = bitpos + big_endian_correction;
1827
1828 store_bit_field (reg, bitsize, xbitpos, word_mode,
1829 extract_bit_field (word, bitsize, bitpos, 1,
1830 NULL_RTX, word_mode,
1831 word_mode,
1832 bitsize / BITS_PER_UNIT,
1833 BITS_PER_WORD),
1834 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1835 }
1836 }
1837 }
4ab56118 1838
51bbfa0c
RS
1839 /* Now store any partially-in-registers parm.
1840 This is the last place a block-move can happen. */
1841 if (reg_parm_seen)
1842 for (i = 0; i < num_actuals; i++)
1843 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1844 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1845 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1846
1847#ifndef PUSH_ARGS_REVERSED
1848#ifdef STACK_BOUNDARY
1849 /* If we pushed args in forward order, perform stack alignment
1850 after pushing the last arg. */
1851 if (argblock == 0)
e5d70561
RK
1852 anti_adjust_stack (GEN_INT (args_size.constant
1853 - original_args_size.constant));
51bbfa0c
RS
1854#endif
1855#endif
1856
756e0e12
RS
1857 /* If register arguments require space on the stack and stack space
1858 was not preallocated, allocate stack space here for arguments
1859 passed in registers. */
6e716e89 1860#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1861 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1862 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1863#endif
1864
51bbfa0c
RS
1865 /* Pass the function the address in which to return a structure value. */
1866 if (structure_value_addr && ! structure_value_addr_parm)
1867 {
1868 emit_move_insn (struct_value_rtx,
1869 force_reg (Pmode,
e5d70561
RK
1870 force_operand (structure_value_addr,
1871 NULL_RTX)));
7815214e
RK
1872
1873 /* Mark the memory for the aggregate as write-only. */
1874 if (flag_check_memory_usage)
1875 emit_library_call (chkr_set_right_libfunc, 1,
1876 VOIDmode, 3,
1877 structure_value_addr, ptr_mode,
1878 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
1879 GEN_INT (MEMORY_USE_WO),
1880 TYPE_MODE (integer_type_node));
7815214e 1881
51bbfa0c 1882 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1883 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1884 }
1885
77cac2f2 1886 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1887
51bbfa0c
RS
1888 /* Now do the register loads required for any wholly-register parms or any
1889 parms which are passed both on the stack and in a register. Their
1890 expressions were already evaluated.
1891
1892 Mark all register-parms as living through the call, putting these USE
77cac2f2 1893 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c 1894
bb1b857a
GK
1895#ifdef LOAD_ARGS_REVERSED
1896 for (i = num_actuals - 1; i >= 0; i--)
1897#else
51bbfa0c 1898 for (i = 0; i < num_actuals; i++)
bb1b857a 1899#endif
51bbfa0c 1900 {
cacbd532 1901 rtx reg = args[i].reg;
51bbfa0c 1902 int partial = args[i].partial;
cacbd532 1903 int nregs;
51bbfa0c 1904
cacbd532 1905 if (reg)
51bbfa0c 1906 {
6b972c4f
JW
1907 /* Set to non-negative if must move a word at a time, even if just
1908 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1909 we just use a normal move insn. This value can be zero if the
1910 argument is a zero size structure with no fields. */
51bbfa0c
RS
1911 nregs = (partial ? partial
1912 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1913 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1914 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1915 : -1));
51bbfa0c 1916
cacbd532
JW
1917 /* Handle calls that pass values in multiple non-contiguous
1918 locations. The Irix 6 ABI has examples of this. */
1919
1920 if (GET_CODE (reg) == PARALLEL)
1921 emit_group_load (reg, args[i].value);
1922
51bbfa0c
RS
1923 /* If simple case, just do move. If normal partial, store_one_arg
1924 has already loaded the register for us. In all other cases,
1925 load the register(s) from memory. */
1926
cacbd532 1927 else if (nregs == -1)
51bbfa0c 1928 emit_move_insn (reg, args[i].value);
4ab56118 1929
4ab56118
RK
1930 /* If we have pre-computed the values to put in the registers in
1931 the case of non-aligned structures, copy them in now. */
1932
1933 else if (args[i].n_aligned_regs != 0)
1934 for (j = 0; j < args[i].n_aligned_regs; j++)
38a448ca 1935 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
4ab56118 1936 args[i].aligned_regs[j]);
4ab56118 1937
cacbd532 1938 else if (partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1939 move_block_to_reg (REGNO (reg),
1940 validize_mem (args[i].value), nregs,
1941 args[i].mode);
0304dfbb 1942
cacbd532
JW
1943 /* Handle calls that pass values in multiple non-contiguous
1944 locations. The Irix 6 ABI has examples of this. */
1945 if (GET_CODE (reg) == PARALLEL)
1946 use_group_regs (&call_fusage, reg);
1947 else if (nregs == -1)
0304dfbb
DE
1948 use_reg (&call_fusage, reg);
1949 else
1950 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1951 }
1952 }
1953
1954 /* Perform postincrements before actually calling the function. */
1955 emit_queue ();
1956
1957 /* All arguments and registers used for the call must be set up by now! */
1958
51bbfa0c 1959 /* Generate the actual call instruction. */
2c8da025 1960 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
51bbfa0c 1961 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1962 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1963
1964 /* If call is cse'able, make appropriate pair of reg-notes around it.
1965 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
1966 if return type is void. Disable for PARALLEL return values, because
1967 we have no way to move such values into a pseudo register. */
1968 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
1969 {
1970 rtx note = 0;
1971 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1972 rtx insns;
1973
9ae8ffe7
JL
1974 /* Mark the return value as a pointer if needed. */
1975 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1976 {
1977 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1978 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1979 }
1980
51bbfa0c
RS
1981 /* Construct an "equal form" for the value which mentions all the
1982 arguments in order as well as the function name. */
1983#ifdef PUSH_ARGS_REVERSED
1984 for (i = 0; i < num_actuals; i++)
38a448ca 1985 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
1986#else
1987 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 1988 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 1989#endif
38a448ca 1990 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
1991
1992 insns = get_insns ();
1993 end_sequence ();
1994
1995 emit_libcall_block (insns, temp, valreg, note);
1996
1997 valreg = temp;
1998 }
4f48d56a
RK
1999 else if (is_const)
2000 {
2001 /* Otherwise, just write out the sequence without a note. */
2002 rtx insns = get_insns ();
2003
2004 end_sequence ();
2005 emit_insns (insns);
2006 }
9ae8ffe7
JL
2007 else if (is_malloc)
2008 {
2009 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2010 rtx last, insns;
2011
2012 /* The return value from a malloc-like function is a pointer. */
2013 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2014 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2015
2016 emit_move_insn (temp, valreg);
2017
2018 /* The return value from a malloc-like function can not alias
2019 anything else. */
2020 last = get_last_insn ();
2021 REG_NOTES (last) =
38a448ca 2022 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2023
2024 /* Write out the sequence. */
2025 insns = get_insns ();
2026 end_sequence ();
2027 emit_insns (insns);
2028 valreg = temp;
2029 }
51bbfa0c
RS
2030
2031 /* For calls to `setjmp', etc., inform flow.c it should complain
2032 if nonvolatile values are live. */
2033
2034 if (returns_twice)
2035 {
2036 emit_note (name, NOTE_INSN_SETJMP);
2037 current_function_calls_setjmp = 1;
2038 }
2039
2040 if (is_longjmp)
2041 current_function_calls_longjmp = 1;
2042
2043 /* Notice functions that cannot return.
2044 If optimizing, insns emitted below will be dead.
2045 If not optimizing, they will exist, which is useful
2046 if the user uses the `return' command in the debugger. */
2047
2048 if (is_volatile || is_longjmp)
2049 emit_barrier ();
2050
51bbfa0c
RS
2051 /* If value type not void, return an rtx for the value. */
2052
e976b8b2
MS
2053 /* If there are cleanups to be called, don't use a hard reg as target.
2054 We need to double check this and see if it matters anymore. */
e9a25f70 2055 if (any_pending_cleanups (1)
51bbfa0c
RS
2056 && target && REG_P (target)
2057 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2058 target = 0;
2059
2060 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2061 || ignore)
2062 {
2063 target = const0_rtx;
2064 }
2065 else if (structure_value_addr)
2066 {
2067 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2068 {
38a448ca
RH
2069 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2070 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2071 structure_value_addr));
05e3bdb9 2072 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 2073 }
51bbfa0c
RS
2074 }
2075 else if (pcc_struct_value)
2076 {
f78b5ca1
JL
2077 /* This is the special C++ case where we need to
2078 know what the true target was. We take care to
2079 never use this value more than once in one expression. */
38a448ca
RH
2080 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2081 copy_to_reg (valreg));
f78b5ca1 2082 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
51bbfa0c 2083 }
cacbd532
JW
2084 /* Handle calls that return values in multiple non-contiguous locations.
2085 The Irix 6 ABI has examples of this. */
2086 else if (GET_CODE (valreg) == PARALLEL)
2087 {
2088 if (target == 0)
2089 {
2090 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2b4092f2 2091 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
cacbd532
JW
2092 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2093 preserve_temp_slots (target);
2094 }
2095
2096 emit_group_store (target, valreg);
2097 }
059c3d84
JW
2098 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2099 && GET_MODE (target) == GET_MODE (valreg))
2100 /* TARGET and VALREG cannot be equal at this point because the latter
2101 would not have REG_FUNCTION_VALUE_P true, while the former would if
2102 it were referring to the same register.
2103
2104 If they refer to the same register, this move will be a no-op, except
2105 when function inlining is being done. */
2106 emit_move_insn (target, valreg);
766b19fb
JL
2107 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2108 {
2109 /* Some machines (the PA for example) want to return all small
2110 structures in registers regardless of the structure's alignment.
2111
2112 Deal with them explicitly by copying from the return registers
2113 into the target MEM locations. */
2114 int bytes = int_size_in_bytes (TREE_TYPE (exp));
1b5c5873
RK
2115 rtx src, dst;
2116 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2117 int bitpos, xbitpos, big_endian_correction = 0;
766b19fb
JL
2118
2119 if (target == 0)
822e3422
RK
2120 {
2121 target = assign_stack_temp (BLKmode, bytes, 0);
2122 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2123 preserve_temp_slots (target);
2124 }
766b19fb 2125
e934eef9
RK
2126 /* This code assumes valreg is at least a full word. If it isn't,
2127 copy it into a new pseudo which is a full word. */
2128 if (GET_MODE (valreg) != BLKmode
2129 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
144a3150 2130 valreg = convert_to_mode (word_mode, valreg,
e934eef9
RK
2131 TREE_UNSIGNED (TREE_TYPE (exp)));
2132
1b5c5873
RK
2133 /* Structures whose size is not a multiple of a word are aligned
2134 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2135 machine, this means we must skip the empty high order bytes when
2136 calculating the bit offset. */
2137 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2138 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2139 * BITS_PER_UNIT));
2140
2141 /* Copy the structure BITSIZE bites at a time.
2142
2143 We could probably emit more efficient code for machines
766b19fb
JL
2144 which do not use strict alignment, but it doesn't seem
2145 worth the effort at the current time. */
1b5c5873
RK
2146 for (bitpos = 0, xbitpos = big_endian_correction;
2147 bitpos < bytes * BITS_PER_UNIT;
2148 bitpos += bitsize, xbitpos += bitsize)
766b19fb 2149 {
1b5c5873
RK
2150
2151 /* We need a new source operand each time xbitpos is on a
2152 word boundary and when xbitpos == big_endian_correction
2153 (the first time through). */
2154 if (xbitpos % BITS_PER_WORD == 0
2155 || xbitpos == big_endian_correction)
2156 src = operand_subword_force (valreg,
2157 xbitpos / BITS_PER_WORD,
2158 BLKmode);
2159
2160 /* We need a new destination operand each time bitpos is on
2161 a word boundary. */
2162 if (bitpos % BITS_PER_WORD == 0)
2163 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
766b19fb 2164
1b5c5873
RK
2165 /* Use xbitpos for the source extraction (right justified) and
2166 xbitpos for the destination store (left justified). */
2167 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2168 extract_bit_field (src, bitsize,
2169 xbitpos % BITS_PER_WORD, 1,
2170 NULL_RTX, word_mode,
2171 word_mode,
2172 bitsize / BITS_PER_UNIT,
2173 BITS_PER_WORD),
2174 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
766b19fb
JL
2175 }
2176 }
51bbfa0c
RS
2177 else
2178 target = copy_to_reg (valreg);
2179
84b55618 2180#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2181 /* If we promoted this return value, make the proper SUBREG. TARGET
2182 might be const0_rtx here, so be careful. */
2183 if (GET_CODE (target) == REG
766b19fb 2184 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2185 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2186 {
321e0bba
RK
2187 tree type = TREE_TYPE (exp);
2188 int unsignedp = TREE_UNSIGNED (type);
84b55618 2189
321e0bba
RK
2190 /* If we don't promote as expected, something is wrong. */
2191 if (GET_MODE (target)
2192 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2193 abort ();
2194
38a448ca 2195 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2196 SUBREG_PROMOTED_VAR_P (target) = 1;
2197 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2198 }
2199#endif
2200
2f4aa534
RS
2201 /* If size of args is variable or this was a constructor call for a stack
2202 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2203
2204 if (old_stack_level)
2205 {
e5d70561 2206 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2207 pending_stack_adjust = old_pending_adj;
d64f5a78 2208#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2209 stack_arg_under_construction = old_stack_arg_under_construction;
2210 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2211 stack_usage_map = initial_stack_usage_map;
d64f5a78 2212#endif
51bbfa0c 2213 }
51bbfa0c
RS
2214#ifdef ACCUMULATE_OUTGOING_ARGS
2215 else
2216 {
2217#ifdef REG_PARM_STACK_SPACE
2218 if (save_area)
2219 {
2220 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 2221#ifdef ARGS_GROW_DOWNWARD
51bbfa0c 2222 rtx stack_area
38a448ca
RH
2223 = gen_rtx_MEM (save_mode,
2224 memory_address (save_mode,
38a448ca 2225 plus_constant (argblock,
ceb83206 2226 - high_to_save)));
b94301c2 2227#else
ceb83206
JL
2228 rtx stack_area
2229 = gen_rtx_MEM (save_mode,
2230 memory_address (save_mode,
38a448ca 2231 plus_constant (argblock,
ceb83206 2232 low_to_save)));
b94301c2 2233#endif
51bbfa0c
RS
2234
2235 if (save_mode != BLKmode)
2236 emit_move_insn (stack_area, save_area);
2237 else
2238 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2239 GEN_INT (high_to_save - low_to_save + 1),
2240 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2241 }
2242#endif
2243
2244 /* If we saved any argument areas, restore them. */
2245 for (i = 0; i < num_actuals; i++)
2246 if (args[i].save_area)
2247 {
2248 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2249 rtx stack_area
38a448ca
RH
2250 = gen_rtx_MEM (save_mode,
2251 memory_address (save_mode,
2252 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2253
2254 if (save_mode != BLKmode)
2255 emit_move_insn (stack_area, args[i].save_area);
2256 else
2257 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2258 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2259 PARM_BOUNDARY / BITS_PER_UNIT);
2260 }
2261
2262 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2263 stack_usage_map = initial_stack_usage_map;
2264 }
2265#endif
2266
59257ff7
RK
2267 /* If this was alloca, record the new stack level for nonlocal gotos.
2268 Check for the handler slots since we might not have a save area
0f41302f 2269 for non-local gotos. */
59257ff7
RK
2270
2271 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2272 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2273
2274 pop_temp_slots ();
2275
2276 return target;
2277}
2278\f
322e3e34
RK
2279/* Output a library call to function FUN (a SYMBOL_REF rtx)
2280 (emitting the queue unless NO_QUEUE is nonzero),
2281 for a value of mode OUTMODE,
2282 with NARGS different arguments, passed as alternating rtx values
2283 and machine_modes to convert them to.
2284 The rtx values should have been passed through protect_from_queue already.
2285
2286 NO_QUEUE will be true if and only if the library call is a `const' call
2287 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2288 to the variable is_const in expand_call.
2289
2290 NO_QUEUE must be true for const calls, because if it isn't, then
2291 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2292 and will be lost if the libcall sequence is optimized away.
2293
2294 NO_QUEUE must be false for non-const calls, because if it isn't, the
2295 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2296 optimized. For instance, the instruction scheduler may incorrectly
2297 move memory references across the non-const call. */
2298
2299void
4f90e4a0
RK
2300emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2301 int nargs, ...))
322e3e34 2302{
4f90e4a0
RK
2303#ifndef __STDC__
2304 rtx orgfun;
2305 int no_queue;
2306 enum machine_mode outmode;
2307 int nargs;
2308#endif
322e3e34
RK
2309 va_list p;
2310 /* Total size in bytes of all the stack-parms scanned so far. */
2311 struct args_size args_size;
2312 /* Size of arguments before any adjustments (such as rounding). */
2313 struct args_size original_args_size;
2314 register int argnum;
322e3e34 2315 rtx fun;
322e3e34
RK
2316 int inc;
2317 int count;
2318 rtx argblock = 0;
2319 CUMULATIVE_ARGS args_so_far;
2320 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2321 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2322 struct arg *argvec;
2323 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2324 rtx call_fusage = 0;
f046b3cc
JL
2325#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2326 /* Define the boundary of the register parm stack space that needs to be
2327 save, if any. */
2328 int low_to_save = -1, high_to_save;
2329 rtx save_area = 0; /* Place that it is saved */
2330#endif
2331
2332#ifdef ACCUMULATE_OUTGOING_ARGS
2333 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2334 char *initial_stack_usage_map = stack_usage_map;
2335 int needed;
2336#endif
2337
2338#ifdef REG_PARM_STACK_SPACE
69d4ca36
RL
2339 /* Size of the stack reserved for parameter registers. */
2340 int reg_parm_stack_space = 0;
f046b3cc
JL
2341#ifdef MAYBE_REG_PARM_STACK_SPACE
2342 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2343#else
2344 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2345#endif
2346#endif
322e3e34 2347
4f90e4a0
RK
2348 VA_START (p, nargs);
2349
2350#ifndef __STDC__
2351 orgfun = va_arg (p, rtx);
322e3e34
RK
2352 no_queue = va_arg (p, int);
2353 outmode = va_arg (p, enum machine_mode);
2354 nargs = va_arg (p, int);
4f90e4a0
RK
2355#endif
2356
2357 fun = orgfun;
322e3e34
RK
2358
2359 /* Copy all the libcall-arguments out of the varargs data
2360 and into a vector ARGVEC.
2361
2362 Compute how to pass each argument. We only support a very small subset
2363 of the full argument passing conventions to limit complexity here since
2364 library functions shouldn't have many args. */
2365
2366 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2367 bzero ((char *) argvec, nargs * sizeof (struct arg));
2368
322e3e34 2369
eecb6f50 2370 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2371
2372 args_size.constant = 0;
2373 args_size.var = 0;
2374
888aa7a9
RS
2375 push_temp_slots ();
2376
322e3e34
RK
2377 for (count = 0; count < nargs; count++)
2378 {
2379 rtx val = va_arg (p, rtx);
2380 enum machine_mode mode = va_arg (p, enum machine_mode);
2381
2382 /* We cannot convert the arg value to the mode the library wants here;
2383 must do it earlier where we know the signedness of the arg. */
2384 if (mode == BLKmode
2385 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2386 abort ();
2387
2388 /* On some machines, there's no way to pass a float to a library fcn.
2389 Pass it as a double instead. */
2390#ifdef LIBGCC_NEEDS_DOUBLE
2391 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2392 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2393#endif
2394
2395 /* There's no need to call protect_from_queue, because
2396 either emit_move_insn or emit_push_insn will do that. */
2397
2398 /* Make sure it is a reasonable operand for a move or push insn. */
2399 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2400 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2401 val = force_operand (val, NULL_RTX);
2402
322e3e34
RK
2403#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2404 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2405 {
a44492f0
RK
2406 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2407 be viewed as just an efficiency improvement. */
888aa7a9
RS
2408 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2409 emit_move_insn (slot, val);
8301b6e2 2410 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2411 mode = Pmode;
888aa7a9 2412 }
322e3e34
RK
2413#endif
2414
888aa7a9
RS
2415 argvec[count].value = val;
2416 argvec[count].mode = mode;
2417
322e3e34 2418 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2419 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2420 abort ();
2421#ifdef FUNCTION_ARG_PARTIAL_NREGS
2422 argvec[count].partial
2423 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2424#else
2425 argvec[count].partial = 0;
2426#endif
2427
2428 locate_and_pad_parm (mode, NULL_TREE,
2429 argvec[count].reg && argvec[count].partial == 0,
2430 NULL_TREE, &args_size, &argvec[count].offset,
2431 &argvec[count].size);
2432
2433 if (argvec[count].size.var)
2434 abort ();
2435
2436#ifndef REG_PARM_STACK_SPACE
2437 if (argvec[count].partial)
2438 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2439#endif
2440
2441 if (argvec[count].reg == 0 || argvec[count].partial != 0
2442#ifdef REG_PARM_STACK_SPACE
2443 || 1
2444#endif
2445 )
2446 args_size.constant += argvec[count].size.constant;
2447
0f41302f 2448 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2449 }
2450 va_end (p);
2451
f046b3cc
JL
2452#ifdef FINAL_REG_PARM_STACK_SPACE
2453 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2454 args_size.var);
2455#endif
2456
322e3e34
RK
2457 /* If this machine requires an external definition for library
2458 functions, write one out. */
2459 assemble_external_libcall (fun);
2460
2461 original_args_size = args_size;
2462#ifdef STACK_BOUNDARY
2463 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2464 / STACK_BYTES) * STACK_BYTES);
2465#endif
2466
2467#ifdef REG_PARM_STACK_SPACE
2468 args_size.constant = MAX (args_size.constant,
f046b3cc 2469 reg_parm_stack_space);
322e3e34 2470#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2471 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2472#endif
2473#endif
2474
322e3e34
RK
2475 if (args_size.constant > current_function_outgoing_args_size)
2476 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2477
2478#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2479 /* Since the stack pointer will never be pushed, it is possible for
2480 the evaluation of a parm to clobber something we have already
2481 written to the stack. Since most function calls on RISC machines
2482 do not use the stack, this is uncommon, but must work correctly.
2483
2484 Therefore, we save any area of the stack that was already written
2485 and that we are using. Here we set up to do this by making a new
2486 stack usage map from the old one.
2487
2488 Another approach might be to try to reorder the argument
2489 evaluations to avoid this conflicting stack usage. */
2490
2491 needed = args_size.constant;
2492#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2493 /* Since we will be writing into the entire argument area, the
2494 map must be allocated for its entire size, not just the part that
2495 is the responsibility of the caller. */
2496 needed += reg_parm_stack_space;
2497#endif
2498
2499#ifdef ARGS_GROW_DOWNWARD
2500 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2501 needed + 1);
2502#else
2503 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2504 needed);
322e3e34 2505#endif
f046b3cc
JL
2506 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2507
2508 if (initial_highest_arg_in_use)
2509 bcopy (initial_stack_usage_map, stack_usage_map,
2510 initial_highest_arg_in_use);
2511
2512 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2513 bzero (&stack_usage_map[initial_highest_arg_in_use],
2514 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2515 needed = 0;
322e3e34 2516
f046b3cc
JL
2517 /* The address of the outgoing argument list must not be copied to a
2518 register here, because argblock would be left pointing to the
2519 wrong place after the call to allocate_dynamic_stack_space below.
2520 */
2521
2522 argblock = virtual_outgoing_args_rtx;
2523#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2524#ifndef PUSH_ROUNDING
2525 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2526#endif
f046b3cc 2527#endif
322e3e34
RK
2528
2529#ifdef PUSH_ARGS_REVERSED
2530#ifdef STACK_BOUNDARY
2531 /* If we push args individually in reverse order, perform stack alignment
2532 before the first push (the last arg). */
2533 if (argblock == 0)
2534 anti_adjust_stack (GEN_INT (args_size.constant
2535 - original_args_size.constant));
2536#endif
2537#endif
2538
2539#ifdef PUSH_ARGS_REVERSED
2540 inc = -1;
2541 argnum = nargs - 1;
2542#else
2543 inc = 1;
2544 argnum = 0;
2545#endif
2546
f046b3cc
JL
2547#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2548 /* The argument list is the property of the called routine and it
2549 may clobber it. If the fixed area has been used for previous
2550 parameters, we must save and restore it.
2551
2552 Here we compute the boundary of the that needs to be saved, if any. */
2553
2554#ifdef ARGS_GROW_DOWNWARD
2555 for (count = 0; count < reg_parm_stack_space + 1; count++)
2556#else
2557 for (count = 0; count < reg_parm_stack_space; count++)
2558#endif
2559 {
2560 if (count >= highest_outgoing_arg_in_use
2561 || stack_usage_map[count] == 0)
2562 continue;
2563
2564 if (low_to_save == -1)
2565 low_to_save = count;
2566
2567 high_to_save = count;
2568 }
2569
2570 if (low_to_save >= 0)
2571 {
2572 int num_to_save = high_to_save - low_to_save + 1;
2573 enum machine_mode save_mode
2574 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2575 rtx stack_area;
2576
2577 /* If we don't have the required alignment, must do this in BLKmode. */
2578 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2579 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2580 save_mode = BLKmode;
2581
ceb83206 2582#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
2583 stack_area = gen_rtx_MEM (save_mode,
2584 memory_address (save_mode,
38a448ca 2585 plus_constant (argblock,
ceb83206 2586 - high_to_save)));
f046b3cc 2587#else
ceb83206
JL
2588 stack_area = gen_rtx_MEM (save_mode,
2589 memory_address (save_mode,
38a448ca 2590 plus_constant (argblock,
ceb83206 2591 low_to_save)));
f046b3cc 2592#endif
f046b3cc
JL
2593 if (save_mode == BLKmode)
2594 {
2595 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2596 MEM_IN_STRUCT_P (save_area) = 0;
2597 emit_block_move (validize_mem (save_area), stack_area,
2598 GEN_INT (num_to_save),
2599 PARM_BOUNDARY / BITS_PER_UNIT);
2600 }
2601 else
2602 {
2603 save_area = gen_reg_rtx (save_mode);
2604 emit_move_insn (save_area, stack_area);
2605 }
2606 }
2607#endif
2608
322e3e34
RK
2609 /* Push the args that need to be pushed. */
2610
5e26979c
JL
2611 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2612 are to be pushed. */
322e3e34
RK
2613 for (count = 0; count < nargs; count++, argnum += inc)
2614 {
2615 register enum machine_mode mode = argvec[argnum].mode;
2616 register rtx val = argvec[argnum].value;
2617 rtx reg = argvec[argnum].reg;
2618 int partial = argvec[argnum].partial;
69d4ca36 2619#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2620 int lower_bound, upper_bound, i;
69d4ca36 2621#endif
322e3e34
RK
2622
2623 if (! (reg != 0 && partial == 0))
f046b3cc
JL
2624 {
2625#ifdef ACCUMULATE_OUTGOING_ARGS
2626 /* If this is being stored into a pre-allocated, fixed-size, stack
2627 area, save any previous data at that location. */
2628
2629#ifdef ARGS_GROW_DOWNWARD
2630 /* stack_slot is negative, but we want to index stack_usage_map
2631 with positive values. */
5e26979c
JL
2632 upper_bound = -argvec[argnum].offset.constant + 1;
2633 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 2634#else
5e26979c
JL
2635 lower_bound = argvec[argnum].offset.constant;
2636 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
2637#endif
2638
2639 for (i = lower_bound; i < upper_bound; i++)
2640 if (stack_usage_map[i]
2641#ifdef REG_PARM_STACK_SPACE
2642 /* Don't store things in the fixed argument area at this point;
2643 it has already been saved. */
2644 && i > reg_parm_stack_space
2645#endif
2646 )
2647 break;
2648
2649 if (i != upper_bound)
2650 {
2651 /* We need to make a save area. See what mode we can make it. */
2652 enum machine_mode save_mode
5e26979c 2653 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
2654 MODE_INT, 1);
2655 rtx stack_area
38a448ca
RH
2656 = gen_rtx_MEM (save_mode,
2657 memory_address (save_mode,
2658 plus_constant (argblock, argvec[argnum].offset.constant)));
5e26979c
JL
2659 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2660 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
2661 }
2662#endif
2663 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
5e26979c 2664 argblock, GEN_INT (argvec[argnum].offset.constant));
f046b3cc
JL
2665
2666#ifdef ACCUMULATE_OUTGOING_ARGS
2667 /* Now mark the segment we just used. */
2668 for (i = lower_bound; i < upper_bound; i++)
2669 stack_usage_map[i] = 1;
2670#endif
2671
2672 NO_DEFER_POP;
2673 }
322e3e34
RK
2674 }
2675
2676#ifndef PUSH_ARGS_REVERSED
2677#ifdef STACK_BOUNDARY
2678 /* If we pushed args in forward order, perform stack alignment
2679 after pushing the last arg. */
2680 if (argblock == 0)
2681 anti_adjust_stack (GEN_INT (args_size.constant
2682 - original_args_size.constant));
2683#endif
2684#endif
2685
2686#ifdef PUSH_ARGS_REVERSED
2687 argnum = nargs - 1;
2688#else
2689 argnum = 0;
2690#endif
2691
77cac2f2 2692 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2693
322e3e34
RK
2694 /* Now load any reg parms into their regs. */
2695
5e26979c
JL
2696 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2697 are to be pushed. */
322e3e34
RK
2698 for (count = 0; count < nargs; count++, argnum += inc)
2699 {
322e3e34
RK
2700 register rtx val = argvec[argnum].value;
2701 rtx reg = argvec[argnum].reg;
2702 int partial = argvec[argnum].partial;
2703
2704 if (reg != 0 && partial == 0)
2705 emit_move_insn (reg, val);
2706 NO_DEFER_POP;
2707 }
2708
2709 /* For version 1.37, try deleting this entirely. */
2710 if (! no_queue)
2711 emit_queue ();
2712
2713 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2714 for (count = 0; count < nargs; count++)
2715 if (argvec[count].reg != 0)
77cac2f2 2716 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2717
322e3e34
RK
2718 /* Don't allow popping to be deferred, since then
2719 cse'ing of library calls could delete a call and leave the pop. */
2720 NO_DEFER_POP;
2721
2722 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2723 will set inhibit_defer_pop to that value. */
2724
334c4f0f
RK
2725 /* The return type is needed to decide how many bytes the function pops.
2726 Signedness plays no role in that, so for simplicity, we pretend it's
2727 always signed. We also assume that the list of arguments passed has
2728 no impact, so we pretend it is unknown. */
2729
2c8da025
RK
2730 emit_call_1 (fun,
2731 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
2732 build_function_type (outmode == VOIDmode ? void_type_node
2733 : type_for_mode (outmode, 0), NULL_TREE),
334c4f0f 2734 args_size.constant, 0,
322e3e34
RK
2735 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2736 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2737 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2738
888aa7a9
RS
2739 pop_temp_slots ();
2740
322e3e34
RK
2741 /* Now restore inhibit_defer_pop to its actual original value. */
2742 OK_DEFER_POP;
f046b3cc
JL
2743
2744#ifdef ACCUMULATE_OUTGOING_ARGS
2745#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
2746 if (save_area)
2747 {
2748 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 2749#ifdef ARGS_GROW_DOWNWARD
e9a25f70 2750 rtx stack_area
38a448ca
RH
2751 = gen_rtx_MEM (save_mode,
2752 memory_address (save_mode,
ceb83206
JL
2753 plus_constant (argblock,
2754 - high_to_save)));
f046b3cc 2755#else
ceb83206
JL
2756 rtx stack_area
2757 = gen_rtx_MEM (save_mode,
2758 memory_address (save_mode,
2759 plus_constant (argblock, low_to_save)));
f046b3cc 2760#endif
f046b3cc 2761
e9a25f70
JL
2762 if (save_mode != BLKmode)
2763 emit_move_insn (stack_area, save_area);
2764 else
2765 emit_block_move (stack_area, validize_mem (save_area),
2766 GEN_INT (high_to_save - low_to_save + 1),
2767 PARM_BOUNDARY / BITS_PER_UNIT);
2768 }
f046b3cc
JL
2769#endif
2770
2771 /* If we saved any argument areas, restore them. */
2772 for (count = 0; count < nargs; count++)
2773 if (argvec[count].save_area)
2774 {
2775 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2776 rtx stack_area
38a448ca
RH
2777 = gen_rtx_MEM (save_mode,
2778 memory_address (save_mode,
2779 plus_constant (argblock, argvec[count].offset.constant)));
f046b3cc
JL
2780
2781 emit_move_insn (stack_area, argvec[count].save_area);
2782 }
2783
2784 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2785 stack_usage_map = initial_stack_usage_map;
2786#endif
322e3e34
RK
2787}
2788\f
2789/* Like emit_library_call except that an extra argument, VALUE,
2790 comes second and says where to store the result.
fac0ad80
RS
2791 (If VALUE is zero, this function chooses a convenient way
2792 to return the value.
322e3e34 2793
fac0ad80
RS
2794 This function returns an rtx for where the value is to be found.
2795 If VALUE is nonzero, VALUE is returned. */
2796
2797rtx
4f90e4a0
RK
2798emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2799 enum machine_mode outmode, int nargs, ...))
322e3e34 2800{
4f90e4a0
RK
2801#ifndef __STDC__
2802 rtx orgfun;
2803 rtx value;
2804 int no_queue;
2805 enum machine_mode outmode;
2806 int nargs;
2807#endif
322e3e34
RK
2808 va_list p;
2809 /* Total size in bytes of all the stack-parms scanned so far. */
2810 struct args_size args_size;
2811 /* Size of arguments before any adjustments (such as rounding). */
2812 struct args_size original_args_size;
2813 register int argnum;
322e3e34 2814 rtx fun;
322e3e34
RK
2815 int inc;
2816 int count;
2817 rtx argblock = 0;
2818 CUMULATIVE_ARGS args_so_far;
2819 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2820 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2821 struct arg *argvec;
2822 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2823 rtx call_fusage = 0;
322e3e34 2824 rtx mem_value = 0;
fac0ad80 2825 int pcc_struct_value = 0;
4f389214 2826 int struct_value_size = 0;
d61bee95 2827 int is_const;
69d4ca36 2828#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2829 int needed;
69d4ca36 2830#endif
f046b3cc
JL
2831
2832#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2833 /* Define the boundary of the register parm stack space that needs to be
2834 save, if any. */
2835 int low_to_save = -1, high_to_save;
2836 rtx save_area = 0; /* Place that it is saved */
2837#endif
2838
2839#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36
RL
2840 /* Size of the stack reserved for parameter registers. */
2841 int reg_parm_stack_space = 0;
f046b3cc
JL
2842 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2843 char *initial_stack_usage_map = stack_usage_map;
2844#endif
2845
2846#ifdef REG_PARM_STACK_SPACE
2847#ifdef MAYBE_REG_PARM_STACK_SPACE
2848 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2849#else
2850 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2851#endif
2852#endif
322e3e34 2853
4f90e4a0
RK
2854 VA_START (p, nargs);
2855
2856#ifndef __STDC__
2857 orgfun = va_arg (p, rtx);
322e3e34
RK
2858 value = va_arg (p, rtx);
2859 no_queue = va_arg (p, int);
2860 outmode = va_arg (p, enum machine_mode);
2861 nargs = va_arg (p, int);
4f90e4a0
RK
2862#endif
2863
d61bee95 2864 is_const = no_queue;
4f90e4a0 2865 fun = orgfun;
322e3e34
RK
2866
2867 /* If this kind of value comes back in memory,
2868 decide where in memory it should come back. */
fac0ad80 2869 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2870 {
fac0ad80
RS
2871#ifdef PCC_STATIC_STRUCT_RETURN
2872 rtx pointer_reg
2873 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2874 0);
38a448ca 2875 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
2876 pcc_struct_value = 1;
2877 if (value == 0)
2878 value = gen_reg_rtx (outmode);
2879#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2880 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2881 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2882 mem_value = value;
2883 else
2884 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2885#endif
779c643a
JW
2886
2887 /* This call returns a big structure. */
2888 is_const = 0;
322e3e34
RK
2889 }
2890
2891 /* ??? Unfinished: must pass the memory address as an argument. */
2892
2893 /* Copy all the libcall-arguments out of the varargs data
2894 and into a vector ARGVEC.
2895
2896 Compute how to pass each argument. We only support a very small subset
2897 of the full argument passing conventions to limit complexity here since
2898 library functions shouldn't have many args. */
2899
2900 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 2901 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 2902
eecb6f50 2903 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2904
2905 args_size.constant = 0;
2906 args_size.var = 0;
2907
2908 count = 0;
2909
888aa7a9
RS
2910 push_temp_slots ();
2911
322e3e34
RK
2912 /* If there's a structure value address to be passed,
2913 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2914 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2915 {
2916 rtx addr = XEXP (mem_value, 0);
fac0ad80 2917 nargs++;
322e3e34 2918
fac0ad80
RS
2919 /* Make sure it is a reasonable operand for a move or push insn. */
2920 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2921 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2922 addr = force_operand (addr, NULL_RTX);
322e3e34 2923
fac0ad80 2924 argvec[count].value = addr;
4fc3dcd5 2925 argvec[count].mode = Pmode;
fac0ad80 2926 argvec[count].partial = 0;
322e3e34 2927
4fc3dcd5 2928 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2929#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2930 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2931 abort ();
322e3e34
RK
2932#endif
2933
4fc3dcd5 2934 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2935 argvec[count].reg && argvec[count].partial == 0,
2936 NULL_TREE, &args_size, &argvec[count].offset,
2937 &argvec[count].size);
322e3e34
RK
2938
2939
fac0ad80 2940 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2941#ifdef REG_PARM_STACK_SPACE
fac0ad80 2942 || 1
322e3e34 2943#endif
fac0ad80
RS
2944 )
2945 args_size.constant += argvec[count].size.constant;
322e3e34 2946
0f41302f 2947 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2948
2949 count++;
322e3e34
RK
2950 }
2951
2952 for (; count < nargs; count++)
2953 {
2954 rtx val = va_arg (p, rtx);
2955 enum machine_mode mode = va_arg (p, enum machine_mode);
2956
2957 /* We cannot convert the arg value to the mode the library wants here;
2958 must do it earlier where we know the signedness of the arg. */
2959 if (mode == BLKmode
2960 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2961 abort ();
2962
2963 /* On some machines, there's no way to pass a float to a library fcn.
2964 Pass it as a double instead. */
2965#ifdef LIBGCC_NEEDS_DOUBLE
2966 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2967 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2968#endif
2969
2970 /* There's no need to call protect_from_queue, because
2971 either emit_move_insn or emit_push_insn will do that. */
2972
2973 /* Make sure it is a reasonable operand for a move or push insn. */
2974 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2975 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2976 val = force_operand (val, NULL_RTX);
2977
322e3e34
RK
2978#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2979 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2980 {
a44492f0
RK
2981 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2982 be viewed as just an efficiency improvement. */
888aa7a9
RS
2983 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2984 emit_move_insn (slot, val);
2985 val = XEXP (slot, 0);
2986 mode = Pmode;
2987 }
322e3e34
RK
2988#endif
2989
888aa7a9
RS
2990 argvec[count].value = val;
2991 argvec[count].mode = mode;
2992
322e3e34 2993 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2994 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2995 abort ();
2996#ifdef FUNCTION_ARG_PARTIAL_NREGS
2997 argvec[count].partial
2998 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2999#else
3000 argvec[count].partial = 0;
3001#endif
3002
3003 locate_and_pad_parm (mode, NULL_TREE,
3004 argvec[count].reg && argvec[count].partial == 0,
3005 NULL_TREE, &args_size, &argvec[count].offset,
3006 &argvec[count].size);
3007
3008 if (argvec[count].size.var)
3009 abort ();
3010
3011#ifndef REG_PARM_STACK_SPACE
3012 if (argvec[count].partial)
3013 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3014#endif
3015
3016 if (argvec[count].reg == 0 || argvec[count].partial != 0
3017#ifdef REG_PARM_STACK_SPACE
3018 || 1
3019#endif
3020 )
3021 args_size.constant += argvec[count].size.constant;
3022
0f41302f 3023 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3024 }
3025 va_end (p);
3026
f046b3cc
JL
3027#ifdef FINAL_REG_PARM_STACK_SPACE
3028 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3029 args_size.var);
3030#endif
322e3e34
RK
3031 /* If this machine requires an external definition for library
3032 functions, write one out. */
3033 assemble_external_libcall (fun);
3034
3035 original_args_size = args_size;
3036#ifdef STACK_BOUNDARY
3037 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3038 / STACK_BYTES) * STACK_BYTES);
3039#endif
3040
3041#ifdef REG_PARM_STACK_SPACE
3042 args_size.constant = MAX (args_size.constant,
f046b3cc 3043 reg_parm_stack_space);
322e3e34 3044#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3045 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3046#endif
3047#endif
3048
322e3e34
RK
3049 if (args_size.constant > current_function_outgoing_args_size)
3050 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3051
3052#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3053 /* Since the stack pointer will never be pushed, it is possible for
3054 the evaluation of a parm to clobber something we have already
3055 written to the stack. Since most function calls on RISC machines
3056 do not use the stack, this is uncommon, but must work correctly.
3057
3058 Therefore, we save any area of the stack that was already written
3059 and that we are using. Here we set up to do this by making a new
3060 stack usage map from the old one.
3061
3062 Another approach might be to try to reorder the argument
3063 evaluations to avoid this conflicting stack usage. */
3064
3065 needed = args_size.constant;
3066#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
3067 /* Since we will be writing into the entire argument area, the
3068 map must be allocated for its entire size, not just the part that
3069 is the responsibility of the caller. */
3070 needed += reg_parm_stack_space;
3071#endif
3072
3073#ifdef ARGS_GROW_DOWNWARD
3074 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3075 needed + 1);
3076#else
3077 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3078 needed);
322e3e34 3079#endif
f046b3cc
JL
3080 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3081
3082 if (initial_highest_arg_in_use)
3083 bcopy (initial_stack_usage_map, stack_usage_map,
3084 initial_highest_arg_in_use);
3085
3086 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3087 bzero (&stack_usage_map[initial_highest_arg_in_use],
3088 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3089 needed = 0;
322e3e34 3090
f046b3cc
JL
3091 /* The address of the outgoing argument list must not be copied to a
3092 register here, because argblock would be left pointing to the
3093 wrong place after the call to allocate_dynamic_stack_space below.
3094 */
3095
3096 argblock = virtual_outgoing_args_rtx;
3097#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3098#ifndef PUSH_ROUNDING
3099 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3100#endif
f046b3cc 3101#endif
322e3e34
RK
3102
3103#ifdef PUSH_ARGS_REVERSED
3104#ifdef STACK_BOUNDARY
3105 /* If we push args individually in reverse order, perform stack alignment
3106 before the first push (the last arg). */
3107 if (argblock == 0)
3108 anti_adjust_stack (GEN_INT (args_size.constant
3109 - original_args_size.constant));
3110#endif
3111#endif
3112
3113#ifdef PUSH_ARGS_REVERSED
3114 inc = -1;
3115 argnum = nargs - 1;
3116#else
3117 inc = 1;
3118 argnum = 0;
3119#endif
3120
f046b3cc
JL
3121#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3122 /* The argument list is the property of the called routine and it
3123 may clobber it. If the fixed area has been used for previous
3124 parameters, we must save and restore it.
3125
3126 Here we compute the boundary of the that needs to be saved, if any. */
3127
3128#ifdef ARGS_GROW_DOWNWARD
3129 for (count = 0; count < reg_parm_stack_space + 1; count++)
3130#else
3131 for (count = 0; count < reg_parm_stack_space; count++)
3132#endif
3133 {
3134 if (count >= highest_outgoing_arg_in_use
3135 || stack_usage_map[count] == 0)
3136 continue;
3137
3138 if (low_to_save == -1)
3139 low_to_save = count;
3140
3141 high_to_save = count;
3142 }
3143
3144 if (low_to_save >= 0)
3145 {
3146 int num_to_save = high_to_save - low_to_save + 1;
3147 enum machine_mode save_mode
3148 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3149 rtx stack_area;
3150
3151 /* If we don't have the required alignment, must do this in BLKmode. */
3152 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3153 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3154 save_mode = BLKmode;
3155
ceb83206 3156#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3157 stack_area = gen_rtx_MEM (save_mode,
3158 memory_address (save_mode,
38a448ca 3159 plus_constant (argblock,
ceb83206 3160 - high_to_save)));
f046b3cc 3161#else
ceb83206
JL
3162 stack_area = gen_rtx_MEM (save_mode,
3163 memory_address (save_mode,
38a448ca 3164 plus_constant (argblock,
ceb83206 3165 low_to_save)));
f046b3cc 3166#endif
f046b3cc
JL
3167 if (save_mode == BLKmode)
3168 {
3169 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3170 MEM_IN_STRUCT_P (save_area) = 0;
3171 emit_block_move (validize_mem (save_area), stack_area,
3172 GEN_INT (num_to_save),
3173 PARM_BOUNDARY / BITS_PER_UNIT);
3174 }
3175 else
3176 {
3177 save_area = gen_reg_rtx (save_mode);
3178 emit_move_insn (save_area, stack_area);
3179 }
3180 }
3181#endif
3182
322e3e34
RK
3183 /* Push the args that need to be pushed. */
3184
5e26979c
JL
3185 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3186 are to be pushed. */
322e3e34
RK
3187 for (count = 0; count < nargs; count++, argnum += inc)
3188 {
3189 register enum machine_mode mode = argvec[argnum].mode;
3190 register rtx val = argvec[argnum].value;
3191 rtx reg = argvec[argnum].reg;
3192 int partial = argvec[argnum].partial;
69d4ca36 3193#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3194 int lower_bound, upper_bound, i;
69d4ca36 3195#endif
322e3e34
RK
3196
3197 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3198 {
3199#ifdef ACCUMULATE_OUTGOING_ARGS
3200 /* If this is being stored into a pre-allocated, fixed-size, stack
3201 area, save any previous data at that location. */
3202
3203#ifdef ARGS_GROW_DOWNWARD
3204 /* stack_slot is negative, but we want to index stack_usage_map
3205 with positive values. */
5e26979c
JL
3206 upper_bound = -argvec[argnum].offset.constant + 1;
3207 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3208#else
5e26979c
JL
3209 lower_bound = argvec[argnum].offset.constant;
3210 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3211#endif
3212
3213 for (i = lower_bound; i < upper_bound; i++)
3214 if (stack_usage_map[i]
3215#ifdef REG_PARM_STACK_SPACE
3216 /* Don't store things in the fixed argument area at this point;
3217 it has already been saved. */
3218 && i > reg_parm_stack_space
3219#endif
3220 )
3221 break;
3222
3223 if (i != upper_bound)
3224 {
3225 /* We need to make a save area. See what mode we can make it. */
3226 enum machine_mode save_mode
5e26979c 3227 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3228 MODE_INT, 1);
3229 rtx stack_area
38a448ca
RH
3230 = gen_rtx_MEM (save_mode,
3231 memory_address (save_mode,
3232 plus_constant (argblock,
3233 argvec[argnum].offset.constant)));
5e26979c
JL
3234 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3235 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3236 }
3237#endif
3238 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
5e26979c 3239 argblock, GEN_INT (argvec[argnum].offset.constant));
f046b3cc
JL
3240
3241#ifdef ACCUMULATE_OUTGOING_ARGS
3242 /* Now mark the segment we just used. */
3243 for (i = lower_bound; i < upper_bound; i++)
3244 stack_usage_map[i] = 1;
3245#endif
3246
3247 NO_DEFER_POP;
3248 }
322e3e34
RK
3249 }
3250
3251#ifndef PUSH_ARGS_REVERSED
3252#ifdef STACK_BOUNDARY
3253 /* If we pushed args in forward order, perform stack alignment
3254 after pushing the last arg. */
3255 if (argblock == 0)
3256 anti_adjust_stack (GEN_INT (args_size.constant
3257 - original_args_size.constant));
3258#endif
3259#endif
3260
3261#ifdef PUSH_ARGS_REVERSED
3262 argnum = nargs - 1;
3263#else
3264 argnum = 0;
3265#endif
3266
77cac2f2 3267 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3268
322e3e34
RK
3269 /* Now load any reg parms into their regs. */
3270
5e26979c
JL
3271 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3272 are to be pushed. */
322e3e34
RK
3273 for (count = 0; count < nargs; count++, argnum += inc)
3274 {
322e3e34
RK
3275 register rtx val = argvec[argnum].value;
3276 rtx reg = argvec[argnum].reg;
3277 int partial = argvec[argnum].partial;
3278
3279 if (reg != 0 && partial == 0)
3280 emit_move_insn (reg, val);
3281 NO_DEFER_POP;
3282 }
3283
3284#if 0
3285 /* For version 1.37, try deleting this entirely. */
3286 if (! no_queue)
3287 emit_queue ();
3288#endif
3289
3290 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3291 for (count = 0; count < nargs; count++)
3292 if (argvec[count].reg != 0)
77cac2f2 3293 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3294
fac0ad80
RS
3295 /* Pass the function the address in which to return a structure value. */
3296 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3297 {
3298 emit_move_insn (struct_value_rtx,
3299 force_reg (Pmode,
3300 force_operand (XEXP (mem_value, 0),
3301 NULL_RTX)));
3302 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3303 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3304 }
3305
322e3e34
RK
3306 /* Don't allow popping to be deferred, since then
3307 cse'ing of library calls could delete a call and leave the pop. */
3308 NO_DEFER_POP;
3309
3310 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3311 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3312 /* See the comment in emit_library_call about the function type we build
3313 and pass here. */
322e3e34 3314
2c8da025
RK
3315 emit_call_1 (fun,
3316 get_identifier (XSTR (orgfun, 0)),
334c4f0f
RK
3317 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3318 args_size.constant, struct_value_size,
322e3e34 3319 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3320 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3321 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
3322
3323 /* Now restore inhibit_defer_pop to its actual original value. */
3324 OK_DEFER_POP;
3325
888aa7a9
RS
3326 pop_temp_slots ();
3327
322e3e34
RK
3328 /* Copy the value to the right place. */
3329 if (outmode != VOIDmode)
3330 {
3331 if (mem_value)
3332 {
3333 if (value == 0)
fac0ad80 3334 value = mem_value;
322e3e34
RK
3335 if (value != mem_value)
3336 emit_move_insn (value, mem_value);
3337 }
3338 else if (value != 0)
3339 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3340 else
3341 value = hard_libcall_value (outmode);
322e3e34 3342 }
fac0ad80 3343
f046b3cc
JL
3344#ifdef ACCUMULATE_OUTGOING_ARGS
3345#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3346 if (save_area)
3347 {
3348 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3349#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3350 rtx stack_area
38a448ca
RH
3351 = gen_rtx_MEM (save_mode,
3352 memory_address (save_mode,
ceb83206
JL
3353 plus_constant (argblock,
3354 - high_to_save)));
f046b3cc 3355#else
ceb83206
JL
3356 rtx stack_area
3357 = gen_rtx_MEM (save_mode,
3358 memory_address (save_mode,
3359 plus_constant (argblock, low_to_save)));
f046b3cc 3360#endif
e9a25f70
JL
3361 if (save_mode != BLKmode)
3362 emit_move_insn (stack_area, save_area);
3363 else
3364 emit_block_move (stack_area, validize_mem (save_area),
3365 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3366 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3367 }
f046b3cc
JL
3368#endif
3369
3370 /* If we saved any argument areas, restore them. */
3371 for (count = 0; count < nargs; count++)
3372 if (argvec[count].save_area)
3373 {
3374 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3375 rtx stack_area
38a448ca 3376 = gen_rtx_MEM (save_mode,
f046b3cc
JL
3377 memory_address (save_mode, plus_constant (argblock,
3378 argvec[count].offset.constant)));
3379
3380 emit_move_insn (stack_area, argvec[count].save_area);
3381 }
3382
3383 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3384 stack_usage_map = initial_stack_usage_map;
3385#endif
3386
fac0ad80 3387 return value;
322e3e34
RK
3388}
3389\f
51bbfa0c
RS
3390#if 0
3391/* Return an rtx which represents a suitable home on the stack
3392 given TYPE, the type of the argument looking for a home.
3393 This is called only for BLKmode arguments.
3394
3395 SIZE is the size needed for this target.
3396 ARGS_ADDR is the address of the bottom of the argument block for this call.
3397 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3398 if this machine uses push insns. */
3399
3400static rtx
3401target_for_arg (type, size, args_addr, offset)
3402 tree type;
3403 rtx size;
3404 rtx args_addr;
3405 struct args_size offset;
3406{
3407 rtx target;
3408 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3409
3410 /* We do not call memory_address if possible,
3411 because we want to address as close to the stack
3412 as possible. For non-variable sized arguments,
3413 this will be stack-pointer relative addressing. */
3414 if (GET_CODE (offset_rtx) == CONST_INT)
3415 target = plus_constant (args_addr, INTVAL (offset_rtx));
3416 else
3417 {
3418 /* I have no idea how to guarantee that this
3419 will work in the presence of register parameters. */
38a448ca 3420 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3421 target = memory_address (QImode, target);
3422 }
3423
38a448ca 3424 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3425}
3426#endif
3427\f
3428/* Store a single argument for a function call
3429 into the register or memory area where it must be passed.
3430 *ARG describes the argument value and where to pass it.
3431
3432 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3433 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3434
3435 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3436 so must be careful about how the stack is used.
3437
3438 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3439 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3440 that we need not worry about saving and restoring the stack.
3441
3442 FNDECL is the declaration of the function we are calling. */
3443
3444static void
6f90e075
JW
3445store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
3446 reg_parm_stack_space)
51bbfa0c
RS
3447 struct arg_data *arg;
3448 rtx argblock;
3449 int may_be_alloca;
3450 int variable_size;
3451 tree fndecl;
6f90e075 3452 int reg_parm_stack_space;
51bbfa0c
RS
3453{
3454 register tree pval = arg->tree_value;
3455 rtx reg = 0;
3456 int partial = 0;
3457 int used = 0;
69d4ca36 3458#ifdef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 3459 int i, lower_bound, upper_bound;
69d4ca36 3460#endif
51bbfa0c
RS
3461
3462 if (TREE_CODE (pval) == ERROR_MARK)
3463 return;
3464
cc79451b
RK
3465 /* Push a new temporary level for any temporaries we make for
3466 this argument. */
3467 push_temp_slots ();
3468
51bbfa0c
RS
3469#ifdef ACCUMULATE_OUTGOING_ARGS
3470 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3471 save any previous data at that location. */
3472 if (argblock && ! variable_size && arg->stack)
3473 {
3474#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3475 /* stack_slot is negative, but we want to index stack_usage_map
3476 with positive values. */
51bbfa0c
RS
3477 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3478 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3479 else
50eb43ca 3480 upper_bound = 0;
51bbfa0c
RS
3481
3482 lower_bound = upper_bound - arg->size.constant;
3483#else
3484 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3485 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3486 else
3487 lower_bound = 0;
3488
3489 upper_bound = lower_bound + arg->size.constant;
3490#endif
3491
3492 for (i = lower_bound; i < upper_bound; i++)
3493 if (stack_usage_map[i]
3494#ifdef REG_PARM_STACK_SPACE
3495 /* Don't store things in the fixed argument area at this point;
3496 it has already been saved. */
6f90e075 3497 && i > reg_parm_stack_space
51bbfa0c
RS
3498#endif
3499 )
3500 break;
3501
3502 if (i != upper_bound)
3503 {
3504 /* We need to make a save area. See what mode we can make it. */
3505 enum machine_mode save_mode
3506 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3507 rtx stack_area
38a448ca
RH
3508 = gen_rtx_MEM (save_mode,
3509 memory_address (save_mode,
3510 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3511
3512 if (save_mode == BLKmode)
3513 {
3514 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3515 arg->size.constant, 0);
3668e76e
JL
3516 MEM_IN_STRUCT_P (arg->save_area)
3517 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
cc79451b 3518 preserve_temp_slots (arg->save_area);
51bbfa0c 3519 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3520 GEN_INT (arg->size.constant),
51bbfa0c
RS
3521 PARM_BOUNDARY / BITS_PER_UNIT);
3522 }
3523 else
3524 {
3525 arg->save_area = gen_reg_rtx (save_mode);
3526 emit_move_insn (arg->save_area, stack_area);
3527 }
3528 }
3529 }
3530#endif
3531
3532 /* If this isn't going to be placed on both the stack and in registers,
3533 set up the register and number of words. */
3534 if (! arg->pass_on_stack)
3535 reg = arg->reg, partial = arg->partial;
3536
3537 if (reg != 0 && partial == 0)
3538 /* Being passed entirely in a register. We shouldn't be called in
3539 this case. */
3540 abort ();
3541
4ab56118
RK
3542 /* If this arg needs special alignment, don't load the registers
3543 here. */
3544 if (arg->n_aligned_regs != 0)
3545 reg = 0;
4ab56118 3546
4ab56118 3547 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3548 it directly into its stack slot. Otherwise, we can. */
3549 if (arg->value == 0)
d64f5a78
RS
3550 {
3551#ifdef ACCUMULATE_OUTGOING_ARGS
3552 /* stack_arg_under_construction is nonzero if a function argument is
3553 being evaluated directly into the outgoing argument list and
3554 expand_call must take special action to preserve the argument list
3555 if it is called recursively.
3556
3557 For scalar function arguments stack_usage_map is sufficient to
3558 determine which stack slots must be saved and restored. Scalar
3559 arguments in general have pass_on_stack == 0.
3560
3561 If this argument is initialized by a function which takes the
3562 address of the argument (a C++ constructor or a C function
3563 returning a BLKmode structure), then stack_usage_map is
3564 insufficient and expand_call must push the stack around the
3565 function call. Such arguments have pass_on_stack == 1.
3566
3567 Note that it is always safe to set stack_arg_under_construction,
3568 but this generates suboptimal code if set when not needed. */
3569
3570 if (arg->pass_on_stack)
3571 stack_arg_under_construction++;
3572#endif
3a08477a
RK
3573 arg->value = expand_expr (pval,
3574 (partial
3575 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3576 ? NULL_RTX : arg->stack,
e5d70561 3577 VOIDmode, 0);
1efe6448
RK
3578
3579 /* If we are promoting object (or for any other reason) the mode
3580 doesn't agree, convert the mode. */
3581
7373d92d
RK
3582 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3583 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3584 arg->value, arg->unsignedp);
1efe6448 3585
d64f5a78
RS
3586#ifdef ACCUMULATE_OUTGOING_ARGS
3587 if (arg->pass_on_stack)
3588 stack_arg_under_construction--;
3589#endif
3590 }
51bbfa0c
RS
3591
3592 /* Don't allow anything left on stack from computation
3593 of argument to alloca. */
3594 if (may_be_alloca)
3595 do_pending_stack_adjust ();
3596
3597 if (arg->value == arg->stack)
7815214e
RK
3598 {
3599 /* If the value is already in the stack slot, we are done. */
3600 if (flag_check_memory_usage && GET_CODE (arg->stack) == MEM)
3601 {
3602 if (arg->mode == BLKmode)
3603 abort ();
3604
3605 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3606 XEXP (arg->stack, 0), ptr_mode,
3607 GEN_INT (GET_MODE_SIZE (arg->mode)),
3608 TYPE_MODE (sizetype),
956d6950
JL
3609 GEN_INT (MEMORY_USE_RW),
3610 TYPE_MODE (integer_type_node));
7815214e
RK
3611 }
3612 }
1efe6448 3613 else if (arg->mode != BLKmode)
51bbfa0c
RS
3614 {
3615 register int size;
3616
3617 /* Argument is a scalar, not entirely passed in registers.
3618 (If part is passed in registers, arg->partial says how much
3619 and emit_push_insn will take care of putting it there.)
3620
3621 Push it, and if its size is less than the
3622 amount of space allocated to it,
3623 also bump stack pointer by the additional space.
3624 Note that in C the default argument promotions
3625 will prevent such mismatches. */
3626
1efe6448 3627 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3628 /* Compute how much space the push instruction will push.
3629 On many machines, pushing a byte will advance the stack
3630 pointer by a halfword. */
3631#ifdef PUSH_ROUNDING
3632 size = PUSH_ROUNDING (size);
3633#endif
3634 used = size;
3635
3636 /* Compute how much space the argument should get:
3637 round up to a multiple of the alignment for arguments. */
1efe6448 3638 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3639 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3640 / (PARM_BOUNDARY / BITS_PER_UNIT))
3641 * (PARM_BOUNDARY / BITS_PER_UNIT));
3642
3643 /* This isn't already where we want it on the stack, so put it there.
3644 This can either be done with push or copy insns. */
ccf5d244
RK
3645 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3646 0, partial, reg, used - size,
3647 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
3648 }
3649 else
3650 {
3651 /* BLKmode, at least partly to be pushed. */
3652
3653 register int excess;
3654 rtx size_rtx;
3655
3656 /* Pushing a nonscalar.
3657 If part is passed in registers, PARTIAL says how much
3658 and emit_push_insn will take care of putting it there. */
3659
3660 /* Round its size up to a multiple
3661 of the allocation unit for arguments. */
3662
3663 if (arg->size.var != 0)
3664 {
3665 excess = 0;
3666 size_rtx = ARGS_SIZE_RTX (arg->size);
3667 }
3668 else
3669 {
51bbfa0c
RS
3670 /* PUSH_ROUNDING has no effect on us, because
3671 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3672 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3673 + partial * UNITS_PER_WORD);
e4f93898 3674 size_rtx = expr_size (pval);
51bbfa0c
RS
3675 }
3676
1efe6448 3677 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
3678 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3679 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3680 }
3681
3682
3683 /* Unless this is a partially-in-register argument, the argument is now
3684 in the stack.
3685
3686 ??? Note that this can change arg->value from arg->stack to
3687 arg->stack_slot and it matters when they are not the same.
3688 It isn't totally clear that this is correct in all cases. */
3689 if (partial == 0)
3690 arg->value = arg->stack_slot;
3691
3692 /* Once we have pushed something, pops can't safely
3693 be deferred during the rest of the arguments. */
3694 NO_DEFER_POP;
3695
3696 /* ANSI doesn't require a sequence point here,
3697 but PCC has one, so this will avoid some problems. */
3698 emit_queue ();
3699
db907e7b
RK
3700 /* Free any temporary slots made in processing this argument. Show
3701 that we might have taken the address of something and pushed that
3702 as an operand. */
3703 preserve_temp_slots (NULL_RTX);
51bbfa0c 3704 free_temp_slots ();
cc79451b 3705 pop_temp_slots ();
51bbfa0c
RS
3706
3707#ifdef ACCUMULATE_OUTGOING_ARGS
3708 /* Now mark the segment we just used. */
3709 if (argblock && ! variable_size && arg->stack)
3710 for (i = lower_bound; i < upper_bound; i++)
3711 stack_usage_map[i] = 1;
3712#endif
3713}
This page took 0.821018 seconds and 5 git commands to generate.