]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
formatting tweaks
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
45d44c98 2 Copyright (C) 1989, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "expr.h"
4f90e4a0 26#ifdef __STDC__
04fe4385 27#include <stdarg.h>
4f90e4a0 28#else
04fe4385 29#include <varargs.h>
4f90e4a0 30#endif
51bbfa0c
RS
31#include "insn-flags.h"
32
33/* Decide whether a function's arguments should be processed
bbc8a071
RK
34 from first to last or from last to first.
35
36 They should if the stack and args grow in opposite directions, but
37 only if we have push insns. */
51bbfa0c 38
51bbfa0c 39#ifdef PUSH_ROUNDING
bbc8a071 40
40083ddf 41#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
bbc8a071 44
51bbfa0c
RS
45#endif
46
47/* Like STACK_BOUNDARY but in units of bytes, not bits. */
48#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
49
50/* Data structure and subroutines used within expand_call. */
51
52struct arg_data
53{
54 /* Tree node for this argument. */
55 tree tree_value;
1efe6448
RK
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
51bbfa0c
RS
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 rtx value;
60 /* Initially-compute RTL value for argument; only for const functions. */
61 rtx initial_value;
62 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 63 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
64 registers. */
65 rtx reg;
84b55618
RK
66 /* If REG was promoted from the actual mode of the argument expression,
67 indicates whether the promotion is sign- or zero-extended. */
68 int unsignedp;
51bbfa0c
RS
69 /* Number of registers to use. 0 means put the whole arg in registers.
70 Also 0 if not passed in registers. */
71 int partial;
d64f5a78
RS
72 /* Non-zero if argument must be passed on stack.
73 Note that some arguments may be passed on the stack
74 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
75 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
76 int pass_on_stack;
77 /* Offset of this argument from beginning of stack-args. */
78 struct args_size offset;
79 /* Similar, but offset to the start of the stack slot. Different from
80 OFFSET if this arg pads downward. */
81 struct args_size slot_offset;
82 /* Size of this argument on the stack, rounded up for any padding it gets,
83 parts of the argument passed in registers do not count.
84 If REG_PARM_STACK_SPACE is defined, then register parms
85 are counted here as well. */
86 struct args_size size;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
89 rtx stack;
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 rtx stack_slot;
94#ifdef ACCUMULATE_OUTGOING_ARGS
95 /* Place that this stack area has been saved, if needed. */
96 rtx save_area;
97#endif
4ab56118
RK
98 /* If an argument's alignment does not permit direct copying into registers,
99 copy in smaller-sized pieces into pseudos. These are stored in a
100 block pointed to by this field. The next field says how many
101 word-sized pseudos we made. */
102 rtx *aligned_regs;
103 int n_aligned_regs;
51bbfa0c
RS
104};
105
106#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 107/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
108 the corresponding stack location has been used.
109 This vector is used to prevent a function call within an argument from
110 clobbering any stack already set up. */
111static char *stack_usage_map;
112
113/* Size of STACK_USAGE_MAP. */
114static int highest_outgoing_arg_in_use;
2f4aa534
RS
115
116/* stack_arg_under_construction is nonzero when an argument may be
117 initialized with a constructor call (including a C function that
118 returns a BLKmode struct) and expand_call must take special action
119 to make sure the object being constructed does not overlap the
120 argument list for the constructor call. */
121int stack_arg_under_construction;
51bbfa0c
RS
122#endif
123
322e3e34 124static int calls_function PROTO((tree, int));
9f4d9f6c 125static int calls_function_1 PROTO((tree, int));
5d6155d4
RK
126static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx,
127 int, rtx, int));
322e3e34
RK
128static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
129 tree, int));
51bbfa0c 130\f
1ce0cb53
JW
131/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
132 `alloca'.
133
134 If WHICH is 0, return 1 if EXP contains a call to any function.
135 Actually, we only need return 1 if evaluating EXP would require pushing
136 arguments on the stack, but that is too difficult to compute, so we just
137 assume any function call might require the stack. */
51bbfa0c 138
1c8d7aef
RS
139static tree calls_function_save_exprs;
140
51bbfa0c 141static int
1ce0cb53 142calls_function (exp, which)
51bbfa0c 143 tree exp;
1ce0cb53 144 int which;
1c8d7aef
RS
145{
146 int val;
147 calls_function_save_exprs = 0;
148 val = calls_function_1 (exp, which);
149 calls_function_save_exprs = 0;
150 return val;
151}
152
153static int
154calls_function_1 (exp, which)
155 tree exp;
156 int which;
51bbfa0c
RS
157{
158 register int i;
0207efa2
RK
159 enum tree_code code = TREE_CODE (exp);
160 int type = TREE_CODE_CLASS (code);
161 int length = tree_code_length[(int) code];
51bbfa0c 162
ddd5a7c1 163 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
164 if ((int) code >= NUM_TREE_CODES)
165 return 1;
51bbfa0c 166
0207efa2 167 /* Only expressions and references can contain calls. */
3b59a331
RS
168 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
169 && type != 'b')
51bbfa0c
RS
170 return 0;
171
0207efa2 172 switch (code)
51bbfa0c
RS
173 {
174 case CALL_EXPR:
1ce0cb53
JW
175 if (which == 0)
176 return 1;
177 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
178 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
179 == FUNCTION_DECL))
180 {
181 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
182
183 if ((DECL_BUILT_IN (fndecl)
184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
185 || (DECL_SAVED_INSNS (fndecl)
186 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
187 & FUNCTION_FLAGS_CALLS_ALLOCA)))
188 return 1;
189 }
51bbfa0c
RS
190
191 /* Third operand is RTL. */
192 length = 2;
193 break;
194
195 case SAVE_EXPR:
196 if (SAVE_EXPR_RTL (exp) != 0)
197 return 0;
1c8d7aef
RS
198 if (value_member (exp, calls_function_save_exprs))
199 return 0;
200 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
201 calls_function_save_exprs);
202 return (TREE_OPERAND (exp, 0) != 0
203 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
204
205 case BLOCK:
ef03bc85
CH
206 {
207 register tree local;
208
209 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 210 if (DECL_INITIAL (local) != 0
1c8d7aef 211 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
212 return 1;
213 }
214 {
215 register tree subblock;
216
217 for (subblock = BLOCK_SUBBLOCKS (exp);
218 subblock;
219 subblock = TREE_CHAIN (subblock))
1c8d7aef 220 if (calls_function_1 (subblock, which))
ef03bc85
CH
221 return 1;
222 }
223 return 0;
51bbfa0c
RS
224
225 case METHOD_CALL_EXPR:
226 length = 3;
227 break;
228
229 case WITH_CLEANUP_EXPR:
230 length = 1;
231 break;
232
233 case RTL_EXPR:
234 return 0;
235 }
236
237 for (i = 0; i < length; i++)
238 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 239 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
240 return 1;
241
242 return 0;
243}
244\f
245/* Force FUNEXP into a form suitable for the address of a CALL,
246 and return that as an rtx. Also load the static chain register
247 if FNDECL is a nested function.
248
77cac2f2
RK
249 CALL_FUSAGE points to a variable holding the prospective
250 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 251
03dacb02 252rtx
77cac2f2 253prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
254 rtx funexp;
255 tree fndecl;
77cac2f2 256 rtx *call_fusage;
01368078 257 int reg_parm_seen;
51bbfa0c
RS
258{
259 rtx static_chain_value = 0;
260
261 funexp = protect_from_queue (funexp, 0);
262
263 if (fndecl != 0)
0f41302f 264 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
265 static_chain_value = lookup_static_chain (fndecl);
266
267 /* Make a valid memory address and copy constants thru pseudo-regs,
268 but not for a constant address if -fno-function-cse. */
269 if (GET_CODE (funexp) != SYMBOL_REF)
01368078
RK
270 funexp =
271#ifdef SMALL_REGISTER_CLASSES
272 /* If we are using registers for parameters, force the
273 function address into a register now. */
274 reg_parm_seen ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
275 :
276#endif
277 memory_address (FUNCTION_MODE, funexp);
51bbfa0c
RS
278 else
279 {
280#ifndef NO_FUNCTION_CSE
281 if (optimize && ! flag_no_function_cse)
282#ifdef NO_RECURSIVE_FUNCTION_CSE
283 if (fndecl != current_function_decl)
284#endif
285 funexp = force_reg (Pmode, funexp);
286#endif
287 }
288
289 if (static_chain_value != 0)
290 {
291 emit_move_insn (static_chain_rtx, static_chain_value);
292
f991a240
RK
293 if (GET_CODE (static_chain_rtx) == REG)
294 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
295 }
296
297 return funexp;
298}
299
300/* Generate instructions to call function FUNEXP,
301 and optionally pop the results.
302 The CALL_INSN is the first insn generated.
303
2c8da025
RK
304 FNDECL is the declaration node of the function. This is given ot the
305 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
306
51bbfa0c
RS
307 FUNTYPE is the data type of the function, or, for a library call,
308 the identifier for the name of the call. This is given to the
309 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
310
311 STACK_SIZE is the number of bytes of arguments on the stack,
312 rounded up to STACK_BOUNDARY; zero if the size is variable.
313 This is both to put into the call insn and
314 to generate explicit popping code if necessary.
315
316 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
317 It is zero if this call doesn't want a structure value.
318
319 NEXT_ARG_REG is the rtx that results from executing
320 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
321 just after all the args have had their registers assigned.
322 This could be whatever you like, but normally it is the first
323 arg-register beyond those used for args in this call,
324 or 0 if all the arg-registers are used in this call.
325 It is passed on to `gen_call' so you can put this info in the call insn.
326
327 VALREG is a hard register in which a value is returned,
328 or 0 if the call does not return a value.
329
330 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
331 the args to this call were processed.
332 We restore `inhibit_defer_pop' to that value.
333
94b25f81
RK
334 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
335 denote registers used by the called function.
51bbfa0c
RS
336
337 IS_CONST is true if this is a `const' call. */
338
322e3e34 339static void
2c8da025 340emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
5d6155d4
RK
341 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
342 is_const)
51bbfa0c 343 rtx funexp;
2c8da025 344 tree fndecl;
51bbfa0c
RS
345 tree funtype;
346 int stack_size;
347 int struct_value_size;
348 rtx next_arg_reg;
349 rtx valreg;
350 int old_inhibit_defer_pop;
77cac2f2 351 rtx call_fusage;
51bbfa0c
RS
352 int is_const;
353{
e5d70561
RK
354 rtx stack_size_rtx = GEN_INT (stack_size);
355 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
356 rtx call_insn;
357 int already_popped = 0;
358
359 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
360 and we don't want to load it into a register as an optimization,
361 because prepare_call_address already did it if it should be done. */
362 if (GET_CODE (funexp) != SYMBOL_REF)
363 funexp = memory_address (FUNCTION_MODE, funexp);
364
365#ifndef ACCUMULATE_OUTGOING_ARGS
366#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
367 if (HAVE_call_pop && HAVE_call_value_pop
2c8da025
RK
368 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
369 || stack_size == 0))
51bbfa0c 370 {
2c8da025 371 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
51bbfa0c
RS
372 rtx pat;
373
374 /* If this subroutine pops its own args, record that in the call insn
375 if possible, for the sake of frame pointer elimination. */
2c8da025 376
51bbfa0c
RS
377 if (valreg)
378 pat = gen_call_value_pop (valreg,
379 gen_rtx (MEM, FUNCTION_MODE, funexp),
380 stack_size_rtx, next_arg_reg, n_pop);
381 else
382 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
383 stack_size_rtx, next_arg_reg, n_pop);
384
385 emit_call_insn (pat);
386 already_popped = 1;
387 }
388 else
389#endif
390#endif
391
392#if defined (HAVE_call) && defined (HAVE_call_value)
393 if (HAVE_call && HAVE_call_value)
394 {
395 if (valreg)
396 emit_call_insn (gen_call_value (valreg,
397 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
398 stack_size_rtx, next_arg_reg,
399 NULL_RTX));
51bbfa0c
RS
400 else
401 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
402 stack_size_rtx, next_arg_reg,
403 struct_value_size_rtx));
404 }
405 else
406#endif
407 abort ();
408
77cac2f2 409 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
410 for (call_insn = get_last_insn ();
411 call_insn && GET_CODE (call_insn) != CALL_INSN;
412 call_insn = PREV_INSN (call_insn))
413 ;
414
415 if (! call_insn)
416 abort ();
417
e59e60a7
RK
418 /* Put the register usage information on the CALL. If there is already
419 some usage information, put ours at the end. */
420 if (CALL_INSN_FUNCTION_USAGE (call_insn))
421 {
422 rtx link;
423
424 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
425 link = XEXP (link, 1))
426 ;
427
428 XEXP (link, 1) = call_fusage;
429 }
430 else
431 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
432
433 /* If this is a const call, then set the insn's unchanging bit. */
434 if (is_const)
435 CONST_CALL_P (call_insn) = 1;
436
b1e64e0d
RS
437 /* Restore this now, so that we do defer pops for this call's args
438 if the context of the call as a whole permits. */
439 inhibit_defer_pop = old_inhibit_defer_pop;
440
51bbfa0c
RS
441#ifndef ACCUMULATE_OUTGOING_ARGS
442 /* If returning from the subroutine does not automatically pop the args,
443 we need an instruction to pop them sooner or later.
444 Perhaps do it now; perhaps just record how much space to pop later.
445
446 If returning from the subroutine does pop the args, indicate that the
447 stack pointer will be changed. */
448
2c8da025 449 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
51bbfa0c
RS
450 {
451 if (!already_popped)
77cac2f2 452 CALL_INSN_FUNCTION_USAGE (call_insn) =
96fd013f 453 gen_rtx (EXPR_LIST, VOIDmode,
984d9166 454 gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
77cac2f2 455 CALL_INSN_FUNCTION_USAGE (call_insn));
2c8da025 456 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
e5d70561 457 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
458 }
459
460 if (stack_size != 0)
461 {
70a73141 462 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
463 pending_stack_adjust += stack_size;
464 else
465 adjust_stack (stack_size_rtx);
466 }
467#endif
468}
469
470/* Generate all the code for a function call
471 and return an rtx for its value.
472 Store the value in TARGET (specified as an rtx) if convenient.
473 If the value is stored in TARGET then TARGET is returned.
474 If IGNORE is nonzero, then we ignore the value of the function call. */
475
476rtx
8129842c 477expand_call (exp, target, ignore)
51bbfa0c
RS
478 tree exp;
479 rtx target;
480 int ignore;
51bbfa0c
RS
481{
482 /* List of actual parameters. */
483 tree actparms = TREE_OPERAND (exp, 1);
484 /* RTX for the function to be called. */
485 rtx funexp;
486 /* Tree node for the function to be called (not the address!). */
487 tree funtree;
488 /* Data type of the function. */
489 tree funtype;
490 /* Declaration of the function being called,
491 or 0 if the function is computed (not known by name). */
492 tree fndecl = 0;
493 char *name = 0;
494
495 /* Register in which non-BLKmode value will be returned,
496 or 0 if no value or if value is BLKmode. */
497 rtx valreg;
498 /* Address where we should return a BLKmode value;
499 0 if value not BLKmode. */
500 rtx structure_value_addr = 0;
501 /* Nonzero if that address is being passed by treating it as
502 an extra, implicit first parameter. Otherwise,
503 it is passed by being copied directly into struct_value_rtx. */
504 int structure_value_addr_parm = 0;
505 /* Size of aggregate value wanted, or zero if none wanted
506 or if we are using the non-reentrant PCC calling convention
507 or expecting the value in registers. */
508 int struct_value_size = 0;
509 /* Nonzero if called function returns an aggregate in memory PCC style,
510 by returning the address of where to find it. */
511 int pcc_struct_value = 0;
512
513 /* Number of actual parameters in this call, including struct value addr. */
514 int num_actuals;
515 /* Number of named args. Args after this are anonymous ones
516 and they must all go on the stack. */
517 int n_named_args;
518 /* Count arg position in order args appear. */
519 int argpos;
520
521 /* Vector of information about each argument.
522 Arguments are numbered in the order they will be pushed,
523 not the order they are written. */
524 struct arg_data *args;
525
526 /* Total size in bytes of all the stack-parms scanned so far. */
527 struct args_size args_size;
528 /* Size of arguments before any adjustments (such as rounding). */
529 struct args_size original_args_size;
530 /* Data on reg parms scanned so far. */
531 CUMULATIVE_ARGS args_so_far;
532 /* Nonzero if a reg parm has been scanned. */
533 int reg_parm_seen;
efd65a8b 534 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
535
536 /* Nonzero if we must avoid push-insns in the args for this call.
537 If stack space is allocated for register parameters, but not by the
538 caller, then it is preallocated in the fixed part of the stack frame.
539 So the entire argument block must then be preallocated (i.e., we
540 ignore PUSH_ROUNDING in that case). */
541
542#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
543 int must_preallocate = 1;
544#else
545#ifdef PUSH_ROUNDING
546 int must_preallocate = 0;
547#else
548 int must_preallocate = 1;
549#endif
550#endif
551
f72aed24 552 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
553 int reg_parm_stack_space = 0;
554
51bbfa0c
RS
555 /* 1 if scanning parms front to back, -1 if scanning back to front. */
556 int inc;
557 /* Address of space preallocated for stack parms
558 (on machines that lack push insns), or 0 if space not preallocated. */
559 rtx argblock = 0;
560
561 /* Nonzero if it is plausible that this is a call to alloca. */
562 int may_be_alloca;
563 /* Nonzero if this is a call to setjmp or a related function. */
564 int returns_twice;
565 /* Nonzero if this is a call to `longjmp'. */
566 int is_longjmp;
567 /* Nonzero if this is a call to an inline function. */
568 int is_integrable = 0;
51bbfa0c
RS
569 /* Nonzero if this is a call to a `const' function.
570 Note that only explicitly named functions are handled as `const' here. */
571 int is_const = 0;
572 /* Nonzero if this is a call to a `volatile' function. */
573 int is_volatile = 0;
574#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
575 /* Define the boundary of the register parm stack space that needs to be
576 save, if any. */
577 int low_to_save = -1, high_to_save;
578 rtx save_area = 0; /* Place that it is saved */
579#endif
580
581#ifdef ACCUMULATE_OUTGOING_ARGS
582 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
583 char *initial_stack_usage_map = stack_usage_map;
584#endif
585
586 rtx old_stack_level = 0;
79be3418 587 int old_pending_adj = 0;
2f4aa534 588 int old_stack_arg_under_construction;
51bbfa0c
RS
589 int old_inhibit_defer_pop = inhibit_defer_pop;
590 tree old_cleanups = cleanups_this_call;
77cac2f2 591 rtx call_fusage = 0;
51bbfa0c 592 register tree p;
4ab56118 593 register int i, j;
51bbfa0c
RS
594
595 /* See if we can find a DECL-node for the actual function.
596 As a result, decide whether this is a call to an integrable function. */
597
598 p = TREE_OPERAND (exp, 0);
599 if (TREE_CODE (p) == ADDR_EXPR)
600 {
601 fndecl = TREE_OPERAND (p, 0);
602 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 603 fndecl = 0;
51bbfa0c
RS
604 else
605 {
606 if (!flag_no_inline
607 && fndecl != current_function_decl
aa10adff 608 && DECL_INLINE (fndecl)
1cf4f698
RK
609 && DECL_SAVED_INSNS (fndecl)
610 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
51bbfa0c
RS
611 is_integrable = 1;
612 else if (! TREE_ADDRESSABLE (fndecl))
613 {
13d39dbc 614 /* In case this function later becomes inlinable,
51bbfa0c
RS
615 record that there was already a non-inline call to it.
616
617 Use abstraction instead of setting TREE_ADDRESSABLE
618 directly. */
da8c1713
RK
619 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
620 && optimize > 0)
1907795e
JM
621 {
622 warning_with_decl (fndecl, "can't inline call to `%s'");
623 warning ("called from here");
624 }
51bbfa0c
RS
625 mark_addressable (fndecl);
626 }
627
d45cf215
RS
628 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
629 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 630 is_const = 1;
5e24110e
RS
631
632 if (TREE_THIS_VOLATILE (fndecl))
633 is_volatile = 1;
51bbfa0c
RS
634 }
635 }
636
fdff8c6d
RK
637 /* If we don't have specific function to call, see if we have a
638 constant or `noreturn' function from the type. */
639 if (fndecl == 0)
640 {
641 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
642 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
643 }
644
6f90e075
JW
645#ifdef REG_PARM_STACK_SPACE
646#ifdef MAYBE_REG_PARM_STACK_SPACE
647 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
648#else
649 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
650#endif
651#endif
652
51bbfa0c
RS
653 /* Warn if this value is an aggregate type,
654 regardless of which calling convention we are using for it. */
05e3bdb9 655 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
656 warning ("function call has aggregate value");
657
658 /* Set up a place to return a structure. */
659
660 /* Cater to broken compilers. */
661 if (aggregate_value_p (exp))
662 {
663 /* This call returns a big structure. */
664 is_const = 0;
665
666#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
667 {
668 pcc_struct_value = 1;
0dd532dc
JW
669 /* Easier than making that case work right. */
670 if (is_integrable)
671 {
672 /* In case this is a static function, note that it has been
673 used. */
674 if (! TREE_ADDRESSABLE (fndecl))
675 mark_addressable (fndecl);
676 is_integrable = 0;
677 }
9e7b1d0a
RS
678 }
679#else /* not PCC_STATIC_STRUCT_RETURN */
680 {
681 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 682
9e7b1d0a
RS
683 if (target && GET_CODE (target) == MEM)
684 structure_value_addr = XEXP (target, 0);
685 else
686 {
687 /* Assign a temporary on the stack to hold the value. */
51bbfa0c 688
9e7b1d0a
RS
689 /* For variable-sized objects, we must be called with a target
690 specified. If we were to allocate space on the stack here,
691 we would have no way of knowing when to free it. */
51bbfa0c 692
002bdd6c
RK
693 if (struct_value_size < 0)
694 abort ();
695
9e7b1d0a
RS
696 structure_value_addr
697 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
3668e76e
JL
698 MEM_IN_STRUCT_P (structure_value_addr)
699 = AGGREGATE_TYPE_P (TREE_TYPE (exp));
9e7b1d0a
RS
700 target = 0;
701 }
702 }
703#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
704 }
705
706 /* If called function is inline, try to integrate it. */
707
708 if (is_integrable)
709 {
710 rtx temp;
2f4aa534 711 rtx before_call = get_last_insn ();
51bbfa0c
RS
712
713 temp = expand_inline_function (fndecl, actparms, target,
714 ignore, TREE_TYPE (exp),
715 structure_value_addr);
716
717 /* If inlining succeeded, return. */
854e97f0 718 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 719 {
ef5057f8
MS
720 if (flag_short_temps)
721 {
722 /* Perform all cleanups needed for the arguments of this
723 call (i.e. destructors in C++). It is ok if these
724 destructors clobber RETURN_VALUE_REG, because the
725 only time we care about this is when TARGET is that
726 register. But in C++, we take care to never return
727 that register directly. */
728 expand_cleanups_to (old_cleanups);
729 }
d64f5a78
RS
730
731#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
732 /* If the outgoing argument list must be preserved, push
733 the stack before executing the inlined function if it
734 makes any calls. */
735
736 for (i = reg_parm_stack_space - 1; i >= 0; i--)
737 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
738 break;
739
740 if (stack_arg_under_construction || i >= 0)
741 {
a1917650
RK
742 rtx first_insn
743 = before_call ? NEXT_INSN (before_call) : get_insns ();
744 rtx insn, seq;
2f4aa534 745
d64f5a78
RS
746 /* Look for a call in the inline function code.
747 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
748 nonzero then there is a call and it is not necessary
749 to scan the insns. */
750
751 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
a1917650 752 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
753 if (GET_CODE (insn) == CALL_INSN)
754 break;
2f4aa534
RS
755
756 if (insn)
757 {
d64f5a78
RS
758 /* Reserve enough stack space so that the largest
759 argument list of any function call in the inline
760 function does not overlap the argument list being
761 evaluated. This is usually an overestimate because
762 allocate_dynamic_stack_space reserves space for an
763 outgoing argument list in addition to the requested
764 space, but there is no way to ask for stack space such
765 that an argument list of a certain length can be
766 safely constructed. */
767
768 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
769#ifdef REG_PARM_STACK_SPACE
770 /* Add the stack space reserved for register arguments
771 in the inline function. What is really needed is the
772 largest value of reg_parm_stack_space in the inline
773 function, but that is not available. Using the current
774 value of reg_parm_stack_space is wrong, but gives
775 correct results on all supported machines. */
776 adjust += reg_parm_stack_space;
777#endif
2f4aa534 778 start_sequence ();
ccf5d244 779 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
780 allocate_dynamic_stack_space (GEN_INT (adjust),
781 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
782 seq = get_insns ();
783 end_sequence ();
a1917650 784 emit_insns_before (seq, first_insn);
e5d70561 785 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
786 }
787 }
d64f5a78 788#endif
51bbfa0c
RS
789
790 /* If the result is equivalent to TARGET, return TARGET to simplify
791 checks in store_expr. They can be equivalent but not equal in the
792 case of a function that returns BLKmode. */
793 if (temp != target && rtx_equal_p (temp, target))
794 return target;
795 return temp;
796 }
797
798 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
799 separately after all. If function was declared inline,
800 give a warning. */
801 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 802 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
803 {
804 warning_with_decl (fndecl, "inlining failed in call to `%s'");
805 warning ("called from here");
806 }
51bbfa0c
RS
807 mark_addressable (fndecl);
808 }
809
810 /* When calling a const function, we must pop the stack args right away,
811 so that the pop is deleted or moved with the call. */
812 if (is_const)
813 NO_DEFER_POP;
814
815 function_call_count++;
816
817 if (fndecl && DECL_NAME (fndecl))
818 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
819
820#if 0
821 /* Unless it's a call to a specific function that isn't alloca,
822 if it has one argument, we must assume it might be alloca. */
823
824 may_be_alloca =
825 (!(fndecl != 0 && strcmp (name, "alloca"))
826 && actparms != 0
827 && TREE_CHAIN (actparms) == 0);
828#else
829 /* We assume that alloca will always be called by name. It
830 makes no sense to pass it as a pointer-to-function to
831 anything that does not understand its behavior. */
832 may_be_alloca =
833 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
834 && name[0] == 'a'
835 && ! strcmp (name, "alloca"))
836 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
837 && name[0] == '_'
838 && ! strcmp (name, "__builtin_alloca"))));
839#endif
840
841 /* See if this is a call to a function that can return more than once
842 or a call to longjmp. */
843
844 returns_twice = 0;
845 is_longjmp = 0;
846
847 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
848 {
849 char *tname = name;
850
8d515633 851 /* Disregard prefix _, __ or __x. */
51bbfa0c 852 if (name[0] == '_')
8d515633
RS
853 {
854 if (name[1] == '_' && name[2] == 'x')
855 tname += 3;
856 else if (name[1] == '_')
857 tname += 2;
858 else
859 tname += 1;
860 }
51bbfa0c
RS
861
862 if (tname[0] == 's')
863 {
864 returns_twice
865 = ((tname[1] == 'e'
866 && (! strcmp (tname, "setjmp")
867 || ! strcmp (tname, "setjmp_syscall")))
868 || (tname[1] == 'i'
869 && ! strcmp (tname, "sigsetjmp"))
870 || (tname[1] == 'a'
871 && ! strcmp (tname, "savectx")));
872 if (tname[1] == 'i'
873 && ! strcmp (tname, "siglongjmp"))
874 is_longjmp = 1;
875 }
876 else if ((tname[0] == 'q' && tname[1] == 's'
877 && ! strcmp (tname, "qsetjmp"))
878 || (tname[0] == 'v' && tname[1] == 'f'
879 && ! strcmp (tname, "vfork")))
880 returns_twice = 1;
881
882 else if (tname[0] == 'l' && tname[1] == 'o'
883 && ! strcmp (tname, "longjmp"))
884 is_longjmp = 1;
885 }
886
51bbfa0c
RS
887 if (may_be_alloca)
888 current_function_calls_alloca = 1;
889
890 /* Don't let pending stack adjusts add up to too much.
891 Also, do all pending adjustments now
892 if there is any chance this might be a call to alloca. */
893
894 if (pending_stack_adjust >= 32
895 || (pending_stack_adjust > 0 && may_be_alloca))
896 do_pending_stack_adjust ();
897
898 /* Operand 0 is a pointer-to-function; get the type of the function. */
899 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
900 if (TREE_CODE (funtype) != POINTER_TYPE)
901 abort ();
902 funtype = TREE_TYPE (funtype);
903
cc79451b
RK
904 /* Push the temporary stack slot level so that we can free any temporaries
905 we make. */
51bbfa0c
RS
906 push_temp_slots ();
907
eecb6f50
JL
908 /* Start updating where the next arg would go.
909
910 On some machines (such as the PA) indirect calls have a different
911 calling convention than normal calls. The last argument in
912 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
913 or not. */
914 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
915
916 /* If struct_value_rtx is 0, it means pass the address
917 as if it were an extra parameter. */
918 if (structure_value_addr && struct_value_rtx == 0)
919 {
5582b006
RK
920 /* If structure_value_addr is a REG other than
921 virtual_outgoing_args_rtx, we can use always use it. If it
922 is not a REG, we must always copy it into a register.
923 If it is virtual_outgoing_args_rtx, we must copy it to another
924 register in some cases. */
925 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 926#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
927 || (stack_arg_under_construction
928 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 929#endif
5582b006
RK
930 ? copy_addr_to_reg (structure_value_addr)
931 : structure_value_addr);
d64f5a78 932
51bbfa0c
RS
933 actparms
934 = tree_cons (error_mark_node,
935 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 936 temp),
51bbfa0c
RS
937 actparms);
938 structure_value_addr_parm = 1;
939 }
940
941 /* Count the arguments and set NUM_ACTUALS. */
942 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
943 num_actuals = i;
944
945 /* Compute number of named args.
946 Normally, don't include the last named arg if anonymous args follow.
469225d8
JW
947 We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
948 (If no anonymous args follow, the result of list_length is actually
949 one too large. This is harmless.)
51bbfa0c 950
469225d8
JW
951 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
952 this machine will be able to place unnamed args that were passed in
953 registers into the stack. So treat all args as named. This allows the
954 insns emitting for a specific argument list to be independent of the
955 function declaration.
51bbfa0c
RS
956
957 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
958 way to pass unnamed args in registers, so we must force them into
959 memory. */
469225d8 960#if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
51bbfa0c
RS
961 if (TYPE_ARG_TYPES (funtype) != 0)
962 n_named_args
0ee902cb 963 = (list_length (TYPE_ARG_TYPES (funtype))
469225d8 964#ifndef STRICT_ARGUMENT_NAMING
0ee902cb
RM
965 /* Don't include the last named arg. */
966 - 1
469225d8 967#endif
0ee902cb
RM
968 /* Count the struct value address, if it is passed as a parm. */
969 + structure_value_addr_parm);
51bbfa0c
RS
970 else
971#endif
972 /* If we know nothing, treat all args as named. */
973 n_named_args = num_actuals;
974
975 /* Make a vector to hold all the information about each arg. */
976 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 977 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
978
979 args_size.constant = 0;
980 args_size.var = 0;
981
982 /* In this loop, we consider args in the order they are written.
0ee902cb 983 We fill up ARGS from the front or from the back if necessary
51bbfa0c
RS
984 so that in any case the first arg to be pushed ends up at the front. */
985
986#ifdef PUSH_ARGS_REVERSED
987 i = num_actuals - 1, inc = -1;
988 /* In this case, must reverse order of args
989 so that we compute and push the last arg first. */
990#else
991 i = 0, inc = 1;
992#endif
993
994 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
995 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
996 {
997 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 998 int unsignedp;
84b55618 999 enum machine_mode mode;
51bbfa0c
RS
1000
1001 args[i].tree_value = TREE_VALUE (p);
1002
1003 /* Replace erroneous argument with constant zero. */
1004 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1005 args[i].tree_value = integer_zero_node, type = integer_type_node;
1006
5c1c34d3
RK
1007 /* If TYPE is a transparent union, pass things the way we would
1008 pass the first field of the union. We have already verified that
1009 the modes are the same. */
1010 if (TYPE_TRANSPARENT_UNION (type))
1011 type = TREE_TYPE (TYPE_FIELDS (type));
1012
51bbfa0c
RS
1013 /* Decide where to pass this arg.
1014
1015 args[i].reg is nonzero if all or part is passed in registers.
1016
1017 args[i].partial is nonzero if part but not all is passed in registers,
1018 and the exact value says how many words are passed in registers.
1019
1020 args[i].pass_on_stack is nonzero if the argument must at least be
1021 computed on the stack. It may then be loaded back into registers
1022 if args[i].reg is nonzero.
1023
1024 These decisions are driven by the FUNCTION_... macros and must agree
1025 with those made by function.c. */
1026
51bbfa0c 1027 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1028 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1029 && contains_placeholder_p (TYPE_SIZE (type)))
657bb6dc 1030 || TREE_ADDRESSABLE (type)
7ef1fbd7
RK
1031#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1032 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1033 type, argpos < n_named_args)
1034#endif
1035 )
51bbfa0c 1036 {
5e0de251
DE
1037#ifdef FUNCTION_ARG_CALLEE_COPIES
1038 if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
1039 argpos < n_named_args)
1040 /* If it's in a register, we must make a copy of it too. */
1041 /* ??? Is this a sufficient test? Is there a better one? */
1042 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
67038661
RK
1043 && REG_P (DECL_RTL (args[i].tree_value)))
1044 && ! TREE_ADDRESSABLE (type))
51bbfa0c 1045 {
5e0de251
DE
1046 args[i].tree_value = build1 (ADDR_EXPR,
1047 build_pointer_type (type),
1048 args[i].tree_value);
1049 type = build_pointer_type (type);
51bbfa0c
RS
1050 }
1051 else
5e0de251 1052#endif
82c0ff02 1053 {
5e0de251
DE
1054 /* We make a copy of the object and pass the address to the
1055 function being called. */
1056 rtx copy;
51bbfa0c 1057
5e0de251
DE
1058 if (TYPE_SIZE (type) == 0
1059 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1060 {
1061 /* This is a variable-sized object. Make space on the stack
1062 for it. */
1063 rtx size_rtx = expr_size (TREE_VALUE (p));
1064
1065 if (old_stack_level == 0)
1066 {
1067 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1068 old_pending_adj = pending_stack_adjust;
1069 pending_stack_adjust = 0;
1070 }
1071
1072 copy = gen_rtx (MEM, BLKmode,
1073 allocate_dynamic_stack_space (size_rtx,
1074 NULL_RTX,
1075 TYPE_ALIGN (type)));
1076 }
1077 else
1078 {
1079 int size = int_size_in_bytes (type);
6fa51029 1080 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
5e0de251 1081 }
51bbfa0c 1082
05e3bdb9 1083 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1084
5e0de251 1085 store_expr (args[i].tree_value, copy, 0);
ba3a053e 1086 is_const = 0;
5e0de251
DE
1087
1088 args[i].tree_value = build1 (ADDR_EXPR,
1089 build_pointer_type (type),
1090 make_tree (type, copy));
1091 type = build_pointer_type (type);
1092 }
51bbfa0c 1093 }
51bbfa0c 1094
84b55618 1095 mode = TYPE_MODE (type);
321e0bba 1096 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1097
1098#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1099 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1100#endif
1101
321e0bba 1102 args[i].unsignedp = unsignedp;
1efe6448 1103 args[i].mode = mode;
84b55618 1104 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1105 argpos < n_named_args);
1106#ifdef FUNCTION_ARG_PARTIAL_NREGS
1107 if (args[i].reg)
1108 args[i].partial
84b55618 1109 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1110 argpos < n_named_args);
1111#endif
1112
84b55618 1113 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c 1114
cacbd532
JW
1115 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1116 it means that we are to pass this arg in the register(s) designated
1117 by the PARALLEL, but also to pass it in the stack. */
1118 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1119 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1120 args[i].pass_on_stack = 1;
51bbfa0c
RS
1121
1122 /* If this is an addressable type, we must preallocate the stack
1123 since we must evaluate the object into its final location.
1124
1125 If this is to be passed in both registers and the stack, it is simpler
1126 to preallocate. */
1127 if (TREE_ADDRESSABLE (type)
1128 || (args[i].pass_on_stack && args[i].reg != 0))
1129 must_preallocate = 1;
1130
1131 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1132 we cannot consider this function call constant. */
1133 if (TREE_ADDRESSABLE (type))
1134 is_const = 0;
1135
1136 /* Compute the stack-size of this argument. */
1137 if (args[i].reg == 0 || args[i].partial != 0
1138#ifdef REG_PARM_STACK_SPACE
6f90e075 1139 || reg_parm_stack_space > 0
51bbfa0c
RS
1140#endif
1141 || args[i].pass_on_stack)
1efe6448 1142 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1143#ifdef STACK_PARMS_IN_REG_PARM_AREA
1144 1,
1145#else
1146 args[i].reg != 0,
1147#endif
1148 fndecl, &args_size, &args[i].offset,
1149 &args[i].size);
1150
1151#ifndef ARGS_GROW_DOWNWARD
1152 args[i].slot_offset = args_size;
1153#endif
1154
1155#ifndef REG_PARM_STACK_SPACE
1156 /* If a part of the arg was put into registers,
1157 don't include that part in the amount pushed. */
1158 if (! args[i].pass_on_stack)
1159 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1160 / (PARM_BOUNDARY / BITS_PER_UNIT)
1161 * (PARM_BOUNDARY / BITS_PER_UNIT));
1162#endif
1163
1164 /* Update ARGS_SIZE, the total stack space for args so far. */
1165
1166 args_size.constant += args[i].size.constant;
1167 if (args[i].size.var)
1168 {
1169 ADD_PARM_SIZE (args_size, args[i].size.var);
1170 }
1171
1172 /* Since the slot offset points to the bottom of the slot,
1173 we must record it after incrementing if the args grow down. */
1174#ifdef ARGS_GROW_DOWNWARD
1175 args[i].slot_offset = args_size;
1176
1177 args[i].slot_offset.constant = -args_size.constant;
1178 if (args_size.var)
1179 {
1180 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1181 }
1182#endif
1183
1184 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1185 have been used, etc. */
1186
1187 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1188 argpos < n_named_args);
1189 }
1190
6f90e075
JW
1191#ifdef FINAL_REG_PARM_STACK_SPACE
1192 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1193 args_size.var);
1194#endif
1195
51bbfa0c
RS
1196 /* Compute the actual size of the argument block required. The variable
1197 and constant sizes must be combined, the size may have to be rounded,
1198 and there may be a minimum required size. */
1199
1200 original_args_size = args_size;
1201 if (args_size.var)
1202 {
1203 /* If this function requires a variable-sized argument list, don't try to
1204 make a cse'able block for this call. We may be able to do this
1205 eventually, but it is too complicated to keep track of what insns go
1206 in the cse'able block and which don't. */
1207
1208 is_const = 0;
1209 must_preallocate = 1;
1210
1211 args_size.var = ARGS_SIZE_TREE (args_size);
1212 args_size.constant = 0;
1213
1214#ifdef STACK_BOUNDARY
1215 if (STACK_BOUNDARY != BITS_PER_UNIT)
1216 args_size.var = round_up (args_size.var, STACK_BYTES);
1217#endif
1218
1219#ifdef REG_PARM_STACK_SPACE
6f90e075 1220 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1221 {
1222 args_size.var
1223 = size_binop (MAX_EXPR, args_size.var,
1224 size_int (REG_PARM_STACK_SPACE (fndecl)));
1225
1226#ifndef OUTGOING_REG_PARM_STACK_SPACE
1227 /* The area corresponding to register parameters is not to count in
1228 the size of the block we need. So make the adjustment. */
1229 args_size.var
1230 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1231 size_int (reg_parm_stack_space));
51bbfa0c
RS
1232#endif
1233 }
1234#endif
1235 }
1236 else
1237 {
1238#ifdef STACK_BOUNDARY
1239 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1240 / STACK_BYTES) * STACK_BYTES);
1241#endif
1242
1243#ifdef REG_PARM_STACK_SPACE
1244 args_size.constant = MAX (args_size.constant,
6f90e075 1245 reg_parm_stack_space);
e1336658
JW
1246#ifdef MAYBE_REG_PARM_STACK_SPACE
1247 if (reg_parm_stack_space == 0)
1248 args_size.constant = 0;
1249#endif
51bbfa0c 1250#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1251 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1252#endif
1253#endif
1254 }
1255
1256 /* See if we have or want to preallocate stack space.
1257
1258 If we would have to push a partially-in-regs parm
1259 before other stack parms, preallocate stack space instead.
1260
1261 If the size of some parm is not a multiple of the required stack
1262 alignment, we must preallocate.
1263
1264 If the total size of arguments that would otherwise create a copy in
1265 a temporary (such as a CALL) is more than half the total argument list
1266 size, preallocation is faster.
1267
1268 Another reason to preallocate is if we have a machine (like the m88k)
1269 where stack alignment is required to be maintained between every
1270 pair of insns, not just when the call is made. However, we assume here
1271 that such machines either do not have push insns (and hence preallocation
1272 would occur anyway) or the problem is taken care of with
1273 PUSH_ROUNDING. */
1274
1275 if (! must_preallocate)
1276 {
1277 int partial_seen = 0;
1278 int copy_to_evaluate_size = 0;
1279
1280 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1281 {
1282 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1283 partial_seen = 1;
1284 else if (partial_seen && args[i].reg == 0)
1285 must_preallocate = 1;
1286
1287 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1288 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1289 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1290 || TREE_CODE (args[i].tree_value) == COND_EXPR
1291 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1292 copy_to_evaluate_size
1293 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1294 }
1295
c62f36cf
RS
1296 if (copy_to_evaluate_size * 2 >= args_size.constant
1297 && args_size.constant > 0)
51bbfa0c
RS
1298 must_preallocate = 1;
1299 }
1300
1301 /* If the structure value address will reference the stack pointer, we must
1302 stabilize it. We don't need to do this if we know that we are not going
1303 to adjust the stack pointer in processing this call. */
1304
1305 if (structure_value_addr
1306 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1307 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1308 && (args_size.var
1309#ifndef ACCUMULATE_OUTGOING_ARGS
1310 || args_size.constant
1311#endif
1312 ))
1313 structure_value_addr = copy_to_reg (structure_value_addr);
1314
1315 /* If this function call is cse'able, precompute all the parameters.
1316 Note that if the parameter is constructed into a temporary, this will
1317 cause an additional copy because the parameter will be constructed
1318 into a temporary location and then copied into the outgoing arguments.
1319 If a parameter contains a call to alloca and this function uses the
1320 stack, precompute the parameter. */
1321
1ce0cb53
JW
1322 /* If we preallocated the stack space, and some arguments must be passed
1323 on the stack, then we must precompute any parameter which contains a
1324 function call which will store arguments on the stack.
1325 Otherwise, evaluating the parameter may clobber previous parameters
1326 which have already been stored into the stack. */
1327
51bbfa0c
RS
1328 for (i = 0; i < num_actuals; i++)
1329 if (is_const
1330 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1331 && calls_function (args[i].tree_value, 1))
1332 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1333 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1334 {
657bb6dc
JM
1335 /* If this is an addressable type, we cannot pre-evaluate it. */
1336 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1337 abort ();
1338
cc79451b
RK
1339 push_temp_slots ();
1340
51bbfa0c 1341 args[i].initial_value = args[i].value
e5d70561 1342 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1343
51bbfa0c 1344 preserve_temp_slots (args[i].value);
cc79451b 1345 pop_temp_slots ();
51bbfa0c
RS
1346
1347 /* ANSI doesn't require a sequence point here,
1348 but PCC has one, so this will avoid some problems. */
1349 emit_queue ();
8e6c802b
RK
1350
1351 args[i].initial_value = args[i].value
1352 = protect_from_queue (args[i].initial_value, 0);
1353
1354 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1355 args[i].value
1356 = convert_modes (args[i].mode,
1357 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1358 args[i].value, args[i].unsignedp);
51bbfa0c
RS
1359 }
1360
1361 /* Now we are about to start emitting insns that can be deleted
1362 if a libcall is deleted. */
1363 if (is_const)
1364 start_sequence ();
1365
1366 /* If we have no actual push instructions, or shouldn't use them,
1367 make space for all args right now. */
1368
1369 if (args_size.var != 0)
1370 {
1371 if (old_stack_level == 0)
1372 {
e5d70561 1373 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1374 old_pending_adj = pending_stack_adjust;
1375 pending_stack_adjust = 0;
d64f5a78 1376#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1377 /* stack_arg_under_construction says whether a stack arg is
1378 being constructed at the old stack level. Pushing the stack
1379 gets a clean outgoing argument block. */
1380 old_stack_arg_under_construction = stack_arg_under_construction;
1381 stack_arg_under_construction = 0;
d64f5a78 1382#endif
51bbfa0c
RS
1383 }
1384 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1385 }
26a258fe 1386 else
51bbfa0c
RS
1387 {
1388 /* Note that we must go through the motions of allocating an argument
1389 block even if the size is zero because we may be storing args
1390 in the area reserved for register arguments, which may be part of
1391 the stack frame. */
26a258fe 1392
51bbfa0c
RS
1393 int needed = args_size.constant;
1394
0f41302f
MS
1395 /* Store the maximum argument space used. It will be pushed by
1396 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1397 checking). */
51bbfa0c
RS
1398
1399 if (needed > current_function_outgoing_args_size)
1400 current_function_outgoing_args_size = needed;
1401
26a258fe
PB
1402 if (must_preallocate)
1403 {
1404#ifdef ACCUMULATE_OUTGOING_ARGS
1405 /* Since the stack pointer will never be pushed, it is possible for
1406 the evaluation of a parm to clobber something we have already
1407 written to the stack. Since most function calls on RISC machines
1408 do not use the stack, this is uncommon, but must work correctly.
1409
1410 Therefore, we save any area of the stack that was already written
1411 and that we are using. Here we set up to do this by making a new
1412 stack usage map from the old one. The actual save will be done
1413 by store_one_arg.
1414
1415 Another approach might be to try to reorder the argument
1416 evaluations to avoid this conflicting stack usage. */
1417
51bbfa0c 1418#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
26a258fe
PB
1419 /* Since we will be writing into the entire argument area, the
1420 map must be allocated for its entire size, not just the part that
1421 is the responsibility of the caller. */
1422 needed += reg_parm_stack_space;
51bbfa0c
RS
1423#endif
1424
1425#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1426 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1427 needed + 1);
51bbfa0c 1428#else
26a258fe
PB
1429 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1430 needed);
51bbfa0c 1431#endif
26a258fe 1432 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1433
26a258fe
PB
1434 if (initial_highest_arg_in_use)
1435 bcopy (initial_stack_usage_map, stack_usage_map,
1436 initial_highest_arg_in_use);
51bbfa0c 1437
26a258fe
PB
1438 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1439 bzero (&stack_usage_map[initial_highest_arg_in_use],
1440 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1441 needed = 0;
2f4aa534 1442
26a258fe
PB
1443 /* The address of the outgoing argument list must not be copied to a
1444 register here, because argblock would be left pointing to the
1445 wrong place after the call to allocate_dynamic_stack_space below.
1446 */
2f4aa534 1447
26a258fe 1448 argblock = virtual_outgoing_args_rtx;
2f4aa534 1449
51bbfa0c 1450#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1451 if (inhibit_defer_pop == 0)
51bbfa0c 1452 {
26a258fe
PB
1453 /* Try to reuse some or all of the pending_stack_adjust
1454 to get this space. Maybe we can avoid any pushing. */
1455 if (needed > pending_stack_adjust)
1456 {
1457 needed -= pending_stack_adjust;
1458 pending_stack_adjust = 0;
1459 }
1460 else
1461 {
1462 pending_stack_adjust -= needed;
1463 needed = 0;
1464 }
51bbfa0c 1465 }
26a258fe
PB
1466 /* Special case this because overhead of `push_block' in this
1467 case is non-trivial. */
1468 if (needed == 0)
1469 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1470 else
26a258fe
PB
1471 argblock = push_block (GEN_INT (needed), 0, 0);
1472
1473 /* We only really need to call `copy_to_reg' in the case where push
1474 insns are going to be used to pass ARGBLOCK to a function
1475 call in ARGS. In that case, the stack pointer changes value
1476 from the allocation point to the call point, and hence
1477 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1478 But might as well always do it. */
1479 argblock = copy_to_reg (argblock);
51bbfa0c 1480#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1481 }
51bbfa0c
RS
1482 }
1483
bfbf933a
RS
1484#ifdef ACCUMULATE_OUTGOING_ARGS
1485 /* The save/restore code in store_one_arg handles all cases except one:
1486 a constructor call (including a C function returning a BLKmode struct)
1487 to initialize an argument. */
1488 if (stack_arg_under_construction)
1489 {
1490#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1491 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1492#else
e5d70561 1493 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1494#endif
1495 if (old_stack_level == 0)
1496 {
e5d70561 1497 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1498 old_pending_adj = pending_stack_adjust;
1499 pending_stack_adjust = 0;
1500 /* stack_arg_under_construction says whether a stack arg is
1501 being constructed at the old stack level. Pushing the stack
1502 gets a clean outgoing argument block. */
1503 old_stack_arg_under_construction = stack_arg_under_construction;
1504 stack_arg_under_construction = 0;
1505 /* Make a new map for the new argument list. */
1506 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1507 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1508 highest_outgoing_arg_in_use = 0;
1509 }
e5d70561 1510 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1511 }
1512 /* If argument evaluation might modify the stack pointer, copy the
1513 address of the argument list to a register. */
1514 for (i = 0; i < num_actuals; i++)
1515 if (args[i].pass_on_stack)
1516 {
1517 argblock = copy_addr_to_reg (argblock);
1518 break;
1519 }
1520#endif
1521
1522
51bbfa0c
RS
1523 /* If we preallocated stack space, compute the address of each argument.
1524 We need not ensure it is a valid memory address here; it will be
1525 validized when it is used. */
1526 if (argblock)
1527 {
1528 rtx arg_reg = argblock;
1529 int arg_offset = 0;
1530
1531 if (GET_CODE (argblock) == PLUS)
1532 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1533
1534 for (i = 0; i < num_actuals; i++)
1535 {
1536 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1537 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1538 rtx addr;
1539
1540 /* Skip this parm if it will not be passed on the stack. */
1541 if (! args[i].pass_on_stack && args[i].reg != 0)
1542 continue;
1543
1544 if (GET_CODE (offset) == CONST_INT)
1545 addr = plus_constant (arg_reg, INTVAL (offset));
1546 else
1547 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1548
1549 addr = plus_constant (addr, arg_offset);
1efe6448 1550 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
0c0600d5 1551 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1552 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1553
1554 if (GET_CODE (slot_offset) == CONST_INT)
1555 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1556 else
1557 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1558
1559 addr = plus_constant (addr, arg_offset);
1efe6448 1560 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1561 }
1562 }
1563
1564#ifdef PUSH_ARGS_REVERSED
1565#ifdef STACK_BOUNDARY
1566 /* If we push args individually in reverse order, perform stack alignment
1567 before the first push (the last arg). */
1568 if (argblock == 0)
e5d70561
RK
1569 anti_adjust_stack (GEN_INT (args_size.constant
1570 - original_args_size.constant));
51bbfa0c
RS
1571#endif
1572#endif
1573
1574 /* Don't try to defer pops if preallocating, not even from the first arg,
1575 since ARGBLOCK probably refers to the SP. */
1576 if (argblock)
1577 NO_DEFER_POP;
1578
1579 /* Get the function to call, in the form of RTL. */
1580 if (fndecl)
ef5d30c9
RK
1581 {
1582 /* If this is the first use of the function, see if we need to
1583 make an external definition for it. */
1584 if (! TREE_USED (fndecl))
1585 {
1586 assemble_external (fndecl);
1587 TREE_USED (fndecl) = 1;
1588 }
1589
1590 /* Get a SYMBOL_REF rtx for the function address. */
1591 funexp = XEXP (DECL_RTL (fndecl), 0);
1592 }
51bbfa0c
RS
1593 else
1594 /* Generate an rtx (probably a pseudo-register) for the address. */
1595 {
cc79451b 1596 push_temp_slots ();
e5d70561 1597 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1598 pop_temp_slots (); /* FUNEXP can't be BLKmode */
51bbfa0c
RS
1599 emit_queue ();
1600 }
1601
1602 /* Figure out the register where the value, if any, will come back. */
1603 valreg = 0;
1604 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1605 && ! structure_value_addr)
1606 {
1607 if (pcc_struct_value)
1608 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1609 fndecl);
1610 else
1611 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1612 }
1613
1614 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 1615 once we have started filling any specific hard regs. */
51bbfa0c
RS
1616 reg_parm_seen = 0;
1617 for (i = 0; i < num_actuals; i++)
1618 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1619 {
1620 reg_parm_seen = 1;
1621
1622 if (args[i].value == 0)
1623 {
cc79451b 1624 push_temp_slots ();
e5d70561
RK
1625 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1626 VOIDmode, 0);
51bbfa0c 1627 preserve_temp_slots (args[i].value);
cc79451b 1628 pop_temp_slots ();
51bbfa0c
RS
1629
1630 /* ANSI doesn't require a sequence point here,
1631 but PCC has one, so this will avoid some problems. */
1632 emit_queue ();
1633 }
84b55618
RK
1634
1635 /* If we are to promote the function arg to a wider mode,
1636 do it now. */
84b55618 1637
843fec55
RK
1638 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1639 args[i].value
1640 = convert_modes (args[i].mode,
1641 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1642 args[i].value, args[i].unsignedp);
ebef2728
RK
1643
1644 /* If the value is expensive, and we are inside an appropriately
1645 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1646 into the hard reg.
1647
1648 For small register classes, also do this if this call uses
1649 register parameters. This is to avoid reload conflicts while
1650 loading the parameters registers. */
ebef2728
RK
1651
1652 if ((! (GET_CODE (args[i].value) == REG
1653 || (GET_CODE (args[i].value) == SUBREG
1654 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1655 && args[i].mode != BLKmode
1656 && rtx_cost (args[i].value, SET) > 2
01368078 1657#ifdef SMALL_REGISTER_CLASSES
4f48d56a 1658 && (reg_parm_seen || preserve_subexpressions_p ())
01368078 1659#else
4f48d56a 1660 && preserve_subexpressions_p ()
01368078 1661#endif
4f48d56a 1662 )
ebef2728 1663 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1664 }
1665
1666#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1667 /* The argument list is the property of the called routine and it
1668 may clobber it. If the fixed area has been used for previous
1669 parameters, we must save and restore it.
1670
1671 Here we compute the boundary of the that needs to be saved, if any. */
1672
b94301c2
RS
1673#ifdef ARGS_GROW_DOWNWARD
1674 for (i = 0; i < reg_parm_stack_space + 1; i++)
1675#else
6f90e075 1676 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1677#endif
51bbfa0c
RS
1678 {
1679 if (i >= highest_outgoing_arg_in_use
1680 || stack_usage_map[i] == 0)
1681 continue;
1682
1683 if (low_to_save == -1)
1684 low_to_save = i;
1685
1686 high_to_save = i;
1687 }
1688
1689 if (low_to_save >= 0)
1690 {
1691 int num_to_save = high_to_save - low_to_save + 1;
1692 enum machine_mode save_mode
1693 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1694 rtx stack_area;
1695
1696 /* If we don't have the required alignment, must do this in BLKmode. */
1697 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1698 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1699 save_mode = BLKmode;
1700
1701 stack_area = gen_rtx (MEM, save_mode,
1702 memory_address (save_mode,
b94301c2
RS
1703
1704#ifdef ARGS_GROW_DOWNWARD
1705 plus_constant (argblock,
1706 - high_to_save)
1707#else
51bbfa0c 1708 plus_constant (argblock,
b94301c2
RS
1709 low_to_save)
1710#endif
1711 ));
51bbfa0c
RS
1712 if (save_mode == BLKmode)
1713 {
6fa51029 1714 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3668e76e 1715 MEM_IN_STRUCT_P (save_area) = 0;
51bbfa0c 1716 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1717 GEN_INT (num_to_save),
51bbfa0c
RS
1718 PARM_BOUNDARY / BITS_PER_UNIT);
1719 }
1720 else
1721 {
1722 save_area = gen_reg_rtx (save_mode);
1723 emit_move_insn (save_area, stack_area);
1724 }
1725 }
1726#endif
1727
1728
1729 /* Now store (and compute if necessary) all non-register parms.
1730 These come before register parms, since they can require block-moves,
1731 which could clobber the registers used for register parms.
1732 Parms which have partial registers are not stored here,
1733 but we do preallocate space here if they want that. */
1734
1735 for (i = 0; i < num_actuals; i++)
1736 if (args[i].reg == 0 || args[i].pass_on_stack)
1737 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1738 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1739
4ab56118
RK
1740 /* If we have a parm that is passed in registers but not in memory
1741 and whose alignment does not permit a direct copy into registers,
1742 make a group of pseudos that correspond to each register that we
1743 will later fill. */
1744
45d44c98
RK
1745 if (STRICT_ALIGNMENT)
1746 for (i = 0; i < num_actuals; i++)
1747 if (args[i].reg != 0 && ! args[i].pass_on_stack
4ab56118 1748 && args[i].mode == BLKmode
45d44c98
RK
1749 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1750 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1751 {
1752 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1753 int big_endian_correction = 0;
4ab56118 1754
45d44c98
RK
1755 args[i].n_aligned_regs
1756 = args[i].partial ? args[i].partial
1757 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
4ab56118 1758
45d44c98
RK
1759 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1760 * args[i].n_aligned_regs);
4ab56118 1761
45d44c98
RK
1762 /* Structures smaller than a word are aligned to the least
1763 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1764 this means we must skip the empty high order bytes when
1765 calculating the bit offset. */
1766 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1767 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
8498efd0 1768
45d44c98
RK
1769 for (j = 0; j < args[i].n_aligned_regs; j++)
1770 {
1771 rtx reg = gen_reg_rtx (word_mode);
1772 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1773 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1774 int bitpos;
1775
1776 args[i].aligned_regs[j] = reg;
1777
1778 /* Clobber REG and move each partword into it. Ensure we don't
1779 go past the end of the structure. Note that the loop below
1780 works because we've already verified that padding
1781 and endianness are compatible. */
1782
1783 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1784
1785 for (bitpos = 0;
1786 bitpos < BITS_PER_WORD && bytes > 0;
1787 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1788 {
1789 int xbitpos = bitpos + big_endian_correction;
1790
1791 store_bit_field (reg, bitsize, xbitpos, word_mode,
1792 extract_bit_field (word, bitsize, bitpos, 1,
1793 NULL_RTX, word_mode,
1794 word_mode,
1795 bitsize / BITS_PER_UNIT,
1796 BITS_PER_WORD),
1797 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1798 }
1799 }
1800 }
4ab56118 1801
51bbfa0c
RS
1802 /* Now store any partially-in-registers parm.
1803 This is the last place a block-move can happen. */
1804 if (reg_parm_seen)
1805 for (i = 0; i < num_actuals; i++)
1806 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1807 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1808 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1809
1810#ifndef PUSH_ARGS_REVERSED
1811#ifdef STACK_BOUNDARY
1812 /* If we pushed args in forward order, perform stack alignment
1813 after pushing the last arg. */
1814 if (argblock == 0)
e5d70561
RK
1815 anti_adjust_stack (GEN_INT (args_size.constant
1816 - original_args_size.constant));
51bbfa0c
RS
1817#endif
1818#endif
1819
756e0e12
RS
1820 /* If register arguments require space on the stack and stack space
1821 was not preallocated, allocate stack space here for arguments
1822 passed in registers. */
6e716e89 1823#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1824 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1825 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1826#endif
1827
51bbfa0c
RS
1828 /* Pass the function the address in which to return a structure value. */
1829 if (structure_value_addr && ! structure_value_addr_parm)
1830 {
1831 emit_move_insn (struct_value_rtx,
1832 force_reg (Pmode,
e5d70561
RK
1833 force_operand (structure_value_addr,
1834 NULL_RTX)));
51bbfa0c 1835 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1836 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1837 }
1838
77cac2f2 1839 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1840
51bbfa0c
RS
1841 /* Now do the register loads required for any wholly-register parms or any
1842 parms which are passed both on the stack and in a register. Their
1843 expressions were already evaluated.
1844
1845 Mark all register-parms as living through the call, putting these USE
77cac2f2 1846 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c
RS
1847
1848 for (i = 0; i < num_actuals; i++)
1849 {
cacbd532 1850 rtx reg = args[i].reg;
51bbfa0c 1851 int partial = args[i].partial;
cacbd532 1852 int nregs;
51bbfa0c 1853
cacbd532 1854 if (reg)
51bbfa0c 1855 {
6b972c4f
JW
1856 /* Set to non-negative if must move a word at a time, even if just
1857 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1858 we just use a normal move insn. This value can be zero if the
1859 argument is a zero size structure with no fields. */
51bbfa0c
RS
1860 nregs = (partial ? partial
1861 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1862 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1863 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1864 : -1));
51bbfa0c 1865
cacbd532
JW
1866 /* Handle calls that pass values in multiple non-contiguous
1867 locations. The Irix 6 ABI has examples of this. */
1868
1869 if (GET_CODE (reg) == PARALLEL)
1870 emit_group_load (reg, args[i].value);
1871
51bbfa0c
RS
1872 /* If simple case, just do move. If normal partial, store_one_arg
1873 has already loaded the register for us. In all other cases,
1874 load the register(s) from memory. */
1875
cacbd532 1876 else if (nregs == -1)
51bbfa0c 1877 emit_move_insn (reg, args[i].value);
4ab56118 1878
4ab56118
RK
1879 /* If we have pre-computed the values to put in the registers in
1880 the case of non-aligned structures, copy them in now. */
1881
1882 else if (args[i].n_aligned_regs != 0)
1883 for (j = 0; j < args[i].n_aligned_regs; j++)
1884 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1885 args[i].aligned_regs[j]);
4ab56118 1886
cacbd532 1887 else if (partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1888 move_block_to_reg (REGNO (reg),
1889 validize_mem (args[i].value), nregs,
1890 args[i].mode);
0304dfbb 1891
cacbd532
JW
1892 /* Handle calls that pass values in multiple non-contiguous
1893 locations. The Irix 6 ABI has examples of this. */
1894 if (GET_CODE (reg) == PARALLEL)
1895 use_group_regs (&call_fusage, reg);
1896 else if (nregs == -1)
0304dfbb
DE
1897 use_reg (&call_fusage, reg);
1898 else
1899 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1900 }
1901 }
1902
1903 /* Perform postincrements before actually calling the function. */
1904 emit_queue ();
1905
1906 /* All arguments and registers used for the call must be set up by now! */
1907
51bbfa0c 1908 /* Generate the actual call instruction. */
2c8da025 1909 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
51bbfa0c 1910 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1911 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1912
1913 /* If call is cse'able, make appropriate pair of reg-notes around it.
1914 Test valreg so we don't crash; may safely ignore `const'
1915 if return type is void. */
1916 if (is_const && valreg != 0)
1917 {
1918 rtx note = 0;
1919 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1920 rtx insns;
1921
1922 /* Construct an "equal form" for the value which mentions all the
1923 arguments in order as well as the function name. */
1924#ifdef PUSH_ARGS_REVERSED
1925 for (i = 0; i < num_actuals; i++)
1926 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1927#else
1928 for (i = num_actuals - 1; i >= 0; i--)
1929 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1930#endif
1931 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1932
1933 insns = get_insns ();
1934 end_sequence ();
1935
1936 emit_libcall_block (insns, temp, valreg, note);
1937
1938 valreg = temp;
1939 }
4f48d56a
RK
1940 else if (is_const)
1941 {
1942 /* Otherwise, just write out the sequence without a note. */
1943 rtx insns = get_insns ();
1944
1945 end_sequence ();
1946 emit_insns (insns);
1947 }
51bbfa0c
RS
1948
1949 /* For calls to `setjmp', etc., inform flow.c it should complain
1950 if nonvolatile values are live. */
1951
1952 if (returns_twice)
1953 {
1954 emit_note (name, NOTE_INSN_SETJMP);
1955 current_function_calls_setjmp = 1;
1956 }
1957
1958 if (is_longjmp)
1959 current_function_calls_longjmp = 1;
1960
1961 /* Notice functions that cannot return.
1962 If optimizing, insns emitted below will be dead.
1963 If not optimizing, they will exist, which is useful
1964 if the user uses the `return' command in the debugger. */
1965
1966 if (is_volatile || is_longjmp)
1967 emit_barrier ();
1968
51bbfa0c
RS
1969 /* If value type not void, return an rtx for the value. */
1970
1971 /* If there are cleanups to be called, don't use a hard reg as target. */
1972 if (cleanups_this_call != old_cleanups
1973 && target && REG_P (target)
1974 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1975 target = 0;
1976
1977 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1978 || ignore)
1979 {
1980 target = const0_rtx;
1981 }
1982 else if (structure_value_addr)
1983 {
1984 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
1985 {
1986 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1987 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1988 structure_value_addr));
05e3bdb9 1989 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 1990 }
51bbfa0c
RS
1991 }
1992 else if (pcc_struct_value)
1993 {
1994 if (target == 0)
29008b51 1995 {
30082223
RS
1996 /* We used leave the value in the location that it is
1997 returned in, but that causes problems if it is used more
1998 than once in one expression. Rather than trying to track
1999 when a copy is required, we always copy when TARGET is
2000 not specified. This calling sequence is only used on
2001 a few machines and TARGET is usually nonzero. */
2002 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
6d8b61b9
RS
2003 {
2004 target = assign_stack_temp (BLKmode,
2005 int_size_in_bytes (TREE_TYPE (exp)),
2006 0);
2007
05e3bdb9 2008 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
3b780899 2009
6d8b61b9
RS
2010 /* Save this temp slot around the pop below. */
2011 preserve_temp_slots (target);
2012 }
30082223
RS
2013 else
2014 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
29008b51 2015 }
30082223
RS
2016
2017 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
51bbfa0c
RS
2018 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2019 copy_to_reg (valreg)));
2020 else
2021 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
2022 expr_size (exp),
2023 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2024 }
84b55618
RK
2025 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2026 && GET_MODE (target) == GET_MODE (valreg))
51bbfa0c
RS
2027 /* TARGET and VALREG cannot be equal at this point because the latter
2028 would not have REG_FUNCTION_VALUE_P true, while the former would if
2029 it were referring to the same register.
2030
2031 If they refer to the same register, this move will be a no-op, except
2032 when function inlining is being done. */
2033 emit_move_insn (target, valreg);
cacbd532
JW
2034 /* Handle calls that return values in multiple non-contiguous locations.
2035 The Irix 6 ABI has examples of this. */
2036 else if (GET_CODE (valreg) == PARALLEL)
2037 {
2038 if (target == 0)
2039 {
2040 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2041 target = assign_stack_temp (BLKmode, bytes, 0);
2042 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2043 preserve_temp_slots (target);
2044 }
2045
2046 emit_group_store (target, valreg);
2047 }
766b19fb
JL
2048 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2049 {
2050 /* Some machines (the PA for example) want to return all small
2051 structures in registers regardless of the structure's alignment.
2052
2053 Deal with them explicitly by copying from the return registers
2054 into the target MEM locations. */
2055 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2056 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2057 int i;
2058 enum machine_mode tmpmode;
1b5c5873
RK
2059 rtx src, dst;
2060 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2061 int bitpos, xbitpos, big_endian_correction = 0;
766b19fb
JL
2062
2063 if (target == 0)
822e3422
RK
2064 {
2065 target = assign_stack_temp (BLKmode, bytes, 0);
2066 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2067 preserve_temp_slots (target);
2068 }
766b19fb 2069
e934eef9
RK
2070 /* This code assumes valreg is at least a full word. If it isn't,
2071 copy it into a new pseudo which is a full word. */
2072 if (GET_MODE (valreg) != BLKmode
2073 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
144a3150 2074 valreg = convert_to_mode (word_mode, valreg,
e934eef9
RK
2075 TREE_UNSIGNED (TREE_TYPE (exp)));
2076
1b5c5873
RK
2077 /* Structures whose size is not a multiple of a word are aligned
2078 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2079 machine, this means we must skip the empty high order bytes when
2080 calculating the bit offset. */
2081 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2082 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2083 * BITS_PER_UNIT));
2084
2085 /* Copy the structure BITSIZE bites at a time.
2086
2087 We could probably emit more efficient code for machines
766b19fb
JL
2088 which do not use strict alignment, but it doesn't seem
2089 worth the effort at the current time. */
1b5c5873
RK
2090 for (bitpos = 0, xbitpos = big_endian_correction;
2091 bitpos < bytes * BITS_PER_UNIT;
2092 bitpos += bitsize, xbitpos += bitsize)
766b19fb 2093 {
1b5c5873
RK
2094
2095 /* We need a new source operand each time xbitpos is on a
2096 word boundary and when xbitpos == big_endian_correction
2097 (the first time through). */
2098 if (xbitpos % BITS_PER_WORD == 0
2099 || xbitpos == big_endian_correction)
2100 src = operand_subword_force (valreg,
2101 xbitpos / BITS_PER_WORD,
2102 BLKmode);
2103
2104 /* We need a new destination operand each time bitpos is on
2105 a word boundary. */
2106 if (bitpos % BITS_PER_WORD == 0)
2107 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
766b19fb 2108
1b5c5873
RK
2109 /* Use xbitpos for the source extraction (right justified) and
2110 xbitpos for the destination store (left justified). */
2111 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2112 extract_bit_field (src, bitsize,
2113 xbitpos % BITS_PER_WORD, 1,
2114 NULL_RTX, word_mode,
2115 word_mode,
2116 bitsize / BITS_PER_UNIT,
2117 BITS_PER_WORD),
2118 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
766b19fb
JL
2119 }
2120 }
51bbfa0c
RS
2121 else
2122 target = copy_to_reg (valreg);
2123
84b55618 2124#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2125 /* If we promoted this return value, make the proper SUBREG. TARGET
2126 might be const0_rtx here, so be careful. */
2127 if (GET_CODE (target) == REG
766b19fb 2128 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2129 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2130 {
321e0bba
RK
2131 tree type = TREE_TYPE (exp);
2132 int unsignedp = TREE_UNSIGNED (type);
84b55618 2133
321e0bba
RK
2134 /* If we don't promote as expected, something is wrong. */
2135 if (GET_MODE (target)
2136 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2137 abort ();
2138
321e0bba 2139 target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
84b55618
RK
2140 SUBREG_PROMOTED_VAR_P (target) = 1;
2141 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2142 }
2143#endif
2144
5dab5552
MS
2145 if (flag_short_temps)
2146 {
2147 /* Perform all cleanups needed for the arguments of this call
2148 (i.e. destructors in C++). */
2149 expand_cleanups_to (old_cleanups);
2150 }
51bbfa0c 2151
2f4aa534
RS
2152 /* If size of args is variable or this was a constructor call for a stack
2153 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2154
2155 if (old_stack_level)
2156 {
e5d70561 2157 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2158 pending_stack_adjust = old_pending_adj;
d64f5a78 2159#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2160 stack_arg_under_construction = old_stack_arg_under_construction;
2161 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2162 stack_usage_map = initial_stack_usage_map;
d64f5a78 2163#endif
51bbfa0c 2164 }
51bbfa0c
RS
2165#ifdef ACCUMULATE_OUTGOING_ARGS
2166 else
2167 {
2168#ifdef REG_PARM_STACK_SPACE
2169 if (save_area)
2170 {
2171 enum machine_mode save_mode = GET_MODE (save_area);
2172 rtx stack_area
2173 = gen_rtx (MEM, save_mode,
2174 memory_address (save_mode,
b94301c2
RS
2175#ifdef ARGS_GROW_DOWNWARD
2176 plus_constant (argblock, - high_to_save)
2177#else
2178 plus_constant (argblock, low_to_save)
2179#endif
2180 ));
51bbfa0c
RS
2181
2182 if (save_mode != BLKmode)
2183 emit_move_insn (stack_area, save_area);
2184 else
2185 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2186 GEN_INT (high_to_save - low_to_save + 1),
2187 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2188 }
2189#endif
2190
2191 /* If we saved any argument areas, restore them. */
2192 for (i = 0; i < num_actuals; i++)
2193 if (args[i].save_area)
2194 {
2195 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2196 rtx stack_area
2197 = gen_rtx (MEM, save_mode,
2198 memory_address (save_mode,
2199 XEXP (args[i].stack_slot, 0)));
2200
2201 if (save_mode != BLKmode)
2202 emit_move_insn (stack_area, args[i].save_area);
2203 else
2204 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2205 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2206 PARM_BOUNDARY / BITS_PER_UNIT);
2207 }
2208
2209 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2210 stack_usage_map = initial_stack_usage_map;
2211 }
2212#endif
2213
59257ff7
RK
2214 /* If this was alloca, record the new stack level for nonlocal gotos.
2215 Check for the handler slots since we might not have a save area
0f41302f 2216 for non-local gotos. */
59257ff7
RK
2217
2218 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2219 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2220
2221 pop_temp_slots ();
2222
2223 return target;
2224}
2225\f
322e3e34
RK
2226/* Output a library call to function FUN (a SYMBOL_REF rtx)
2227 (emitting the queue unless NO_QUEUE is nonzero),
2228 for a value of mode OUTMODE,
2229 with NARGS different arguments, passed as alternating rtx values
2230 and machine_modes to convert them to.
2231 The rtx values should have been passed through protect_from_queue already.
2232
2233 NO_QUEUE will be true if and only if the library call is a `const' call
2234 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2235 to the variable is_const in expand_call.
2236
2237 NO_QUEUE must be true for const calls, because if it isn't, then
2238 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2239 and will be lost if the libcall sequence is optimized away.
2240
2241 NO_QUEUE must be false for non-const calls, because if it isn't, the
2242 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2243 optimized. For instance, the instruction scheduler may incorrectly
2244 move memory references across the non-const call. */
2245
2246void
4f90e4a0
RK
2247emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2248 int nargs, ...))
322e3e34 2249{
4f90e4a0
RK
2250#ifndef __STDC__
2251 rtx orgfun;
2252 int no_queue;
2253 enum machine_mode outmode;
2254 int nargs;
2255#endif
322e3e34
RK
2256 va_list p;
2257 /* Total size in bytes of all the stack-parms scanned so far. */
2258 struct args_size args_size;
2259 /* Size of arguments before any adjustments (such as rounding). */
2260 struct args_size original_args_size;
2261 register int argnum;
322e3e34 2262 rtx fun;
322e3e34
RK
2263 int inc;
2264 int count;
2265 rtx argblock = 0;
2266 CUMULATIVE_ARGS args_so_far;
2267 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2268 struct args_size offset; struct args_size size; };
2269 struct arg *argvec;
2270 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2271 rtx call_fusage = 0;
322e3e34 2272
4f90e4a0
RK
2273 VA_START (p, nargs);
2274
2275#ifndef __STDC__
2276 orgfun = va_arg (p, rtx);
322e3e34
RK
2277 no_queue = va_arg (p, int);
2278 outmode = va_arg (p, enum machine_mode);
2279 nargs = va_arg (p, int);
4f90e4a0
RK
2280#endif
2281
2282 fun = orgfun;
322e3e34
RK
2283
2284 /* Copy all the libcall-arguments out of the varargs data
2285 and into a vector ARGVEC.
2286
2287 Compute how to pass each argument. We only support a very small subset
2288 of the full argument passing conventions to limit complexity here since
2289 library functions shouldn't have many args. */
2290
2291 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2292
eecb6f50 2293 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2294
2295 args_size.constant = 0;
2296 args_size.var = 0;
2297
888aa7a9
RS
2298 push_temp_slots ();
2299
322e3e34
RK
2300 for (count = 0; count < nargs; count++)
2301 {
2302 rtx val = va_arg (p, rtx);
2303 enum machine_mode mode = va_arg (p, enum machine_mode);
2304
2305 /* We cannot convert the arg value to the mode the library wants here;
2306 must do it earlier where we know the signedness of the arg. */
2307 if (mode == BLKmode
2308 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2309 abort ();
2310
2311 /* On some machines, there's no way to pass a float to a library fcn.
2312 Pass it as a double instead. */
2313#ifdef LIBGCC_NEEDS_DOUBLE
2314 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2315 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2316#endif
2317
2318 /* There's no need to call protect_from_queue, because
2319 either emit_move_insn or emit_push_insn will do that. */
2320
2321 /* Make sure it is a reasonable operand for a move or push insn. */
2322 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2323 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2324 val = force_operand (val, NULL_RTX);
2325
322e3e34
RK
2326#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2327 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2328 {
a44492f0
RK
2329 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2330 be viewed as just an efficiency improvement. */
888aa7a9
RS
2331 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2332 emit_move_insn (slot, val);
8301b6e2 2333 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2334 mode = Pmode;
888aa7a9 2335 }
322e3e34
RK
2336#endif
2337
888aa7a9
RS
2338 argvec[count].value = val;
2339 argvec[count].mode = mode;
2340
322e3e34 2341 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2342 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2343 abort ();
2344#ifdef FUNCTION_ARG_PARTIAL_NREGS
2345 argvec[count].partial
2346 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2347#else
2348 argvec[count].partial = 0;
2349#endif
2350
2351 locate_and_pad_parm (mode, NULL_TREE,
2352 argvec[count].reg && argvec[count].partial == 0,
2353 NULL_TREE, &args_size, &argvec[count].offset,
2354 &argvec[count].size);
2355
2356 if (argvec[count].size.var)
2357 abort ();
2358
2359#ifndef REG_PARM_STACK_SPACE
2360 if (argvec[count].partial)
2361 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2362#endif
2363
2364 if (argvec[count].reg == 0 || argvec[count].partial != 0
2365#ifdef REG_PARM_STACK_SPACE
2366 || 1
2367#endif
2368 )
2369 args_size.constant += argvec[count].size.constant;
2370
2371#ifdef ACCUMULATE_OUTGOING_ARGS
2372 /* If this arg is actually passed on the stack, it might be
2373 clobbering something we already put there (this library call might
2374 be inside the evaluation of an argument to a function whose call
2375 requires the stack). This will only occur when the library call
2376 has sufficient args to run out of argument registers. Abort in
2377 this case; if this ever occurs, code must be added to save and
2378 restore the arg slot. */
2379
2380 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2381 abort ();
2382#endif
2383
0f41302f 2384 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2385 }
2386 va_end (p);
2387
2388 /* If this machine requires an external definition for library
2389 functions, write one out. */
2390 assemble_external_libcall (fun);
2391
2392 original_args_size = args_size;
2393#ifdef STACK_BOUNDARY
2394 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2395 / STACK_BYTES) * STACK_BYTES);
2396#endif
2397
2398#ifdef REG_PARM_STACK_SPACE
2399 args_size.constant = MAX (args_size.constant,
2400 REG_PARM_STACK_SPACE (NULL_TREE));
2401#ifndef OUTGOING_REG_PARM_STACK_SPACE
2402 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2403#endif
2404#endif
2405
322e3e34
RK
2406 if (args_size.constant > current_function_outgoing_args_size)
2407 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2408
2409#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2410 args_size.constant = 0;
2411#endif
2412
2413#ifndef PUSH_ROUNDING
2414 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2415#endif
2416
2417#ifdef PUSH_ARGS_REVERSED
2418#ifdef STACK_BOUNDARY
2419 /* If we push args individually in reverse order, perform stack alignment
2420 before the first push (the last arg). */
2421 if (argblock == 0)
2422 anti_adjust_stack (GEN_INT (args_size.constant
2423 - original_args_size.constant));
2424#endif
2425#endif
2426
2427#ifdef PUSH_ARGS_REVERSED
2428 inc = -1;
2429 argnum = nargs - 1;
2430#else
2431 inc = 1;
2432 argnum = 0;
2433#endif
2434
2435 /* Push the args that need to be pushed. */
2436
2437 for (count = 0; count < nargs; count++, argnum += inc)
2438 {
2439 register enum machine_mode mode = argvec[argnum].mode;
2440 register rtx val = argvec[argnum].value;
2441 rtx reg = argvec[argnum].reg;
2442 int partial = argvec[argnum].partial;
2443
2444 if (! (reg != 0 && partial == 0))
2445 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2446 argblock, GEN_INT (argvec[count].offset.constant));
2447 NO_DEFER_POP;
2448 }
2449
2450#ifndef PUSH_ARGS_REVERSED
2451#ifdef STACK_BOUNDARY
2452 /* If we pushed args in forward order, perform stack alignment
2453 after pushing the last arg. */
2454 if (argblock == 0)
2455 anti_adjust_stack (GEN_INT (args_size.constant
2456 - original_args_size.constant));
2457#endif
2458#endif
2459
2460#ifdef PUSH_ARGS_REVERSED
2461 argnum = nargs - 1;
2462#else
2463 argnum = 0;
2464#endif
2465
77cac2f2 2466 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2467
322e3e34
RK
2468 /* Now load any reg parms into their regs. */
2469
2470 for (count = 0; count < nargs; count++, argnum += inc)
2471 {
2472 register enum machine_mode mode = argvec[argnum].mode;
2473 register rtx val = argvec[argnum].value;
2474 rtx reg = argvec[argnum].reg;
2475 int partial = argvec[argnum].partial;
2476
2477 if (reg != 0 && partial == 0)
2478 emit_move_insn (reg, val);
2479 NO_DEFER_POP;
2480 }
2481
2482 /* For version 1.37, try deleting this entirely. */
2483 if (! no_queue)
2484 emit_queue ();
2485
2486 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2487 for (count = 0; count < nargs; count++)
2488 if (argvec[count].reg != 0)
77cac2f2 2489 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2490
322e3e34
RK
2491 /* Don't allow popping to be deferred, since then
2492 cse'ing of library calls could delete a call and leave the pop. */
2493 NO_DEFER_POP;
2494
2495 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2496 will set inhibit_defer_pop to that value. */
2497
2c8da025
RK
2498 emit_call_1 (fun,
2499 get_identifier (XSTR (orgfun, 0)),
2500 get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
322e3e34
RK
2501 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2502 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2503 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2504
888aa7a9
RS
2505 pop_temp_slots ();
2506
322e3e34
RK
2507 /* Now restore inhibit_defer_pop to its actual original value. */
2508 OK_DEFER_POP;
2509}
2510\f
2511/* Like emit_library_call except that an extra argument, VALUE,
2512 comes second and says where to store the result.
fac0ad80
RS
2513 (If VALUE is zero, this function chooses a convenient way
2514 to return the value.
322e3e34 2515
fac0ad80
RS
2516 This function returns an rtx for where the value is to be found.
2517 If VALUE is nonzero, VALUE is returned. */
2518
2519rtx
4f90e4a0
RK
2520emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2521 enum machine_mode outmode, int nargs, ...))
322e3e34 2522{
4f90e4a0
RK
2523#ifndef __STDC__
2524 rtx orgfun;
2525 rtx value;
2526 int no_queue;
2527 enum machine_mode outmode;
2528 int nargs;
2529#endif
322e3e34
RK
2530 va_list p;
2531 /* Total size in bytes of all the stack-parms scanned so far. */
2532 struct args_size args_size;
2533 /* Size of arguments before any adjustments (such as rounding). */
2534 struct args_size original_args_size;
2535 register int argnum;
322e3e34 2536 rtx fun;
322e3e34
RK
2537 int inc;
2538 int count;
2539 rtx argblock = 0;
2540 CUMULATIVE_ARGS args_so_far;
2541 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2542 struct args_size offset; struct args_size size; };
2543 struct arg *argvec;
2544 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2545 rtx call_fusage = 0;
322e3e34 2546 rtx mem_value = 0;
fac0ad80 2547 int pcc_struct_value = 0;
4f389214 2548 int struct_value_size = 0;
d61bee95 2549 int is_const;
322e3e34 2550
4f90e4a0
RK
2551 VA_START (p, nargs);
2552
2553#ifndef __STDC__
2554 orgfun = va_arg (p, rtx);
322e3e34
RK
2555 value = va_arg (p, rtx);
2556 no_queue = va_arg (p, int);
2557 outmode = va_arg (p, enum machine_mode);
2558 nargs = va_arg (p, int);
4f90e4a0
RK
2559#endif
2560
d61bee95 2561 is_const = no_queue;
4f90e4a0 2562 fun = orgfun;
322e3e34
RK
2563
2564 /* If this kind of value comes back in memory,
2565 decide where in memory it should come back. */
fac0ad80 2566 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2567 {
fac0ad80
RS
2568#ifdef PCC_STATIC_STRUCT_RETURN
2569 rtx pointer_reg
2570 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2571 0);
2572 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2573 pcc_struct_value = 1;
2574 if (value == 0)
2575 value = gen_reg_rtx (outmode);
2576#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2577 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2578 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2579 mem_value = value;
2580 else
2581 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2582#endif
779c643a
JW
2583
2584 /* This call returns a big structure. */
2585 is_const = 0;
322e3e34
RK
2586 }
2587
2588 /* ??? Unfinished: must pass the memory address as an argument. */
2589
2590 /* Copy all the libcall-arguments out of the varargs data
2591 and into a vector ARGVEC.
2592
2593 Compute how to pass each argument. We only support a very small subset
2594 of the full argument passing conventions to limit complexity here since
2595 library functions shouldn't have many args. */
2596
2597 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2598
eecb6f50 2599 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2600
2601 args_size.constant = 0;
2602 args_size.var = 0;
2603
2604 count = 0;
2605
888aa7a9
RS
2606 push_temp_slots ();
2607
322e3e34
RK
2608 /* If there's a structure value address to be passed,
2609 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2610 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2611 {
2612 rtx addr = XEXP (mem_value, 0);
fac0ad80 2613 nargs++;
322e3e34 2614
fac0ad80
RS
2615 /* Make sure it is a reasonable operand for a move or push insn. */
2616 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2617 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2618 addr = force_operand (addr, NULL_RTX);
322e3e34 2619
fac0ad80 2620 argvec[count].value = addr;
4fc3dcd5 2621 argvec[count].mode = Pmode;
fac0ad80 2622 argvec[count].partial = 0;
322e3e34 2623
4fc3dcd5 2624 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2625#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2626 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2627 abort ();
322e3e34
RK
2628#endif
2629
4fc3dcd5 2630 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2631 argvec[count].reg && argvec[count].partial == 0,
2632 NULL_TREE, &args_size, &argvec[count].offset,
2633 &argvec[count].size);
322e3e34
RK
2634
2635
fac0ad80 2636 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2637#ifdef REG_PARM_STACK_SPACE
fac0ad80 2638 || 1
322e3e34 2639#endif
fac0ad80
RS
2640 )
2641 args_size.constant += argvec[count].size.constant;
322e3e34 2642
0f41302f 2643 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2644
2645 count++;
322e3e34
RK
2646 }
2647
2648 for (; count < nargs; count++)
2649 {
2650 rtx val = va_arg (p, rtx);
2651 enum machine_mode mode = va_arg (p, enum machine_mode);
2652
2653 /* We cannot convert the arg value to the mode the library wants here;
2654 must do it earlier where we know the signedness of the arg. */
2655 if (mode == BLKmode
2656 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2657 abort ();
2658
2659 /* On some machines, there's no way to pass a float to a library fcn.
2660 Pass it as a double instead. */
2661#ifdef LIBGCC_NEEDS_DOUBLE
2662 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2663 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2664#endif
2665
2666 /* There's no need to call protect_from_queue, because
2667 either emit_move_insn or emit_push_insn will do that. */
2668
2669 /* Make sure it is a reasonable operand for a move or push insn. */
2670 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2671 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2672 val = force_operand (val, NULL_RTX);
2673
322e3e34
RK
2674#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2675 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2676 {
a44492f0
RK
2677 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2678 be viewed as just an efficiency improvement. */
888aa7a9
RS
2679 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2680 emit_move_insn (slot, val);
2681 val = XEXP (slot, 0);
2682 mode = Pmode;
2683 }
322e3e34
RK
2684#endif
2685
888aa7a9
RS
2686 argvec[count].value = val;
2687 argvec[count].mode = mode;
2688
322e3e34 2689 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2690 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2691 abort ();
2692#ifdef FUNCTION_ARG_PARTIAL_NREGS
2693 argvec[count].partial
2694 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2695#else
2696 argvec[count].partial = 0;
2697#endif
2698
2699 locate_and_pad_parm (mode, NULL_TREE,
2700 argvec[count].reg && argvec[count].partial == 0,
2701 NULL_TREE, &args_size, &argvec[count].offset,
2702 &argvec[count].size);
2703
2704 if (argvec[count].size.var)
2705 abort ();
2706
2707#ifndef REG_PARM_STACK_SPACE
2708 if (argvec[count].partial)
2709 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2710#endif
2711
2712 if (argvec[count].reg == 0 || argvec[count].partial != 0
2713#ifdef REG_PARM_STACK_SPACE
2714 || 1
2715#endif
2716 )
2717 args_size.constant += argvec[count].size.constant;
2718
2719#ifdef ACCUMULATE_OUTGOING_ARGS
2720 /* If this arg is actually passed on the stack, it might be
2721 clobbering something we already put there (this library call might
2722 be inside the evaluation of an argument to a function whose call
2723 requires the stack). This will only occur when the library call
2724 has sufficient args to run out of argument registers. Abort in
2725 this case; if this ever occurs, code must be added to save and
2726 restore the arg slot. */
2727
2728 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2729 abort ();
2730#endif
2731
0f41302f 2732 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2733 }
2734 va_end (p);
2735
2736 /* If this machine requires an external definition for library
2737 functions, write one out. */
2738 assemble_external_libcall (fun);
2739
2740 original_args_size = args_size;
2741#ifdef STACK_BOUNDARY
2742 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2743 / STACK_BYTES) * STACK_BYTES);
2744#endif
2745
2746#ifdef REG_PARM_STACK_SPACE
2747 args_size.constant = MAX (args_size.constant,
2748 REG_PARM_STACK_SPACE (NULL_TREE));
2749#ifndef OUTGOING_REG_PARM_STACK_SPACE
2750 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2751#endif
2752#endif
2753
322e3e34
RK
2754 if (args_size.constant > current_function_outgoing_args_size)
2755 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2756
2757#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2758 args_size.constant = 0;
2759#endif
2760
2761#ifndef PUSH_ROUNDING
2762 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2763#endif
2764
2765#ifdef PUSH_ARGS_REVERSED
2766#ifdef STACK_BOUNDARY
2767 /* If we push args individually in reverse order, perform stack alignment
2768 before the first push (the last arg). */
2769 if (argblock == 0)
2770 anti_adjust_stack (GEN_INT (args_size.constant
2771 - original_args_size.constant));
2772#endif
2773#endif
2774
2775#ifdef PUSH_ARGS_REVERSED
2776 inc = -1;
2777 argnum = nargs - 1;
2778#else
2779 inc = 1;
2780 argnum = 0;
2781#endif
2782
2783 /* Push the args that need to be pushed. */
2784
2785 for (count = 0; count < nargs; count++, argnum += inc)
2786 {
2787 register enum machine_mode mode = argvec[argnum].mode;
2788 register rtx val = argvec[argnum].value;
2789 rtx reg = argvec[argnum].reg;
2790 int partial = argvec[argnum].partial;
2791
2792 if (! (reg != 0 && partial == 0))
2793 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2794 argblock, GEN_INT (argvec[count].offset.constant));
2795 NO_DEFER_POP;
2796 }
2797
2798#ifndef PUSH_ARGS_REVERSED
2799#ifdef STACK_BOUNDARY
2800 /* If we pushed args in forward order, perform stack alignment
2801 after pushing the last arg. */
2802 if (argblock == 0)
2803 anti_adjust_stack (GEN_INT (args_size.constant
2804 - original_args_size.constant));
2805#endif
2806#endif
2807
2808#ifdef PUSH_ARGS_REVERSED
2809 argnum = nargs - 1;
2810#else
2811 argnum = 0;
2812#endif
2813
77cac2f2 2814 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2815
322e3e34
RK
2816 /* Now load any reg parms into their regs. */
2817
322e3e34
RK
2818 for (count = 0; count < nargs; count++, argnum += inc)
2819 {
2820 register enum machine_mode mode = argvec[argnum].mode;
2821 register rtx val = argvec[argnum].value;
2822 rtx reg = argvec[argnum].reg;
2823 int partial = argvec[argnum].partial;
2824
2825 if (reg != 0 && partial == 0)
2826 emit_move_insn (reg, val);
2827 NO_DEFER_POP;
2828 }
2829
2830#if 0
2831 /* For version 1.37, try deleting this entirely. */
2832 if (! no_queue)
2833 emit_queue ();
2834#endif
2835
2836 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2837 for (count = 0; count < nargs; count++)
2838 if (argvec[count].reg != 0)
77cac2f2 2839 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2840
fac0ad80
RS
2841 /* Pass the function the address in which to return a structure value. */
2842 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2843 {
2844 emit_move_insn (struct_value_rtx,
2845 force_reg (Pmode,
2846 force_operand (XEXP (mem_value, 0),
2847 NULL_RTX)));
2848 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2849 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
2850 }
2851
322e3e34
RK
2852 /* Don't allow popping to be deferred, since then
2853 cse'ing of library calls could delete a call and leave the pop. */
2854 NO_DEFER_POP;
2855
2856 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2857 will set inhibit_defer_pop to that value. */
2858
2c8da025
RK
2859 emit_call_1 (fun,
2860 get_identifier (XSTR (orgfun, 0)),
2861 get_identifier (XSTR (orgfun, 0)), args_size.constant,
4f389214 2862 struct_value_size,
322e3e34 2863 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
fac0ad80
RS
2864 (outmode != VOIDmode && mem_value == 0
2865 ? hard_libcall_value (outmode) : NULL_RTX),
77cac2f2 2866 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
2867
2868 /* Now restore inhibit_defer_pop to its actual original value. */
2869 OK_DEFER_POP;
2870
888aa7a9
RS
2871 pop_temp_slots ();
2872
322e3e34
RK
2873 /* Copy the value to the right place. */
2874 if (outmode != VOIDmode)
2875 {
2876 if (mem_value)
2877 {
2878 if (value == 0)
fac0ad80 2879 value = mem_value;
322e3e34
RK
2880 if (value != mem_value)
2881 emit_move_insn (value, mem_value);
2882 }
2883 else if (value != 0)
2884 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
2885 else
2886 value = hard_libcall_value (outmode);
322e3e34 2887 }
fac0ad80
RS
2888
2889 return value;
322e3e34
RK
2890}
2891\f
51bbfa0c
RS
2892#if 0
2893/* Return an rtx which represents a suitable home on the stack
2894 given TYPE, the type of the argument looking for a home.
2895 This is called only for BLKmode arguments.
2896
2897 SIZE is the size needed for this target.
2898 ARGS_ADDR is the address of the bottom of the argument block for this call.
2899 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2900 if this machine uses push insns. */
2901
2902static rtx
2903target_for_arg (type, size, args_addr, offset)
2904 tree type;
2905 rtx size;
2906 rtx args_addr;
2907 struct args_size offset;
2908{
2909 rtx target;
2910 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2911
2912 /* We do not call memory_address if possible,
2913 because we want to address as close to the stack
2914 as possible. For non-variable sized arguments,
2915 this will be stack-pointer relative addressing. */
2916 if (GET_CODE (offset_rtx) == CONST_INT)
2917 target = plus_constant (args_addr, INTVAL (offset_rtx));
2918 else
2919 {
2920 /* I have no idea how to guarantee that this
2921 will work in the presence of register parameters. */
2922 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2923 target = memory_address (QImode, target);
2924 }
2925
2926 return gen_rtx (MEM, BLKmode, target);
2927}
2928#endif
2929\f
2930/* Store a single argument for a function call
2931 into the register or memory area where it must be passed.
2932 *ARG describes the argument value and where to pass it.
2933
2934 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 2935 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
2936
2937 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2938 so must be careful about how the stack is used.
2939
2940 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2941 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2942 that we need not worry about saving and restoring the stack.
2943
2944 FNDECL is the declaration of the function we are calling. */
2945
2946static void
6f90e075
JW
2947store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2948 reg_parm_stack_space)
51bbfa0c
RS
2949 struct arg_data *arg;
2950 rtx argblock;
2951 int may_be_alloca;
2952 int variable_size;
2953 tree fndecl;
6f90e075 2954 int reg_parm_stack_space;
51bbfa0c
RS
2955{
2956 register tree pval = arg->tree_value;
2957 rtx reg = 0;
2958 int partial = 0;
2959 int used = 0;
2960 int i, lower_bound, upper_bound;
2961
2962 if (TREE_CODE (pval) == ERROR_MARK)
2963 return;
2964
cc79451b
RK
2965 /* Push a new temporary level for any temporaries we make for
2966 this argument. */
2967 push_temp_slots ();
2968
51bbfa0c
RS
2969#ifdef ACCUMULATE_OUTGOING_ARGS
2970 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2971 save any previous data at that location. */
2972 if (argblock && ! variable_size && arg->stack)
2973 {
2974#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
2975 /* stack_slot is negative, but we want to index stack_usage_map
2976 with positive values. */
51bbfa0c
RS
2977 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2978 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2979 else
2980 abort ();
2981
2982 lower_bound = upper_bound - arg->size.constant;
2983#else
2984 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2985 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2986 else
2987 lower_bound = 0;
2988
2989 upper_bound = lower_bound + arg->size.constant;
2990#endif
2991
2992 for (i = lower_bound; i < upper_bound; i++)
2993 if (stack_usage_map[i]
2994#ifdef REG_PARM_STACK_SPACE
2995 /* Don't store things in the fixed argument area at this point;
2996 it has already been saved. */
6f90e075 2997 && i > reg_parm_stack_space
51bbfa0c
RS
2998#endif
2999 )
3000 break;
3001
3002 if (i != upper_bound)
3003 {
3004 /* We need to make a save area. See what mode we can make it. */
3005 enum machine_mode save_mode
3006 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3007 rtx stack_area
3008 = gen_rtx (MEM, save_mode,
3009 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
3010
3011 if (save_mode == BLKmode)
3012 {
3013 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3014 arg->size.constant, 0);
3668e76e
JL
3015 MEM_IN_STRUCT_P (arg->save_area)
3016 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
cc79451b 3017 preserve_temp_slots (arg->save_area);
51bbfa0c 3018 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3019 GEN_INT (arg->size.constant),
51bbfa0c
RS
3020 PARM_BOUNDARY / BITS_PER_UNIT);
3021 }
3022 else
3023 {
3024 arg->save_area = gen_reg_rtx (save_mode);
3025 emit_move_insn (arg->save_area, stack_area);
3026 }
3027 }
3028 }
3029#endif
3030
3031 /* If this isn't going to be placed on both the stack and in registers,
3032 set up the register and number of words. */
3033 if (! arg->pass_on_stack)
3034 reg = arg->reg, partial = arg->partial;
3035
3036 if (reg != 0 && partial == 0)
3037 /* Being passed entirely in a register. We shouldn't be called in
3038 this case. */
3039 abort ();
3040
4ab56118
RK
3041 /* If this arg needs special alignment, don't load the registers
3042 here. */
3043 if (arg->n_aligned_regs != 0)
3044 reg = 0;
4ab56118 3045
4ab56118 3046 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3047 it directly into its stack slot. Otherwise, we can. */
3048 if (arg->value == 0)
d64f5a78
RS
3049 {
3050#ifdef ACCUMULATE_OUTGOING_ARGS
3051 /* stack_arg_under_construction is nonzero if a function argument is
3052 being evaluated directly into the outgoing argument list and
3053 expand_call must take special action to preserve the argument list
3054 if it is called recursively.
3055
3056 For scalar function arguments stack_usage_map is sufficient to
3057 determine which stack slots must be saved and restored. Scalar
3058 arguments in general have pass_on_stack == 0.
3059
3060 If this argument is initialized by a function which takes the
3061 address of the argument (a C++ constructor or a C function
3062 returning a BLKmode structure), then stack_usage_map is
3063 insufficient and expand_call must push the stack around the
3064 function call. Such arguments have pass_on_stack == 1.
3065
3066 Note that it is always safe to set stack_arg_under_construction,
3067 but this generates suboptimal code if set when not needed. */
3068
3069 if (arg->pass_on_stack)
3070 stack_arg_under_construction++;
3071#endif
3a08477a
RK
3072 arg->value = expand_expr (pval,
3073 (partial
3074 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3075 ? NULL_RTX : arg->stack,
e5d70561 3076 VOIDmode, 0);
1efe6448
RK
3077
3078 /* If we are promoting object (or for any other reason) the mode
3079 doesn't agree, convert the mode. */
3080
7373d92d
RK
3081 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3082 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3083 arg->value, arg->unsignedp);
1efe6448 3084
d64f5a78
RS
3085#ifdef ACCUMULATE_OUTGOING_ARGS
3086 if (arg->pass_on_stack)
3087 stack_arg_under_construction--;
3088#endif
3089 }
51bbfa0c
RS
3090
3091 /* Don't allow anything left on stack from computation
3092 of argument to alloca. */
3093 if (may_be_alloca)
3094 do_pending_stack_adjust ();
3095
3096 if (arg->value == arg->stack)
3097 /* If the value is already in the stack slot, we are done. */
3098 ;
1efe6448 3099 else if (arg->mode != BLKmode)
51bbfa0c
RS
3100 {
3101 register int size;
3102
3103 /* Argument is a scalar, not entirely passed in registers.
3104 (If part is passed in registers, arg->partial says how much
3105 and emit_push_insn will take care of putting it there.)
3106
3107 Push it, and if its size is less than the
3108 amount of space allocated to it,
3109 also bump stack pointer by the additional space.
3110 Note that in C the default argument promotions
3111 will prevent such mismatches. */
3112
1efe6448 3113 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3114 /* Compute how much space the push instruction will push.
3115 On many machines, pushing a byte will advance the stack
3116 pointer by a halfword. */
3117#ifdef PUSH_ROUNDING
3118 size = PUSH_ROUNDING (size);
3119#endif
3120 used = size;
3121
3122 /* Compute how much space the argument should get:
3123 round up to a multiple of the alignment for arguments. */
1efe6448 3124 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3125 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3126 / (PARM_BOUNDARY / BITS_PER_UNIT))
3127 * (PARM_BOUNDARY / BITS_PER_UNIT));
3128
3129 /* This isn't already where we want it on the stack, so put it there.
3130 This can either be done with push or copy insns. */
ccf5d244
RK
3131 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3132 0, partial, reg, used - size,
3133 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
3134 }
3135 else
3136 {
3137 /* BLKmode, at least partly to be pushed. */
3138
3139 register int excess;
3140 rtx size_rtx;
3141
3142 /* Pushing a nonscalar.
3143 If part is passed in registers, PARTIAL says how much
3144 and emit_push_insn will take care of putting it there. */
3145
3146 /* Round its size up to a multiple
3147 of the allocation unit for arguments. */
3148
3149 if (arg->size.var != 0)
3150 {
3151 excess = 0;
3152 size_rtx = ARGS_SIZE_RTX (arg->size);
3153 }
3154 else
3155 {
51bbfa0c
RS
3156 /* PUSH_ROUNDING has no effect on us, because
3157 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3158 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3159 + partial * UNITS_PER_WORD);
e4f93898 3160 size_rtx = expr_size (pval);
51bbfa0c
RS
3161 }
3162
1efe6448 3163 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
3164 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3165 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3166 }
3167
3168
3169 /* Unless this is a partially-in-register argument, the argument is now
3170 in the stack.
3171
3172 ??? Note that this can change arg->value from arg->stack to
3173 arg->stack_slot and it matters when they are not the same.
3174 It isn't totally clear that this is correct in all cases. */
3175 if (partial == 0)
3176 arg->value = arg->stack_slot;
3177
3178 /* Once we have pushed something, pops can't safely
3179 be deferred during the rest of the arguments. */
3180 NO_DEFER_POP;
3181
3182 /* ANSI doesn't require a sequence point here,
3183 but PCC has one, so this will avoid some problems. */
3184 emit_queue ();
3185
db907e7b
RK
3186 /* Free any temporary slots made in processing this argument. Show
3187 that we might have taken the address of something and pushed that
3188 as an operand. */
3189 preserve_temp_slots (NULL_RTX);
51bbfa0c 3190 free_temp_slots ();
cc79451b 3191 pop_temp_slots ();
51bbfa0c
RS
3192
3193#ifdef ACCUMULATE_OUTGOING_ARGS
3194 /* Now mark the segment we just used. */
3195 if (argblock && ! variable_size && arg->stack)
3196 for (i = lower_bound; i < upper_bound; i++)
3197 stack_usage_map[i] = 1;
3198#endif
3199}
This page took 0.648843 seconds and 5 git commands to generate.