]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
90th Cygnus<->FSF quick merge
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
4d6a19ff 2 Copyright (C) 1989, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "expr.h"
4f90e4a0 26#ifdef __STDC__
04fe4385 27#include <stdarg.h>
4f90e4a0 28#else
04fe4385 29#include <varargs.h>
4f90e4a0 30#endif
51bbfa0c
RS
31#include "insn-flags.h"
32
33/* Decide whether a function's arguments should be processed
bbc8a071
RK
34 from first to last or from last to first.
35
36 They should if the stack and args grow in opposite directions, but
37 only if we have push insns. */
51bbfa0c 38
51bbfa0c 39#ifdef PUSH_ROUNDING
bbc8a071 40
40083ddf 41#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
bbc8a071 44
51bbfa0c
RS
45#endif
46
47/* Like STACK_BOUNDARY but in units of bytes, not bits. */
48#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
49
50/* Data structure and subroutines used within expand_call. */
51
52struct arg_data
53{
54 /* Tree node for this argument. */
55 tree tree_value;
1efe6448
RK
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
51bbfa0c
RS
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 rtx value;
60 /* Initially-compute RTL value for argument; only for const functions. */
61 rtx initial_value;
62 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 63 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
64 registers. */
65 rtx reg;
84b55618
RK
66 /* If REG was promoted from the actual mode of the argument expression,
67 indicates whether the promotion is sign- or zero-extended. */
68 int unsignedp;
51bbfa0c
RS
69 /* Number of registers to use. 0 means put the whole arg in registers.
70 Also 0 if not passed in registers. */
71 int partial;
d64f5a78
RS
72 /* Non-zero if argument must be passed on stack.
73 Note that some arguments may be passed on the stack
74 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
75 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
76 int pass_on_stack;
77 /* Offset of this argument from beginning of stack-args. */
78 struct args_size offset;
79 /* Similar, but offset to the start of the stack slot. Different from
80 OFFSET if this arg pads downward. */
81 struct args_size slot_offset;
82 /* Size of this argument on the stack, rounded up for any padding it gets,
83 parts of the argument passed in registers do not count.
84 If REG_PARM_STACK_SPACE is defined, then register parms
85 are counted here as well. */
86 struct args_size size;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
89 rtx stack;
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 rtx stack_slot;
94#ifdef ACCUMULATE_OUTGOING_ARGS
95 /* Place that this stack area has been saved, if needed. */
96 rtx save_area;
97#endif
4ab56118
RK
98 /* If an argument's alignment does not permit direct copying into registers,
99 copy in smaller-sized pieces into pseudos. These are stored in a
100 block pointed to by this field. The next field says how many
101 word-sized pseudos we made. */
102 rtx *aligned_regs;
103 int n_aligned_regs;
51bbfa0c
RS
104};
105
106#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 107/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
108 the corresponding stack location has been used.
109 This vector is used to prevent a function call within an argument from
110 clobbering any stack already set up. */
111static char *stack_usage_map;
112
113/* Size of STACK_USAGE_MAP. */
114static int highest_outgoing_arg_in_use;
2f4aa534
RS
115
116/* stack_arg_under_construction is nonzero when an argument may be
117 initialized with a constructor call (including a C function that
118 returns a BLKmode struct) and expand_call must take special action
119 to make sure the object being constructed does not overlap the
120 argument list for the constructor call. */
121int stack_arg_under_construction;
51bbfa0c
RS
122#endif
123
322e3e34 124static int calls_function PROTO((tree, int));
9f4d9f6c 125static int calls_function_1 PROTO((tree, int));
5d6155d4
RK
126static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx,
127 int, rtx, int));
322e3e34
RK
128static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
129 tree, int));
51bbfa0c 130\f
1ce0cb53
JW
131/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
132 `alloca'.
133
134 If WHICH is 0, return 1 if EXP contains a call to any function.
135 Actually, we only need return 1 if evaluating EXP would require pushing
136 arguments on the stack, but that is too difficult to compute, so we just
137 assume any function call might require the stack. */
51bbfa0c 138
1c8d7aef
RS
139static tree calls_function_save_exprs;
140
51bbfa0c 141static int
1ce0cb53 142calls_function (exp, which)
51bbfa0c 143 tree exp;
1ce0cb53 144 int which;
1c8d7aef
RS
145{
146 int val;
147 calls_function_save_exprs = 0;
148 val = calls_function_1 (exp, which);
149 calls_function_save_exprs = 0;
150 return val;
151}
152
153static int
154calls_function_1 (exp, which)
155 tree exp;
156 int which;
51bbfa0c
RS
157{
158 register int i;
0207efa2
RK
159 enum tree_code code = TREE_CODE (exp);
160 int type = TREE_CODE_CLASS (code);
161 int length = tree_code_length[(int) code];
51bbfa0c 162
ddd5a7c1 163 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
164 if ((int) code >= NUM_TREE_CODES)
165 return 1;
51bbfa0c 166
0207efa2 167 /* Only expressions and references can contain calls. */
3b59a331
RS
168 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
169 && type != 'b')
51bbfa0c
RS
170 return 0;
171
0207efa2 172 switch (code)
51bbfa0c
RS
173 {
174 case CALL_EXPR:
1ce0cb53
JW
175 if (which == 0)
176 return 1;
177 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
178 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
179 == FUNCTION_DECL))
180 {
181 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
182
183 if ((DECL_BUILT_IN (fndecl)
184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
185 || (DECL_SAVED_INSNS (fndecl)
186 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
187 & FUNCTION_FLAGS_CALLS_ALLOCA)))
188 return 1;
189 }
51bbfa0c
RS
190
191 /* Third operand is RTL. */
192 length = 2;
193 break;
194
195 case SAVE_EXPR:
196 if (SAVE_EXPR_RTL (exp) != 0)
197 return 0;
1c8d7aef
RS
198 if (value_member (exp, calls_function_save_exprs))
199 return 0;
200 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
201 calls_function_save_exprs);
202 return (TREE_OPERAND (exp, 0) != 0
203 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
204
205 case BLOCK:
ef03bc85
CH
206 {
207 register tree local;
208
209 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 210 if (DECL_INITIAL (local) != 0
1c8d7aef 211 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
212 return 1;
213 }
214 {
215 register tree subblock;
216
217 for (subblock = BLOCK_SUBBLOCKS (exp);
218 subblock;
219 subblock = TREE_CHAIN (subblock))
1c8d7aef 220 if (calls_function_1 (subblock, which))
ef03bc85
CH
221 return 1;
222 }
223 return 0;
51bbfa0c
RS
224
225 case METHOD_CALL_EXPR:
226 length = 3;
227 break;
228
229 case WITH_CLEANUP_EXPR:
230 length = 1;
231 break;
232
233 case RTL_EXPR:
234 return 0;
235 }
236
237 for (i = 0; i < length; i++)
238 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 239 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
240 return 1;
241
242 return 0;
243}
244\f
245/* Force FUNEXP into a form suitable for the address of a CALL,
246 and return that as an rtx. Also load the static chain register
247 if FNDECL is a nested function.
248
77cac2f2
RK
249 CALL_FUSAGE points to a variable holding the prospective
250 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 251
03dacb02 252rtx
77cac2f2 253prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
254 rtx funexp;
255 tree fndecl;
77cac2f2 256 rtx *call_fusage;
01368078 257 int reg_parm_seen;
51bbfa0c
RS
258{
259 rtx static_chain_value = 0;
260
261 funexp = protect_from_queue (funexp, 0);
262
263 if (fndecl != 0)
0f41302f 264 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
265 static_chain_value = lookup_static_chain (fndecl);
266
267 /* Make a valid memory address and copy constants thru pseudo-regs,
268 but not for a constant address if -fno-function-cse. */
269 if (GET_CODE (funexp) != SYMBOL_REF)
01368078
RK
270 funexp =
271#ifdef SMALL_REGISTER_CLASSES
272 /* If we are using registers for parameters, force the
273 function address into a register now. */
f95182a4
ILT
274 (SMALL_REGISTER_CLASSES && reg_parm_seen)
275 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
276 :
01368078 277#endif
f95182a4 278 memory_address (FUNCTION_MODE, funexp);
51bbfa0c
RS
279 else
280 {
281#ifndef NO_FUNCTION_CSE
282 if (optimize && ! flag_no_function_cse)
283#ifdef NO_RECURSIVE_FUNCTION_CSE
284 if (fndecl != current_function_decl)
285#endif
286 funexp = force_reg (Pmode, funexp);
287#endif
288 }
289
290 if (static_chain_value != 0)
291 {
292 emit_move_insn (static_chain_rtx, static_chain_value);
293
f991a240
RK
294 if (GET_CODE (static_chain_rtx) == REG)
295 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
296 }
297
298 return funexp;
299}
300
301/* Generate instructions to call function FUNEXP,
302 and optionally pop the results.
303 The CALL_INSN is the first insn generated.
304
607ea900 305 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
306 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
307
334c4f0f
RK
308 FUNTYPE is the data type of the function. This is given to the macro
309 RETURN_POPS_ARGS to determine whether this function pops its own args.
310 We used to allow an identifier for library functions, but that doesn't
311 work when the return type is an aggregate type and the calling convention
312 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
313
314 STACK_SIZE is the number of bytes of arguments on the stack,
315 rounded up to STACK_BOUNDARY; zero if the size is variable.
316 This is both to put into the call insn and
317 to generate explicit popping code if necessary.
318
319 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
320 It is zero if this call doesn't want a structure value.
321
322 NEXT_ARG_REG is the rtx that results from executing
323 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
324 just after all the args have had their registers assigned.
325 This could be whatever you like, but normally it is the first
326 arg-register beyond those used for args in this call,
327 or 0 if all the arg-registers are used in this call.
328 It is passed on to `gen_call' so you can put this info in the call insn.
329
330 VALREG is a hard register in which a value is returned,
331 or 0 if the call does not return a value.
332
333 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
334 the args to this call were processed.
335 We restore `inhibit_defer_pop' to that value.
336
94b25f81
RK
337 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
338 denote registers used by the called function.
51bbfa0c
RS
339
340 IS_CONST is true if this is a `const' call. */
341
322e3e34 342static void
2c8da025 343emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
5d6155d4
RK
344 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
345 is_const)
51bbfa0c 346 rtx funexp;
2c8da025 347 tree fndecl;
51bbfa0c
RS
348 tree funtype;
349 int stack_size;
350 int struct_value_size;
351 rtx next_arg_reg;
352 rtx valreg;
353 int old_inhibit_defer_pop;
77cac2f2 354 rtx call_fusage;
51bbfa0c
RS
355 int is_const;
356{
e5d70561
RK
357 rtx stack_size_rtx = GEN_INT (stack_size);
358 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
359 rtx call_insn;
360 int already_popped = 0;
361
362 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
363 and we don't want to load it into a register as an optimization,
364 because prepare_call_address already did it if it should be done. */
365 if (GET_CODE (funexp) != SYMBOL_REF)
366 funexp = memory_address (FUNCTION_MODE, funexp);
367
368#ifndef ACCUMULATE_OUTGOING_ARGS
369#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
370 if (HAVE_call_pop && HAVE_call_value_pop
2c8da025
RK
371 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
372 || stack_size == 0))
51bbfa0c 373 {
2c8da025 374 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
51bbfa0c
RS
375 rtx pat;
376
377 /* If this subroutine pops its own args, record that in the call insn
378 if possible, for the sake of frame pointer elimination. */
2c8da025 379
51bbfa0c
RS
380 if (valreg)
381 pat = gen_call_value_pop (valreg,
382 gen_rtx (MEM, FUNCTION_MODE, funexp),
383 stack_size_rtx, next_arg_reg, n_pop);
384 else
385 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
386 stack_size_rtx, next_arg_reg, n_pop);
387
388 emit_call_insn (pat);
389 already_popped = 1;
390 }
391 else
392#endif
393#endif
394
395#if defined (HAVE_call) && defined (HAVE_call_value)
396 if (HAVE_call && HAVE_call_value)
397 {
398 if (valreg)
399 emit_call_insn (gen_call_value (valreg,
400 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
401 stack_size_rtx, next_arg_reg,
402 NULL_RTX));
51bbfa0c
RS
403 else
404 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
405 stack_size_rtx, next_arg_reg,
406 struct_value_size_rtx));
407 }
408 else
409#endif
410 abort ();
411
77cac2f2 412 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
413 for (call_insn = get_last_insn ();
414 call_insn && GET_CODE (call_insn) != CALL_INSN;
415 call_insn = PREV_INSN (call_insn))
416 ;
417
418 if (! call_insn)
419 abort ();
420
e59e60a7
RK
421 /* Put the register usage information on the CALL. If there is already
422 some usage information, put ours at the end. */
423 if (CALL_INSN_FUNCTION_USAGE (call_insn))
424 {
425 rtx link;
426
427 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
428 link = XEXP (link, 1))
429 ;
430
431 XEXP (link, 1) = call_fusage;
432 }
433 else
434 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
435
436 /* If this is a const call, then set the insn's unchanging bit. */
437 if (is_const)
438 CONST_CALL_P (call_insn) = 1;
439
b1e64e0d
RS
440 /* Restore this now, so that we do defer pops for this call's args
441 if the context of the call as a whole permits. */
442 inhibit_defer_pop = old_inhibit_defer_pop;
443
51bbfa0c
RS
444#ifndef ACCUMULATE_OUTGOING_ARGS
445 /* If returning from the subroutine does not automatically pop the args,
446 we need an instruction to pop them sooner or later.
447 Perhaps do it now; perhaps just record how much space to pop later.
448
449 If returning from the subroutine does pop the args, indicate that the
450 stack pointer will be changed. */
451
2c8da025 452 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
51bbfa0c
RS
453 {
454 if (!already_popped)
77cac2f2 455 CALL_INSN_FUNCTION_USAGE (call_insn) =
96fd013f 456 gen_rtx (EXPR_LIST, VOIDmode,
984d9166 457 gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
77cac2f2 458 CALL_INSN_FUNCTION_USAGE (call_insn));
2c8da025 459 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
e5d70561 460 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
461 }
462
463 if (stack_size != 0)
464 {
70a73141 465 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
466 pending_stack_adjust += stack_size;
467 else
468 adjust_stack (stack_size_rtx);
469 }
470#endif
471}
472
473/* Generate all the code for a function call
474 and return an rtx for its value.
475 Store the value in TARGET (specified as an rtx) if convenient.
476 If the value is stored in TARGET then TARGET is returned.
477 If IGNORE is nonzero, then we ignore the value of the function call. */
478
479rtx
8129842c 480expand_call (exp, target, ignore)
51bbfa0c
RS
481 tree exp;
482 rtx target;
483 int ignore;
51bbfa0c
RS
484{
485 /* List of actual parameters. */
486 tree actparms = TREE_OPERAND (exp, 1);
487 /* RTX for the function to be called. */
488 rtx funexp;
489 /* Tree node for the function to be called (not the address!). */
490 tree funtree;
491 /* Data type of the function. */
492 tree funtype;
493 /* Declaration of the function being called,
494 or 0 if the function is computed (not known by name). */
495 tree fndecl = 0;
496 char *name = 0;
497
498 /* Register in which non-BLKmode value will be returned,
499 or 0 if no value or if value is BLKmode. */
500 rtx valreg;
501 /* Address where we should return a BLKmode value;
502 0 if value not BLKmode. */
503 rtx structure_value_addr = 0;
504 /* Nonzero if that address is being passed by treating it as
505 an extra, implicit first parameter. Otherwise,
506 it is passed by being copied directly into struct_value_rtx. */
507 int structure_value_addr_parm = 0;
508 /* Size of aggregate value wanted, or zero if none wanted
509 or if we are using the non-reentrant PCC calling convention
510 or expecting the value in registers. */
511 int struct_value_size = 0;
512 /* Nonzero if called function returns an aggregate in memory PCC style,
513 by returning the address of where to find it. */
514 int pcc_struct_value = 0;
515
516 /* Number of actual parameters in this call, including struct value addr. */
517 int num_actuals;
518 /* Number of named args. Args after this are anonymous ones
519 and they must all go on the stack. */
520 int n_named_args;
521 /* Count arg position in order args appear. */
522 int argpos;
523
524 /* Vector of information about each argument.
525 Arguments are numbered in the order they will be pushed,
526 not the order they are written. */
527 struct arg_data *args;
528
529 /* Total size in bytes of all the stack-parms scanned so far. */
530 struct args_size args_size;
531 /* Size of arguments before any adjustments (such as rounding). */
532 struct args_size original_args_size;
533 /* Data on reg parms scanned so far. */
534 CUMULATIVE_ARGS args_so_far;
535 /* Nonzero if a reg parm has been scanned. */
536 int reg_parm_seen;
efd65a8b 537 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
538
539 /* Nonzero if we must avoid push-insns in the args for this call.
540 If stack space is allocated for register parameters, but not by the
541 caller, then it is preallocated in the fixed part of the stack frame.
542 So the entire argument block must then be preallocated (i.e., we
543 ignore PUSH_ROUNDING in that case). */
544
545#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
546 int must_preallocate = 1;
547#else
548#ifdef PUSH_ROUNDING
549 int must_preallocate = 0;
550#else
551 int must_preallocate = 1;
552#endif
553#endif
554
f72aed24 555 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
556 int reg_parm_stack_space = 0;
557
51bbfa0c
RS
558 /* 1 if scanning parms front to back, -1 if scanning back to front. */
559 int inc;
560 /* Address of space preallocated for stack parms
561 (on machines that lack push insns), or 0 if space not preallocated. */
562 rtx argblock = 0;
563
564 /* Nonzero if it is plausible that this is a call to alloca. */
565 int may_be_alloca;
566 /* Nonzero if this is a call to setjmp or a related function. */
567 int returns_twice;
568 /* Nonzero if this is a call to `longjmp'. */
569 int is_longjmp;
570 /* Nonzero if this is a call to an inline function. */
571 int is_integrable = 0;
51bbfa0c
RS
572 /* Nonzero if this is a call to a `const' function.
573 Note that only explicitly named functions are handled as `const' here. */
574 int is_const = 0;
575 /* Nonzero if this is a call to a `volatile' function. */
576 int is_volatile = 0;
577#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
578 /* Define the boundary of the register parm stack space that needs to be
579 save, if any. */
580 int low_to_save = -1, high_to_save;
581 rtx save_area = 0; /* Place that it is saved */
582#endif
583
584#ifdef ACCUMULATE_OUTGOING_ARGS
585 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
586 char *initial_stack_usage_map = stack_usage_map;
587#endif
588
589 rtx old_stack_level = 0;
79be3418 590 int old_pending_adj = 0;
2f4aa534 591 int old_stack_arg_under_construction;
51bbfa0c
RS
592 int old_inhibit_defer_pop = inhibit_defer_pop;
593 tree old_cleanups = cleanups_this_call;
77cac2f2 594 rtx call_fusage = 0;
51bbfa0c 595 register tree p;
4ab56118 596 register int i, j;
51bbfa0c
RS
597
598 /* See if we can find a DECL-node for the actual function.
599 As a result, decide whether this is a call to an integrable function. */
600
601 p = TREE_OPERAND (exp, 0);
602 if (TREE_CODE (p) == ADDR_EXPR)
603 {
604 fndecl = TREE_OPERAND (p, 0);
605 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 606 fndecl = 0;
51bbfa0c
RS
607 else
608 {
609 if (!flag_no_inline
610 && fndecl != current_function_decl
aa10adff 611 && DECL_INLINE (fndecl)
1cf4f698
RK
612 && DECL_SAVED_INSNS (fndecl)
613 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
51bbfa0c
RS
614 is_integrable = 1;
615 else if (! TREE_ADDRESSABLE (fndecl))
616 {
13d39dbc 617 /* In case this function later becomes inlinable,
51bbfa0c
RS
618 record that there was already a non-inline call to it.
619
620 Use abstraction instead of setting TREE_ADDRESSABLE
621 directly. */
da8c1713
RK
622 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
623 && optimize > 0)
1907795e
JM
624 {
625 warning_with_decl (fndecl, "can't inline call to `%s'");
626 warning ("called from here");
627 }
51bbfa0c
RS
628 mark_addressable (fndecl);
629 }
630
d45cf215
RS
631 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
632 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 633 is_const = 1;
5e24110e
RS
634
635 if (TREE_THIS_VOLATILE (fndecl))
636 is_volatile = 1;
51bbfa0c
RS
637 }
638 }
639
fdff8c6d
RK
640 /* If we don't have specific function to call, see if we have a
641 constant or `noreturn' function from the type. */
642 if (fndecl == 0)
643 {
644 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
645 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
646 }
647
6f90e075
JW
648#ifdef REG_PARM_STACK_SPACE
649#ifdef MAYBE_REG_PARM_STACK_SPACE
650 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
651#else
652 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
653#endif
654#endif
655
51bbfa0c
RS
656 /* Warn if this value is an aggregate type,
657 regardless of which calling convention we are using for it. */
05e3bdb9 658 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
659 warning ("function call has aggregate value");
660
661 /* Set up a place to return a structure. */
662
663 /* Cater to broken compilers. */
664 if (aggregate_value_p (exp))
665 {
666 /* This call returns a big structure. */
667 is_const = 0;
668
669#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
670 {
671 pcc_struct_value = 1;
0dd532dc
JW
672 /* Easier than making that case work right. */
673 if (is_integrable)
674 {
675 /* In case this is a static function, note that it has been
676 used. */
677 if (! TREE_ADDRESSABLE (fndecl))
678 mark_addressable (fndecl);
679 is_integrable = 0;
680 }
9e7b1d0a
RS
681 }
682#else /* not PCC_STATIC_STRUCT_RETURN */
683 {
684 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 685
9e7b1d0a
RS
686 if (target && GET_CODE (target) == MEM)
687 structure_value_addr = XEXP (target, 0);
688 else
689 {
690 /* Assign a temporary on the stack to hold the value. */
51bbfa0c 691
9e7b1d0a
RS
692 /* For variable-sized objects, we must be called with a target
693 specified. If we were to allocate space on the stack here,
694 we would have no way of knowing when to free it. */
51bbfa0c 695
002bdd6c
RK
696 if (struct_value_size < 0)
697 abort ();
698
9e7b1d0a
RS
699 structure_value_addr
700 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
3668e76e
JL
701 MEM_IN_STRUCT_P (structure_value_addr)
702 = AGGREGATE_TYPE_P (TREE_TYPE (exp));
9e7b1d0a
RS
703 target = 0;
704 }
705 }
706#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
707 }
708
709 /* If called function is inline, try to integrate it. */
710
711 if (is_integrable)
712 {
713 rtx temp;
2f4aa534 714 rtx before_call = get_last_insn ();
51bbfa0c
RS
715
716 temp = expand_inline_function (fndecl, actparms, target,
717 ignore, TREE_TYPE (exp),
718 structure_value_addr);
719
720 /* If inlining succeeded, return. */
854e97f0 721 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 722 {
ef5057f8
MS
723 if (flag_short_temps)
724 {
725 /* Perform all cleanups needed for the arguments of this
726 call (i.e. destructors in C++). It is ok if these
727 destructors clobber RETURN_VALUE_REG, because the
728 only time we care about this is when TARGET is that
729 register. But in C++, we take care to never return
730 that register directly. */
731 expand_cleanups_to (old_cleanups);
732 }
d64f5a78
RS
733
734#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
735 /* If the outgoing argument list must be preserved, push
736 the stack before executing the inlined function if it
737 makes any calls. */
738
739 for (i = reg_parm_stack_space - 1; i >= 0; i--)
740 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
741 break;
742
743 if (stack_arg_under_construction || i >= 0)
744 {
a1917650
RK
745 rtx first_insn
746 = before_call ? NEXT_INSN (before_call) : get_insns ();
747 rtx insn, seq;
2f4aa534 748
d64f5a78
RS
749 /* Look for a call in the inline function code.
750 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
751 nonzero then there is a call and it is not necessary
752 to scan the insns. */
753
754 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
a1917650 755 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
756 if (GET_CODE (insn) == CALL_INSN)
757 break;
2f4aa534
RS
758
759 if (insn)
760 {
d64f5a78
RS
761 /* Reserve enough stack space so that the largest
762 argument list of any function call in the inline
763 function does not overlap the argument list being
764 evaluated. This is usually an overestimate because
765 allocate_dynamic_stack_space reserves space for an
766 outgoing argument list in addition to the requested
767 space, but there is no way to ask for stack space such
768 that an argument list of a certain length can be
769 safely constructed. */
770
771 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
772#ifdef REG_PARM_STACK_SPACE
773 /* Add the stack space reserved for register arguments
774 in the inline function. What is really needed is the
775 largest value of reg_parm_stack_space in the inline
776 function, but that is not available. Using the current
777 value of reg_parm_stack_space is wrong, but gives
778 correct results on all supported machines. */
779 adjust += reg_parm_stack_space;
780#endif
2f4aa534 781 start_sequence ();
ccf5d244 782 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
783 allocate_dynamic_stack_space (GEN_INT (adjust),
784 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
785 seq = get_insns ();
786 end_sequence ();
a1917650 787 emit_insns_before (seq, first_insn);
e5d70561 788 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
789 }
790 }
d64f5a78 791#endif
51bbfa0c
RS
792
793 /* If the result is equivalent to TARGET, return TARGET to simplify
794 checks in store_expr. They can be equivalent but not equal in the
795 case of a function that returns BLKmode. */
796 if (temp != target && rtx_equal_p (temp, target))
797 return target;
798 return temp;
799 }
800
801 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
802 separately after all. If function was declared inline,
803 give a warning. */
804 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 805 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
806 {
807 warning_with_decl (fndecl, "inlining failed in call to `%s'");
808 warning ("called from here");
809 }
51bbfa0c
RS
810 mark_addressable (fndecl);
811 }
812
813 /* When calling a const function, we must pop the stack args right away,
814 so that the pop is deleted or moved with the call. */
815 if (is_const)
816 NO_DEFER_POP;
817
818 function_call_count++;
819
820 if (fndecl && DECL_NAME (fndecl))
821 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
822
823#if 0
824 /* Unless it's a call to a specific function that isn't alloca,
825 if it has one argument, we must assume it might be alloca. */
826
827 may_be_alloca =
828 (!(fndecl != 0 && strcmp (name, "alloca"))
829 && actparms != 0
830 && TREE_CHAIN (actparms) == 0);
831#else
832 /* We assume that alloca will always be called by name. It
833 makes no sense to pass it as a pointer-to-function to
834 anything that does not understand its behavior. */
835 may_be_alloca =
836 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
837 && name[0] == 'a'
838 && ! strcmp (name, "alloca"))
839 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
840 && name[0] == '_'
841 && ! strcmp (name, "__builtin_alloca"))));
842#endif
843
844 /* See if this is a call to a function that can return more than once
845 or a call to longjmp. */
846
847 returns_twice = 0;
848 is_longjmp = 0;
849
850 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
851 {
852 char *tname = name;
853
8d515633 854 /* Disregard prefix _, __ or __x. */
51bbfa0c 855 if (name[0] == '_')
8d515633
RS
856 {
857 if (name[1] == '_' && name[2] == 'x')
858 tname += 3;
859 else if (name[1] == '_')
860 tname += 2;
861 else
862 tname += 1;
863 }
51bbfa0c
RS
864
865 if (tname[0] == 's')
866 {
867 returns_twice
868 = ((tname[1] == 'e'
869 && (! strcmp (tname, "setjmp")
870 || ! strcmp (tname, "setjmp_syscall")))
871 || (tname[1] == 'i'
872 && ! strcmp (tname, "sigsetjmp"))
873 || (tname[1] == 'a'
874 && ! strcmp (tname, "savectx")));
875 if (tname[1] == 'i'
876 && ! strcmp (tname, "siglongjmp"))
877 is_longjmp = 1;
878 }
879 else if ((tname[0] == 'q' && tname[1] == 's'
880 && ! strcmp (tname, "qsetjmp"))
881 || (tname[0] == 'v' && tname[1] == 'f'
882 && ! strcmp (tname, "vfork")))
883 returns_twice = 1;
884
885 else if (tname[0] == 'l' && tname[1] == 'o'
886 && ! strcmp (tname, "longjmp"))
887 is_longjmp = 1;
888 }
889
51bbfa0c
RS
890 if (may_be_alloca)
891 current_function_calls_alloca = 1;
892
893 /* Don't let pending stack adjusts add up to too much.
894 Also, do all pending adjustments now
895 if there is any chance this might be a call to alloca. */
896
897 if (pending_stack_adjust >= 32
898 || (pending_stack_adjust > 0 && may_be_alloca))
899 do_pending_stack_adjust ();
900
901 /* Operand 0 is a pointer-to-function; get the type of the function. */
902 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
903 if (TREE_CODE (funtype) != POINTER_TYPE)
904 abort ();
905 funtype = TREE_TYPE (funtype);
906
cc79451b
RK
907 /* Push the temporary stack slot level so that we can free any temporaries
908 we make. */
51bbfa0c
RS
909 push_temp_slots ();
910
eecb6f50
JL
911 /* Start updating where the next arg would go.
912
913 On some machines (such as the PA) indirect calls have a different
914 calling convention than normal calls. The last argument in
915 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
916 or not. */
917 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
918
919 /* If struct_value_rtx is 0, it means pass the address
920 as if it were an extra parameter. */
921 if (structure_value_addr && struct_value_rtx == 0)
922 {
5582b006
RK
923 /* If structure_value_addr is a REG other than
924 virtual_outgoing_args_rtx, we can use always use it. If it
925 is not a REG, we must always copy it into a register.
926 If it is virtual_outgoing_args_rtx, we must copy it to another
927 register in some cases. */
928 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 929#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
930 || (stack_arg_under_construction
931 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 932#endif
5582b006
RK
933 ? copy_addr_to_reg (structure_value_addr)
934 : structure_value_addr);
d64f5a78 935
51bbfa0c
RS
936 actparms
937 = tree_cons (error_mark_node,
938 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 939 temp),
51bbfa0c
RS
940 actparms);
941 structure_value_addr_parm = 1;
942 }
943
944 /* Count the arguments and set NUM_ACTUALS. */
945 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
946 num_actuals = i;
947
948 /* Compute number of named args.
949 Normally, don't include the last named arg if anonymous args follow.
469225d8
JW
950 We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
951 (If no anonymous args follow, the result of list_length is actually
952 one too large. This is harmless.)
51bbfa0c 953
469225d8
JW
954 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
955 this machine will be able to place unnamed args that were passed in
956 registers into the stack. So treat all args as named. This allows the
957 insns emitting for a specific argument list to be independent of the
958 function declaration.
51bbfa0c
RS
959
960 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
961 way to pass unnamed args in registers, so we must force them into
962 memory. */
469225d8 963#if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
51bbfa0c
RS
964 if (TYPE_ARG_TYPES (funtype) != 0)
965 n_named_args
0ee902cb 966 = (list_length (TYPE_ARG_TYPES (funtype))
469225d8 967#ifndef STRICT_ARGUMENT_NAMING
0ee902cb
RM
968 /* Don't include the last named arg. */
969 - 1
469225d8 970#endif
0ee902cb
RM
971 /* Count the struct value address, if it is passed as a parm. */
972 + structure_value_addr_parm);
51bbfa0c
RS
973 else
974#endif
975 /* If we know nothing, treat all args as named. */
976 n_named_args = num_actuals;
977
978 /* Make a vector to hold all the information about each arg. */
979 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 980 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
981
982 args_size.constant = 0;
983 args_size.var = 0;
984
985 /* In this loop, we consider args in the order they are written.
0ee902cb 986 We fill up ARGS from the front or from the back if necessary
51bbfa0c
RS
987 so that in any case the first arg to be pushed ends up at the front. */
988
989#ifdef PUSH_ARGS_REVERSED
990 i = num_actuals - 1, inc = -1;
991 /* In this case, must reverse order of args
992 so that we compute and push the last arg first. */
993#else
994 i = 0, inc = 1;
995#endif
996
997 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
998 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
999 {
1000 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 1001 int unsignedp;
84b55618 1002 enum machine_mode mode;
51bbfa0c
RS
1003
1004 args[i].tree_value = TREE_VALUE (p);
1005
1006 /* Replace erroneous argument with constant zero. */
1007 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1008 args[i].tree_value = integer_zero_node, type = integer_type_node;
1009
5c1c34d3
RK
1010 /* If TYPE is a transparent union, pass things the way we would
1011 pass the first field of the union. We have already verified that
1012 the modes are the same. */
1013 if (TYPE_TRANSPARENT_UNION (type))
1014 type = TREE_TYPE (TYPE_FIELDS (type));
1015
51bbfa0c
RS
1016 /* Decide where to pass this arg.
1017
1018 args[i].reg is nonzero if all or part is passed in registers.
1019
1020 args[i].partial is nonzero if part but not all is passed in registers,
1021 and the exact value says how many words are passed in registers.
1022
1023 args[i].pass_on_stack is nonzero if the argument must at least be
1024 computed on the stack. It may then be loaded back into registers
1025 if args[i].reg is nonzero.
1026
1027 These decisions are driven by the FUNCTION_... macros and must agree
1028 with those made by function.c. */
1029
51bbfa0c 1030 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1031 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1032 && contains_placeholder_p (TYPE_SIZE (type)))
657bb6dc 1033 || TREE_ADDRESSABLE (type)
7ef1fbd7
RK
1034#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1035 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1036 type, argpos < n_named_args)
1037#endif
1038 )
51bbfa0c 1039 {
5e0de251
DE
1040#ifdef FUNCTION_ARG_CALLEE_COPIES
1041 if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
1042 argpos < n_named_args)
1043 /* If it's in a register, we must make a copy of it too. */
1044 /* ??? Is this a sufficient test? Is there a better one? */
1045 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
67038661
RK
1046 && REG_P (DECL_RTL (args[i].tree_value)))
1047 && ! TREE_ADDRESSABLE (type))
51bbfa0c 1048 {
5e0de251
DE
1049 args[i].tree_value = build1 (ADDR_EXPR,
1050 build_pointer_type (type),
1051 args[i].tree_value);
1052 type = build_pointer_type (type);
51bbfa0c
RS
1053 }
1054 else
5e0de251 1055#endif
82c0ff02 1056 {
5e0de251
DE
1057 /* We make a copy of the object and pass the address to the
1058 function being called. */
1059 rtx copy;
51bbfa0c 1060
5e0de251
DE
1061 if (TYPE_SIZE (type) == 0
1062 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1063 {
1064 /* This is a variable-sized object. Make space on the stack
1065 for it. */
1066 rtx size_rtx = expr_size (TREE_VALUE (p));
1067
1068 if (old_stack_level == 0)
1069 {
1070 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1071 old_pending_adj = pending_stack_adjust;
1072 pending_stack_adjust = 0;
1073 }
1074
1075 copy = gen_rtx (MEM, BLKmode,
1076 allocate_dynamic_stack_space (size_rtx,
1077 NULL_RTX,
1078 TYPE_ALIGN (type)));
1079 }
1080 else
1081 {
1082 int size = int_size_in_bytes (type);
6fa51029 1083 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
5e0de251 1084 }
51bbfa0c 1085
05e3bdb9 1086 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1087
5e0de251 1088 store_expr (args[i].tree_value, copy, 0);
ba3a053e 1089 is_const = 0;
5e0de251
DE
1090
1091 args[i].tree_value = build1 (ADDR_EXPR,
1092 build_pointer_type (type),
1093 make_tree (type, copy));
1094 type = build_pointer_type (type);
1095 }
51bbfa0c 1096 }
51bbfa0c 1097
84b55618 1098 mode = TYPE_MODE (type);
321e0bba 1099 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1100
1101#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1102 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1103#endif
1104
321e0bba 1105 args[i].unsignedp = unsignedp;
1efe6448 1106 args[i].mode = mode;
84b55618 1107 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1108 argpos < n_named_args);
1109#ifdef FUNCTION_ARG_PARTIAL_NREGS
1110 if (args[i].reg)
1111 args[i].partial
84b55618 1112 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1113 argpos < n_named_args);
1114#endif
1115
84b55618 1116 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c 1117
cacbd532
JW
1118 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1119 it means that we are to pass this arg in the register(s) designated
1120 by the PARALLEL, but also to pass it in the stack. */
1121 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1122 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1123 args[i].pass_on_stack = 1;
51bbfa0c
RS
1124
1125 /* If this is an addressable type, we must preallocate the stack
1126 since we must evaluate the object into its final location.
1127
1128 If this is to be passed in both registers and the stack, it is simpler
1129 to preallocate. */
1130 if (TREE_ADDRESSABLE (type)
1131 || (args[i].pass_on_stack && args[i].reg != 0))
1132 must_preallocate = 1;
1133
1134 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1135 we cannot consider this function call constant. */
1136 if (TREE_ADDRESSABLE (type))
1137 is_const = 0;
1138
1139 /* Compute the stack-size of this argument. */
1140 if (args[i].reg == 0 || args[i].partial != 0
1141#ifdef REG_PARM_STACK_SPACE
6f90e075 1142 || reg_parm_stack_space > 0
51bbfa0c
RS
1143#endif
1144 || args[i].pass_on_stack)
1efe6448 1145 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1146#ifdef STACK_PARMS_IN_REG_PARM_AREA
1147 1,
1148#else
1149 args[i].reg != 0,
1150#endif
1151 fndecl, &args_size, &args[i].offset,
1152 &args[i].size);
1153
1154#ifndef ARGS_GROW_DOWNWARD
1155 args[i].slot_offset = args_size;
1156#endif
1157
1158#ifndef REG_PARM_STACK_SPACE
1159 /* If a part of the arg was put into registers,
1160 don't include that part in the amount pushed. */
1161 if (! args[i].pass_on_stack)
1162 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1163 / (PARM_BOUNDARY / BITS_PER_UNIT)
1164 * (PARM_BOUNDARY / BITS_PER_UNIT));
1165#endif
1166
1167 /* Update ARGS_SIZE, the total stack space for args so far. */
1168
1169 args_size.constant += args[i].size.constant;
1170 if (args[i].size.var)
1171 {
1172 ADD_PARM_SIZE (args_size, args[i].size.var);
1173 }
1174
1175 /* Since the slot offset points to the bottom of the slot,
1176 we must record it after incrementing if the args grow down. */
1177#ifdef ARGS_GROW_DOWNWARD
1178 args[i].slot_offset = args_size;
1179
1180 args[i].slot_offset.constant = -args_size.constant;
1181 if (args_size.var)
1182 {
1183 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1184 }
1185#endif
1186
1187 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1188 have been used, etc. */
1189
1190 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1191 argpos < n_named_args);
1192 }
1193
6f90e075
JW
1194#ifdef FINAL_REG_PARM_STACK_SPACE
1195 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1196 args_size.var);
1197#endif
1198
51bbfa0c
RS
1199 /* Compute the actual size of the argument block required. The variable
1200 and constant sizes must be combined, the size may have to be rounded,
1201 and there may be a minimum required size. */
1202
1203 original_args_size = args_size;
1204 if (args_size.var)
1205 {
1206 /* If this function requires a variable-sized argument list, don't try to
1207 make a cse'able block for this call. We may be able to do this
1208 eventually, but it is too complicated to keep track of what insns go
1209 in the cse'able block and which don't. */
1210
1211 is_const = 0;
1212 must_preallocate = 1;
1213
1214 args_size.var = ARGS_SIZE_TREE (args_size);
1215 args_size.constant = 0;
1216
1217#ifdef STACK_BOUNDARY
1218 if (STACK_BOUNDARY != BITS_PER_UNIT)
1219 args_size.var = round_up (args_size.var, STACK_BYTES);
1220#endif
1221
1222#ifdef REG_PARM_STACK_SPACE
6f90e075 1223 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1224 {
1225 args_size.var
1226 = size_binop (MAX_EXPR, args_size.var,
1227 size_int (REG_PARM_STACK_SPACE (fndecl)));
1228
1229#ifndef OUTGOING_REG_PARM_STACK_SPACE
1230 /* The area corresponding to register parameters is not to count in
1231 the size of the block we need. So make the adjustment. */
1232 args_size.var
1233 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1234 size_int (reg_parm_stack_space));
51bbfa0c
RS
1235#endif
1236 }
1237#endif
1238 }
1239 else
1240 {
1241#ifdef STACK_BOUNDARY
1242 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1243 / STACK_BYTES) * STACK_BYTES);
1244#endif
1245
1246#ifdef REG_PARM_STACK_SPACE
1247 args_size.constant = MAX (args_size.constant,
6f90e075 1248 reg_parm_stack_space);
e1336658
JW
1249#ifdef MAYBE_REG_PARM_STACK_SPACE
1250 if (reg_parm_stack_space == 0)
1251 args_size.constant = 0;
1252#endif
51bbfa0c 1253#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1254 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1255#endif
1256#endif
1257 }
1258
1259 /* See if we have or want to preallocate stack space.
1260
1261 If we would have to push a partially-in-regs parm
1262 before other stack parms, preallocate stack space instead.
1263
1264 If the size of some parm is not a multiple of the required stack
1265 alignment, we must preallocate.
1266
1267 If the total size of arguments that would otherwise create a copy in
1268 a temporary (such as a CALL) is more than half the total argument list
1269 size, preallocation is faster.
1270
1271 Another reason to preallocate is if we have a machine (like the m88k)
1272 where stack alignment is required to be maintained between every
1273 pair of insns, not just when the call is made. However, we assume here
1274 that such machines either do not have push insns (and hence preallocation
1275 would occur anyway) or the problem is taken care of with
1276 PUSH_ROUNDING. */
1277
1278 if (! must_preallocate)
1279 {
1280 int partial_seen = 0;
1281 int copy_to_evaluate_size = 0;
1282
1283 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1284 {
1285 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1286 partial_seen = 1;
1287 else if (partial_seen && args[i].reg == 0)
1288 must_preallocate = 1;
1289
1290 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1291 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1292 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1293 || TREE_CODE (args[i].tree_value) == COND_EXPR
1294 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1295 copy_to_evaluate_size
1296 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1297 }
1298
c62f36cf
RS
1299 if (copy_to_evaluate_size * 2 >= args_size.constant
1300 && args_size.constant > 0)
51bbfa0c
RS
1301 must_preallocate = 1;
1302 }
1303
1304 /* If the structure value address will reference the stack pointer, we must
1305 stabilize it. We don't need to do this if we know that we are not going
1306 to adjust the stack pointer in processing this call. */
1307
1308 if (structure_value_addr
1309 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1310 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1311 && (args_size.var
1312#ifndef ACCUMULATE_OUTGOING_ARGS
1313 || args_size.constant
1314#endif
1315 ))
1316 structure_value_addr = copy_to_reg (structure_value_addr);
1317
1318 /* If this function call is cse'able, precompute all the parameters.
1319 Note that if the parameter is constructed into a temporary, this will
1320 cause an additional copy because the parameter will be constructed
1321 into a temporary location and then copied into the outgoing arguments.
1322 If a parameter contains a call to alloca and this function uses the
1323 stack, precompute the parameter. */
1324
1ce0cb53
JW
1325 /* If we preallocated the stack space, and some arguments must be passed
1326 on the stack, then we must precompute any parameter which contains a
1327 function call which will store arguments on the stack.
1328 Otherwise, evaluating the parameter may clobber previous parameters
1329 which have already been stored into the stack. */
1330
51bbfa0c
RS
1331 for (i = 0; i < num_actuals; i++)
1332 if (is_const
1333 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1334 && calls_function (args[i].tree_value, 1))
1335 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1336 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1337 {
657bb6dc
JM
1338 /* If this is an addressable type, we cannot pre-evaluate it. */
1339 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1340 abort ();
1341
cc79451b
RK
1342 push_temp_slots ();
1343
51bbfa0c 1344 args[i].initial_value = args[i].value
e5d70561 1345 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1346
51bbfa0c 1347 preserve_temp_slots (args[i].value);
cc79451b 1348 pop_temp_slots ();
51bbfa0c
RS
1349
1350 /* ANSI doesn't require a sequence point here,
1351 but PCC has one, so this will avoid some problems. */
1352 emit_queue ();
8e6c802b
RK
1353
1354 args[i].initial_value = args[i].value
1355 = protect_from_queue (args[i].initial_value, 0);
1356
1357 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1358 args[i].value
1359 = convert_modes (args[i].mode,
1360 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1361 args[i].value, args[i].unsignedp);
51bbfa0c
RS
1362 }
1363
1364 /* Now we are about to start emitting insns that can be deleted
1365 if a libcall is deleted. */
1366 if (is_const)
1367 start_sequence ();
1368
1369 /* If we have no actual push instructions, or shouldn't use them,
1370 make space for all args right now. */
1371
1372 if (args_size.var != 0)
1373 {
1374 if (old_stack_level == 0)
1375 {
e5d70561 1376 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1377 old_pending_adj = pending_stack_adjust;
1378 pending_stack_adjust = 0;
d64f5a78 1379#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1380 /* stack_arg_under_construction says whether a stack arg is
1381 being constructed at the old stack level. Pushing the stack
1382 gets a clean outgoing argument block. */
1383 old_stack_arg_under_construction = stack_arg_under_construction;
1384 stack_arg_under_construction = 0;
d64f5a78 1385#endif
51bbfa0c
RS
1386 }
1387 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1388 }
26a258fe 1389 else
51bbfa0c
RS
1390 {
1391 /* Note that we must go through the motions of allocating an argument
1392 block even if the size is zero because we may be storing args
1393 in the area reserved for register arguments, which may be part of
1394 the stack frame. */
26a258fe 1395
51bbfa0c
RS
1396 int needed = args_size.constant;
1397
0f41302f
MS
1398 /* Store the maximum argument space used. It will be pushed by
1399 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1400 checking). */
51bbfa0c
RS
1401
1402 if (needed > current_function_outgoing_args_size)
1403 current_function_outgoing_args_size = needed;
1404
26a258fe
PB
1405 if (must_preallocate)
1406 {
1407#ifdef ACCUMULATE_OUTGOING_ARGS
1408 /* Since the stack pointer will never be pushed, it is possible for
1409 the evaluation of a parm to clobber something we have already
1410 written to the stack. Since most function calls on RISC machines
1411 do not use the stack, this is uncommon, but must work correctly.
1412
1413 Therefore, we save any area of the stack that was already written
1414 and that we are using. Here we set up to do this by making a new
1415 stack usage map from the old one. The actual save will be done
1416 by store_one_arg.
1417
1418 Another approach might be to try to reorder the argument
1419 evaluations to avoid this conflicting stack usage. */
1420
51bbfa0c 1421#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
26a258fe
PB
1422 /* Since we will be writing into the entire argument area, the
1423 map must be allocated for its entire size, not just the part that
1424 is the responsibility of the caller. */
1425 needed += reg_parm_stack_space;
51bbfa0c
RS
1426#endif
1427
1428#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1429 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1430 needed + 1);
51bbfa0c 1431#else
26a258fe
PB
1432 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1433 needed);
51bbfa0c 1434#endif
26a258fe 1435 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1436
26a258fe
PB
1437 if (initial_highest_arg_in_use)
1438 bcopy (initial_stack_usage_map, stack_usage_map,
1439 initial_highest_arg_in_use);
51bbfa0c 1440
26a258fe
PB
1441 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1442 bzero (&stack_usage_map[initial_highest_arg_in_use],
1443 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1444 needed = 0;
2f4aa534 1445
26a258fe
PB
1446 /* The address of the outgoing argument list must not be copied to a
1447 register here, because argblock would be left pointing to the
1448 wrong place after the call to allocate_dynamic_stack_space below.
1449 */
2f4aa534 1450
26a258fe 1451 argblock = virtual_outgoing_args_rtx;
2f4aa534 1452
51bbfa0c 1453#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1454 if (inhibit_defer_pop == 0)
51bbfa0c 1455 {
26a258fe
PB
1456 /* Try to reuse some or all of the pending_stack_adjust
1457 to get this space. Maybe we can avoid any pushing. */
1458 if (needed > pending_stack_adjust)
1459 {
1460 needed -= pending_stack_adjust;
1461 pending_stack_adjust = 0;
1462 }
1463 else
1464 {
1465 pending_stack_adjust -= needed;
1466 needed = 0;
1467 }
51bbfa0c 1468 }
26a258fe
PB
1469 /* Special case this because overhead of `push_block' in this
1470 case is non-trivial. */
1471 if (needed == 0)
1472 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1473 else
26a258fe
PB
1474 argblock = push_block (GEN_INT (needed), 0, 0);
1475
1476 /* We only really need to call `copy_to_reg' in the case where push
1477 insns are going to be used to pass ARGBLOCK to a function
1478 call in ARGS. In that case, the stack pointer changes value
1479 from the allocation point to the call point, and hence
1480 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1481 But might as well always do it. */
1482 argblock = copy_to_reg (argblock);
51bbfa0c 1483#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1484 }
51bbfa0c
RS
1485 }
1486
bfbf933a
RS
1487#ifdef ACCUMULATE_OUTGOING_ARGS
1488 /* The save/restore code in store_one_arg handles all cases except one:
1489 a constructor call (including a C function returning a BLKmode struct)
1490 to initialize an argument. */
1491 if (stack_arg_under_construction)
1492 {
1493#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1494 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1495#else
e5d70561 1496 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1497#endif
1498 if (old_stack_level == 0)
1499 {
e5d70561 1500 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1501 old_pending_adj = pending_stack_adjust;
1502 pending_stack_adjust = 0;
1503 /* stack_arg_under_construction says whether a stack arg is
1504 being constructed at the old stack level. Pushing the stack
1505 gets a clean outgoing argument block. */
1506 old_stack_arg_under_construction = stack_arg_under_construction;
1507 stack_arg_under_construction = 0;
1508 /* Make a new map for the new argument list. */
1509 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1510 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1511 highest_outgoing_arg_in_use = 0;
1512 }
e5d70561 1513 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1514 }
1515 /* If argument evaluation might modify the stack pointer, copy the
1516 address of the argument list to a register. */
1517 for (i = 0; i < num_actuals; i++)
1518 if (args[i].pass_on_stack)
1519 {
1520 argblock = copy_addr_to_reg (argblock);
1521 break;
1522 }
1523#endif
1524
1525
51bbfa0c
RS
1526 /* If we preallocated stack space, compute the address of each argument.
1527 We need not ensure it is a valid memory address here; it will be
1528 validized when it is used. */
1529 if (argblock)
1530 {
1531 rtx arg_reg = argblock;
1532 int arg_offset = 0;
1533
1534 if (GET_CODE (argblock) == PLUS)
1535 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1536
1537 for (i = 0; i < num_actuals; i++)
1538 {
1539 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1540 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1541 rtx addr;
1542
1543 /* Skip this parm if it will not be passed on the stack. */
1544 if (! args[i].pass_on_stack && args[i].reg != 0)
1545 continue;
1546
1547 if (GET_CODE (offset) == CONST_INT)
1548 addr = plus_constant (arg_reg, INTVAL (offset));
1549 else
1550 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1551
1552 addr = plus_constant (addr, arg_offset);
1efe6448 1553 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
0c0600d5 1554 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1555 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1556
1557 if (GET_CODE (slot_offset) == CONST_INT)
1558 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1559 else
1560 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1561
1562 addr = plus_constant (addr, arg_offset);
1efe6448 1563 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1564 }
1565 }
1566
1567#ifdef PUSH_ARGS_REVERSED
1568#ifdef STACK_BOUNDARY
1569 /* If we push args individually in reverse order, perform stack alignment
1570 before the first push (the last arg). */
1571 if (argblock == 0)
e5d70561
RK
1572 anti_adjust_stack (GEN_INT (args_size.constant
1573 - original_args_size.constant));
51bbfa0c
RS
1574#endif
1575#endif
1576
1577 /* Don't try to defer pops if preallocating, not even from the first arg,
1578 since ARGBLOCK probably refers to the SP. */
1579 if (argblock)
1580 NO_DEFER_POP;
1581
1582 /* Get the function to call, in the form of RTL. */
1583 if (fndecl)
ef5d30c9
RK
1584 {
1585 /* If this is the first use of the function, see if we need to
1586 make an external definition for it. */
1587 if (! TREE_USED (fndecl))
1588 {
1589 assemble_external (fndecl);
1590 TREE_USED (fndecl) = 1;
1591 }
1592
1593 /* Get a SYMBOL_REF rtx for the function address. */
1594 funexp = XEXP (DECL_RTL (fndecl), 0);
1595 }
51bbfa0c
RS
1596 else
1597 /* Generate an rtx (probably a pseudo-register) for the address. */
1598 {
cc79451b 1599 push_temp_slots ();
e5d70561 1600 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1601 pop_temp_slots (); /* FUNEXP can't be BLKmode */
51bbfa0c
RS
1602 emit_queue ();
1603 }
1604
1605 /* Figure out the register where the value, if any, will come back. */
1606 valreg = 0;
1607 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1608 && ! structure_value_addr)
1609 {
1610 if (pcc_struct_value)
1611 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1612 fndecl);
1613 else
1614 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1615 }
1616
1617 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 1618 once we have started filling any specific hard regs. */
51bbfa0c
RS
1619 reg_parm_seen = 0;
1620 for (i = 0; i < num_actuals; i++)
1621 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1622 {
1623 reg_parm_seen = 1;
1624
1625 if (args[i].value == 0)
1626 {
cc79451b 1627 push_temp_slots ();
e5d70561
RK
1628 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1629 VOIDmode, 0);
51bbfa0c 1630 preserve_temp_slots (args[i].value);
cc79451b 1631 pop_temp_slots ();
51bbfa0c
RS
1632
1633 /* ANSI doesn't require a sequence point here,
1634 but PCC has one, so this will avoid some problems. */
1635 emit_queue ();
1636 }
84b55618
RK
1637
1638 /* If we are to promote the function arg to a wider mode,
1639 do it now. */
84b55618 1640
843fec55
RK
1641 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1642 args[i].value
1643 = convert_modes (args[i].mode,
1644 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1645 args[i].value, args[i].unsignedp);
ebef2728
RK
1646
1647 /* If the value is expensive, and we are inside an appropriately
1648 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1649 into the hard reg.
1650
1651 For small register classes, also do this if this call uses
1652 register parameters. This is to avoid reload conflicts while
1653 loading the parameters registers. */
ebef2728
RK
1654
1655 if ((! (GET_CODE (args[i].value) == REG
1656 || (GET_CODE (args[i].value) == SUBREG
1657 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1658 && args[i].mode != BLKmode
1659 && rtx_cost (args[i].value, SET) > 2
01368078 1660#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
1661 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
1662 || preserve_subexpressions_p ())
01368078 1663#else
4f48d56a 1664 && preserve_subexpressions_p ()
01368078 1665#endif
4f48d56a 1666 )
ebef2728 1667 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1668 }
1669
1670#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1671 /* The argument list is the property of the called routine and it
1672 may clobber it. If the fixed area has been used for previous
1673 parameters, we must save and restore it.
1674
1675 Here we compute the boundary of the that needs to be saved, if any. */
1676
b94301c2
RS
1677#ifdef ARGS_GROW_DOWNWARD
1678 for (i = 0; i < reg_parm_stack_space + 1; i++)
1679#else
6f90e075 1680 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1681#endif
51bbfa0c
RS
1682 {
1683 if (i >= highest_outgoing_arg_in_use
1684 || stack_usage_map[i] == 0)
1685 continue;
1686
1687 if (low_to_save == -1)
1688 low_to_save = i;
1689
1690 high_to_save = i;
1691 }
1692
1693 if (low_to_save >= 0)
1694 {
1695 int num_to_save = high_to_save - low_to_save + 1;
1696 enum machine_mode save_mode
1697 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1698 rtx stack_area;
1699
1700 /* If we don't have the required alignment, must do this in BLKmode. */
1701 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1702 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1703 save_mode = BLKmode;
1704
1705 stack_area = gen_rtx (MEM, save_mode,
1706 memory_address (save_mode,
b94301c2
RS
1707
1708#ifdef ARGS_GROW_DOWNWARD
1709 plus_constant (argblock,
1710 - high_to_save)
1711#else
51bbfa0c 1712 plus_constant (argblock,
b94301c2
RS
1713 low_to_save)
1714#endif
1715 ));
51bbfa0c
RS
1716 if (save_mode == BLKmode)
1717 {
6fa51029 1718 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3668e76e 1719 MEM_IN_STRUCT_P (save_area) = 0;
51bbfa0c 1720 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1721 GEN_INT (num_to_save),
51bbfa0c
RS
1722 PARM_BOUNDARY / BITS_PER_UNIT);
1723 }
1724 else
1725 {
1726 save_area = gen_reg_rtx (save_mode);
1727 emit_move_insn (save_area, stack_area);
1728 }
1729 }
1730#endif
1731
1732
1733 /* Now store (and compute if necessary) all non-register parms.
1734 These come before register parms, since they can require block-moves,
1735 which could clobber the registers used for register parms.
1736 Parms which have partial registers are not stored here,
1737 but we do preallocate space here if they want that. */
1738
1739 for (i = 0; i < num_actuals; i++)
1740 if (args[i].reg == 0 || args[i].pass_on_stack)
1741 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1742 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1743
4ab56118
RK
1744 /* If we have a parm that is passed in registers but not in memory
1745 and whose alignment does not permit a direct copy into registers,
1746 make a group of pseudos that correspond to each register that we
1747 will later fill. */
1748
45d44c98
RK
1749 if (STRICT_ALIGNMENT)
1750 for (i = 0; i < num_actuals; i++)
1751 if (args[i].reg != 0 && ! args[i].pass_on_stack
4ab56118 1752 && args[i].mode == BLKmode
45d44c98
RK
1753 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1754 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1755 {
1756 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1757 int big_endian_correction = 0;
4ab56118 1758
45d44c98
RK
1759 args[i].n_aligned_regs
1760 = args[i].partial ? args[i].partial
1761 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
4ab56118 1762
45d44c98
RK
1763 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1764 * args[i].n_aligned_regs);
4ab56118 1765
45d44c98
RK
1766 /* Structures smaller than a word are aligned to the least
1767 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1768 this means we must skip the empty high order bytes when
1769 calculating the bit offset. */
1770 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1771 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
8498efd0 1772
45d44c98
RK
1773 for (j = 0; j < args[i].n_aligned_regs; j++)
1774 {
1775 rtx reg = gen_reg_rtx (word_mode);
1776 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1777 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1778 int bitpos;
1779
1780 args[i].aligned_regs[j] = reg;
1781
1782 /* Clobber REG and move each partword into it. Ensure we don't
1783 go past the end of the structure. Note that the loop below
1784 works because we've already verified that padding
1785 and endianness are compatible. */
1786
1787 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1788
1789 for (bitpos = 0;
1790 bitpos < BITS_PER_WORD && bytes > 0;
1791 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1792 {
1793 int xbitpos = bitpos + big_endian_correction;
1794
1795 store_bit_field (reg, bitsize, xbitpos, word_mode,
1796 extract_bit_field (word, bitsize, bitpos, 1,
1797 NULL_RTX, word_mode,
1798 word_mode,
1799 bitsize / BITS_PER_UNIT,
1800 BITS_PER_WORD),
1801 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1802 }
1803 }
1804 }
4ab56118 1805
51bbfa0c
RS
1806 /* Now store any partially-in-registers parm.
1807 This is the last place a block-move can happen. */
1808 if (reg_parm_seen)
1809 for (i = 0; i < num_actuals; i++)
1810 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1811 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1812 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1813
1814#ifndef PUSH_ARGS_REVERSED
1815#ifdef STACK_BOUNDARY
1816 /* If we pushed args in forward order, perform stack alignment
1817 after pushing the last arg. */
1818 if (argblock == 0)
e5d70561
RK
1819 anti_adjust_stack (GEN_INT (args_size.constant
1820 - original_args_size.constant));
51bbfa0c
RS
1821#endif
1822#endif
1823
756e0e12
RS
1824 /* If register arguments require space on the stack and stack space
1825 was not preallocated, allocate stack space here for arguments
1826 passed in registers. */
6e716e89 1827#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1828 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1829 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1830#endif
1831
51bbfa0c
RS
1832 /* Pass the function the address in which to return a structure value. */
1833 if (structure_value_addr && ! structure_value_addr_parm)
1834 {
1835 emit_move_insn (struct_value_rtx,
1836 force_reg (Pmode,
e5d70561
RK
1837 force_operand (structure_value_addr,
1838 NULL_RTX)));
51bbfa0c 1839 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1840 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1841 }
1842
77cac2f2 1843 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1844
51bbfa0c
RS
1845 /* Now do the register loads required for any wholly-register parms or any
1846 parms which are passed both on the stack and in a register. Their
1847 expressions were already evaluated.
1848
1849 Mark all register-parms as living through the call, putting these USE
77cac2f2 1850 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c
RS
1851
1852 for (i = 0; i < num_actuals; i++)
1853 {
cacbd532 1854 rtx reg = args[i].reg;
51bbfa0c 1855 int partial = args[i].partial;
cacbd532 1856 int nregs;
51bbfa0c 1857
cacbd532 1858 if (reg)
51bbfa0c 1859 {
6b972c4f
JW
1860 /* Set to non-negative if must move a word at a time, even if just
1861 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1862 we just use a normal move insn. This value can be zero if the
1863 argument is a zero size structure with no fields. */
51bbfa0c
RS
1864 nregs = (partial ? partial
1865 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1866 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1867 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1868 : -1));
51bbfa0c 1869
cacbd532
JW
1870 /* Handle calls that pass values in multiple non-contiguous
1871 locations. The Irix 6 ABI has examples of this. */
1872
1873 if (GET_CODE (reg) == PARALLEL)
1874 emit_group_load (reg, args[i].value);
1875
51bbfa0c
RS
1876 /* If simple case, just do move. If normal partial, store_one_arg
1877 has already loaded the register for us. In all other cases,
1878 load the register(s) from memory. */
1879
cacbd532 1880 else if (nregs == -1)
51bbfa0c 1881 emit_move_insn (reg, args[i].value);
4ab56118 1882
4ab56118
RK
1883 /* If we have pre-computed the values to put in the registers in
1884 the case of non-aligned structures, copy them in now. */
1885
1886 else if (args[i].n_aligned_regs != 0)
1887 for (j = 0; j < args[i].n_aligned_regs; j++)
1888 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1889 args[i].aligned_regs[j]);
4ab56118 1890
cacbd532 1891 else if (partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1892 move_block_to_reg (REGNO (reg),
1893 validize_mem (args[i].value), nregs,
1894 args[i].mode);
0304dfbb 1895
cacbd532
JW
1896 /* Handle calls that pass values in multiple non-contiguous
1897 locations. The Irix 6 ABI has examples of this. */
1898 if (GET_CODE (reg) == PARALLEL)
1899 use_group_regs (&call_fusage, reg);
1900 else if (nregs == -1)
0304dfbb
DE
1901 use_reg (&call_fusage, reg);
1902 else
1903 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1904 }
1905 }
1906
1907 /* Perform postincrements before actually calling the function. */
1908 emit_queue ();
1909
1910 /* All arguments and registers used for the call must be set up by now! */
1911
51bbfa0c 1912 /* Generate the actual call instruction. */
2c8da025 1913 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
51bbfa0c 1914 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1915 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1916
1917 /* If call is cse'able, make appropriate pair of reg-notes around it.
1918 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
1919 if return type is void. Disable for PARALLEL return values, because
1920 we have no way to move such values into a pseudo register. */
1921 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
1922 {
1923 rtx note = 0;
1924 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1925 rtx insns;
1926
1927 /* Construct an "equal form" for the value which mentions all the
1928 arguments in order as well as the function name. */
1929#ifdef PUSH_ARGS_REVERSED
1930 for (i = 0; i < num_actuals; i++)
1931 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1932#else
1933 for (i = num_actuals - 1; i >= 0; i--)
1934 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1935#endif
1936 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1937
1938 insns = get_insns ();
1939 end_sequence ();
1940
1941 emit_libcall_block (insns, temp, valreg, note);
1942
1943 valreg = temp;
1944 }
4f48d56a
RK
1945 else if (is_const)
1946 {
1947 /* Otherwise, just write out the sequence without a note. */
1948 rtx insns = get_insns ();
1949
1950 end_sequence ();
1951 emit_insns (insns);
1952 }
51bbfa0c
RS
1953
1954 /* For calls to `setjmp', etc., inform flow.c it should complain
1955 if nonvolatile values are live. */
1956
1957 if (returns_twice)
1958 {
1959 emit_note (name, NOTE_INSN_SETJMP);
1960 current_function_calls_setjmp = 1;
1961 }
1962
1963 if (is_longjmp)
1964 current_function_calls_longjmp = 1;
1965
1966 /* Notice functions that cannot return.
1967 If optimizing, insns emitted below will be dead.
1968 If not optimizing, they will exist, which is useful
1969 if the user uses the `return' command in the debugger. */
1970
1971 if (is_volatile || is_longjmp)
1972 emit_barrier ();
1973
51bbfa0c
RS
1974 /* If value type not void, return an rtx for the value. */
1975
1976 /* If there are cleanups to be called, don't use a hard reg as target. */
1977 if (cleanups_this_call != old_cleanups
1978 && target && REG_P (target)
1979 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1980 target = 0;
1981
1982 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1983 || ignore)
1984 {
1985 target = const0_rtx;
1986 }
1987 else if (structure_value_addr)
1988 {
1989 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
1990 {
1991 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1992 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1993 structure_value_addr));
05e3bdb9 1994 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 1995 }
51bbfa0c
RS
1996 }
1997 else if (pcc_struct_value)
1998 {
1999 if (target == 0)
29008b51 2000 {
30082223
RS
2001 /* We used leave the value in the location that it is
2002 returned in, but that causes problems if it is used more
2003 than once in one expression. Rather than trying to track
2004 when a copy is required, we always copy when TARGET is
2005 not specified. This calling sequence is only used on
2006 a few machines and TARGET is usually nonzero. */
2007 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
6d8b61b9
RS
2008 {
2009 target = assign_stack_temp (BLKmode,
2010 int_size_in_bytes (TREE_TYPE (exp)),
2011 0);
2012
05e3bdb9 2013 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
3b780899 2014
6d8b61b9
RS
2015 /* Save this temp slot around the pop below. */
2016 preserve_temp_slots (target);
2017 }
30082223
RS
2018 else
2019 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
29008b51 2020 }
30082223
RS
2021
2022 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
51bbfa0c
RS
2023 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2024 copy_to_reg (valreg)));
2025 else
2026 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
2027 expr_size (exp),
2028 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2029 }
cacbd532
JW
2030 /* Handle calls that return values in multiple non-contiguous locations.
2031 The Irix 6 ABI has examples of this. */
2032 else if (GET_CODE (valreg) == PARALLEL)
2033 {
2034 if (target == 0)
2035 {
2036 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2b4092f2 2037 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
cacbd532
JW
2038 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2039 preserve_temp_slots (target);
2040 }
2041
2042 emit_group_store (target, valreg);
2043 }
059c3d84
JW
2044 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2045 && GET_MODE (target) == GET_MODE (valreg))
2046 /* TARGET and VALREG cannot be equal at this point because the latter
2047 would not have REG_FUNCTION_VALUE_P true, while the former would if
2048 it were referring to the same register.
2049
2050 If they refer to the same register, this move will be a no-op, except
2051 when function inlining is being done. */
2052 emit_move_insn (target, valreg);
766b19fb
JL
2053 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2054 {
2055 /* Some machines (the PA for example) want to return all small
2056 structures in registers regardless of the structure's alignment.
2057
2058 Deal with them explicitly by copying from the return registers
2059 into the target MEM locations. */
2060 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2061 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2062 int i;
2063 enum machine_mode tmpmode;
1b5c5873
RK
2064 rtx src, dst;
2065 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2066 int bitpos, xbitpos, big_endian_correction = 0;
766b19fb
JL
2067
2068 if (target == 0)
822e3422
RK
2069 {
2070 target = assign_stack_temp (BLKmode, bytes, 0);
2071 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2072 preserve_temp_slots (target);
2073 }
766b19fb 2074
e934eef9
RK
2075 /* This code assumes valreg is at least a full word. If it isn't,
2076 copy it into a new pseudo which is a full word. */
2077 if (GET_MODE (valreg) != BLKmode
2078 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
144a3150 2079 valreg = convert_to_mode (word_mode, valreg,
e934eef9
RK
2080 TREE_UNSIGNED (TREE_TYPE (exp)));
2081
1b5c5873
RK
2082 /* Structures whose size is not a multiple of a word are aligned
2083 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2084 machine, this means we must skip the empty high order bytes when
2085 calculating the bit offset. */
2086 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2087 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2088 * BITS_PER_UNIT));
2089
2090 /* Copy the structure BITSIZE bites at a time.
2091
2092 We could probably emit more efficient code for machines
766b19fb
JL
2093 which do not use strict alignment, but it doesn't seem
2094 worth the effort at the current time. */
1b5c5873
RK
2095 for (bitpos = 0, xbitpos = big_endian_correction;
2096 bitpos < bytes * BITS_PER_UNIT;
2097 bitpos += bitsize, xbitpos += bitsize)
766b19fb 2098 {
1b5c5873
RK
2099
2100 /* We need a new source operand each time xbitpos is on a
2101 word boundary and when xbitpos == big_endian_correction
2102 (the first time through). */
2103 if (xbitpos % BITS_PER_WORD == 0
2104 || xbitpos == big_endian_correction)
2105 src = operand_subword_force (valreg,
2106 xbitpos / BITS_PER_WORD,
2107 BLKmode);
2108
2109 /* We need a new destination operand each time bitpos is on
2110 a word boundary. */
2111 if (bitpos % BITS_PER_WORD == 0)
2112 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
766b19fb 2113
1b5c5873
RK
2114 /* Use xbitpos for the source extraction (right justified) and
2115 xbitpos for the destination store (left justified). */
2116 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2117 extract_bit_field (src, bitsize,
2118 xbitpos % BITS_PER_WORD, 1,
2119 NULL_RTX, word_mode,
2120 word_mode,
2121 bitsize / BITS_PER_UNIT,
2122 BITS_PER_WORD),
2123 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
766b19fb
JL
2124 }
2125 }
51bbfa0c
RS
2126 else
2127 target = copy_to_reg (valreg);
2128
84b55618 2129#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2130 /* If we promoted this return value, make the proper SUBREG. TARGET
2131 might be const0_rtx here, so be careful. */
2132 if (GET_CODE (target) == REG
766b19fb 2133 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2134 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2135 {
321e0bba
RK
2136 tree type = TREE_TYPE (exp);
2137 int unsignedp = TREE_UNSIGNED (type);
84b55618 2138
321e0bba
RK
2139 /* If we don't promote as expected, something is wrong. */
2140 if (GET_MODE (target)
2141 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2142 abort ();
2143
321e0bba 2144 target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
84b55618
RK
2145 SUBREG_PROMOTED_VAR_P (target) = 1;
2146 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2147 }
2148#endif
2149
5dab5552
MS
2150 if (flag_short_temps)
2151 {
2152 /* Perform all cleanups needed for the arguments of this call
2153 (i.e. destructors in C++). */
2154 expand_cleanups_to (old_cleanups);
2155 }
51bbfa0c 2156
2f4aa534
RS
2157 /* If size of args is variable or this was a constructor call for a stack
2158 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2159
2160 if (old_stack_level)
2161 {
e5d70561 2162 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2163 pending_stack_adjust = old_pending_adj;
d64f5a78 2164#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2165 stack_arg_under_construction = old_stack_arg_under_construction;
2166 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2167 stack_usage_map = initial_stack_usage_map;
d64f5a78 2168#endif
51bbfa0c 2169 }
51bbfa0c
RS
2170#ifdef ACCUMULATE_OUTGOING_ARGS
2171 else
2172 {
2173#ifdef REG_PARM_STACK_SPACE
2174 if (save_area)
2175 {
2176 enum machine_mode save_mode = GET_MODE (save_area);
2177 rtx stack_area
2178 = gen_rtx (MEM, save_mode,
2179 memory_address (save_mode,
b94301c2
RS
2180#ifdef ARGS_GROW_DOWNWARD
2181 plus_constant (argblock, - high_to_save)
2182#else
2183 plus_constant (argblock, low_to_save)
2184#endif
2185 ));
51bbfa0c
RS
2186
2187 if (save_mode != BLKmode)
2188 emit_move_insn (stack_area, save_area);
2189 else
2190 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2191 GEN_INT (high_to_save - low_to_save + 1),
2192 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2193 }
2194#endif
2195
2196 /* If we saved any argument areas, restore them. */
2197 for (i = 0; i < num_actuals; i++)
2198 if (args[i].save_area)
2199 {
2200 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2201 rtx stack_area
2202 = gen_rtx (MEM, save_mode,
2203 memory_address (save_mode,
2204 XEXP (args[i].stack_slot, 0)));
2205
2206 if (save_mode != BLKmode)
2207 emit_move_insn (stack_area, args[i].save_area);
2208 else
2209 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2210 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2211 PARM_BOUNDARY / BITS_PER_UNIT);
2212 }
2213
2214 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2215 stack_usage_map = initial_stack_usage_map;
2216 }
2217#endif
2218
59257ff7
RK
2219 /* If this was alloca, record the new stack level for nonlocal gotos.
2220 Check for the handler slots since we might not have a save area
0f41302f 2221 for non-local gotos. */
59257ff7
RK
2222
2223 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2224 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2225
2226 pop_temp_slots ();
2227
2228 return target;
2229}
2230\f
322e3e34
RK
2231/* Output a library call to function FUN (a SYMBOL_REF rtx)
2232 (emitting the queue unless NO_QUEUE is nonzero),
2233 for a value of mode OUTMODE,
2234 with NARGS different arguments, passed as alternating rtx values
2235 and machine_modes to convert them to.
2236 The rtx values should have been passed through protect_from_queue already.
2237
2238 NO_QUEUE will be true if and only if the library call is a `const' call
2239 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2240 to the variable is_const in expand_call.
2241
2242 NO_QUEUE must be true for const calls, because if it isn't, then
2243 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2244 and will be lost if the libcall sequence is optimized away.
2245
2246 NO_QUEUE must be false for non-const calls, because if it isn't, the
2247 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2248 optimized. For instance, the instruction scheduler may incorrectly
2249 move memory references across the non-const call. */
2250
2251void
4f90e4a0
RK
2252emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2253 int nargs, ...))
322e3e34 2254{
4f90e4a0
RK
2255#ifndef __STDC__
2256 rtx orgfun;
2257 int no_queue;
2258 enum machine_mode outmode;
2259 int nargs;
2260#endif
322e3e34
RK
2261 va_list p;
2262 /* Total size in bytes of all the stack-parms scanned so far. */
2263 struct args_size args_size;
2264 /* Size of arguments before any adjustments (such as rounding). */
2265 struct args_size original_args_size;
2266 register int argnum;
322e3e34 2267 rtx fun;
322e3e34
RK
2268 int inc;
2269 int count;
2270 rtx argblock = 0;
2271 CUMULATIVE_ARGS args_so_far;
2272 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2273 struct args_size offset; struct args_size size; };
2274 struct arg *argvec;
2275 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2276 rtx call_fusage = 0;
322e3e34 2277
4f90e4a0
RK
2278 VA_START (p, nargs);
2279
2280#ifndef __STDC__
2281 orgfun = va_arg (p, rtx);
322e3e34
RK
2282 no_queue = va_arg (p, int);
2283 outmode = va_arg (p, enum machine_mode);
2284 nargs = va_arg (p, int);
4f90e4a0
RK
2285#endif
2286
2287 fun = orgfun;
322e3e34
RK
2288
2289 /* Copy all the libcall-arguments out of the varargs data
2290 and into a vector ARGVEC.
2291
2292 Compute how to pass each argument. We only support a very small subset
2293 of the full argument passing conventions to limit complexity here since
2294 library functions shouldn't have many args. */
2295
2296 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2297
eecb6f50 2298 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2299
2300 args_size.constant = 0;
2301 args_size.var = 0;
2302
888aa7a9
RS
2303 push_temp_slots ();
2304
322e3e34
RK
2305 for (count = 0; count < nargs; count++)
2306 {
2307 rtx val = va_arg (p, rtx);
2308 enum machine_mode mode = va_arg (p, enum machine_mode);
2309
2310 /* We cannot convert the arg value to the mode the library wants here;
2311 must do it earlier where we know the signedness of the arg. */
2312 if (mode == BLKmode
2313 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2314 abort ();
2315
2316 /* On some machines, there's no way to pass a float to a library fcn.
2317 Pass it as a double instead. */
2318#ifdef LIBGCC_NEEDS_DOUBLE
2319 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2320 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2321#endif
2322
2323 /* There's no need to call protect_from_queue, because
2324 either emit_move_insn or emit_push_insn will do that. */
2325
2326 /* Make sure it is a reasonable operand for a move or push insn. */
2327 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2328 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2329 val = force_operand (val, NULL_RTX);
2330
322e3e34
RK
2331#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2332 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2333 {
a44492f0
RK
2334 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2335 be viewed as just an efficiency improvement. */
888aa7a9
RS
2336 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2337 emit_move_insn (slot, val);
8301b6e2 2338 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2339 mode = Pmode;
888aa7a9 2340 }
322e3e34
RK
2341#endif
2342
888aa7a9
RS
2343 argvec[count].value = val;
2344 argvec[count].mode = mode;
2345
322e3e34 2346 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2347 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2348 abort ();
2349#ifdef FUNCTION_ARG_PARTIAL_NREGS
2350 argvec[count].partial
2351 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2352#else
2353 argvec[count].partial = 0;
2354#endif
2355
2356 locate_and_pad_parm (mode, NULL_TREE,
2357 argvec[count].reg && argvec[count].partial == 0,
2358 NULL_TREE, &args_size, &argvec[count].offset,
2359 &argvec[count].size);
2360
2361 if (argvec[count].size.var)
2362 abort ();
2363
2364#ifndef REG_PARM_STACK_SPACE
2365 if (argvec[count].partial)
2366 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2367#endif
2368
2369 if (argvec[count].reg == 0 || argvec[count].partial != 0
2370#ifdef REG_PARM_STACK_SPACE
2371 || 1
2372#endif
2373 )
2374 args_size.constant += argvec[count].size.constant;
2375
2376#ifdef ACCUMULATE_OUTGOING_ARGS
2377 /* If this arg is actually passed on the stack, it might be
2378 clobbering something we already put there (this library call might
2379 be inside the evaluation of an argument to a function whose call
2380 requires the stack). This will only occur when the library call
2381 has sufficient args to run out of argument registers. Abort in
2382 this case; if this ever occurs, code must be added to save and
2383 restore the arg slot. */
2384
2385 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2386 abort ();
2387#endif
2388
0f41302f 2389 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2390 }
2391 va_end (p);
2392
2393 /* If this machine requires an external definition for library
2394 functions, write one out. */
2395 assemble_external_libcall (fun);
2396
2397 original_args_size = args_size;
2398#ifdef STACK_BOUNDARY
2399 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2400 / STACK_BYTES) * STACK_BYTES);
2401#endif
2402
2403#ifdef REG_PARM_STACK_SPACE
2404 args_size.constant = MAX (args_size.constant,
2405 REG_PARM_STACK_SPACE (NULL_TREE));
2406#ifndef OUTGOING_REG_PARM_STACK_SPACE
2407 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2408#endif
2409#endif
2410
322e3e34
RK
2411 if (args_size.constant > current_function_outgoing_args_size)
2412 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2413
2414#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2415 args_size.constant = 0;
2416#endif
2417
2418#ifndef PUSH_ROUNDING
2419 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2420#endif
2421
2422#ifdef PUSH_ARGS_REVERSED
2423#ifdef STACK_BOUNDARY
2424 /* If we push args individually in reverse order, perform stack alignment
2425 before the first push (the last arg). */
2426 if (argblock == 0)
2427 anti_adjust_stack (GEN_INT (args_size.constant
2428 - original_args_size.constant));
2429#endif
2430#endif
2431
2432#ifdef PUSH_ARGS_REVERSED
2433 inc = -1;
2434 argnum = nargs - 1;
2435#else
2436 inc = 1;
2437 argnum = 0;
2438#endif
2439
2440 /* Push the args that need to be pushed. */
2441
2442 for (count = 0; count < nargs; count++, argnum += inc)
2443 {
2444 register enum machine_mode mode = argvec[argnum].mode;
2445 register rtx val = argvec[argnum].value;
2446 rtx reg = argvec[argnum].reg;
2447 int partial = argvec[argnum].partial;
2448
2449 if (! (reg != 0 && partial == 0))
2450 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2451 argblock, GEN_INT (argvec[count].offset.constant));
2452 NO_DEFER_POP;
2453 }
2454
2455#ifndef PUSH_ARGS_REVERSED
2456#ifdef STACK_BOUNDARY
2457 /* If we pushed args in forward order, perform stack alignment
2458 after pushing the last arg. */
2459 if (argblock == 0)
2460 anti_adjust_stack (GEN_INT (args_size.constant
2461 - original_args_size.constant));
2462#endif
2463#endif
2464
2465#ifdef PUSH_ARGS_REVERSED
2466 argnum = nargs - 1;
2467#else
2468 argnum = 0;
2469#endif
2470
77cac2f2 2471 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2472
322e3e34
RK
2473 /* Now load any reg parms into their regs. */
2474
2475 for (count = 0; count < nargs; count++, argnum += inc)
2476 {
2477 register enum machine_mode mode = argvec[argnum].mode;
2478 register rtx val = argvec[argnum].value;
2479 rtx reg = argvec[argnum].reg;
2480 int partial = argvec[argnum].partial;
2481
2482 if (reg != 0 && partial == 0)
2483 emit_move_insn (reg, val);
2484 NO_DEFER_POP;
2485 }
2486
2487 /* For version 1.37, try deleting this entirely. */
2488 if (! no_queue)
2489 emit_queue ();
2490
2491 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2492 for (count = 0; count < nargs; count++)
2493 if (argvec[count].reg != 0)
77cac2f2 2494 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2495
322e3e34
RK
2496 /* Don't allow popping to be deferred, since then
2497 cse'ing of library calls could delete a call and leave the pop. */
2498 NO_DEFER_POP;
2499
2500 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2501 will set inhibit_defer_pop to that value. */
2502
334c4f0f
RK
2503 /* The return type is needed to decide how many bytes the function pops.
2504 Signedness plays no role in that, so for simplicity, we pretend it's
2505 always signed. We also assume that the list of arguments passed has
2506 no impact, so we pretend it is unknown. */
2507
2c8da025
RK
2508 emit_call_1 (fun,
2509 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
2510 build_function_type (outmode == VOIDmode ? void_type_node
2511 : type_for_mode (outmode, 0), NULL_TREE),
334c4f0f 2512 args_size.constant, 0,
322e3e34
RK
2513 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2514 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2515 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2516
888aa7a9
RS
2517 pop_temp_slots ();
2518
322e3e34
RK
2519 /* Now restore inhibit_defer_pop to its actual original value. */
2520 OK_DEFER_POP;
2521}
2522\f
2523/* Like emit_library_call except that an extra argument, VALUE,
2524 comes second and says where to store the result.
fac0ad80
RS
2525 (If VALUE is zero, this function chooses a convenient way
2526 to return the value.
322e3e34 2527
fac0ad80
RS
2528 This function returns an rtx for where the value is to be found.
2529 If VALUE is nonzero, VALUE is returned. */
2530
2531rtx
4f90e4a0
RK
2532emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2533 enum machine_mode outmode, int nargs, ...))
322e3e34 2534{
4f90e4a0
RK
2535#ifndef __STDC__
2536 rtx orgfun;
2537 rtx value;
2538 int no_queue;
2539 enum machine_mode outmode;
2540 int nargs;
2541#endif
322e3e34
RK
2542 va_list p;
2543 /* Total size in bytes of all the stack-parms scanned so far. */
2544 struct args_size args_size;
2545 /* Size of arguments before any adjustments (such as rounding). */
2546 struct args_size original_args_size;
2547 register int argnum;
322e3e34 2548 rtx fun;
322e3e34
RK
2549 int inc;
2550 int count;
2551 rtx argblock = 0;
2552 CUMULATIVE_ARGS args_so_far;
2553 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2554 struct args_size offset; struct args_size size; };
2555 struct arg *argvec;
2556 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2557 rtx call_fusage = 0;
322e3e34 2558 rtx mem_value = 0;
fac0ad80 2559 int pcc_struct_value = 0;
4f389214 2560 int struct_value_size = 0;
d61bee95 2561 int is_const;
322e3e34 2562
4f90e4a0
RK
2563 VA_START (p, nargs);
2564
2565#ifndef __STDC__
2566 orgfun = va_arg (p, rtx);
322e3e34
RK
2567 value = va_arg (p, rtx);
2568 no_queue = va_arg (p, int);
2569 outmode = va_arg (p, enum machine_mode);
2570 nargs = va_arg (p, int);
4f90e4a0
RK
2571#endif
2572
d61bee95 2573 is_const = no_queue;
4f90e4a0 2574 fun = orgfun;
322e3e34
RK
2575
2576 /* If this kind of value comes back in memory,
2577 decide where in memory it should come back. */
fac0ad80 2578 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2579 {
fac0ad80
RS
2580#ifdef PCC_STATIC_STRUCT_RETURN
2581 rtx pointer_reg
2582 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2583 0);
2584 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2585 pcc_struct_value = 1;
2586 if (value == 0)
2587 value = gen_reg_rtx (outmode);
2588#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2589 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2590 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2591 mem_value = value;
2592 else
2593 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2594#endif
779c643a
JW
2595
2596 /* This call returns a big structure. */
2597 is_const = 0;
322e3e34
RK
2598 }
2599
2600 /* ??? Unfinished: must pass the memory address as an argument. */
2601
2602 /* Copy all the libcall-arguments out of the varargs data
2603 and into a vector ARGVEC.
2604
2605 Compute how to pass each argument. We only support a very small subset
2606 of the full argument passing conventions to limit complexity here since
2607 library functions shouldn't have many args. */
2608
2609 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2610
eecb6f50 2611 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2612
2613 args_size.constant = 0;
2614 args_size.var = 0;
2615
2616 count = 0;
2617
888aa7a9
RS
2618 push_temp_slots ();
2619
322e3e34
RK
2620 /* If there's a structure value address to be passed,
2621 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2622 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2623 {
2624 rtx addr = XEXP (mem_value, 0);
fac0ad80 2625 nargs++;
322e3e34 2626
fac0ad80
RS
2627 /* Make sure it is a reasonable operand for a move or push insn. */
2628 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2629 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2630 addr = force_operand (addr, NULL_RTX);
322e3e34 2631
fac0ad80 2632 argvec[count].value = addr;
4fc3dcd5 2633 argvec[count].mode = Pmode;
fac0ad80 2634 argvec[count].partial = 0;
322e3e34 2635
4fc3dcd5 2636 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2637#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2638 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2639 abort ();
322e3e34
RK
2640#endif
2641
4fc3dcd5 2642 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2643 argvec[count].reg && argvec[count].partial == 0,
2644 NULL_TREE, &args_size, &argvec[count].offset,
2645 &argvec[count].size);
322e3e34
RK
2646
2647
fac0ad80 2648 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2649#ifdef REG_PARM_STACK_SPACE
fac0ad80 2650 || 1
322e3e34 2651#endif
fac0ad80
RS
2652 )
2653 args_size.constant += argvec[count].size.constant;
322e3e34 2654
0f41302f 2655 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2656
2657 count++;
322e3e34
RK
2658 }
2659
2660 for (; count < nargs; count++)
2661 {
2662 rtx val = va_arg (p, rtx);
2663 enum machine_mode mode = va_arg (p, enum machine_mode);
2664
2665 /* We cannot convert the arg value to the mode the library wants here;
2666 must do it earlier where we know the signedness of the arg. */
2667 if (mode == BLKmode
2668 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2669 abort ();
2670
2671 /* On some machines, there's no way to pass a float to a library fcn.
2672 Pass it as a double instead. */
2673#ifdef LIBGCC_NEEDS_DOUBLE
2674 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2675 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2676#endif
2677
2678 /* There's no need to call protect_from_queue, because
2679 either emit_move_insn or emit_push_insn will do that. */
2680
2681 /* Make sure it is a reasonable operand for a move or push insn. */
2682 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2683 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2684 val = force_operand (val, NULL_RTX);
2685
322e3e34
RK
2686#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2687 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2688 {
a44492f0
RK
2689 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2690 be viewed as just an efficiency improvement. */
888aa7a9
RS
2691 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2692 emit_move_insn (slot, val);
2693 val = XEXP (slot, 0);
2694 mode = Pmode;
2695 }
322e3e34
RK
2696#endif
2697
888aa7a9
RS
2698 argvec[count].value = val;
2699 argvec[count].mode = mode;
2700
322e3e34 2701 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2702 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2703 abort ();
2704#ifdef FUNCTION_ARG_PARTIAL_NREGS
2705 argvec[count].partial
2706 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2707#else
2708 argvec[count].partial = 0;
2709#endif
2710
2711 locate_and_pad_parm (mode, NULL_TREE,
2712 argvec[count].reg && argvec[count].partial == 0,
2713 NULL_TREE, &args_size, &argvec[count].offset,
2714 &argvec[count].size);
2715
2716 if (argvec[count].size.var)
2717 abort ();
2718
2719#ifndef REG_PARM_STACK_SPACE
2720 if (argvec[count].partial)
2721 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2722#endif
2723
2724 if (argvec[count].reg == 0 || argvec[count].partial != 0
2725#ifdef REG_PARM_STACK_SPACE
2726 || 1
2727#endif
2728 )
2729 args_size.constant += argvec[count].size.constant;
2730
2731#ifdef ACCUMULATE_OUTGOING_ARGS
2732 /* If this arg is actually passed on the stack, it might be
2733 clobbering something we already put there (this library call might
2734 be inside the evaluation of an argument to a function whose call
2735 requires the stack). This will only occur when the library call
2736 has sufficient args to run out of argument registers. Abort in
2737 this case; if this ever occurs, code must be added to save and
2738 restore the arg slot. */
2739
2740 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2741 abort ();
2742#endif
2743
0f41302f 2744 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2745 }
2746 va_end (p);
2747
2748 /* If this machine requires an external definition for library
2749 functions, write one out. */
2750 assemble_external_libcall (fun);
2751
2752 original_args_size = args_size;
2753#ifdef STACK_BOUNDARY
2754 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2755 / STACK_BYTES) * STACK_BYTES);
2756#endif
2757
2758#ifdef REG_PARM_STACK_SPACE
2759 args_size.constant = MAX (args_size.constant,
2760 REG_PARM_STACK_SPACE (NULL_TREE));
2761#ifndef OUTGOING_REG_PARM_STACK_SPACE
2762 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2763#endif
2764#endif
2765
322e3e34
RK
2766 if (args_size.constant > current_function_outgoing_args_size)
2767 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2768
2769#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2770 args_size.constant = 0;
2771#endif
2772
2773#ifndef PUSH_ROUNDING
2774 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2775#endif
2776
2777#ifdef PUSH_ARGS_REVERSED
2778#ifdef STACK_BOUNDARY
2779 /* If we push args individually in reverse order, perform stack alignment
2780 before the first push (the last arg). */
2781 if (argblock == 0)
2782 anti_adjust_stack (GEN_INT (args_size.constant
2783 - original_args_size.constant));
2784#endif
2785#endif
2786
2787#ifdef PUSH_ARGS_REVERSED
2788 inc = -1;
2789 argnum = nargs - 1;
2790#else
2791 inc = 1;
2792 argnum = 0;
2793#endif
2794
2795 /* Push the args that need to be pushed. */
2796
2797 for (count = 0; count < nargs; count++, argnum += inc)
2798 {
2799 register enum machine_mode mode = argvec[argnum].mode;
2800 register rtx val = argvec[argnum].value;
2801 rtx reg = argvec[argnum].reg;
2802 int partial = argvec[argnum].partial;
2803
2804 if (! (reg != 0 && partial == 0))
2805 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2806 argblock, GEN_INT (argvec[count].offset.constant));
2807 NO_DEFER_POP;
2808 }
2809
2810#ifndef PUSH_ARGS_REVERSED
2811#ifdef STACK_BOUNDARY
2812 /* If we pushed args in forward order, perform stack alignment
2813 after pushing the last arg. */
2814 if (argblock == 0)
2815 anti_adjust_stack (GEN_INT (args_size.constant
2816 - original_args_size.constant));
2817#endif
2818#endif
2819
2820#ifdef PUSH_ARGS_REVERSED
2821 argnum = nargs - 1;
2822#else
2823 argnum = 0;
2824#endif
2825
77cac2f2 2826 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2827
322e3e34
RK
2828 /* Now load any reg parms into their regs. */
2829
322e3e34
RK
2830 for (count = 0; count < nargs; count++, argnum += inc)
2831 {
2832 register enum machine_mode mode = argvec[argnum].mode;
2833 register rtx val = argvec[argnum].value;
2834 rtx reg = argvec[argnum].reg;
2835 int partial = argvec[argnum].partial;
2836
2837 if (reg != 0 && partial == 0)
2838 emit_move_insn (reg, val);
2839 NO_DEFER_POP;
2840 }
2841
2842#if 0
2843 /* For version 1.37, try deleting this entirely. */
2844 if (! no_queue)
2845 emit_queue ();
2846#endif
2847
2848 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2849 for (count = 0; count < nargs; count++)
2850 if (argvec[count].reg != 0)
77cac2f2 2851 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2852
fac0ad80
RS
2853 /* Pass the function the address in which to return a structure value. */
2854 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2855 {
2856 emit_move_insn (struct_value_rtx,
2857 force_reg (Pmode,
2858 force_operand (XEXP (mem_value, 0),
2859 NULL_RTX)));
2860 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2861 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
2862 }
2863
322e3e34
RK
2864 /* Don't allow popping to be deferred, since then
2865 cse'ing of library calls could delete a call and leave the pop. */
2866 NO_DEFER_POP;
2867
2868 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2869 will set inhibit_defer_pop to that value. */
334c4f0f
RK
2870 /* See the comment in emit_library_call about the function type we build
2871 and pass here. */
322e3e34 2872
2c8da025
RK
2873 emit_call_1 (fun,
2874 get_identifier (XSTR (orgfun, 0)),
334c4f0f
RK
2875 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
2876 args_size.constant, struct_value_size,
322e3e34 2877 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 2878 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2879 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
2880
2881 /* Now restore inhibit_defer_pop to its actual original value. */
2882 OK_DEFER_POP;
2883
888aa7a9
RS
2884 pop_temp_slots ();
2885
322e3e34
RK
2886 /* Copy the value to the right place. */
2887 if (outmode != VOIDmode)
2888 {
2889 if (mem_value)
2890 {
2891 if (value == 0)
fac0ad80 2892 value = mem_value;
322e3e34
RK
2893 if (value != mem_value)
2894 emit_move_insn (value, mem_value);
2895 }
2896 else if (value != 0)
2897 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
2898 else
2899 value = hard_libcall_value (outmode);
322e3e34 2900 }
fac0ad80
RS
2901
2902 return value;
322e3e34
RK
2903}
2904\f
51bbfa0c
RS
2905#if 0
2906/* Return an rtx which represents a suitable home on the stack
2907 given TYPE, the type of the argument looking for a home.
2908 This is called only for BLKmode arguments.
2909
2910 SIZE is the size needed for this target.
2911 ARGS_ADDR is the address of the bottom of the argument block for this call.
2912 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2913 if this machine uses push insns. */
2914
2915static rtx
2916target_for_arg (type, size, args_addr, offset)
2917 tree type;
2918 rtx size;
2919 rtx args_addr;
2920 struct args_size offset;
2921{
2922 rtx target;
2923 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2924
2925 /* We do not call memory_address if possible,
2926 because we want to address as close to the stack
2927 as possible. For non-variable sized arguments,
2928 this will be stack-pointer relative addressing. */
2929 if (GET_CODE (offset_rtx) == CONST_INT)
2930 target = plus_constant (args_addr, INTVAL (offset_rtx));
2931 else
2932 {
2933 /* I have no idea how to guarantee that this
2934 will work in the presence of register parameters. */
2935 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2936 target = memory_address (QImode, target);
2937 }
2938
2939 return gen_rtx (MEM, BLKmode, target);
2940}
2941#endif
2942\f
2943/* Store a single argument for a function call
2944 into the register or memory area where it must be passed.
2945 *ARG describes the argument value and where to pass it.
2946
2947 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 2948 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
2949
2950 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2951 so must be careful about how the stack is used.
2952
2953 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2954 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2955 that we need not worry about saving and restoring the stack.
2956
2957 FNDECL is the declaration of the function we are calling. */
2958
2959static void
6f90e075
JW
2960store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2961 reg_parm_stack_space)
51bbfa0c
RS
2962 struct arg_data *arg;
2963 rtx argblock;
2964 int may_be_alloca;
2965 int variable_size;
2966 tree fndecl;
6f90e075 2967 int reg_parm_stack_space;
51bbfa0c
RS
2968{
2969 register tree pval = arg->tree_value;
2970 rtx reg = 0;
2971 int partial = 0;
2972 int used = 0;
2973 int i, lower_bound, upper_bound;
2974
2975 if (TREE_CODE (pval) == ERROR_MARK)
2976 return;
2977
cc79451b
RK
2978 /* Push a new temporary level for any temporaries we make for
2979 this argument. */
2980 push_temp_slots ();
2981
51bbfa0c
RS
2982#ifdef ACCUMULATE_OUTGOING_ARGS
2983 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2984 save any previous data at that location. */
2985 if (argblock && ! variable_size && arg->stack)
2986 {
2987#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
2988 /* stack_slot is negative, but we want to index stack_usage_map
2989 with positive values. */
51bbfa0c
RS
2990 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2991 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2992 else
2993 abort ();
2994
2995 lower_bound = upper_bound - arg->size.constant;
2996#else
2997 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2998 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2999 else
3000 lower_bound = 0;
3001
3002 upper_bound = lower_bound + arg->size.constant;
3003#endif
3004
3005 for (i = lower_bound; i < upper_bound; i++)
3006 if (stack_usage_map[i]
3007#ifdef REG_PARM_STACK_SPACE
3008 /* Don't store things in the fixed argument area at this point;
3009 it has already been saved. */
6f90e075 3010 && i > reg_parm_stack_space
51bbfa0c
RS
3011#endif
3012 )
3013 break;
3014
3015 if (i != upper_bound)
3016 {
3017 /* We need to make a save area. See what mode we can make it. */
3018 enum machine_mode save_mode
3019 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3020 rtx stack_area
3021 = gen_rtx (MEM, save_mode,
3022 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
3023
3024 if (save_mode == BLKmode)
3025 {
3026 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3027 arg->size.constant, 0);
3668e76e
JL
3028 MEM_IN_STRUCT_P (arg->save_area)
3029 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
cc79451b 3030 preserve_temp_slots (arg->save_area);
51bbfa0c 3031 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3032 GEN_INT (arg->size.constant),
51bbfa0c
RS
3033 PARM_BOUNDARY / BITS_PER_UNIT);
3034 }
3035 else
3036 {
3037 arg->save_area = gen_reg_rtx (save_mode);
3038 emit_move_insn (arg->save_area, stack_area);
3039 }
3040 }
3041 }
3042#endif
3043
3044 /* If this isn't going to be placed on both the stack and in registers,
3045 set up the register and number of words. */
3046 if (! arg->pass_on_stack)
3047 reg = arg->reg, partial = arg->partial;
3048
3049 if (reg != 0 && partial == 0)
3050 /* Being passed entirely in a register. We shouldn't be called in
3051 this case. */
3052 abort ();
3053
4ab56118
RK
3054 /* If this arg needs special alignment, don't load the registers
3055 here. */
3056 if (arg->n_aligned_regs != 0)
3057 reg = 0;
4ab56118 3058
4ab56118 3059 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3060 it directly into its stack slot. Otherwise, we can. */
3061 if (arg->value == 0)
d64f5a78
RS
3062 {
3063#ifdef ACCUMULATE_OUTGOING_ARGS
3064 /* stack_arg_under_construction is nonzero if a function argument is
3065 being evaluated directly into the outgoing argument list and
3066 expand_call must take special action to preserve the argument list
3067 if it is called recursively.
3068
3069 For scalar function arguments stack_usage_map is sufficient to
3070 determine which stack slots must be saved and restored. Scalar
3071 arguments in general have pass_on_stack == 0.
3072
3073 If this argument is initialized by a function which takes the
3074 address of the argument (a C++ constructor or a C function
3075 returning a BLKmode structure), then stack_usage_map is
3076 insufficient and expand_call must push the stack around the
3077 function call. Such arguments have pass_on_stack == 1.
3078
3079 Note that it is always safe to set stack_arg_under_construction,
3080 but this generates suboptimal code if set when not needed. */
3081
3082 if (arg->pass_on_stack)
3083 stack_arg_under_construction++;
3084#endif
3a08477a
RK
3085 arg->value = expand_expr (pval,
3086 (partial
3087 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3088 ? NULL_RTX : arg->stack,
e5d70561 3089 VOIDmode, 0);
1efe6448
RK
3090
3091 /* If we are promoting object (or for any other reason) the mode
3092 doesn't agree, convert the mode. */
3093
7373d92d
RK
3094 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3095 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3096 arg->value, arg->unsignedp);
1efe6448 3097
d64f5a78
RS
3098#ifdef ACCUMULATE_OUTGOING_ARGS
3099 if (arg->pass_on_stack)
3100 stack_arg_under_construction--;
3101#endif
3102 }
51bbfa0c
RS
3103
3104 /* Don't allow anything left on stack from computation
3105 of argument to alloca. */
3106 if (may_be_alloca)
3107 do_pending_stack_adjust ();
3108
3109 if (arg->value == arg->stack)
3110 /* If the value is already in the stack slot, we are done. */
3111 ;
1efe6448 3112 else if (arg->mode != BLKmode)
51bbfa0c
RS
3113 {
3114 register int size;
3115
3116 /* Argument is a scalar, not entirely passed in registers.
3117 (If part is passed in registers, arg->partial says how much
3118 and emit_push_insn will take care of putting it there.)
3119
3120 Push it, and if its size is less than the
3121 amount of space allocated to it,
3122 also bump stack pointer by the additional space.
3123 Note that in C the default argument promotions
3124 will prevent such mismatches. */
3125
1efe6448 3126 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3127 /* Compute how much space the push instruction will push.
3128 On many machines, pushing a byte will advance the stack
3129 pointer by a halfword. */
3130#ifdef PUSH_ROUNDING
3131 size = PUSH_ROUNDING (size);
3132#endif
3133 used = size;
3134
3135 /* Compute how much space the argument should get:
3136 round up to a multiple of the alignment for arguments. */
1efe6448 3137 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3138 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3139 / (PARM_BOUNDARY / BITS_PER_UNIT))
3140 * (PARM_BOUNDARY / BITS_PER_UNIT));
3141
3142 /* This isn't already where we want it on the stack, so put it there.
3143 This can either be done with push or copy insns. */
ccf5d244
RK
3144 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3145 0, partial, reg, used - size,
3146 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
3147 }
3148 else
3149 {
3150 /* BLKmode, at least partly to be pushed. */
3151
3152 register int excess;
3153 rtx size_rtx;
3154
3155 /* Pushing a nonscalar.
3156 If part is passed in registers, PARTIAL says how much
3157 and emit_push_insn will take care of putting it there. */
3158
3159 /* Round its size up to a multiple
3160 of the allocation unit for arguments. */
3161
3162 if (arg->size.var != 0)
3163 {
3164 excess = 0;
3165 size_rtx = ARGS_SIZE_RTX (arg->size);
3166 }
3167 else
3168 {
51bbfa0c
RS
3169 /* PUSH_ROUNDING has no effect on us, because
3170 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3171 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3172 + partial * UNITS_PER_WORD);
e4f93898 3173 size_rtx = expr_size (pval);
51bbfa0c
RS
3174 }
3175
1efe6448 3176 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
3177 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3178 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3179 }
3180
3181
3182 /* Unless this is a partially-in-register argument, the argument is now
3183 in the stack.
3184
3185 ??? Note that this can change arg->value from arg->stack to
3186 arg->stack_slot and it matters when they are not the same.
3187 It isn't totally clear that this is correct in all cases. */
3188 if (partial == 0)
3189 arg->value = arg->stack_slot;
3190
3191 /* Once we have pushed something, pops can't safely
3192 be deferred during the rest of the arguments. */
3193 NO_DEFER_POP;
3194
3195 /* ANSI doesn't require a sequence point here,
3196 but PCC has one, so this will avoid some problems. */
3197 emit_queue ();
3198
db907e7b
RK
3199 /* Free any temporary slots made in processing this argument. Show
3200 that we might have taken the address of something and pushed that
3201 as an operand. */
3202 preserve_temp_slots (NULL_RTX);
51bbfa0c 3203 free_temp_slots ();
cc79451b 3204 pop_temp_slots ();
51bbfa0c
RS
3205
3206#ifdef ACCUMULATE_OUTGOING_ARGS
3207 /* Now mark the segment we just used. */
3208 if (argblock && ! variable_size && arg->stack)
3209 for (i = lower_bound; i < upper_bound; i++)
3210 stack_usage_map[i] = 1;
3211#endif
3212}
This page took 0.586176 seconds and 5 git commands to generate.