]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
(sched_analyze_2): Make volatile asms depend on all
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
6e716e89 2 Copyright (C) 1989, 1992, 1993 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20#include "config.h"
21#include "rtl.h"
22#include "tree.h"
23#include "flags.h"
24#include "expr.h"
322e3e34 25#include "gvarargs.h"
51bbfa0c
RS
26#include "insn-flags.h"
27
28/* Decide whether a function's arguments should be processed
bbc8a071
RK
29 from first to last or from last to first.
30
31 They should if the stack and args grow in opposite directions, but
32 only if we have push insns. */
51bbfa0c 33
51bbfa0c 34#ifdef PUSH_ROUNDING
bbc8a071 35
40083ddf 36#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
37#define PUSH_ARGS_REVERSED /* If it's last to first */
38#endif
bbc8a071 39
51bbfa0c
RS
40#endif
41
42/* Like STACK_BOUNDARY but in units of bytes, not bits. */
43#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
44
45/* Data structure and subroutines used within expand_call. */
46
47struct arg_data
48{
49 /* Tree node for this argument. */
50 tree tree_value;
1efe6448
RK
51 /* Mode for value; TYPE_MODE unless promoted. */
52 enum machine_mode mode;
51bbfa0c
RS
53 /* Current RTL value for argument, or 0 if it isn't precomputed. */
54 rtx value;
55 /* Initially-compute RTL value for argument; only for const functions. */
56 rtx initial_value;
57 /* Register to pass this argument in, 0 if passed on stack, or an
58 EXPR_LIST if the arg is to be copied into multiple different
59 registers. */
60 rtx reg;
84b55618
RK
61 /* If REG was promoted from the actual mode of the argument expression,
62 indicates whether the promotion is sign- or zero-extended. */
63 int unsignedp;
51bbfa0c
RS
64 /* Number of registers to use. 0 means put the whole arg in registers.
65 Also 0 if not passed in registers. */
66 int partial;
d64f5a78
RS
67 /* Non-zero if argument must be passed on stack.
68 Note that some arguments may be passed on the stack
69 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
70 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
71 int pass_on_stack;
72 /* Offset of this argument from beginning of stack-args. */
73 struct args_size offset;
74 /* Similar, but offset to the start of the stack slot. Different from
75 OFFSET if this arg pads downward. */
76 struct args_size slot_offset;
77 /* Size of this argument on the stack, rounded up for any padding it gets,
78 parts of the argument passed in registers do not count.
79 If REG_PARM_STACK_SPACE is defined, then register parms
80 are counted here as well. */
81 struct args_size size;
82 /* Location on the stack at which parameter should be stored. The store
83 has already been done if STACK == VALUE. */
84 rtx stack;
85 /* Location on the stack of the start of this argument slot. This can
86 differ from STACK if this arg pads downward. This location is known
87 to be aligned to FUNCTION_ARG_BOUNDARY. */
88 rtx stack_slot;
89#ifdef ACCUMULATE_OUTGOING_ARGS
90 /* Place that this stack area has been saved, if needed. */
91 rtx save_area;
92#endif
4ab56118
RK
93#ifdef STRICT_ALIGNMENT
94 /* If an argument's alignment does not permit direct copying into registers,
95 copy in smaller-sized pieces into pseudos. These are stored in a
96 block pointed to by this field. The next field says how many
97 word-sized pseudos we made. */
98 rtx *aligned_regs;
99 int n_aligned_regs;
100#endif
51bbfa0c
RS
101};
102
103#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 104/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108static char *stack_usage_map;
109
110/* Size of STACK_USAGE_MAP. */
111static int highest_outgoing_arg_in_use;
2f4aa534
RS
112
113/* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118int stack_arg_under_construction;
51bbfa0c
RS
119#endif
120
322e3e34 121static int calls_function PROTO((tree, int));
9f4d9f6c 122static int calls_function_1 PROTO((tree, int));
322e3e34
RK
123static void emit_call_1 PROTO((rtx, tree, int, int, rtx, rtx, int,
124 rtx, int));
125static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
126 tree, int));
51bbfa0c 127\f
1ce0cb53
JW
128/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
129 `alloca'.
130
131 If WHICH is 0, return 1 if EXP contains a call to any function.
132 Actually, we only need return 1 if evaluating EXP would require pushing
133 arguments on the stack, but that is too difficult to compute, so we just
134 assume any function call might require the stack. */
51bbfa0c 135
1c8d7aef
RS
136static tree calls_function_save_exprs;
137
51bbfa0c 138static int
1ce0cb53 139calls_function (exp, which)
51bbfa0c 140 tree exp;
1ce0cb53 141 int which;
1c8d7aef
RS
142{
143 int val;
144 calls_function_save_exprs = 0;
145 val = calls_function_1 (exp, which);
146 calls_function_save_exprs = 0;
147 return val;
148}
149
150static int
151calls_function_1 (exp, which)
152 tree exp;
153 int which;
51bbfa0c
RS
154{
155 register int i;
156 int type = TREE_CODE_CLASS (TREE_CODE (exp));
157 int length = tree_code_length[(int) TREE_CODE (exp)];
158
159 /* Only expressions and references can contain calls. */
160
3b59a331
RS
161 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
162 && type != 'b')
51bbfa0c
RS
163 return 0;
164
165 switch (TREE_CODE (exp))
166 {
167 case CALL_EXPR:
1ce0cb53
JW
168 if (which == 0)
169 return 1;
170 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
171 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
172 == FUNCTION_DECL)
173 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
174 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
175 == BUILT_IN_ALLOCA))
51bbfa0c
RS
176 return 1;
177
178 /* Third operand is RTL. */
179 length = 2;
180 break;
181
182 case SAVE_EXPR:
183 if (SAVE_EXPR_RTL (exp) != 0)
184 return 0;
1c8d7aef
RS
185 if (value_member (exp, calls_function_save_exprs))
186 return 0;
187 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
188 calls_function_save_exprs);
189 return (TREE_OPERAND (exp, 0) != 0
190 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
191
192 case BLOCK:
ef03bc85
CH
193 {
194 register tree local;
195
196 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 197 if (DECL_INITIAL (local) != 0
1c8d7aef 198 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
199 return 1;
200 }
201 {
202 register tree subblock;
203
204 for (subblock = BLOCK_SUBBLOCKS (exp);
205 subblock;
206 subblock = TREE_CHAIN (subblock))
1c8d7aef 207 if (calls_function_1 (subblock, which))
ef03bc85
CH
208 return 1;
209 }
210 return 0;
51bbfa0c
RS
211
212 case METHOD_CALL_EXPR:
213 length = 3;
214 break;
215
216 case WITH_CLEANUP_EXPR:
217 length = 1;
218 break;
219
220 case RTL_EXPR:
221 return 0;
222 }
223
224 for (i = 0; i < length; i++)
225 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 226 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
227 return 1;
228
229 return 0;
230}
231\f
232/* Force FUNEXP into a form suitable for the address of a CALL,
233 and return that as an rtx. Also load the static chain register
234 if FNDECL is a nested function.
235
236 USE_INSNS points to a variable holding a chain of USE insns
237 to which a USE of the static chain
238 register should be added, if required. */
239
03dacb02 240rtx
51bbfa0c
RS
241prepare_call_address (funexp, fndecl, use_insns)
242 rtx funexp;
243 tree fndecl;
244 rtx *use_insns;
245{
246 rtx static_chain_value = 0;
247
248 funexp = protect_from_queue (funexp, 0);
249
250 if (fndecl != 0)
251 /* Get possible static chain value for nested function in C. */
252 static_chain_value = lookup_static_chain (fndecl);
253
254 /* Make a valid memory address and copy constants thru pseudo-regs,
255 but not for a constant address if -fno-function-cse. */
256 if (GET_CODE (funexp) != SYMBOL_REF)
257 funexp = memory_address (FUNCTION_MODE, funexp);
258 else
259 {
260#ifndef NO_FUNCTION_CSE
261 if (optimize && ! flag_no_function_cse)
262#ifdef NO_RECURSIVE_FUNCTION_CSE
263 if (fndecl != current_function_decl)
264#endif
265 funexp = force_reg (Pmode, funexp);
266#endif
267 }
268
269 if (static_chain_value != 0)
270 {
271 emit_move_insn (static_chain_rtx, static_chain_value);
272
273 /* Put the USE insn in the chain we were passed. It will later be
274 output immediately in front of the CALL insn. */
275 push_to_sequence (*use_insns);
276 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
277 *use_insns = get_insns ();
278 end_sequence ();
279 }
280
281 return funexp;
282}
283
284/* Generate instructions to call function FUNEXP,
285 and optionally pop the results.
286 The CALL_INSN is the first insn generated.
287
288 FUNTYPE is the data type of the function, or, for a library call,
289 the identifier for the name of the call. This is given to the
290 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
291
292 STACK_SIZE is the number of bytes of arguments on the stack,
293 rounded up to STACK_BOUNDARY; zero if the size is variable.
294 This is both to put into the call insn and
295 to generate explicit popping code if necessary.
296
297 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
298 It is zero if this call doesn't want a structure value.
299
300 NEXT_ARG_REG is the rtx that results from executing
301 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
302 just after all the args have had their registers assigned.
303 This could be whatever you like, but normally it is the first
304 arg-register beyond those used for args in this call,
305 or 0 if all the arg-registers are used in this call.
306 It is passed on to `gen_call' so you can put this info in the call insn.
307
308 VALREG is a hard register in which a value is returned,
309 or 0 if the call does not return a value.
310
311 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
312 the args to this call were processed.
313 We restore `inhibit_defer_pop' to that value.
314
315 USE_INSNS is a chain of USE insns to be emitted immediately before
316 the actual CALL insn.
317
318 IS_CONST is true if this is a `const' call. */
319
322e3e34 320static void
51bbfa0c
RS
321emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
322 valreg, old_inhibit_defer_pop, use_insns, is_const)
323 rtx funexp;
324 tree funtype;
325 int stack_size;
326 int struct_value_size;
327 rtx next_arg_reg;
328 rtx valreg;
329 int old_inhibit_defer_pop;
330 rtx use_insns;
331 int is_const;
332{
e5d70561
RK
333 rtx stack_size_rtx = GEN_INT (stack_size);
334 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
335 rtx call_insn;
336 int already_popped = 0;
337
338 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
339 and we don't want to load it into a register as an optimization,
340 because prepare_call_address already did it if it should be done. */
341 if (GET_CODE (funexp) != SYMBOL_REF)
342 funexp = memory_address (FUNCTION_MODE, funexp);
343
344#ifndef ACCUMULATE_OUTGOING_ARGS
345#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
346 if (HAVE_call_pop && HAVE_call_value_pop
347 && (RETURN_POPS_ARGS (funtype, stack_size) > 0 || stack_size == 0))
348 {
e5d70561 349 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (funtype, stack_size));
51bbfa0c
RS
350 rtx pat;
351
352 /* If this subroutine pops its own args, record that in the call insn
353 if possible, for the sake of frame pointer elimination. */
354 if (valreg)
355 pat = gen_call_value_pop (valreg,
356 gen_rtx (MEM, FUNCTION_MODE, funexp),
357 stack_size_rtx, next_arg_reg, n_pop);
358 else
359 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
360 stack_size_rtx, next_arg_reg, n_pop);
361
362 emit_call_insn (pat);
363 already_popped = 1;
364 }
365 else
366#endif
367#endif
368
369#if defined (HAVE_call) && defined (HAVE_call_value)
370 if (HAVE_call && HAVE_call_value)
371 {
372 if (valreg)
373 emit_call_insn (gen_call_value (valreg,
374 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
375 stack_size_rtx, next_arg_reg,
376 NULL_RTX));
51bbfa0c
RS
377 else
378 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
379 stack_size_rtx, next_arg_reg,
380 struct_value_size_rtx));
381 }
382 else
383#endif
384 abort ();
385
386 /* Find the CALL insn we just emitted and write the USE insns before it. */
387 for (call_insn = get_last_insn ();
388 call_insn && GET_CODE (call_insn) != CALL_INSN;
389 call_insn = PREV_INSN (call_insn))
390 ;
391
392 if (! call_insn)
393 abort ();
394
395 /* Put the USE insns before the CALL. */
396 emit_insns_before (use_insns, call_insn);
397
398 /* If this is a const call, then set the insn's unchanging bit. */
399 if (is_const)
400 CONST_CALL_P (call_insn) = 1;
401
b1e64e0d
RS
402 /* Restore this now, so that we do defer pops for this call's args
403 if the context of the call as a whole permits. */
404 inhibit_defer_pop = old_inhibit_defer_pop;
405
51bbfa0c
RS
406#ifndef ACCUMULATE_OUTGOING_ARGS
407 /* If returning from the subroutine does not automatically pop the args,
408 we need an instruction to pop them sooner or later.
409 Perhaps do it now; perhaps just record how much space to pop later.
410
411 If returning from the subroutine does pop the args, indicate that the
412 stack pointer will be changed. */
413
414 if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
415 {
416 if (!already_popped)
417 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
418 stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
e5d70561 419 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
420 }
421
422 if (stack_size != 0)
423 {
70a73141 424 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
425 pending_stack_adjust += stack_size;
426 else
427 adjust_stack (stack_size_rtx);
428 }
429#endif
430}
431
432/* Generate all the code for a function call
433 and return an rtx for its value.
434 Store the value in TARGET (specified as an rtx) if convenient.
435 If the value is stored in TARGET then TARGET is returned.
436 If IGNORE is nonzero, then we ignore the value of the function call. */
437
438rtx
8129842c 439expand_call (exp, target, ignore)
51bbfa0c
RS
440 tree exp;
441 rtx target;
442 int ignore;
51bbfa0c
RS
443{
444 /* List of actual parameters. */
445 tree actparms = TREE_OPERAND (exp, 1);
446 /* RTX for the function to be called. */
447 rtx funexp;
448 /* Tree node for the function to be called (not the address!). */
449 tree funtree;
450 /* Data type of the function. */
451 tree funtype;
452 /* Declaration of the function being called,
453 or 0 if the function is computed (not known by name). */
454 tree fndecl = 0;
455 char *name = 0;
456
457 /* Register in which non-BLKmode value will be returned,
458 or 0 if no value or if value is BLKmode. */
459 rtx valreg;
460 /* Address where we should return a BLKmode value;
461 0 if value not BLKmode. */
462 rtx structure_value_addr = 0;
463 /* Nonzero if that address is being passed by treating it as
464 an extra, implicit first parameter. Otherwise,
465 it is passed by being copied directly into struct_value_rtx. */
466 int structure_value_addr_parm = 0;
467 /* Size of aggregate value wanted, or zero if none wanted
468 or if we are using the non-reentrant PCC calling convention
469 or expecting the value in registers. */
470 int struct_value_size = 0;
471 /* Nonzero if called function returns an aggregate in memory PCC style,
472 by returning the address of where to find it. */
473 int pcc_struct_value = 0;
474
475 /* Number of actual parameters in this call, including struct value addr. */
476 int num_actuals;
477 /* Number of named args. Args after this are anonymous ones
478 and they must all go on the stack. */
479 int n_named_args;
480 /* Count arg position in order args appear. */
481 int argpos;
482
483 /* Vector of information about each argument.
484 Arguments are numbered in the order they will be pushed,
485 not the order they are written. */
486 struct arg_data *args;
487
488 /* Total size in bytes of all the stack-parms scanned so far. */
489 struct args_size args_size;
490 /* Size of arguments before any adjustments (such as rounding). */
491 struct args_size original_args_size;
492 /* Data on reg parms scanned so far. */
493 CUMULATIVE_ARGS args_so_far;
494 /* Nonzero if a reg parm has been scanned. */
495 int reg_parm_seen;
efd65a8b
RS
496 /* Nonzero if this is an indirect function call. */
497 int current_call_is_indirect = 0;
51bbfa0c
RS
498
499 /* Nonzero if we must avoid push-insns in the args for this call.
500 If stack space is allocated for register parameters, but not by the
501 caller, then it is preallocated in the fixed part of the stack frame.
502 So the entire argument block must then be preallocated (i.e., we
503 ignore PUSH_ROUNDING in that case). */
504
505#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
506 int must_preallocate = 1;
507#else
508#ifdef PUSH_ROUNDING
509 int must_preallocate = 0;
510#else
511 int must_preallocate = 1;
512#endif
513#endif
514
f72aed24 515 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
516 int reg_parm_stack_space = 0;
517
51bbfa0c
RS
518 /* 1 if scanning parms front to back, -1 if scanning back to front. */
519 int inc;
520 /* Address of space preallocated for stack parms
521 (on machines that lack push insns), or 0 if space not preallocated. */
522 rtx argblock = 0;
523
524 /* Nonzero if it is plausible that this is a call to alloca. */
525 int may_be_alloca;
526 /* Nonzero if this is a call to setjmp or a related function. */
527 int returns_twice;
528 /* Nonzero if this is a call to `longjmp'. */
529 int is_longjmp;
530 /* Nonzero if this is a call to an inline function. */
531 int is_integrable = 0;
51bbfa0c
RS
532 /* Nonzero if this is a call to a `const' function.
533 Note that only explicitly named functions are handled as `const' here. */
534 int is_const = 0;
535 /* Nonzero if this is a call to a `volatile' function. */
536 int is_volatile = 0;
537#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
538 /* Define the boundary of the register parm stack space that needs to be
539 save, if any. */
540 int low_to_save = -1, high_to_save;
541 rtx save_area = 0; /* Place that it is saved */
542#endif
543
544#ifdef ACCUMULATE_OUTGOING_ARGS
545 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
546 char *initial_stack_usage_map = stack_usage_map;
547#endif
548
549 rtx old_stack_level = 0;
550 int old_pending_adj;
2f4aa534 551 int old_stack_arg_under_construction;
51bbfa0c
RS
552 int old_inhibit_defer_pop = inhibit_defer_pop;
553 tree old_cleanups = cleanups_this_call;
554
555 rtx use_insns = 0;
556
557 register tree p;
4ab56118 558 register int i, j;
51bbfa0c
RS
559
560 /* See if we can find a DECL-node for the actual function.
561 As a result, decide whether this is a call to an integrable function. */
562
563 p = TREE_OPERAND (exp, 0);
564 if (TREE_CODE (p) == ADDR_EXPR)
565 {
566 fndecl = TREE_OPERAND (p, 0);
567 if (TREE_CODE (fndecl) != FUNCTION_DECL)
568 {
569 /* May still be a `const' function if it is
570 a call through a pointer-to-const.
571 But we don't handle that. */
572 fndecl = 0;
573 }
574 else
575 {
576 if (!flag_no_inline
577 && fndecl != current_function_decl
578 && DECL_SAVED_INSNS (fndecl))
579 is_integrable = 1;
580 else if (! TREE_ADDRESSABLE (fndecl))
581 {
13d39dbc 582 /* In case this function later becomes inlinable,
51bbfa0c
RS
583 record that there was already a non-inline call to it.
584
585 Use abstraction instead of setting TREE_ADDRESSABLE
586 directly. */
67729b99
BK
587 if (DECL_INLINE (fndecl) && extra_warnings && warn_inline
588 && !flag_no_inline)
51bbfa0c
RS
589 warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
590 mark_addressable (fndecl);
591 }
592
d45cf215
RS
593 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
594 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 595 is_const = 1;
5e24110e
RS
596
597 if (TREE_THIS_VOLATILE (fndecl))
598 is_volatile = 1;
51bbfa0c
RS
599 }
600 }
601
6f90e075
JW
602#ifdef REG_PARM_STACK_SPACE
603#ifdef MAYBE_REG_PARM_STACK_SPACE
604 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
605#else
606 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
607#endif
608#endif
609
51bbfa0c
RS
610 /* Warn if this value is an aggregate type,
611 regardless of which calling convention we are using for it. */
612 if (warn_aggregate_return
613 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
614 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
c1b98a95 615 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
51bbfa0c
RS
616 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
617 warning ("function call has aggregate value");
618
619 /* Set up a place to return a structure. */
620
621 /* Cater to broken compilers. */
622 if (aggregate_value_p (exp))
623 {
624 /* This call returns a big structure. */
625 is_const = 0;
626
627#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
628 {
629 pcc_struct_value = 1;
630 is_integrable = 0; /* Easier than making that case work right. */
631 }
632#else /* not PCC_STATIC_STRUCT_RETURN */
633 {
634 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 635
9e7b1d0a
RS
636 if (struct_value_size < 0)
637 abort ();
51bbfa0c 638
9e7b1d0a
RS
639 if (target && GET_CODE (target) == MEM)
640 structure_value_addr = XEXP (target, 0);
641 else
642 {
643 /* Assign a temporary on the stack to hold the value. */
51bbfa0c 644
9e7b1d0a
RS
645 /* For variable-sized objects, we must be called with a target
646 specified. If we were to allocate space on the stack here,
647 we would have no way of knowing when to free it. */
51bbfa0c 648
9e7b1d0a
RS
649 structure_value_addr
650 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
651 target = 0;
652 }
653 }
654#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
655 }
656
657 /* If called function is inline, try to integrate it. */
658
659 if (is_integrable)
660 {
661 rtx temp;
2f4aa534 662 rtx before_call = get_last_insn ();
51bbfa0c
RS
663
664 temp = expand_inline_function (fndecl, actparms, target,
665 ignore, TREE_TYPE (exp),
666 structure_value_addr);
667
668 /* If inlining succeeded, return. */
854e97f0 669 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 670 {
d64f5a78
RS
671 /* Perform all cleanups needed for the arguments of this call
672 (i.e. destructors in C++). It is ok if these destructors
673 clobber RETURN_VALUE_REG, because the only time we care about
674 this is when TARGET is that register. But in C++, we take
675 care to never return that register directly. */
676 expand_cleanups_to (old_cleanups);
677
678#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
679 /* If the outgoing argument list must be preserved, push
680 the stack before executing the inlined function if it
681 makes any calls. */
682
683 for (i = reg_parm_stack_space - 1; i >= 0; i--)
684 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
685 break;
686
687 if (stack_arg_under_construction || i >= 0)
688 {
d64f5a78 689 rtx insn = NEXT_INSN (before_call), seq;
2f4aa534 690
d64f5a78
RS
691 /* Look for a call in the inline function code.
692 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
693 nonzero then there is a call and it is not necessary
694 to scan the insns. */
695
696 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
697 for (; insn; insn = NEXT_INSN (insn))
698 if (GET_CODE (insn) == CALL_INSN)
699 break;
2f4aa534
RS
700
701 if (insn)
702 {
d64f5a78
RS
703 /* Reserve enough stack space so that the largest
704 argument list of any function call in the inline
705 function does not overlap the argument list being
706 evaluated. This is usually an overestimate because
707 allocate_dynamic_stack_space reserves space for an
708 outgoing argument list in addition to the requested
709 space, but there is no way to ask for stack space such
710 that an argument list of a certain length can be
711 safely constructed. */
712
713 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
714#ifdef REG_PARM_STACK_SPACE
715 /* Add the stack space reserved for register arguments
716 in the inline function. What is really needed is the
717 largest value of reg_parm_stack_space in the inline
718 function, but that is not available. Using the current
719 value of reg_parm_stack_space is wrong, but gives
720 correct results on all supported machines. */
721 adjust += reg_parm_stack_space;
722#endif
2f4aa534 723 start_sequence ();
ccf5d244 724 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
725 allocate_dynamic_stack_space (GEN_INT (adjust),
726 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
727 seq = get_insns ();
728 end_sequence ();
729 emit_insns_before (seq, NEXT_INSN (before_call));
e5d70561 730 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
731 }
732 }
d64f5a78 733#endif
51bbfa0c
RS
734
735 /* If the result is equivalent to TARGET, return TARGET to simplify
736 checks in store_expr. They can be equivalent but not equal in the
737 case of a function that returns BLKmode. */
738 if (temp != target && rtx_equal_p (temp, target))
739 return target;
740 return temp;
741 }
742
743 /* If inlining failed, mark FNDECL as needing to be compiled
744 separately after all. */
745 mark_addressable (fndecl);
746 }
747
748 /* When calling a const function, we must pop the stack args right away,
749 so that the pop is deleted or moved with the call. */
750 if (is_const)
751 NO_DEFER_POP;
752
753 function_call_count++;
754
755 if (fndecl && DECL_NAME (fndecl))
756 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
757
efd65a8b
RS
758 /* On some machines (such as the PA) indirect calls have a different
759 calling convention than normal calls. FUNCTION_ARG in the target
760 description can look at current_call_is_indirect to determine which
761 calling convention to use. */
762 current_call_is_indirect = (fndecl == 0);
763#if 0
764 = TREE_CODE (TREE_OPERAND (exp, 0)) == NON_LVALUE_EXPR ? 1 : 0;
765#endif
766
51bbfa0c
RS
767#if 0
768 /* Unless it's a call to a specific function that isn't alloca,
769 if it has one argument, we must assume it might be alloca. */
770
771 may_be_alloca =
772 (!(fndecl != 0 && strcmp (name, "alloca"))
773 && actparms != 0
774 && TREE_CHAIN (actparms) == 0);
775#else
776 /* We assume that alloca will always be called by name. It
777 makes no sense to pass it as a pointer-to-function to
778 anything that does not understand its behavior. */
779 may_be_alloca =
780 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
781 && name[0] == 'a'
782 && ! strcmp (name, "alloca"))
783 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
784 && name[0] == '_'
785 && ! strcmp (name, "__builtin_alloca"))));
786#endif
787
788 /* See if this is a call to a function that can return more than once
789 or a call to longjmp. */
790
791 returns_twice = 0;
792 is_longjmp = 0;
793
794 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
795 {
796 char *tname = name;
797
798 if (name[0] == '_')
799 tname += ((name[1] == '_' && name[2] == 'x') ? 3 : 1);
800
801 if (tname[0] == 's')
802 {
803 returns_twice
804 = ((tname[1] == 'e'
805 && (! strcmp (tname, "setjmp")
806 || ! strcmp (tname, "setjmp_syscall")))
807 || (tname[1] == 'i'
808 && ! strcmp (tname, "sigsetjmp"))
809 || (tname[1] == 'a'
810 && ! strcmp (tname, "savectx")));
811 if (tname[1] == 'i'
812 && ! strcmp (tname, "siglongjmp"))
813 is_longjmp = 1;
814 }
815 else if ((tname[0] == 'q' && tname[1] == 's'
816 && ! strcmp (tname, "qsetjmp"))
817 || (tname[0] == 'v' && tname[1] == 'f'
818 && ! strcmp (tname, "vfork")))
819 returns_twice = 1;
820
821 else if (tname[0] == 'l' && tname[1] == 'o'
822 && ! strcmp (tname, "longjmp"))
823 is_longjmp = 1;
824 }
825
51bbfa0c
RS
826 if (may_be_alloca)
827 current_function_calls_alloca = 1;
828
829 /* Don't let pending stack adjusts add up to too much.
830 Also, do all pending adjustments now
831 if there is any chance this might be a call to alloca. */
832
833 if (pending_stack_adjust >= 32
834 || (pending_stack_adjust > 0 && may_be_alloca))
835 do_pending_stack_adjust ();
836
837 /* Operand 0 is a pointer-to-function; get the type of the function. */
838 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
839 if (TREE_CODE (funtype) != POINTER_TYPE)
840 abort ();
841 funtype = TREE_TYPE (funtype);
842
843 /* Push the temporary stack slot level so that we can free temporaries used
844 by each of the arguments separately. */
845 push_temp_slots ();
846
847 /* Start updating where the next arg would go. */
85ec8ec4 848 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX);
51bbfa0c
RS
849
850 /* If struct_value_rtx is 0, it means pass the address
851 as if it were an extra parameter. */
852 if (structure_value_addr && struct_value_rtx == 0)
853 {
d64f5a78 854#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
855 /* If the stack will be adjusted, make sure the structure address
856 does not refer to virtual_outgoing_args_rtx. */
857 rtx temp = (stack_arg_under_construction
858 ? copy_addr_to_reg (structure_value_addr)
859 : force_reg (Pmode, structure_value_addr));
d64f5a78
RS
860#else
861 rtx temp = force_reg (Pmode, structure_value_addr);
862#endif
863
51bbfa0c
RS
864 actparms
865 = tree_cons (error_mark_node,
866 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 867 temp),
51bbfa0c
RS
868 actparms);
869 structure_value_addr_parm = 1;
870 }
871
872 /* Count the arguments and set NUM_ACTUALS. */
873 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
874 num_actuals = i;
875
876 /* Compute number of named args.
877 Normally, don't include the last named arg if anonymous args follow.
878 (If no anonymous args follow, the result of list_length
879 is actually one too large.)
880
881 If SETUP_INCOMING_VARARGS is defined, this machine will be able to
882 place unnamed args that were passed in registers into the stack. So
883 treat all args as named. This allows the insns emitting for a specific
d45cf215 884 argument list to be independent of the function declaration.
51bbfa0c
RS
885
886 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
887 way to pass unnamed args in registers, so we must force them into
888 memory. */
889#ifndef SETUP_INCOMING_VARARGS
890 if (TYPE_ARG_TYPES (funtype) != 0)
891 n_named_args
892 = list_length (TYPE_ARG_TYPES (funtype)) - 1
893 /* Count the struct value address, if it is passed as a parm. */
894 + structure_value_addr_parm;
895 else
896#endif
897 /* If we know nothing, treat all args as named. */
898 n_named_args = num_actuals;
899
900 /* Make a vector to hold all the information about each arg. */
901 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
902 bzero (args, num_actuals * sizeof (struct arg_data));
903
904 args_size.constant = 0;
905 args_size.var = 0;
906
907 /* In this loop, we consider args in the order they are written.
908 We fill up ARGS from the front of from the back if necessary
909 so that in any case the first arg to be pushed ends up at the front. */
910
911#ifdef PUSH_ARGS_REVERSED
912 i = num_actuals - 1, inc = -1;
913 /* In this case, must reverse order of args
914 so that we compute and push the last arg first. */
915#else
916 i = 0, inc = 1;
917#endif
918
919 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
920 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
921 {
922 tree type = TREE_TYPE (TREE_VALUE (p));
84b55618 923 enum machine_mode mode;
51bbfa0c
RS
924
925 args[i].tree_value = TREE_VALUE (p);
926
927 /* Replace erroneous argument with constant zero. */
928 if (type == error_mark_node || TYPE_SIZE (type) == 0)
929 args[i].tree_value = integer_zero_node, type = integer_type_node;
930
931 /* Decide where to pass this arg.
932
933 args[i].reg is nonzero if all or part is passed in registers.
934
935 args[i].partial is nonzero if part but not all is passed in registers,
936 and the exact value says how many words are passed in registers.
937
938 args[i].pass_on_stack is nonzero if the argument must at least be
939 computed on the stack. It may then be loaded back into registers
940 if args[i].reg is nonzero.
941
942 These decisions are driven by the FUNCTION_... macros and must agree
943 with those made by function.c. */
944
51bbfa0c 945 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
946 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
947 && contains_placeholder_p (TYPE_SIZE (type)))
948#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
949 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
950 type, argpos < n_named_args)
951#endif
952 )
51bbfa0c 953 {
5e0de251
DE
954#ifdef FUNCTION_ARG_CALLEE_COPIES
955 if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
956 argpos < n_named_args)
957 /* If it's in a register, we must make a copy of it too. */
958 /* ??? Is this a sufficient test? Is there a better one? */
959 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
960 && REG_P (DECL_RTL (args[i].tree_value))))
51bbfa0c 961 {
5e0de251
DE
962 args[i].tree_value = build1 (ADDR_EXPR,
963 build_pointer_type (type),
964 args[i].tree_value);
965 type = build_pointer_type (type);
51bbfa0c
RS
966 }
967 else
5e0de251 968#endif
82c0ff02 969 {
5e0de251
DE
970 /* We make a copy of the object and pass the address to the
971 function being called. */
972 rtx copy;
51bbfa0c 973
5e0de251
DE
974 if (TYPE_SIZE (type) == 0
975 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
976 {
977 /* This is a variable-sized object. Make space on the stack
978 for it. */
979 rtx size_rtx = expr_size (TREE_VALUE (p));
980
981 if (old_stack_level == 0)
982 {
983 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
984 old_pending_adj = pending_stack_adjust;
985 pending_stack_adjust = 0;
986 }
987
988 copy = gen_rtx (MEM, BLKmode,
989 allocate_dynamic_stack_space (size_rtx,
990 NULL_RTX,
991 TYPE_ALIGN (type)));
992 }
993 else
994 {
995 int size = int_size_in_bytes (type);
996 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
997 }
51bbfa0c 998
5e0de251
DE
999 store_expr (args[i].tree_value, copy, 0);
1000
1001 args[i].tree_value = build1 (ADDR_EXPR,
1002 build_pointer_type (type),
1003 make_tree (type, copy));
1004 type = build_pointer_type (type);
1005 }
51bbfa0c 1006 }
51bbfa0c 1007
84b55618
RK
1008 mode = TYPE_MODE (type);
1009
1010#ifdef PROMOTE_FUNCTION_ARGS
1011 /* Compute the mode in which the arg is actually to be extended to. */
1012 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
1013 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
1014 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
1015 || TREE_CODE (type) == OFFSET_TYPE)
1016 {
1017 int unsignedp = TREE_UNSIGNED (type);
1018 PROMOTE_MODE (mode, unsignedp, type);
1019 args[i].unsignedp = unsignedp;
1020 }
1021#endif
1022
1efe6448 1023 args[i].mode = mode;
84b55618 1024 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1025 argpos < n_named_args);
1026#ifdef FUNCTION_ARG_PARTIAL_NREGS
1027 if (args[i].reg)
1028 args[i].partial
84b55618 1029 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1030 argpos < n_named_args);
1031#endif
1032
84b55618 1033 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c
RS
1034
1035 /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
1036 we are to pass this arg in the register(s) designated by FOO, but
1037 also to pass it in the stack. */
1038 if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
1039 && XEXP (args[i].reg, 0) == 0)
1040 args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
1041
1042 /* If this is an addressable type, we must preallocate the stack
1043 since we must evaluate the object into its final location.
1044
1045 If this is to be passed in both registers and the stack, it is simpler
1046 to preallocate. */
1047 if (TREE_ADDRESSABLE (type)
1048 || (args[i].pass_on_stack && args[i].reg != 0))
1049 must_preallocate = 1;
1050
1051 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1052 we cannot consider this function call constant. */
1053 if (TREE_ADDRESSABLE (type))
1054 is_const = 0;
1055
1056 /* Compute the stack-size of this argument. */
1057 if (args[i].reg == 0 || args[i].partial != 0
1058#ifdef REG_PARM_STACK_SPACE
6f90e075 1059 || reg_parm_stack_space > 0
51bbfa0c
RS
1060#endif
1061 || args[i].pass_on_stack)
1efe6448 1062 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1063#ifdef STACK_PARMS_IN_REG_PARM_AREA
1064 1,
1065#else
1066 args[i].reg != 0,
1067#endif
1068 fndecl, &args_size, &args[i].offset,
1069 &args[i].size);
1070
1071#ifndef ARGS_GROW_DOWNWARD
1072 args[i].slot_offset = args_size;
1073#endif
1074
1075#ifndef REG_PARM_STACK_SPACE
1076 /* If a part of the arg was put into registers,
1077 don't include that part in the amount pushed. */
1078 if (! args[i].pass_on_stack)
1079 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1080 / (PARM_BOUNDARY / BITS_PER_UNIT)
1081 * (PARM_BOUNDARY / BITS_PER_UNIT));
1082#endif
1083
1084 /* Update ARGS_SIZE, the total stack space for args so far. */
1085
1086 args_size.constant += args[i].size.constant;
1087 if (args[i].size.var)
1088 {
1089 ADD_PARM_SIZE (args_size, args[i].size.var);
1090 }
1091
1092 /* Since the slot offset points to the bottom of the slot,
1093 we must record it after incrementing if the args grow down. */
1094#ifdef ARGS_GROW_DOWNWARD
1095 args[i].slot_offset = args_size;
1096
1097 args[i].slot_offset.constant = -args_size.constant;
1098 if (args_size.var)
1099 {
1100 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1101 }
1102#endif
1103
1104 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1105 have been used, etc. */
1106
1107 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1108 argpos < n_named_args);
1109 }
1110
6f90e075
JW
1111#ifdef FINAL_REG_PARM_STACK_SPACE
1112 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1113 args_size.var);
1114#endif
1115
51bbfa0c
RS
1116 /* Compute the actual size of the argument block required. The variable
1117 and constant sizes must be combined, the size may have to be rounded,
1118 and there may be a minimum required size. */
1119
1120 original_args_size = args_size;
1121 if (args_size.var)
1122 {
1123 /* If this function requires a variable-sized argument list, don't try to
1124 make a cse'able block for this call. We may be able to do this
1125 eventually, but it is too complicated to keep track of what insns go
1126 in the cse'able block and which don't. */
1127
1128 is_const = 0;
1129 must_preallocate = 1;
1130
1131 args_size.var = ARGS_SIZE_TREE (args_size);
1132 args_size.constant = 0;
1133
1134#ifdef STACK_BOUNDARY
1135 if (STACK_BOUNDARY != BITS_PER_UNIT)
1136 args_size.var = round_up (args_size.var, STACK_BYTES);
1137#endif
1138
1139#ifdef REG_PARM_STACK_SPACE
6f90e075 1140 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1141 {
1142 args_size.var
1143 = size_binop (MAX_EXPR, args_size.var,
1144 size_int (REG_PARM_STACK_SPACE (fndecl)));
1145
1146#ifndef OUTGOING_REG_PARM_STACK_SPACE
1147 /* The area corresponding to register parameters is not to count in
1148 the size of the block we need. So make the adjustment. */
1149 args_size.var
1150 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1151 size_int (reg_parm_stack_space));
51bbfa0c
RS
1152#endif
1153 }
1154#endif
1155 }
1156 else
1157 {
1158#ifdef STACK_BOUNDARY
1159 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1160 / STACK_BYTES) * STACK_BYTES);
1161#endif
1162
1163#ifdef REG_PARM_STACK_SPACE
1164 args_size.constant = MAX (args_size.constant,
6f90e075 1165 reg_parm_stack_space);
e1336658
JW
1166#ifdef MAYBE_REG_PARM_STACK_SPACE
1167 if (reg_parm_stack_space == 0)
1168 args_size.constant = 0;
1169#endif
51bbfa0c 1170#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1171 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1172#endif
1173#endif
1174 }
1175
1176 /* See if we have or want to preallocate stack space.
1177
1178 If we would have to push a partially-in-regs parm
1179 before other stack parms, preallocate stack space instead.
1180
1181 If the size of some parm is not a multiple of the required stack
1182 alignment, we must preallocate.
1183
1184 If the total size of arguments that would otherwise create a copy in
1185 a temporary (such as a CALL) is more than half the total argument list
1186 size, preallocation is faster.
1187
1188 Another reason to preallocate is if we have a machine (like the m88k)
1189 where stack alignment is required to be maintained between every
1190 pair of insns, not just when the call is made. However, we assume here
1191 that such machines either do not have push insns (and hence preallocation
1192 would occur anyway) or the problem is taken care of with
1193 PUSH_ROUNDING. */
1194
1195 if (! must_preallocate)
1196 {
1197 int partial_seen = 0;
1198 int copy_to_evaluate_size = 0;
1199
1200 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1201 {
1202 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1203 partial_seen = 1;
1204 else if (partial_seen && args[i].reg == 0)
1205 must_preallocate = 1;
1206
1207 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1208 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1209 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1210 || TREE_CODE (args[i].tree_value) == COND_EXPR
1211 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1212 copy_to_evaluate_size
1213 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1214 }
1215
c62f36cf
RS
1216 if (copy_to_evaluate_size * 2 >= args_size.constant
1217 && args_size.constant > 0)
51bbfa0c
RS
1218 must_preallocate = 1;
1219 }
1220
1221 /* If the structure value address will reference the stack pointer, we must
1222 stabilize it. We don't need to do this if we know that we are not going
1223 to adjust the stack pointer in processing this call. */
1224
1225 if (structure_value_addr
1226 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1227 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1228 && (args_size.var
1229#ifndef ACCUMULATE_OUTGOING_ARGS
1230 || args_size.constant
1231#endif
1232 ))
1233 structure_value_addr = copy_to_reg (structure_value_addr);
1234
1235 /* If this function call is cse'able, precompute all the parameters.
1236 Note that if the parameter is constructed into a temporary, this will
1237 cause an additional copy because the parameter will be constructed
1238 into a temporary location and then copied into the outgoing arguments.
1239 If a parameter contains a call to alloca and this function uses the
1240 stack, precompute the parameter. */
1241
1ce0cb53
JW
1242 /* If we preallocated the stack space, and some arguments must be passed
1243 on the stack, then we must precompute any parameter which contains a
1244 function call which will store arguments on the stack.
1245 Otherwise, evaluating the parameter may clobber previous parameters
1246 which have already been stored into the stack. */
1247
51bbfa0c
RS
1248 for (i = 0; i < num_actuals; i++)
1249 if (is_const
1250 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1251 && calls_function (args[i].tree_value, 1))
1252 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1253 && calls_function (args[i].tree_value, 0)))
51bbfa0c
RS
1254 {
1255 args[i].initial_value = args[i].value
e5d70561 1256 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448
RK
1257
1258 if (GET_MODE (args[i].value ) != VOIDmode
1259 && GET_MODE (args[i].value) != args[i].mode)
1260 args[i].value = convert_to_mode (args[i].mode, args[i].value,
1261 args[i].unsignedp);
51bbfa0c 1262 preserve_temp_slots (args[i].value);
1efe6448 1263
51bbfa0c
RS
1264 free_temp_slots ();
1265
1266 /* ANSI doesn't require a sequence point here,
1267 but PCC has one, so this will avoid some problems. */
1268 emit_queue ();
1269 }
1270
1271 /* Now we are about to start emitting insns that can be deleted
1272 if a libcall is deleted. */
1273 if (is_const)
1274 start_sequence ();
1275
1276 /* If we have no actual push instructions, or shouldn't use them,
1277 make space for all args right now. */
1278
1279 if (args_size.var != 0)
1280 {
1281 if (old_stack_level == 0)
1282 {
e5d70561 1283 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1284 old_pending_adj = pending_stack_adjust;
1285 pending_stack_adjust = 0;
d64f5a78 1286#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1287 /* stack_arg_under_construction says whether a stack arg is
1288 being constructed at the old stack level. Pushing the stack
1289 gets a clean outgoing argument block. */
1290 old_stack_arg_under_construction = stack_arg_under_construction;
1291 stack_arg_under_construction = 0;
d64f5a78 1292#endif
51bbfa0c
RS
1293 }
1294 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1295 }
1296 else if (must_preallocate)
1297 {
1298 /* Note that we must go through the motions of allocating an argument
1299 block even if the size is zero because we may be storing args
1300 in the area reserved for register arguments, which may be part of
1301 the stack frame. */
1302 int needed = args_size.constant;
1303
1304#ifdef ACCUMULATE_OUTGOING_ARGS
1305 /* Store the maximum argument space used. It will be pushed by the
1306 prologue.
1307
1308 Since the stack pointer will never be pushed, it is possible for
1309 the evaluation of a parm to clobber something we have already
1310 written to the stack. Since most function calls on RISC machines
1311 do not use the stack, this is uncommon, but must work correctly.
1312
1313 Therefore, we save any area of the stack that was already written
1314 and that we are using. Here we set up to do this by making a new
1315 stack usage map from the old one. The actual save will be done
1316 by store_one_arg.
1317
1318 Another approach might be to try to reorder the argument
1319 evaluations to avoid this conflicting stack usage. */
1320
1321 if (needed > current_function_outgoing_args_size)
1322 current_function_outgoing_args_size = needed;
1323
1324#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1325 /* Since we will be writing into the entire argument area, the
1326 map must be allocated for its entire size, not just the part that
1327 is the responsibility of the caller. */
6f90e075 1328 needed += reg_parm_stack_space;
51bbfa0c
RS
1329#endif
1330
1331#ifdef ARGS_GROW_DOWNWARD
1332 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1333 needed + 1);
1334#else
1335 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
1336#endif
1337 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1338
1339 if (initial_highest_arg_in_use)
1340 bcopy (initial_stack_usage_map, stack_usage_map,
1341 initial_highest_arg_in_use);
1342
1343 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1344 bzero (&stack_usage_map[initial_highest_arg_in_use],
1345 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1346 needed = 0;
2f4aa534 1347
bfbf933a
RS
1348 /* The address of the outgoing argument list must not be copied to a
1349 register here, because argblock would be left pointing to the
1350 wrong place after the call to allocate_dynamic_stack_space below. */
2f4aa534 1351
51bbfa0c 1352 argblock = virtual_outgoing_args_rtx;
2f4aa534 1353
51bbfa0c
RS
1354#else /* not ACCUMULATE_OUTGOING_ARGS */
1355 if (inhibit_defer_pop == 0)
1356 {
1357 /* Try to reuse some or all of the pending_stack_adjust
1358 to get this space. Maybe we can avoid any pushing. */
1359 if (needed > pending_stack_adjust)
1360 {
1361 needed -= pending_stack_adjust;
1362 pending_stack_adjust = 0;
1363 }
1364 else
1365 {
1366 pending_stack_adjust -= needed;
1367 needed = 0;
1368 }
1369 }
1370 /* Special case this because overhead of `push_block' in this
1371 case is non-trivial. */
1372 if (needed == 0)
1373 argblock = virtual_outgoing_args_rtx;
1374 else
e5d70561 1375 argblock = push_block (GEN_INT (needed), 0, 0);
51bbfa0c
RS
1376
1377 /* We only really need to call `copy_to_reg' in the case where push
1378 insns are going to be used to pass ARGBLOCK to a function
1379 call in ARGS. In that case, the stack pointer changes value
1380 from the allocation point to the call point, and hence
1381 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1382 But might as well always do it. */
1383 argblock = copy_to_reg (argblock);
1384#endif /* not ACCUMULATE_OUTGOING_ARGS */
1385 }
1386
bfbf933a
RS
1387
1388#ifdef ACCUMULATE_OUTGOING_ARGS
1389 /* The save/restore code in store_one_arg handles all cases except one:
1390 a constructor call (including a C function returning a BLKmode struct)
1391 to initialize an argument. */
1392 if (stack_arg_under_construction)
1393 {
1394#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1395 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1396#else
e5d70561 1397 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1398#endif
1399 if (old_stack_level == 0)
1400 {
e5d70561 1401 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1402 old_pending_adj = pending_stack_adjust;
1403 pending_stack_adjust = 0;
1404 /* stack_arg_under_construction says whether a stack arg is
1405 being constructed at the old stack level. Pushing the stack
1406 gets a clean outgoing argument block. */
1407 old_stack_arg_under_construction = stack_arg_under_construction;
1408 stack_arg_under_construction = 0;
1409 /* Make a new map for the new argument list. */
1410 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1411 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1412 highest_outgoing_arg_in_use = 0;
1413 }
e5d70561 1414 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1415 }
1416 /* If argument evaluation might modify the stack pointer, copy the
1417 address of the argument list to a register. */
1418 for (i = 0; i < num_actuals; i++)
1419 if (args[i].pass_on_stack)
1420 {
1421 argblock = copy_addr_to_reg (argblock);
1422 break;
1423 }
1424#endif
1425
1426
51bbfa0c
RS
1427 /* If we preallocated stack space, compute the address of each argument.
1428 We need not ensure it is a valid memory address here; it will be
1429 validized when it is used. */
1430 if (argblock)
1431 {
1432 rtx arg_reg = argblock;
1433 int arg_offset = 0;
1434
1435 if (GET_CODE (argblock) == PLUS)
1436 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1437
1438 for (i = 0; i < num_actuals; i++)
1439 {
1440 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1441 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1442 rtx addr;
1443
1444 /* Skip this parm if it will not be passed on the stack. */
1445 if (! args[i].pass_on_stack && args[i].reg != 0)
1446 continue;
1447
1448 if (GET_CODE (offset) == CONST_INT)
1449 addr = plus_constant (arg_reg, INTVAL (offset));
1450 else
1451 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1452
1453 addr = plus_constant (addr, arg_offset);
1efe6448 1454 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
0c0600d5
RK
1455 MEM_IN_STRUCT_P (args[i].stack)
1456 = (TREE_CODE (TREE_TYPE (args[i].tree_value)) == RECORD_TYPE
1457 || TREE_CODE (TREE_TYPE (args[i].tree_value)) == UNION_TYPE
1458 || TREE_CODE (TREE_TYPE (args[i].tree_value)) == QUAL_UNION_TYPE
1459 || TREE_CODE (TREE_TYPE (args[i].tree_value)) == ARRAY_TYPE);
51bbfa0c
RS
1460
1461 if (GET_CODE (slot_offset) == CONST_INT)
1462 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1463 else
1464 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1465
1466 addr = plus_constant (addr, arg_offset);
1efe6448 1467 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1468 }
1469 }
1470
1471#ifdef PUSH_ARGS_REVERSED
1472#ifdef STACK_BOUNDARY
1473 /* If we push args individually in reverse order, perform stack alignment
1474 before the first push (the last arg). */
1475 if (argblock == 0)
e5d70561
RK
1476 anti_adjust_stack (GEN_INT (args_size.constant
1477 - original_args_size.constant));
51bbfa0c
RS
1478#endif
1479#endif
1480
1481 /* Don't try to defer pops if preallocating, not even from the first arg,
1482 since ARGBLOCK probably refers to the SP. */
1483 if (argblock)
1484 NO_DEFER_POP;
1485
1486 /* Get the function to call, in the form of RTL. */
1487 if (fndecl)
ef5d30c9
RK
1488 {
1489 /* If this is the first use of the function, see if we need to
1490 make an external definition for it. */
1491 if (! TREE_USED (fndecl))
1492 {
1493 assemble_external (fndecl);
1494 TREE_USED (fndecl) = 1;
1495 }
1496
1497 /* Get a SYMBOL_REF rtx for the function address. */
1498 funexp = XEXP (DECL_RTL (fndecl), 0);
1499 }
51bbfa0c
RS
1500 else
1501 /* Generate an rtx (probably a pseudo-register) for the address. */
1502 {
e5d70561 1503 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
51bbfa0c
RS
1504 free_temp_slots (); /* FUNEXP can't be BLKmode */
1505 emit_queue ();
1506 }
1507
1508 /* Figure out the register where the value, if any, will come back. */
1509 valreg = 0;
1510 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1511 && ! structure_value_addr)
1512 {
1513 if (pcc_struct_value)
1514 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1515 fndecl);
1516 else
1517 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1518 }
1519
1520 /* Precompute all register parameters. It isn't safe to compute anything
1521 once we have started filling any specific hard regs. */
1522 reg_parm_seen = 0;
1523 for (i = 0; i < num_actuals; i++)
1524 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1525 {
1526 reg_parm_seen = 1;
1527
1528 if (args[i].value == 0)
1529 {
e5d70561
RK
1530 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1531 VOIDmode, 0);
51bbfa0c
RS
1532 preserve_temp_slots (args[i].value);
1533 free_temp_slots ();
1534
1535 /* ANSI doesn't require a sequence point here,
1536 but PCC has one, so this will avoid some problems. */
1537 emit_queue ();
1538 }
84b55618
RK
1539
1540 /* If we are to promote the function arg to a wider mode,
1541 do it now. */
84b55618 1542
1efe6448
RK
1543 if (GET_MODE (args[i].value) != VOIDmode
1544 && GET_MODE (args[i].value) != args[i].mode)
1545 args[i].value = convert_to_mode (args[i].mode, args[i].value,
84b55618 1546 args[i].unsignedp);
51bbfa0c
RS
1547 }
1548
1549#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1550 /* The argument list is the property of the called routine and it
1551 may clobber it. If the fixed area has been used for previous
1552 parameters, we must save and restore it.
1553
1554 Here we compute the boundary of the that needs to be saved, if any. */
1555
b94301c2
RS
1556#ifdef ARGS_GROW_DOWNWARD
1557 for (i = 0; i < reg_parm_stack_space + 1; i++)
1558#else
6f90e075 1559 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1560#endif
51bbfa0c
RS
1561 {
1562 if (i >= highest_outgoing_arg_in_use
1563 || stack_usage_map[i] == 0)
1564 continue;
1565
1566 if (low_to_save == -1)
1567 low_to_save = i;
1568
1569 high_to_save = i;
1570 }
1571
1572 if (low_to_save >= 0)
1573 {
1574 int num_to_save = high_to_save - low_to_save + 1;
1575 enum machine_mode save_mode
1576 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1577 rtx stack_area;
1578
1579 /* If we don't have the required alignment, must do this in BLKmode. */
1580 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1581 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1582 save_mode = BLKmode;
1583
1584 stack_area = gen_rtx (MEM, save_mode,
1585 memory_address (save_mode,
b94301c2
RS
1586
1587#ifdef ARGS_GROW_DOWNWARD
1588 plus_constant (argblock,
1589 - high_to_save)
1590#else
51bbfa0c 1591 plus_constant (argblock,
b94301c2
RS
1592 low_to_save)
1593#endif
1594 ));
51bbfa0c
RS
1595 if (save_mode == BLKmode)
1596 {
1597 save_area = assign_stack_temp (BLKmode, num_to_save, 1);
1598 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1599 GEN_INT (num_to_save),
51bbfa0c
RS
1600 PARM_BOUNDARY / BITS_PER_UNIT);
1601 }
1602 else
1603 {
1604 save_area = gen_reg_rtx (save_mode);
1605 emit_move_insn (save_area, stack_area);
1606 }
1607 }
1608#endif
1609
1610
1611 /* Now store (and compute if necessary) all non-register parms.
1612 These come before register parms, since they can require block-moves,
1613 which could clobber the registers used for register parms.
1614 Parms which have partial registers are not stored here,
1615 but we do preallocate space here if they want that. */
1616
1617 for (i = 0; i < num_actuals; i++)
1618 if (args[i].reg == 0 || args[i].pass_on_stack)
1619 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1620 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1621
4ab56118
RK
1622#ifdef STRICT_ALIGNMENT
1623 /* If we have a parm that is passed in registers but not in memory
1624 and whose alignment does not permit a direct copy into registers,
1625 make a group of pseudos that correspond to each register that we
1626 will later fill. */
1627
1628 for (i = 0; i < num_actuals; i++)
1629 if (args[i].reg != 0 && ! args[i].pass_on_stack
1630 && args[i].mode == BLKmode
1631 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1632 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1633 {
1634 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
8498efd0 1635 int big_endian_correction = 0;
4ab56118
RK
1636
1637 args[i].n_aligned_regs
1638 = args[i].partial ? args[i].partial
1639 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1640
1641 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1642 * args[i].n_aligned_regs);
1643
8498efd0
JW
1644 /* Structures smaller than a word are aligned to the least signifcant
1645 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
1646 must skip the empty high order bytes when calculating the bit
1647 offset. */
1648 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1649 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1650
4ab56118
RK
1651 for (j = 0; j < args[i].n_aligned_regs; j++)
1652 {
1653 rtx reg = gen_reg_rtx (word_mode);
1654 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1655 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1656 int bitpos;
1657
1658 args[i].aligned_regs[j] = reg;
1659
1660 /* Clobber REG and move each partword into it. Ensure we don't
1661 go past the end of the structure. Note that the loop below
1662 works because we've already verified that padding
1663 and endianness are compatible. */
1664
1665 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1666
1667 for (bitpos = 0;
7a03f4b4 1668 bitpos < BITS_PER_WORD && bytes > 0;
4ab56118
RK
1669 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1670 {
8498efd0 1671 int xbitpos = bitpos + big_endian_correction;
4ab56118
RK
1672
1673 store_bit_field (reg, bitsize, xbitpos, word_mode,
8498efd0 1674 extract_bit_field (word, bitsize, bitpos, 1,
4ab56118
RK
1675 NULL_RTX, word_mode,
1676 word_mode,
1677 bitsize / BITS_PER_UNIT,
1678 BITS_PER_WORD),
1679 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1680 }
1681 }
1682 }
1683#endif
1684
51bbfa0c
RS
1685 /* Now store any partially-in-registers parm.
1686 This is the last place a block-move can happen. */
1687 if (reg_parm_seen)
1688 for (i = 0; i < num_actuals; i++)
1689 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1690 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1691 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1692
1693#ifndef PUSH_ARGS_REVERSED
1694#ifdef STACK_BOUNDARY
1695 /* If we pushed args in forward order, perform stack alignment
1696 after pushing the last arg. */
1697 if (argblock == 0)
e5d70561
RK
1698 anti_adjust_stack (GEN_INT (args_size.constant
1699 - original_args_size.constant));
51bbfa0c
RS
1700#endif
1701#endif
1702
756e0e12
RS
1703 /* If register arguments require space on the stack and stack space
1704 was not preallocated, allocate stack space here for arguments
1705 passed in registers. */
6e716e89 1706#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1707 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1708 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1709#endif
1710
51bbfa0c
RS
1711 /* Pass the function the address in which to return a structure value. */
1712 if (structure_value_addr && ! structure_value_addr_parm)
1713 {
1714 emit_move_insn (struct_value_rtx,
1715 force_reg (Pmode,
e5d70561
RK
1716 force_operand (structure_value_addr,
1717 NULL_RTX)));
51bbfa0c
RS
1718 if (GET_CODE (struct_value_rtx) == REG)
1719 {
1720 push_to_sequence (use_insns);
1721 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
1722 use_insns = get_insns ();
1723 end_sequence ();
1724 }
1725 }
1726
1727 /* Now do the register loads required for any wholly-register parms or any
1728 parms which are passed both on the stack and in a register. Their
1729 expressions were already evaluated.
1730
1731 Mark all register-parms as living through the call, putting these USE
1732 insns in a list headed by USE_INSNS. */
1733
1734 for (i = 0; i < num_actuals; i++)
1735 {
1736 rtx list = args[i].reg;
1737 int partial = args[i].partial;
1738
1739 while (list)
1740 {
1741 rtx reg;
1742 int nregs;
1743
1744 /* Process each register that needs to get this arg. */
1745 if (GET_CODE (list) == EXPR_LIST)
1746 reg = XEXP (list, 0), list = XEXP (list, 1);
1747 else
1748 reg = list, list = 0;
1749
1750 /* Set to non-zero if must move a word at a time, even if just one
1751 word (e.g, partial == 1 && mode == DFmode). Set to zero if
1752 we just use a normal move insn. */
1753 nregs = (partial ? partial
1754 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1755 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1756 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1757 : 0));
1758
1759 /* If simple case, just do move. If normal partial, store_one_arg
1760 has already loaded the register for us. In all other cases,
1761 load the register(s) from memory. */
1762
1763 if (nregs == 0)
1764 emit_move_insn (reg, args[i].value);
4ab56118
RK
1765
1766#ifdef STRICT_ALIGNMENT
1767 /* If we have pre-computed the values to put in the registers in
1768 the case of non-aligned structures, copy them in now. */
1769
1770 else if (args[i].n_aligned_regs != 0)
1771 for (j = 0; j < args[i].n_aligned_regs; j++)
1772 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1773 args[i].aligned_regs[j]);
1774#endif
1775
51bbfa0c
RS
1776 else if (args[i].partial == 0 || args[i].pass_on_stack)
1777 move_block_to_reg (REGNO (reg),
1778 validize_mem (args[i].value), nregs,
1efe6448 1779 args[i].mode);
51bbfa0c
RS
1780
1781 push_to_sequence (use_insns);
1782 if (nregs == 0)
1783 emit_insn (gen_rtx (USE, VOIDmode, reg));
1784 else
1785 use_regs (REGNO (reg), nregs);
1786 use_insns = get_insns ();
1787 end_sequence ();
1788
1789 /* PARTIAL referred only to the first register, so clear it for the
1790 next time. */
1791 partial = 0;
1792 }
1793 }
1794
1795 /* Perform postincrements before actually calling the function. */
1796 emit_queue ();
1797
1798 /* All arguments and registers used for the call must be set up by now! */
1799
1800 funexp = prepare_call_address (funexp, fndecl, &use_insns);
1801
1802 /* Generate the actual call instruction. */
1803 emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
1804 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1805 valreg, old_inhibit_defer_pop, use_insns, is_const);
1806
1807 /* If call is cse'able, make appropriate pair of reg-notes around it.
1808 Test valreg so we don't crash; may safely ignore `const'
1809 if return type is void. */
1810 if (is_const && valreg != 0)
1811 {
1812 rtx note = 0;
1813 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1814 rtx insns;
1815
1816 /* Construct an "equal form" for the value which mentions all the
1817 arguments in order as well as the function name. */
1818#ifdef PUSH_ARGS_REVERSED
1819 for (i = 0; i < num_actuals; i++)
1820 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1821#else
1822 for (i = num_actuals - 1; i >= 0; i--)
1823 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1824#endif
1825 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1826
1827 insns = get_insns ();
1828 end_sequence ();
1829
1830 emit_libcall_block (insns, temp, valreg, note);
1831
1832 valreg = temp;
1833 }
1834
1835 /* For calls to `setjmp', etc., inform flow.c it should complain
1836 if nonvolatile values are live. */
1837
1838 if (returns_twice)
1839 {
1840 emit_note (name, NOTE_INSN_SETJMP);
1841 current_function_calls_setjmp = 1;
1842 }
1843
1844 if (is_longjmp)
1845 current_function_calls_longjmp = 1;
1846
1847 /* Notice functions that cannot return.
1848 If optimizing, insns emitted below will be dead.
1849 If not optimizing, they will exist, which is useful
1850 if the user uses the `return' command in the debugger. */
1851
1852 if (is_volatile || is_longjmp)
1853 emit_barrier ();
1854
51bbfa0c
RS
1855 /* If value type not void, return an rtx for the value. */
1856
1857 /* If there are cleanups to be called, don't use a hard reg as target. */
1858 if (cleanups_this_call != old_cleanups
1859 && target && REG_P (target)
1860 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1861 target = 0;
1862
1863 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1864 || ignore)
1865 {
1866 target = const0_rtx;
1867 }
1868 else if (structure_value_addr)
1869 {
1870 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
1871 {
1872 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1873 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1874 structure_value_addr));
1875 MEM_IN_STRUCT_P (target)
1876 = (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
1877 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
c1b98a95
RK
1878 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
1879 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE);
29008b51 1880 }
51bbfa0c
RS
1881 }
1882 else if (pcc_struct_value)
1883 {
1884 if (target == 0)
29008b51 1885 {
30082223
RS
1886 /* We used leave the value in the location that it is
1887 returned in, but that causes problems if it is used more
1888 than once in one expression. Rather than trying to track
1889 when a copy is required, we always copy when TARGET is
1890 not specified. This calling sequence is only used on
1891 a few machines and TARGET is usually nonzero. */
1892 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
6d8b61b9
RS
1893 {
1894 target = assign_stack_temp (BLKmode,
1895 int_size_in_bytes (TREE_TYPE (exp)),
1896 0);
1897
1898 /* Save this temp slot around the pop below. */
1899 preserve_temp_slots (target);
1900 }
30082223
RS
1901 else
1902 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
29008b51 1903 }
30082223
RS
1904
1905 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
51bbfa0c
RS
1906 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1907 copy_to_reg (valreg)));
1908 else
1909 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
1910 expr_size (exp),
1911 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1912 }
84b55618
RK
1913 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
1914 && GET_MODE (target) == GET_MODE (valreg))
51bbfa0c
RS
1915 /* TARGET and VALREG cannot be equal at this point because the latter
1916 would not have REG_FUNCTION_VALUE_P true, while the former would if
1917 it were referring to the same register.
1918
1919 If they refer to the same register, this move will be a no-op, except
1920 when function inlining is being done. */
1921 emit_move_insn (target, valreg);
1922 else
1923 target = copy_to_reg (valreg);
1924
84b55618 1925#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
1926 /* If we promoted this return value, make the proper SUBREG. TARGET
1927 might be const0_rtx here, so be careful. */
1928 if (GET_CODE (target) == REG
1929 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 1930 {
5d2ac65e 1931 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
84b55618
RK
1932 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
1933
1934 if (TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
1935 || TREE_CODE (TREE_TYPE (exp)) == ENUMERAL_TYPE
1936 || TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE
1937 || TREE_CODE (TREE_TYPE (exp)) == CHAR_TYPE
1938 || TREE_CODE (TREE_TYPE (exp)) == REAL_TYPE
1939 || TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE
1940 || TREE_CODE (TREE_TYPE (exp)) == OFFSET_TYPE)
1941 {
1942 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (exp));
1943 }
1944
5d2ac65e
RK
1945 /* If we didn't promote as expected, something is wrong. */
1946 if (mode != GET_MODE (target))
1947 abort ();
1948
84b55618
RK
1949 target = gen_rtx (SUBREG, TYPE_MODE (TREE_TYPE (exp)), target, 0);
1950 SUBREG_PROMOTED_VAR_P (target) = 1;
1951 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
1952 }
1953#endif
1954
51bbfa0c
RS
1955 /* Perform all cleanups needed for the arguments of this call
1956 (i.e. destructors in C++). */
1957 expand_cleanups_to (old_cleanups);
1958
2f4aa534
RS
1959 /* If size of args is variable or this was a constructor call for a stack
1960 argument, restore saved stack-pointer value. */
51bbfa0c
RS
1961
1962 if (old_stack_level)
1963 {
e5d70561 1964 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 1965 pending_stack_adjust = old_pending_adj;
d64f5a78 1966#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1967 stack_arg_under_construction = old_stack_arg_under_construction;
1968 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
1969 stack_usage_map = initial_stack_usage_map;
d64f5a78 1970#endif
51bbfa0c 1971 }
51bbfa0c
RS
1972#ifdef ACCUMULATE_OUTGOING_ARGS
1973 else
1974 {
1975#ifdef REG_PARM_STACK_SPACE
1976 if (save_area)
1977 {
1978 enum machine_mode save_mode = GET_MODE (save_area);
1979 rtx stack_area
1980 = gen_rtx (MEM, save_mode,
1981 memory_address (save_mode,
b94301c2
RS
1982#ifdef ARGS_GROW_DOWNWARD
1983 plus_constant (argblock, - high_to_save)
1984#else
1985 plus_constant (argblock, low_to_save)
1986#endif
1987 ));
51bbfa0c
RS
1988
1989 if (save_mode != BLKmode)
1990 emit_move_insn (stack_area, save_area);
1991 else
1992 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
1993 GEN_INT (high_to_save - low_to_save + 1),
1994 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
1995 }
1996#endif
1997
1998 /* If we saved any argument areas, restore them. */
1999 for (i = 0; i < num_actuals; i++)
2000 if (args[i].save_area)
2001 {
2002 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2003 rtx stack_area
2004 = gen_rtx (MEM, save_mode,
2005 memory_address (save_mode,
2006 XEXP (args[i].stack_slot, 0)));
2007
2008 if (save_mode != BLKmode)
2009 emit_move_insn (stack_area, args[i].save_area);
2010 else
2011 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2012 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2013 PARM_BOUNDARY / BITS_PER_UNIT);
2014 }
2015
2016 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2017 stack_usage_map = initial_stack_usage_map;
2018 }
2019#endif
2020
59257ff7
RK
2021 /* If this was alloca, record the new stack level for nonlocal gotos.
2022 Check for the handler slots since we might not have a save area
2023 for non-local gotos. */
2024
2025 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2026 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2027
2028 pop_temp_slots ();
2029
2030 return target;
2031}
2032\f
322e3e34
RK
2033/* Output a library call to function FUN (a SYMBOL_REF rtx)
2034 (emitting the queue unless NO_QUEUE is nonzero),
2035 for a value of mode OUTMODE,
2036 with NARGS different arguments, passed as alternating rtx values
2037 and machine_modes to convert them to.
2038 The rtx values should have been passed through protect_from_queue already.
2039
2040 NO_QUEUE will be true if and only if the library call is a `const' call
2041 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2042 to the variable is_const in expand_call.
2043
2044 NO_QUEUE must be true for const calls, because if it isn't, then
2045 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2046 and will be lost if the libcall sequence is optimized away.
2047
2048 NO_QUEUE must be false for non-const calls, because if it isn't, the
2049 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2050 optimized. For instance, the instruction scheduler may incorrectly
2051 move memory references across the non-const call. */
2052
2053void
2054emit_library_call (va_alist)
2055 va_dcl
2056{
2057 va_list p;
2058 /* Total size in bytes of all the stack-parms scanned so far. */
2059 struct args_size args_size;
2060 /* Size of arguments before any adjustments (such as rounding). */
2061 struct args_size original_args_size;
2062 register int argnum;
2063 enum machine_mode outmode;
2064 int nargs;
2065 rtx fun;
2066 rtx orgfun;
2067 int inc;
2068 int count;
2069 rtx argblock = 0;
2070 CUMULATIVE_ARGS args_so_far;
2071 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2072 struct args_size offset; struct args_size size; };
2073 struct arg *argvec;
2074 int old_inhibit_defer_pop = inhibit_defer_pop;
2075 int no_queue = 0;
2076 rtx use_insns;
efd65a8b
RS
2077 /* library calls are never indirect calls. */
2078 int current_call_is_indirect = 0;
322e3e34
RK
2079
2080 va_start (p);
2081 orgfun = fun = va_arg (p, rtx);
2082 no_queue = va_arg (p, int);
2083 outmode = va_arg (p, enum machine_mode);
2084 nargs = va_arg (p, int);
2085
2086 /* Copy all the libcall-arguments out of the varargs data
2087 and into a vector ARGVEC.
2088
2089 Compute how to pass each argument. We only support a very small subset
2090 of the full argument passing conventions to limit complexity here since
2091 library functions shouldn't have many args. */
2092
2093 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2094
2095 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2096
2097 args_size.constant = 0;
2098 args_size.var = 0;
2099
2100 for (count = 0; count < nargs; count++)
2101 {
2102 rtx val = va_arg (p, rtx);
2103 enum machine_mode mode = va_arg (p, enum machine_mode);
2104
2105 /* We cannot convert the arg value to the mode the library wants here;
2106 must do it earlier where we know the signedness of the arg. */
2107 if (mode == BLKmode
2108 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2109 abort ();
2110
2111 /* On some machines, there's no way to pass a float to a library fcn.
2112 Pass it as a double instead. */
2113#ifdef LIBGCC_NEEDS_DOUBLE
2114 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2115 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2116#endif
2117
2118 /* There's no need to call protect_from_queue, because
2119 either emit_move_insn or emit_push_insn will do that. */
2120
2121 /* Make sure it is a reasonable operand for a move or push insn. */
2122 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2123 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2124 val = force_operand (val, NULL_RTX);
2125
2126 argvec[count].value = val;
2127 argvec[count].mode = mode;
2128
2129#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2130 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2131 abort ();
2132#endif
2133
2134 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2135 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2136 abort ();
2137#ifdef FUNCTION_ARG_PARTIAL_NREGS
2138 argvec[count].partial
2139 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2140#else
2141 argvec[count].partial = 0;
2142#endif
2143
2144 locate_and_pad_parm (mode, NULL_TREE,
2145 argvec[count].reg && argvec[count].partial == 0,
2146 NULL_TREE, &args_size, &argvec[count].offset,
2147 &argvec[count].size);
2148
2149 if (argvec[count].size.var)
2150 abort ();
2151
2152#ifndef REG_PARM_STACK_SPACE
2153 if (argvec[count].partial)
2154 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2155#endif
2156
2157 if (argvec[count].reg == 0 || argvec[count].partial != 0
2158#ifdef REG_PARM_STACK_SPACE
2159 || 1
2160#endif
2161 )
2162 args_size.constant += argvec[count].size.constant;
2163
2164#ifdef ACCUMULATE_OUTGOING_ARGS
2165 /* If this arg is actually passed on the stack, it might be
2166 clobbering something we already put there (this library call might
2167 be inside the evaluation of an argument to a function whose call
2168 requires the stack). This will only occur when the library call
2169 has sufficient args to run out of argument registers. Abort in
2170 this case; if this ever occurs, code must be added to save and
2171 restore the arg slot. */
2172
2173 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2174 abort ();
2175#endif
2176
2177 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2178 }
2179 va_end (p);
2180
2181 /* If this machine requires an external definition for library
2182 functions, write one out. */
2183 assemble_external_libcall (fun);
2184
2185 original_args_size = args_size;
2186#ifdef STACK_BOUNDARY
2187 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2188 / STACK_BYTES) * STACK_BYTES);
2189#endif
2190
2191#ifdef REG_PARM_STACK_SPACE
2192 args_size.constant = MAX (args_size.constant,
2193 REG_PARM_STACK_SPACE (NULL_TREE));
2194#ifndef OUTGOING_REG_PARM_STACK_SPACE
2195 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2196#endif
2197#endif
2198
2199#ifdef ACCUMULATE_OUTGOING_ARGS
2200 if (args_size.constant > current_function_outgoing_args_size)
2201 current_function_outgoing_args_size = args_size.constant;
2202 args_size.constant = 0;
2203#endif
2204
2205#ifndef PUSH_ROUNDING
2206 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2207#endif
2208
2209#ifdef PUSH_ARGS_REVERSED
2210#ifdef STACK_BOUNDARY
2211 /* If we push args individually in reverse order, perform stack alignment
2212 before the first push (the last arg). */
2213 if (argblock == 0)
2214 anti_adjust_stack (GEN_INT (args_size.constant
2215 - original_args_size.constant));
2216#endif
2217#endif
2218
2219#ifdef PUSH_ARGS_REVERSED
2220 inc = -1;
2221 argnum = nargs - 1;
2222#else
2223 inc = 1;
2224 argnum = 0;
2225#endif
2226
2227 /* Push the args that need to be pushed. */
2228
2229 for (count = 0; count < nargs; count++, argnum += inc)
2230 {
2231 register enum machine_mode mode = argvec[argnum].mode;
2232 register rtx val = argvec[argnum].value;
2233 rtx reg = argvec[argnum].reg;
2234 int partial = argvec[argnum].partial;
2235
2236 if (! (reg != 0 && partial == 0))
2237 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2238 argblock, GEN_INT (argvec[count].offset.constant));
2239 NO_DEFER_POP;
2240 }
2241
2242#ifndef PUSH_ARGS_REVERSED
2243#ifdef STACK_BOUNDARY
2244 /* If we pushed args in forward order, perform stack alignment
2245 after pushing the last arg. */
2246 if (argblock == 0)
2247 anti_adjust_stack (GEN_INT (args_size.constant
2248 - original_args_size.constant));
2249#endif
2250#endif
2251
2252#ifdef PUSH_ARGS_REVERSED
2253 argnum = nargs - 1;
2254#else
2255 argnum = 0;
2256#endif
2257
2258 /* Now load any reg parms into their regs. */
2259
2260 for (count = 0; count < nargs; count++, argnum += inc)
2261 {
2262 register enum machine_mode mode = argvec[argnum].mode;
2263 register rtx val = argvec[argnum].value;
2264 rtx reg = argvec[argnum].reg;
2265 int partial = argvec[argnum].partial;
2266
2267 if (reg != 0 && partial == 0)
2268 emit_move_insn (reg, val);
2269 NO_DEFER_POP;
2270 }
2271
2272 /* For version 1.37, try deleting this entirely. */
2273 if (! no_queue)
2274 emit_queue ();
2275
2276 /* Any regs containing parms remain in use through the call. */
2277 start_sequence ();
2278 for (count = 0; count < nargs; count++)
2279 if (argvec[count].reg != 0)
2280 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2281
2282 use_insns = get_insns ();
2283 end_sequence ();
2284
2285 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2286
2287 /* Don't allow popping to be deferred, since then
2288 cse'ing of library calls could delete a call and leave the pop. */
2289 NO_DEFER_POP;
2290
2291 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2292 will set inhibit_defer_pop to that value. */
2293
2294 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2295 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2296 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2297 old_inhibit_defer_pop + 1, use_insns, no_queue);
2298
2299 /* Now restore inhibit_defer_pop to its actual original value. */
2300 OK_DEFER_POP;
2301}
2302\f
2303/* Like emit_library_call except that an extra argument, VALUE,
2304 comes second and says where to store the result.
fac0ad80
RS
2305 (If VALUE is zero, this function chooses a convenient way
2306 to return the value.
322e3e34 2307
fac0ad80
RS
2308 This function returns an rtx for where the value is to be found.
2309 If VALUE is nonzero, VALUE is returned. */
2310
2311rtx
322e3e34
RK
2312emit_library_call_value (va_alist)
2313 va_dcl
2314{
2315 va_list p;
2316 /* Total size in bytes of all the stack-parms scanned so far. */
2317 struct args_size args_size;
2318 /* Size of arguments before any adjustments (such as rounding). */
2319 struct args_size original_args_size;
2320 register int argnum;
2321 enum machine_mode outmode;
2322 int nargs;
2323 rtx fun;
2324 rtx orgfun;
2325 int inc;
2326 int count;
2327 rtx argblock = 0;
2328 CUMULATIVE_ARGS args_so_far;
2329 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2330 struct args_size offset; struct args_size size; };
2331 struct arg *argvec;
2332 int old_inhibit_defer_pop = inhibit_defer_pop;
2333 int no_queue = 0;
2334 rtx use_insns;
2335 rtx value;
2336 rtx mem_value = 0;
fac0ad80 2337 int pcc_struct_value = 0;
efd65a8b
RS
2338 /* library calls are never indirect calls. */
2339 int current_call_is_indirect = 0;
322e3e34
RK
2340
2341 va_start (p);
2342 orgfun = fun = va_arg (p, rtx);
2343 value = va_arg (p, rtx);
2344 no_queue = va_arg (p, int);
2345 outmode = va_arg (p, enum machine_mode);
2346 nargs = va_arg (p, int);
2347
2348 /* If this kind of value comes back in memory,
2349 decide where in memory it should come back. */
fac0ad80 2350 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2351 {
fac0ad80
RS
2352#ifdef PCC_STATIC_STRUCT_RETURN
2353 rtx pointer_reg
2354 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2355 0);
2356 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2357 pcc_struct_value = 1;
2358 if (value == 0)
2359 value = gen_reg_rtx (outmode);
2360#else /* not PCC_STATIC_STRUCT_RETURN */
2361 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2362 mem_value = value;
2363 else
2364 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2365#endif
322e3e34
RK
2366 }
2367
2368 /* ??? Unfinished: must pass the memory address as an argument. */
2369
2370 /* Copy all the libcall-arguments out of the varargs data
2371 and into a vector ARGVEC.
2372
2373 Compute how to pass each argument. We only support a very small subset
2374 of the full argument passing conventions to limit complexity here since
2375 library functions shouldn't have many args. */
2376
2377 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2378
2379 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2380
2381 args_size.constant = 0;
2382 args_size.var = 0;
2383
2384 count = 0;
2385
2386 /* If there's a structure value address to be passed,
2387 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2388 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2389 {
2390 rtx addr = XEXP (mem_value, 0);
fac0ad80 2391 nargs++;
322e3e34 2392
fac0ad80
RS
2393 /* Make sure it is a reasonable operand for a move or push insn. */
2394 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2395 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2396 addr = force_operand (addr, NULL_RTX);
322e3e34 2397
fac0ad80
RS
2398 argvec[count].value = addr;
2399 argvec[count].mode = outmode;
2400 argvec[count].partial = 0;
322e3e34 2401
fac0ad80 2402 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
322e3e34 2403#ifdef FUNCTION_ARG_PARTIAL_NREGS
fac0ad80
RS
2404 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2405 abort ();
322e3e34
RK
2406#endif
2407
fac0ad80
RS
2408 locate_and_pad_parm (outmode, NULL_TREE,
2409 argvec[count].reg && argvec[count].partial == 0,
2410 NULL_TREE, &args_size, &argvec[count].offset,
2411 &argvec[count].size);
322e3e34
RK
2412
2413
fac0ad80 2414 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2415#ifdef REG_PARM_STACK_SPACE
fac0ad80 2416 || 1
322e3e34 2417#endif
fac0ad80
RS
2418 )
2419 args_size.constant += argvec[count].size.constant;
322e3e34 2420
fac0ad80
RS
2421 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2422
2423 count++;
322e3e34
RK
2424 }
2425
2426 for (; count < nargs; count++)
2427 {
2428 rtx val = va_arg (p, rtx);
2429 enum machine_mode mode = va_arg (p, enum machine_mode);
2430
2431 /* We cannot convert the arg value to the mode the library wants here;
2432 must do it earlier where we know the signedness of the arg. */
2433 if (mode == BLKmode
2434 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2435 abort ();
2436
2437 /* On some machines, there's no way to pass a float to a library fcn.
2438 Pass it as a double instead. */
2439#ifdef LIBGCC_NEEDS_DOUBLE
2440 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2441 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2442#endif
2443
2444 /* There's no need to call protect_from_queue, because
2445 either emit_move_insn or emit_push_insn will do that. */
2446
2447 /* Make sure it is a reasonable operand for a move or push insn. */
2448 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2449 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2450 val = force_operand (val, NULL_RTX);
2451
2452 argvec[count].value = val;
2453 argvec[count].mode = mode;
2454
2455#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2456 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2457 abort ();
2458#endif
2459
2460 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2461 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2462 abort ();
2463#ifdef FUNCTION_ARG_PARTIAL_NREGS
2464 argvec[count].partial
2465 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2466#else
2467 argvec[count].partial = 0;
2468#endif
2469
2470 locate_and_pad_parm (mode, NULL_TREE,
2471 argvec[count].reg && argvec[count].partial == 0,
2472 NULL_TREE, &args_size, &argvec[count].offset,
2473 &argvec[count].size);
2474
2475 if (argvec[count].size.var)
2476 abort ();
2477
2478#ifndef REG_PARM_STACK_SPACE
2479 if (argvec[count].partial)
2480 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2481#endif
2482
2483 if (argvec[count].reg == 0 || argvec[count].partial != 0
2484#ifdef REG_PARM_STACK_SPACE
2485 || 1
2486#endif
2487 )
2488 args_size.constant += argvec[count].size.constant;
2489
2490#ifdef ACCUMULATE_OUTGOING_ARGS
2491 /* If this arg is actually passed on the stack, it might be
2492 clobbering something we already put there (this library call might
2493 be inside the evaluation of an argument to a function whose call
2494 requires the stack). This will only occur when the library call
2495 has sufficient args to run out of argument registers. Abort in
2496 this case; if this ever occurs, code must be added to save and
2497 restore the arg slot. */
2498
2499 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2500 abort ();
2501#endif
2502
2503 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2504 }
2505 va_end (p);
2506
2507 /* If this machine requires an external definition for library
2508 functions, write one out. */
2509 assemble_external_libcall (fun);
2510
2511 original_args_size = args_size;
2512#ifdef STACK_BOUNDARY
2513 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2514 / STACK_BYTES) * STACK_BYTES);
2515#endif
2516
2517#ifdef REG_PARM_STACK_SPACE
2518 args_size.constant = MAX (args_size.constant,
2519 REG_PARM_STACK_SPACE (NULL_TREE));
2520#ifndef OUTGOING_REG_PARM_STACK_SPACE
2521 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2522#endif
2523#endif
2524
2525#ifdef ACCUMULATE_OUTGOING_ARGS
2526 if (args_size.constant > current_function_outgoing_args_size)
2527 current_function_outgoing_args_size = args_size.constant;
2528 args_size.constant = 0;
2529#endif
2530
2531#ifndef PUSH_ROUNDING
2532 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2533#endif
2534
2535#ifdef PUSH_ARGS_REVERSED
2536#ifdef STACK_BOUNDARY
2537 /* If we push args individually in reverse order, perform stack alignment
2538 before the first push (the last arg). */
2539 if (argblock == 0)
2540 anti_adjust_stack (GEN_INT (args_size.constant
2541 - original_args_size.constant));
2542#endif
2543#endif
2544
2545#ifdef PUSH_ARGS_REVERSED
2546 inc = -1;
2547 argnum = nargs - 1;
2548#else
2549 inc = 1;
2550 argnum = 0;
2551#endif
2552
2553 /* Push the args that need to be pushed. */
2554
2555 for (count = 0; count < nargs; count++, argnum += inc)
2556 {
2557 register enum machine_mode mode = argvec[argnum].mode;
2558 register rtx val = argvec[argnum].value;
2559 rtx reg = argvec[argnum].reg;
2560 int partial = argvec[argnum].partial;
2561
2562 if (! (reg != 0 && partial == 0))
2563 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2564 argblock, GEN_INT (argvec[count].offset.constant));
2565 NO_DEFER_POP;
2566 }
2567
2568#ifndef PUSH_ARGS_REVERSED
2569#ifdef STACK_BOUNDARY
2570 /* If we pushed args in forward order, perform stack alignment
2571 after pushing the last arg. */
2572 if (argblock == 0)
2573 anti_adjust_stack (GEN_INT (args_size.constant
2574 - original_args_size.constant));
2575#endif
2576#endif
2577
2578#ifdef PUSH_ARGS_REVERSED
2579 argnum = nargs - 1;
2580#else
2581 argnum = 0;
2582#endif
2583
2584 /* Now load any reg parms into their regs. */
2585
322e3e34
RK
2586 for (count = 0; count < nargs; count++, argnum += inc)
2587 {
2588 register enum machine_mode mode = argvec[argnum].mode;
2589 register rtx val = argvec[argnum].value;
2590 rtx reg = argvec[argnum].reg;
2591 int partial = argvec[argnum].partial;
2592
2593 if (reg != 0 && partial == 0)
2594 emit_move_insn (reg, val);
2595 NO_DEFER_POP;
2596 }
2597
2598#if 0
2599 /* For version 1.37, try deleting this entirely. */
2600 if (! no_queue)
2601 emit_queue ();
2602#endif
2603
2604 /* Any regs containing parms remain in use through the call. */
2605 start_sequence ();
2606 for (count = 0; count < nargs; count++)
2607 if (argvec[count].reg != 0)
2608 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2609
2610 use_insns = get_insns ();
2611 end_sequence ();
2612
fac0ad80
RS
2613 /* Pass the function the address in which to return a structure value. */
2614 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2615 {
2616 emit_move_insn (struct_value_rtx,
2617 force_reg (Pmode,
2618 force_operand (XEXP (mem_value, 0),
2619 NULL_RTX)));
2620 if (GET_CODE (struct_value_rtx) == REG)
2621 {
2622 push_to_sequence (use_insns);
2623 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
2624 use_insns = get_insns ();
2625 end_sequence ();
2626 }
2627 }
2628
322e3e34
RK
2629 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2630
2631 /* Don't allow popping to be deferred, since then
2632 cse'ing of library calls could delete a call and leave the pop. */
2633 NO_DEFER_POP;
2634
2635 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2636 will set inhibit_defer_pop to that value. */
2637
2638 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2639 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
fac0ad80
RS
2640 (outmode != VOIDmode && mem_value == 0
2641 ? hard_libcall_value (outmode) : NULL_RTX),
322e3e34
RK
2642 old_inhibit_defer_pop + 1, use_insns, no_queue);
2643
2644 /* Now restore inhibit_defer_pop to its actual original value. */
2645 OK_DEFER_POP;
2646
2647 /* Copy the value to the right place. */
2648 if (outmode != VOIDmode)
2649 {
2650 if (mem_value)
2651 {
2652 if (value == 0)
fac0ad80 2653 value = mem_value;
322e3e34
RK
2654 if (value != mem_value)
2655 emit_move_insn (value, mem_value);
2656 }
2657 else if (value != 0)
2658 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
2659 else
2660 value = hard_libcall_value (outmode);
322e3e34 2661 }
fac0ad80
RS
2662
2663 return value;
322e3e34
RK
2664}
2665\f
51bbfa0c
RS
2666#if 0
2667/* Return an rtx which represents a suitable home on the stack
2668 given TYPE, the type of the argument looking for a home.
2669 This is called only for BLKmode arguments.
2670
2671 SIZE is the size needed for this target.
2672 ARGS_ADDR is the address of the bottom of the argument block for this call.
2673 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2674 if this machine uses push insns. */
2675
2676static rtx
2677target_for_arg (type, size, args_addr, offset)
2678 tree type;
2679 rtx size;
2680 rtx args_addr;
2681 struct args_size offset;
2682{
2683 rtx target;
2684 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2685
2686 /* We do not call memory_address if possible,
2687 because we want to address as close to the stack
2688 as possible. For non-variable sized arguments,
2689 this will be stack-pointer relative addressing. */
2690 if (GET_CODE (offset_rtx) == CONST_INT)
2691 target = plus_constant (args_addr, INTVAL (offset_rtx));
2692 else
2693 {
2694 /* I have no idea how to guarantee that this
2695 will work in the presence of register parameters. */
2696 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2697 target = memory_address (QImode, target);
2698 }
2699
2700 return gen_rtx (MEM, BLKmode, target);
2701}
2702#endif
2703\f
2704/* Store a single argument for a function call
2705 into the register or memory area where it must be passed.
2706 *ARG describes the argument value and where to pass it.
2707
2708 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 2709 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
2710
2711 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2712 so must be careful about how the stack is used.
2713
2714 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2715 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2716 that we need not worry about saving and restoring the stack.
2717
2718 FNDECL is the declaration of the function we are calling. */
2719
2720static void
6f90e075
JW
2721store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2722 reg_parm_stack_space)
51bbfa0c
RS
2723 struct arg_data *arg;
2724 rtx argblock;
2725 int may_be_alloca;
2726 int variable_size;
2727 tree fndecl;
6f90e075 2728 int reg_parm_stack_space;
51bbfa0c
RS
2729{
2730 register tree pval = arg->tree_value;
2731 rtx reg = 0;
2732 int partial = 0;
2733 int used = 0;
2734 int i, lower_bound, upper_bound;
2735
2736 if (TREE_CODE (pval) == ERROR_MARK)
2737 return;
2738
2739#ifdef ACCUMULATE_OUTGOING_ARGS
2740 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2741 save any previous data at that location. */
2742 if (argblock && ! variable_size && arg->stack)
2743 {
2744#ifdef ARGS_GROW_DOWNWARD
2745 /* stack_slot is negative, but we want to index stack_usage_map */
2746 /* with positive values. */
2747 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2748 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2749 else
2750 abort ();
2751
2752 lower_bound = upper_bound - arg->size.constant;
2753#else
2754 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2755 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2756 else
2757 lower_bound = 0;
2758
2759 upper_bound = lower_bound + arg->size.constant;
2760#endif
2761
2762 for (i = lower_bound; i < upper_bound; i++)
2763 if (stack_usage_map[i]
2764#ifdef REG_PARM_STACK_SPACE
2765 /* Don't store things in the fixed argument area at this point;
2766 it has already been saved. */
6f90e075 2767 && i > reg_parm_stack_space
51bbfa0c
RS
2768#endif
2769 )
2770 break;
2771
2772 if (i != upper_bound)
2773 {
2774 /* We need to make a save area. See what mode we can make it. */
2775 enum machine_mode save_mode
2776 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
2777 rtx stack_area
2778 = gen_rtx (MEM, save_mode,
2779 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
2780
2781 if (save_mode == BLKmode)
2782 {
2783 arg->save_area = assign_stack_temp (BLKmode,
2784 arg->size.constant, 1);
2785 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 2786 GEN_INT (arg->size.constant),
51bbfa0c
RS
2787 PARM_BOUNDARY / BITS_PER_UNIT);
2788 }
2789 else
2790 {
2791 arg->save_area = gen_reg_rtx (save_mode);
2792 emit_move_insn (arg->save_area, stack_area);
2793 }
2794 }
2795 }
2796#endif
2797
2798 /* If this isn't going to be placed on both the stack and in registers,
2799 set up the register and number of words. */
2800 if (! arg->pass_on_stack)
2801 reg = arg->reg, partial = arg->partial;
2802
2803 if (reg != 0 && partial == 0)
2804 /* Being passed entirely in a register. We shouldn't be called in
2805 this case. */
2806 abort ();
2807
4ab56118
RK
2808#ifdef STRICT_ALIGNMENT
2809 /* If this arg needs special alignment, don't load the registers
2810 here. */
2811 if (arg->n_aligned_regs != 0)
2812 reg = 0;
2813#endif
2814
51bbfa0c
RS
2815 /* If this is being partially passed in a register, but multiple locations
2816 are specified, we assume that the one partially used is the one that is
2817 listed first. */
2818 if (reg && GET_CODE (reg) == EXPR_LIST)
2819 reg = XEXP (reg, 0);
2820
4ab56118 2821 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
2822 it directly into its stack slot. Otherwise, we can. */
2823 if (arg->value == 0)
d64f5a78
RS
2824 {
2825#ifdef ACCUMULATE_OUTGOING_ARGS
2826 /* stack_arg_under_construction is nonzero if a function argument is
2827 being evaluated directly into the outgoing argument list and
2828 expand_call must take special action to preserve the argument list
2829 if it is called recursively.
2830
2831 For scalar function arguments stack_usage_map is sufficient to
2832 determine which stack slots must be saved and restored. Scalar
2833 arguments in general have pass_on_stack == 0.
2834
2835 If this argument is initialized by a function which takes the
2836 address of the argument (a C++ constructor or a C function
2837 returning a BLKmode structure), then stack_usage_map is
2838 insufficient and expand_call must push the stack around the
2839 function call. Such arguments have pass_on_stack == 1.
2840
2841 Note that it is always safe to set stack_arg_under_construction,
2842 but this generates suboptimal code if set when not needed. */
2843
2844 if (arg->pass_on_stack)
2845 stack_arg_under_construction++;
2846#endif
3a08477a
RK
2847 arg->value = expand_expr (pval,
2848 (partial
2849 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
2850 ? NULL_RTX : arg->stack,
e5d70561 2851 VOIDmode, 0);
1efe6448
RK
2852
2853 /* If we are promoting object (or for any other reason) the mode
2854 doesn't agree, convert the mode. */
2855
2856 if (GET_MODE (arg->value) != VOIDmode
2857 && GET_MODE (arg->value) != arg->mode)
2858 arg->value = convert_to_mode (arg->mode, arg->value, arg->unsignedp);
2859
d64f5a78
RS
2860#ifdef ACCUMULATE_OUTGOING_ARGS
2861 if (arg->pass_on_stack)
2862 stack_arg_under_construction--;
2863#endif
2864 }
51bbfa0c
RS
2865
2866 /* Don't allow anything left on stack from computation
2867 of argument to alloca. */
2868 if (may_be_alloca)
2869 do_pending_stack_adjust ();
2870
2871 if (arg->value == arg->stack)
2872 /* If the value is already in the stack slot, we are done. */
2873 ;
1efe6448 2874 else if (arg->mode != BLKmode)
51bbfa0c
RS
2875 {
2876 register int size;
2877
2878 /* Argument is a scalar, not entirely passed in registers.
2879 (If part is passed in registers, arg->partial says how much
2880 and emit_push_insn will take care of putting it there.)
2881
2882 Push it, and if its size is less than the
2883 amount of space allocated to it,
2884 also bump stack pointer by the additional space.
2885 Note that in C the default argument promotions
2886 will prevent such mismatches. */
2887
1efe6448 2888 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
2889 /* Compute how much space the push instruction will push.
2890 On many machines, pushing a byte will advance the stack
2891 pointer by a halfword. */
2892#ifdef PUSH_ROUNDING
2893 size = PUSH_ROUNDING (size);
2894#endif
2895 used = size;
2896
2897 /* Compute how much space the argument should get:
2898 round up to a multiple of the alignment for arguments. */
1efe6448 2899 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
2900 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
2901 / (PARM_BOUNDARY / BITS_PER_UNIT))
2902 * (PARM_BOUNDARY / BITS_PER_UNIT));
2903
2904 /* This isn't already where we want it on the stack, so put it there.
2905 This can either be done with push or copy insns. */
ccf5d244
RK
2906 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
2907 0, partial, reg, used - size,
2908 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
2909 }
2910 else
2911 {
2912 /* BLKmode, at least partly to be pushed. */
2913
2914 register int excess;
2915 rtx size_rtx;
2916
2917 /* Pushing a nonscalar.
2918 If part is passed in registers, PARTIAL says how much
2919 and emit_push_insn will take care of putting it there. */
2920
2921 /* Round its size up to a multiple
2922 of the allocation unit for arguments. */
2923
2924 if (arg->size.var != 0)
2925 {
2926 excess = 0;
2927 size_rtx = ARGS_SIZE_RTX (arg->size);
2928 }
2929 else
2930 {
51bbfa0c
RS
2931 /* PUSH_ROUNDING has no effect on us, because
2932 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 2933 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 2934 + partial * UNITS_PER_WORD);
e4f93898 2935 size_rtx = expr_size (pval);
51bbfa0c
RS
2936 }
2937
1efe6448 2938 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
2939 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
2940 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
2941 }
2942
2943
2944 /* Unless this is a partially-in-register argument, the argument is now
2945 in the stack.
2946
2947 ??? Note that this can change arg->value from arg->stack to
2948 arg->stack_slot and it matters when they are not the same.
2949 It isn't totally clear that this is correct in all cases. */
2950 if (partial == 0)
2951 arg->value = arg->stack_slot;
2952
2953 /* Once we have pushed something, pops can't safely
2954 be deferred during the rest of the arguments. */
2955 NO_DEFER_POP;
2956
2957 /* ANSI doesn't require a sequence point here,
2958 but PCC has one, so this will avoid some problems. */
2959 emit_queue ();
2960
2961 /* Free any temporary slots made in processing this argument. */
2962 free_temp_slots ();
2963
2964#ifdef ACCUMULATE_OUTGOING_ARGS
2965 /* Now mark the segment we just used. */
2966 if (argblock && ! variable_size && arg->stack)
2967 for (i = lower_bound; i < upper_bound; i++)
2968 stack_usage_map[i] = 1;
2969#endif
2970}
This page took 0.428096 seconds and 5 git commands to generate.