]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
(objc-act.o): Add dependencies.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c
RS
1/* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20#include "config.h"
21#include "rtl.h"
22#include "tree.h"
23#include "flags.h"
24#include "expr.h"
322e3e34 25#include "gvarargs.h"
51bbfa0c
RS
26#include "insn-flags.h"
27
28/* Decide whether a function's arguments should be processed
bbc8a071
RK
29 from first to last or from last to first.
30
31 They should if the stack and args grow in opposite directions, but
32 only if we have push insns. */
51bbfa0c 33
51bbfa0c 34#ifdef PUSH_ROUNDING
bbc8a071
RK
35
36#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
51bbfa0c
RS
37#define PUSH_ARGS_REVERSED /* If it's last to first */
38#endif
bbc8a071 39
51bbfa0c
RS
40#endif
41
42/* Like STACK_BOUNDARY but in units of bytes, not bits. */
43#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
44
45/* Data structure and subroutines used within expand_call. */
46
47struct arg_data
48{
49 /* Tree node for this argument. */
50 tree tree_value;
1efe6448
RK
51 /* Mode for value; TYPE_MODE unless promoted. */
52 enum machine_mode mode;
51bbfa0c
RS
53 /* Current RTL value for argument, or 0 if it isn't precomputed. */
54 rtx value;
55 /* Initially-compute RTL value for argument; only for const functions. */
56 rtx initial_value;
57 /* Register to pass this argument in, 0 if passed on stack, or an
58 EXPR_LIST if the arg is to be copied into multiple different
59 registers. */
60 rtx reg;
84b55618
RK
61 /* If REG was promoted from the actual mode of the argument expression,
62 indicates whether the promotion is sign- or zero-extended. */
63 int unsignedp;
51bbfa0c
RS
64 /* Number of registers to use. 0 means put the whole arg in registers.
65 Also 0 if not passed in registers. */
66 int partial;
d64f5a78
RS
67 /* Non-zero if argument must be passed on stack.
68 Note that some arguments may be passed on the stack
69 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
70 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
71 int pass_on_stack;
72 /* Offset of this argument from beginning of stack-args. */
73 struct args_size offset;
74 /* Similar, but offset to the start of the stack slot. Different from
75 OFFSET if this arg pads downward. */
76 struct args_size slot_offset;
77 /* Size of this argument on the stack, rounded up for any padding it gets,
78 parts of the argument passed in registers do not count.
79 If REG_PARM_STACK_SPACE is defined, then register parms
80 are counted here as well. */
81 struct args_size size;
82 /* Location on the stack at which parameter should be stored. The store
83 has already been done if STACK == VALUE. */
84 rtx stack;
85 /* Location on the stack of the start of this argument slot. This can
86 differ from STACK if this arg pads downward. This location is known
87 to be aligned to FUNCTION_ARG_BOUNDARY. */
88 rtx stack_slot;
89#ifdef ACCUMULATE_OUTGOING_ARGS
90 /* Place that this stack area has been saved, if needed. */
91 rtx save_area;
92#endif
4ab56118
RK
93#ifdef STRICT_ALIGNMENT
94 /* If an argument's alignment does not permit direct copying into registers,
95 copy in smaller-sized pieces into pseudos. These are stored in a
96 block pointed to by this field. The next field says how many
97 word-sized pseudos we made. */
98 rtx *aligned_regs;
99 int n_aligned_regs;
100#endif
51bbfa0c
RS
101};
102
103#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 104/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108static char *stack_usage_map;
109
110/* Size of STACK_USAGE_MAP. */
111static int highest_outgoing_arg_in_use;
2f4aa534
RS
112
113/* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118int stack_arg_under_construction;
51bbfa0c
RS
119#endif
120
322e3e34 121static int calls_function PROTO((tree, int));
322e3e34
RK
122static void emit_call_1 PROTO((rtx, tree, int, int, rtx, rtx, int,
123 rtx, int));
124static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
125 tree, int));
51bbfa0c 126\f
1ce0cb53
JW
127/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
128 `alloca'.
129
130 If WHICH is 0, return 1 if EXP contains a call to any function.
131 Actually, we only need return 1 if evaluating EXP would require pushing
132 arguments on the stack, but that is too difficult to compute, so we just
133 assume any function call might require the stack. */
51bbfa0c
RS
134
135static int
1ce0cb53 136calls_function (exp, which)
51bbfa0c 137 tree exp;
1ce0cb53 138 int which;
51bbfa0c
RS
139{
140 register int i;
141 int type = TREE_CODE_CLASS (TREE_CODE (exp));
142 int length = tree_code_length[(int) TREE_CODE (exp)];
143
144 /* Only expressions and references can contain calls. */
145
3b59a331
RS
146 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
147 && type != 'b')
51bbfa0c
RS
148 return 0;
149
150 switch (TREE_CODE (exp))
151 {
152 case CALL_EXPR:
1ce0cb53
JW
153 if (which == 0)
154 return 1;
155 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
156 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
157 == FUNCTION_DECL)
158 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
159 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
160 == BUILT_IN_ALLOCA))
51bbfa0c
RS
161 return 1;
162
163 /* Third operand is RTL. */
164 length = 2;
165 break;
166
167 case SAVE_EXPR:
168 if (SAVE_EXPR_RTL (exp) != 0)
169 return 0;
170 break;
171
172 case BLOCK:
ef03bc85
CH
173 {
174 register tree local;
175
176 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53
JW
177 if (DECL_INITIAL (local) != 0
178 && calls_function (DECL_INITIAL (local), which))
ef03bc85
CH
179 return 1;
180 }
181 {
182 register tree subblock;
183
184 for (subblock = BLOCK_SUBBLOCKS (exp);
185 subblock;
186 subblock = TREE_CHAIN (subblock))
1ce0cb53 187 if (calls_function (subblock, which))
ef03bc85
CH
188 return 1;
189 }
190 return 0;
51bbfa0c
RS
191
192 case METHOD_CALL_EXPR:
193 length = 3;
194 break;
195
196 case WITH_CLEANUP_EXPR:
197 length = 1;
198 break;
199
200 case RTL_EXPR:
201 return 0;
202 }
203
204 for (i = 0; i < length; i++)
205 if (TREE_OPERAND (exp, i) != 0
1ce0cb53 206 && calls_function (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
207 return 1;
208
209 return 0;
210}
211\f
212/* Force FUNEXP into a form suitable for the address of a CALL,
213 and return that as an rtx. Also load the static chain register
214 if FNDECL is a nested function.
215
216 USE_INSNS points to a variable holding a chain of USE insns
217 to which a USE of the static chain
218 register should be added, if required. */
219
03dacb02 220rtx
51bbfa0c
RS
221prepare_call_address (funexp, fndecl, use_insns)
222 rtx funexp;
223 tree fndecl;
224 rtx *use_insns;
225{
226 rtx static_chain_value = 0;
227
228 funexp = protect_from_queue (funexp, 0);
229
230 if (fndecl != 0)
231 /* Get possible static chain value for nested function in C. */
232 static_chain_value = lookup_static_chain (fndecl);
233
234 /* Make a valid memory address and copy constants thru pseudo-regs,
235 but not for a constant address if -fno-function-cse. */
236 if (GET_CODE (funexp) != SYMBOL_REF)
237 funexp = memory_address (FUNCTION_MODE, funexp);
238 else
239 {
240#ifndef NO_FUNCTION_CSE
241 if (optimize && ! flag_no_function_cse)
242#ifdef NO_RECURSIVE_FUNCTION_CSE
243 if (fndecl != current_function_decl)
244#endif
245 funexp = force_reg (Pmode, funexp);
246#endif
247 }
248
249 if (static_chain_value != 0)
250 {
251 emit_move_insn (static_chain_rtx, static_chain_value);
252
253 /* Put the USE insn in the chain we were passed. It will later be
254 output immediately in front of the CALL insn. */
255 push_to_sequence (*use_insns);
256 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
257 *use_insns = get_insns ();
258 end_sequence ();
259 }
260
261 return funexp;
262}
263
264/* Generate instructions to call function FUNEXP,
265 and optionally pop the results.
266 The CALL_INSN is the first insn generated.
267
268 FUNTYPE is the data type of the function, or, for a library call,
269 the identifier for the name of the call. This is given to the
270 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
271
272 STACK_SIZE is the number of bytes of arguments on the stack,
273 rounded up to STACK_BOUNDARY; zero if the size is variable.
274 This is both to put into the call insn and
275 to generate explicit popping code if necessary.
276
277 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
278 It is zero if this call doesn't want a structure value.
279
280 NEXT_ARG_REG is the rtx that results from executing
281 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
282 just after all the args have had their registers assigned.
283 This could be whatever you like, but normally it is the first
284 arg-register beyond those used for args in this call,
285 or 0 if all the arg-registers are used in this call.
286 It is passed on to `gen_call' so you can put this info in the call insn.
287
288 VALREG is a hard register in which a value is returned,
289 or 0 if the call does not return a value.
290
291 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
292 the args to this call were processed.
293 We restore `inhibit_defer_pop' to that value.
294
295 USE_INSNS is a chain of USE insns to be emitted immediately before
296 the actual CALL insn.
297
298 IS_CONST is true if this is a `const' call. */
299
322e3e34 300static void
51bbfa0c
RS
301emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
302 valreg, old_inhibit_defer_pop, use_insns, is_const)
303 rtx funexp;
304 tree funtype;
305 int stack_size;
306 int struct_value_size;
307 rtx next_arg_reg;
308 rtx valreg;
309 int old_inhibit_defer_pop;
310 rtx use_insns;
311 int is_const;
312{
e5d70561
RK
313 rtx stack_size_rtx = GEN_INT (stack_size);
314 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
315 rtx call_insn;
316 int already_popped = 0;
317
318 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
319 and we don't want to load it into a register as an optimization,
320 because prepare_call_address already did it if it should be done. */
321 if (GET_CODE (funexp) != SYMBOL_REF)
322 funexp = memory_address (FUNCTION_MODE, funexp);
323
324#ifndef ACCUMULATE_OUTGOING_ARGS
325#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
326 if (HAVE_call_pop && HAVE_call_value_pop
327 && (RETURN_POPS_ARGS (funtype, stack_size) > 0 || stack_size == 0))
328 {
e5d70561 329 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (funtype, stack_size));
51bbfa0c
RS
330 rtx pat;
331
332 /* If this subroutine pops its own args, record that in the call insn
333 if possible, for the sake of frame pointer elimination. */
334 if (valreg)
335 pat = gen_call_value_pop (valreg,
336 gen_rtx (MEM, FUNCTION_MODE, funexp),
337 stack_size_rtx, next_arg_reg, n_pop);
338 else
339 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
340 stack_size_rtx, next_arg_reg, n_pop);
341
342 emit_call_insn (pat);
343 already_popped = 1;
344 }
345 else
346#endif
347#endif
348
349#if defined (HAVE_call) && defined (HAVE_call_value)
350 if (HAVE_call && HAVE_call_value)
351 {
352 if (valreg)
353 emit_call_insn (gen_call_value (valreg,
354 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
355 stack_size_rtx, next_arg_reg,
356 NULL_RTX));
51bbfa0c
RS
357 else
358 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
359 stack_size_rtx, next_arg_reg,
360 struct_value_size_rtx));
361 }
362 else
363#endif
364 abort ();
365
366 /* Find the CALL insn we just emitted and write the USE insns before it. */
367 for (call_insn = get_last_insn ();
368 call_insn && GET_CODE (call_insn) != CALL_INSN;
369 call_insn = PREV_INSN (call_insn))
370 ;
371
372 if (! call_insn)
373 abort ();
374
375 /* Put the USE insns before the CALL. */
376 emit_insns_before (use_insns, call_insn);
377
378 /* If this is a const call, then set the insn's unchanging bit. */
379 if (is_const)
380 CONST_CALL_P (call_insn) = 1;
381
b1e64e0d
RS
382 /* Restore this now, so that we do defer pops for this call's args
383 if the context of the call as a whole permits. */
384 inhibit_defer_pop = old_inhibit_defer_pop;
385
51bbfa0c
RS
386#ifndef ACCUMULATE_OUTGOING_ARGS
387 /* If returning from the subroutine does not automatically pop the args,
388 we need an instruction to pop them sooner or later.
389 Perhaps do it now; perhaps just record how much space to pop later.
390
391 If returning from the subroutine does pop the args, indicate that the
392 stack pointer will be changed. */
393
394 if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
395 {
396 if (!already_popped)
397 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
398 stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
e5d70561 399 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
400 }
401
402 if (stack_size != 0)
403 {
70a73141 404 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
405 pending_stack_adjust += stack_size;
406 else
407 adjust_stack (stack_size_rtx);
408 }
409#endif
410}
411
412/* Generate all the code for a function call
413 and return an rtx for its value.
414 Store the value in TARGET (specified as an rtx) if convenient.
415 If the value is stored in TARGET then TARGET is returned.
416 If IGNORE is nonzero, then we ignore the value of the function call. */
417
418rtx
8129842c 419expand_call (exp, target, ignore)
51bbfa0c
RS
420 tree exp;
421 rtx target;
422 int ignore;
51bbfa0c
RS
423{
424 /* List of actual parameters. */
425 tree actparms = TREE_OPERAND (exp, 1);
426 /* RTX for the function to be called. */
427 rtx funexp;
428 /* Tree node for the function to be called (not the address!). */
429 tree funtree;
430 /* Data type of the function. */
431 tree funtype;
432 /* Declaration of the function being called,
433 or 0 if the function is computed (not known by name). */
434 tree fndecl = 0;
435 char *name = 0;
436
437 /* Register in which non-BLKmode value will be returned,
438 or 0 if no value or if value is BLKmode. */
439 rtx valreg;
440 /* Address where we should return a BLKmode value;
441 0 if value not BLKmode. */
442 rtx structure_value_addr = 0;
443 /* Nonzero if that address is being passed by treating it as
444 an extra, implicit first parameter. Otherwise,
445 it is passed by being copied directly into struct_value_rtx. */
446 int structure_value_addr_parm = 0;
447 /* Size of aggregate value wanted, or zero if none wanted
448 or if we are using the non-reentrant PCC calling convention
449 or expecting the value in registers. */
450 int struct_value_size = 0;
451 /* Nonzero if called function returns an aggregate in memory PCC style,
452 by returning the address of where to find it. */
453 int pcc_struct_value = 0;
454
455 /* Number of actual parameters in this call, including struct value addr. */
456 int num_actuals;
457 /* Number of named args. Args after this are anonymous ones
458 and they must all go on the stack. */
459 int n_named_args;
460 /* Count arg position in order args appear. */
461 int argpos;
462
463 /* Vector of information about each argument.
464 Arguments are numbered in the order they will be pushed,
465 not the order they are written. */
466 struct arg_data *args;
467
468 /* Total size in bytes of all the stack-parms scanned so far. */
469 struct args_size args_size;
470 /* Size of arguments before any adjustments (such as rounding). */
471 struct args_size original_args_size;
472 /* Data on reg parms scanned so far. */
473 CUMULATIVE_ARGS args_so_far;
474 /* Nonzero if a reg parm has been scanned. */
475 int reg_parm_seen;
476
477 /* Nonzero if we must avoid push-insns in the args for this call.
478 If stack space is allocated for register parameters, but not by the
479 caller, then it is preallocated in the fixed part of the stack frame.
480 So the entire argument block must then be preallocated (i.e., we
481 ignore PUSH_ROUNDING in that case). */
482
483#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
484 int must_preallocate = 1;
485#else
486#ifdef PUSH_ROUNDING
487 int must_preallocate = 0;
488#else
489 int must_preallocate = 1;
490#endif
491#endif
492
f72aed24 493 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
494 int reg_parm_stack_space = 0;
495
51bbfa0c
RS
496 /* 1 if scanning parms front to back, -1 if scanning back to front. */
497 int inc;
498 /* Address of space preallocated for stack parms
499 (on machines that lack push insns), or 0 if space not preallocated. */
500 rtx argblock = 0;
501
502 /* Nonzero if it is plausible that this is a call to alloca. */
503 int may_be_alloca;
504 /* Nonzero if this is a call to setjmp or a related function. */
505 int returns_twice;
506 /* Nonzero if this is a call to `longjmp'. */
507 int is_longjmp;
508 /* Nonzero if this is a call to an inline function. */
509 int is_integrable = 0;
51bbfa0c
RS
510 /* Nonzero if this is a call to a `const' function.
511 Note that only explicitly named functions are handled as `const' here. */
512 int is_const = 0;
513 /* Nonzero if this is a call to a `volatile' function. */
514 int is_volatile = 0;
515#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
516 /* Define the boundary of the register parm stack space that needs to be
517 save, if any. */
518 int low_to_save = -1, high_to_save;
519 rtx save_area = 0; /* Place that it is saved */
520#endif
521
522#ifdef ACCUMULATE_OUTGOING_ARGS
523 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
524 char *initial_stack_usage_map = stack_usage_map;
525#endif
526
527 rtx old_stack_level = 0;
528 int old_pending_adj;
2f4aa534 529 int old_stack_arg_under_construction;
51bbfa0c
RS
530 int old_inhibit_defer_pop = inhibit_defer_pop;
531 tree old_cleanups = cleanups_this_call;
532
533 rtx use_insns = 0;
534
535 register tree p;
4ab56118 536 register int i, j;
51bbfa0c
RS
537
538 /* See if we can find a DECL-node for the actual function.
539 As a result, decide whether this is a call to an integrable function. */
540
541 p = TREE_OPERAND (exp, 0);
542 if (TREE_CODE (p) == ADDR_EXPR)
543 {
544 fndecl = TREE_OPERAND (p, 0);
545 if (TREE_CODE (fndecl) != FUNCTION_DECL)
546 {
547 /* May still be a `const' function if it is
548 a call through a pointer-to-const.
549 But we don't handle that. */
550 fndecl = 0;
551 }
552 else
553 {
554 if (!flag_no_inline
555 && fndecl != current_function_decl
556 && DECL_SAVED_INSNS (fndecl))
557 is_integrable = 1;
558 else if (! TREE_ADDRESSABLE (fndecl))
559 {
13d39dbc 560 /* In case this function later becomes inlinable,
51bbfa0c
RS
561 record that there was already a non-inline call to it.
562
563 Use abstraction instead of setting TREE_ADDRESSABLE
564 directly. */
216d5cdd 565 if (DECL_INLINE (fndecl) && extra_warnings && !flag_no_inline)
51bbfa0c
RS
566 warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
567 mark_addressable (fndecl);
568 }
569
d45cf215
RS
570 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
571 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c
RS
572 is_const = 1;
573 }
574 }
575
576 is_volatile = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
577
6f90e075
JW
578#ifdef REG_PARM_STACK_SPACE
579#ifdef MAYBE_REG_PARM_STACK_SPACE
580 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
581#else
582 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
583#endif
584#endif
585
51bbfa0c
RS
586 /* Warn if this value is an aggregate type,
587 regardless of which calling convention we are using for it. */
588 if (warn_aggregate_return
589 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
590 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
c1b98a95 591 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
51bbfa0c
RS
592 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
593 warning ("function call has aggregate value");
594
595 /* Set up a place to return a structure. */
596
597 /* Cater to broken compilers. */
598 if (aggregate_value_p (exp))
599 {
600 /* This call returns a big structure. */
601 is_const = 0;
602
603#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
604 {
605 pcc_struct_value = 1;
606 is_integrable = 0; /* Easier than making that case work right. */
607 }
608#else /* not PCC_STATIC_STRUCT_RETURN */
609 {
610 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 611
9e7b1d0a
RS
612 if (struct_value_size < 0)
613 abort ();
51bbfa0c 614
9e7b1d0a
RS
615 if (target && GET_CODE (target) == MEM)
616 structure_value_addr = XEXP (target, 0);
617 else
618 {
619 /* Assign a temporary on the stack to hold the value. */
51bbfa0c 620
9e7b1d0a
RS
621 /* For variable-sized objects, we must be called with a target
622 specified. If we were to allocate space on the stack here,
623 we would have no way of knowing when to free it. */
51bbfa0c 624
9e7b1d0a
RS
625 structure_value_addr
626 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
627 target = 0;
628 }
629 }
630#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
631 }
632
633 /* If called function is inline, try to integrate it. */
634
635 if (is_integrable)
636 {
637 rtx temp;
2f4aa534 638 rtx before_call = get_last_insn ();
51bbfa0c
RS
639
640 temp = expand_inline_function (fndecl, actparms, target,
641 ignore, TREE_TYPE (exp),
642 structure_value_addr);
643
644 /* If inlining succeeded, return. */
854e97f0 645 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 646 {
d64f5a78
RS
647 /* Perform all cleanups needed for the arguments of this call
648 (i.e. destructors in C++). It is ok if these destructors
649 clobber RETURN_VALUE_REG, because the only time we care about
650 this is when TARGET is that register. But in C++, we take
651 care to never return that register directly. */
652 expand_cleanups_to (old_cleanups);
653
654#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
655 /* If the outgoing argument list must be preserved, push
656 the stack before executing the inlined function if it
657 makes any calls. */
658
659 for (i = reg_parm_stack_space - 1; i >= 0; i--)
660 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
661 break;
662
663 if (stack_arg_under_construction || i >= 0)
664 {
d64f5a78 665 rtx insn = NEXT_INSN (before_call), seq;
2f4aa534 666
d64f5a78
RS
667 /* Look for a call in the inline function code.
668 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
669 nonzero then there is a call and it is not necessary
670 to scan the insns. */
671
672 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
673 for (; insn; insn = NEXT_INSN (insn))
674 if (GET_CODE (insn) == CALL_INSN)
675 break;
2f4aa534
RS
676
677 if (insn)
678 {
d64f5a78
RS
679 /* Reserve enough stack space so that the largest
680 argument list of any function call in the inline
681 function does not overlap the argument list being
682 evaluated. This is usually an overestimate because
683 allocate_dynamic_stack_space reserves space for an
684 outgoing argument list in addition to the requested
685 space, but there is no way to ask for stack space such
686 that an argument list of a certain length can be
687 safely constructed. */
688
689 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
690#ifdef REG_PARM_STACK_SPACE
691 /* Add the stack space reserved for register arguments
692 in the inline function. What is really needed is the
693 largest value of reg_parm_stack_space in the inline
694 function, but that is not available. Using the current
695 value of reg_parm_stack_space is wrong, but gives
696 correct results on all supported machines. */
697 adjust += reg_parm_stack_space;
698#endif
2f4aa534 699 start_sequence ();
ccf5d244 700 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
701 allocate_dynamic_stack_space (GEN_INT (adjust),
702 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
703 seq = get_insns ();
704 end_sequence ();
705 emit_insns_before (seq, NEXT_INSN (before_call));
e5d70561 706 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
707 }
708 }
d64f5a78 709#endif
51bbfa0c
RS
710
711 /* If the result is equivalent to TARGET, return TARGET to simplify
712 checks in store_expr. They can be equivalent but not equal in the
713 case of a function that returns BLKmode. */
714 if (temp != target && rtx_equal_p (temp, target))
715 return target;
716 return temp;
717 }
718
719 /* If inlining failed, mark FNDECL as needing to be compiled
720 separately after all. */
721 mark_addressable (fndecl);
722 }
723
724 /* When calling a const function, we must pop the stack args right away,
725 so that the pop is deleted or moved with the call. */
726 if (is_const)
727 NO_DEFER_POP;
728
729 function_call_count++;
730
731 if (fndecl && DECL_NAME (fndecl))
732 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
733
734#if 0
735 /* Unless it's a call to a specific function that isn't alloca,
736 if it has one argument, we must assume it might be alloca. */
737
738 may_be_alloca =
739 (!(fndecl != 0 && strcmp (name, "alloca"))
740 && actparms != 0
741 && TREE_CHAIN (actparms) == 0);
742#else
743 /* We assume that alloca will always be called by name. It
744 makes no sense to pass it as a pointer-to-function to
745 anything that does not understand its behavior. */
746 may_be_alloca =
747 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
748 && name[0] == 'a'
749 && ! strcmp (name, "alloca"))
750 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
751 && name[0] == '_'
752 && ! strcmp (name, "__builtin_alloca"))));
753#endif
754
755 /* See if this is a call to a function that can return more than once
756 or a call to longjmp. */
757
758 returns_twice = 0;
759 is_longjmp = 0;
760
761 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
762 {
763 char *tname = name;
764
765 if (name[0] == '_')
766 tname += ((name[1] == '_' && name[2] == 'x') ? 3 : 1);
767
768 if (tname[0] == 's')
769 {
770 returns_twice
771 = ((tname[1] == 'e'
772 && (! strcmp (tname, "setjmp")
773 || ! strcmp (tname, "setjmp_syscall")))
774 || (tname[1] == 'i'
775 && ! strcmp (tname, "sigsetjmp"))
776 || (tname[1] == 'a'
777 && ! strcmp (tname, "savectx")));
778 if (tname[1] == 'i'
779 && ! strcmp (tname, "siglongjmp"))
780 is_longjmp = 1;
781 }
782 else if ((tname[0] == 'q' && tname[1] == 's'
783 && ! strcmp (tname, "qsetjmp"))
784 || (tname[0] == 'v' && tname[1] == 'f'
785 && ! strcmp (tname, "vfork")))
786 returns_twice = 1;
787
788 else if (tname[0] == 'l' && tname[1] == 'o'
789 && ! strcmp (tname, "longjmp"))
790 is_longjmp = 1;
791 }
792
51bbfa0c
RS
793 if (may_be_alloca)
794 current_function_calls_alloca = 1;
795
796 /* Don't let pending stack adjusts add up to too much.
797 Also, do all pending adjustments now
798 if there is any chance this might be a call to alloca. */
799
800 if (pending_stack_adjust >= 32
801 || (pending_stack_adjust > 0 && may_be_alloca))
802 do_pending_stack_adjust ();
803
804 /* Operand 0 is a pointer-to-function; get the type of the function. */
805 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
806 if (TREE_CODE (funtype) != POINTER_TYPE)
807 abort ();
808 funtype = TREE_TYPE (funtype);
809
810 /* Push the temporary stack slot level so that we can free temporaries used
811 by each of the arguments separately. */
812 push_temp_slots ();
813
814 /* Start updating where the next arg would go. */
85ec8ec4 815 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX);
51bbfa0c
RS
816
817 /* If struct_value_rtx is 0, it means pass the address
818 as if it were an extra parameter. */
819 if (structure_value_addr && struct_value_rtx == 0)
820 {
d64f5a78 821#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
822 /* If the stack will be adjusted, make sure the structure address
823 does not refer to virtual_outgoing_args_rtx. */
824 rtx temp = (stack_arg_under_construction
825 ? copy_addr_to_reg (structure_value_addr)
826 : force_reg (Pmode, structure_value_addr));
d64f5a78
RS
827#else
828 rtx temp = force_reg (Pmode, structure_value_addr);
829#endif
830
51bbfa0c
RS
831 actparms
832 = tree_cons (error_mark_node,
833 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 834 temp),
51bbfa0c
RS
835 actparms);
836 structure_value_addr_parm = 1;
837 }
838
839 /* Count the arguments and set NUM_ACTUALS. */
840 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
841 num_actuals = i;
842
843 /* Compute number of named args.
844 Normally, don't include the last named arg if anonymous args follow.
845 (If no anonymous args follow, the result of list_length
846 is actually one too large.)
847
848 If SETUP_INCOMING_VARARGS is defined, this machine will be able to
849 place unnamed args that were passed in registers into the stack. So
850 treat all args as named. This allows the insns emitting for a specific
d45cf215 851 argument list to be independent of the function declaration.
51bbfa0c
RS
852
853 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
854 way to pass unnamed args in registers, so we must force them into
855 memory. */
856#ifndef SETUP_INCOMING_VARARGS
857 if (TYPE_ARG_TYPES (funtype) != 0)
858 n_named_args
859 = list_length (TYPE_ARG_TYPES (funtype)) - 1
860 /* Count the struct value address, if it is passed as a parm. */
861 + structure_value_addr_parm;
862 else
863#endif
864 /* If we know nothing, treat all args as named. */
865 n_named_args = num_actuals;
866
867 /* Make a vector to hold all the information about each arg. */
868 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
869 bzero (args, num_actuals * sizeof (struct arg_data));
870
871 args_size.constant = 0;
872 args_size.var = 0;
873
874 /* In this loop, we consider args in the order they are written.
875 We fill up ARGS from the front of from the back if necessary
876 so that in any case the first arg to be pushed ends up at the front. */
877
878#ifdef PUSH_ARGS_REVERSED
879 i = num_actuals - 1, inc = -1;
880 /* In this case, must reverse order of args
881 so that we compute and push the last arg first. */
882#else
883 i = 0, inc = 1;
884#endif
885
886 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
887 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
888 {
889 tree type = TREE_TYPE (TREE_VALUE (p));
84b55618 890 enum machine_mode mode;
51bbfa0c
RS
891
892 args[i].tree_value = TREE_VALUE (p);
893
894 /* Replace erroneous argument with constant zero. */
895 if (type == error_mark_node || TYPE_SIZE (type) == 0)
896 args[i].tree_value = integer_zero_node, type = integer_type_node;
897
898 /* Decide where to pass this arg.
899
900 args[i].reg is nonzero if all or part is passed in registers.
901
902 args[i].partial is nonzero if part but not all is passed in registers,
903 and the exact value says how many words are passed in registers.
904
905 args[i].pass_on_stack is nonzero if the argument must at least be
906 computed on the stack. It may then be loaded back into registers
907 if args[i].reg is nonzero.
908
909 These decisions are driven by the FUNCTION_... macros and must agree
910 with those made by function.c. */
911
912#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
913 /* See if this argument should be passed by invisible reference. */
914 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type), type,
915 argpos < n_named_args))
916 {
917 /* We make a copy of the object and pass the address to the function
918 being called. */
51bbfa0c
RS
919 rtx copy;
920
82c0ff02
RS
921 if (TYPE_SIZE (type) == 0
922 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
51bbfa0c
RS
923 {
924 /* This is a variable-sized object. Make space on the stack
925 for it. */
1ef1ca81 926 rtx size_rtx = expr_size (TREE_VALUE (p));
51bbfa0c
RS
927
928 if (old_stack_level == 0)
929 {
e5d70561 930 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
931 old_pending_adj = pending_stack_adjust;
932 pending_stack_adjust = 0;
933 }
934
935 copy = gen_rtx (MEM, BLKmode,
e5d70561 936 allocate_dynamic_stack_space (size_rtx, NULL_RTX,
5130a5cc 937 TYPE_ALIGN (type)));
51bbfa0c
RS
938 }
939 else
82c0ff02
RS
940 {
941 int size = int_size_in_bytes (type);
942 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
943 }
51bbfa0c
RS
944
945 store_expr (args[i].tree_value, copy, 0);
946
947 args[i].tree_value = build1 (ADDR_EXPR, build_pointer_type (type),
948 make_tree (type, copy));
949 type = build_pointer_type (type);
950 }
951#endif
952
84b55618
RK
953 mode = TYPE_MODE (type);
954
955#ifdef PROMOTE_FUNCTION_ARGS
956 /* Compute the mode in which the arg is actually to be extended to. */
957 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
958 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
959 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
960 || TREE_CODE (type) == OFFSET_TYPE)
961 {
962 int unsignedp = TREE_UNSIGNED (type);
963 PROMOTE_MODE (mode, unsignedp, type);
964 args[i].unsignedp = unsignedp;
965 }
966#endif
967
1efe6448 968 args[i].mode = mode;
84b55618 969 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
970 argpos < n_named_args);
971#ifdef FUNCTION_ARG_PARTIAL_NREGS
972 if (args[i].reg)
973 args[i].partial
84b55618 974 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
975 argpos < n_named_args);
976#endif
977
84b55618 978 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c
RS
979
980 /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
981 we are to pass this arg in the register(s) designated by FOO, but
982 also to pass it in the stack. */
983 if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
984 && XEXP (args[i].reg, 0) == 0)
985 args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
986
987 /* If this is an addressable type, we must preallocate the stack
988 since we must evaluate the object into its final location.
989
990 If this is to be passed in both registers and the stack, it is simpler
991 to preallocate. */
992 if (TREE_ADDRESSABLE (type)
993 || (args[i].pass_on_stack && args[i].reg != 0))
994 must_preallocate = 1;
995
996 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
997 we cannot consider this function call constant. */
998 if (TREE_ADDRESSABLE (type))
999 is_const = 0;
1000
1001 /* Compute the stack-size of this argument. */
1002 if (args[i].reg == 0 || args[i].partial != 0
1003#ifdef REG_PARM_STACK_SPACE
6f90e075 1004 || reg_parm_stack_space > 0
51bbfa0c
RS
1005#endif
1006 || args[i].pass_on_stack)
1efe6448 1007 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1008#ifdef STACK_PARMS_IN_REG_PARM_AREA
1009 1,
1010#else
1011 args[i].reg != 0,
1012#endif
1013 fndecl, &args_size, &args[i].offset,
1014 &args[i].size);
1015
1016#ifndef ARGS_GROW_DOWNWARD
1017 args[i].slot_offset = args_size;
1018#endif
1019
1020#ifndef REG_PARM_STACK_SPACE
1021 /* If a part of the arg was put into registers,
1022 don't include that part in the amount pushed. */
1023 if (! args[i].pass_on_stack)
1024 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1025 / (PARM_BOUNDARY / BITS_PER_UNIT)
1026 * (PARM_BOUNDARY / BITS_PER_UNIT));
1027#endif
1028
1029 /* Update ARGS_SIZE, the total stack space for args so far. */
1030
1031 args_size.constant += args[i].size.constant;
1032 if (args[i].size.var)
1033 {
1034 ADD_PARM_SIZE (args_size, args[i].size.var);
1035 }
1036
1037 /* Since the slot offset points to the bottom of the slot,
1038 we must record it after incrementing if the args grow down. */
1039#ifdef ARGS_GROW_DOWNWARD
1040 args[i].slot_offset = args_size;
1041
1042 args[i].slot_offset.constant = -args_size.constant;
1043 if (args_size.var)
1044 {
1045 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1046 }
1047#endif
1048
1049 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1050 have been used, etc. */
1051
1052 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1053 argpos < n_named_args);
1054 }
1055
6f90e075
JW
1056#ifdef FINAL_REG_PARM_STACK_SPACE
1057 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1058 args_size.var);
1059#endif
1060
51bbfa0c
RS
1061 /* Compute the actual size of the argument block required. The variable
1062 and constant sizes must be combined, the size may have to be rounded,
1063 and there may be a minimum required size. */
1064
1065 original_args_size = args_size;
1066 if (args_size.var)
1067 {
1068 /* If this function requires a variable-sized argument list, don't try to
1069 make a cse'able block for this call. We may be able to do this
1070 eventually, but it is too complicated to keep track of what insns go
1071 in the cse'able block and which don't. */
1072
1073 is_const = 0;
1074 must_preallocate = 1;
1075
1076 args_size.var = ARGS_SIZE_TREE (args_size);
1077 args_size.constant = 0;
1078
1079#ifdef STACK_BOUNDARY
1080 if (STACK_BOUNDARY != BITS_PER_UNIT)
1081 args_size.var = round_up (args_size.var, STACK_BYTES);
1082#endif
1083
1084#ifdef REG_PARM_STACK_SPACE
6f90e075 1085 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1086 {
1087 args_size.var
1088 = size_binop (MAX_EXPR, args_size.var,
1089 size_int (REG_PARM_STACK_SPACE (fndecl)));
1090
1091#ifndef OUTGOING_REG_PARM_STACK_SPACE
1092 /* The area corresponding to register parameters is not to count in
1093 the size of the block we need. So make the adjustment. */
1094 args_size.var
1095 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1096 size_int (reg_parm_stack_space));
51bbfa0c
RS
1097#endif
1098 }
1099#endif
1100 }
1101 else
1102 {
1103#ifdef STACK_BOUNDARY
1104 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1105 / STACK_BYTES) * STACK_BYTES);
1106#endif
1107
1108#ifdef REG_PARM_STACK_SPACE
1109 args_size.constant = MAX (args_size.constant,
6f90e075 1110 reg_parm_stack_space);
e1336658
JW
1111#ifdef MAYBE_REG_PARM_STACK_SPACE
1112 if (reg_parm_stack_space == 0)
1113 args_size.constant = 0;
1114#endif
51bbfa0c 1115#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1116 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1117#endif
1118#endif
1119 }
1120
1121 /* See if we have or want to preallocate stack space.
1122
1123 If we would have to push a partially-in-regs parm
1124 before other stack parms, preallocate stack space instead.
1125
1126 If the size of some parm is not a multiple of the required stack
1127 alignment, we must preallocate.
1128
1129 If the total size of arguments that would otherwise create a copy in
1130 a temporary (such as a CALL) is more than half the total argument list
1131 size, preallocation is faster.
1132
1133 Another reason to preallocate is if we have a machine (like the m88k)
1134 where stack alignment is required to be maintained between every
1135 pair of insns, not just when the call is made. However, we assume here
1136 that such machines either do not have push insns (and hence preallocation
1137 would occur anyway) or the problem is taken care of with
1138 PUSH_ROUNDING. */
1139
1140 if (! must_preallocate)
1141 {
1142 int partial_seen = 0;
1143 int copy_to_evaluate_size = 0;
1144
1145 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1146 {
1147 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1148 partial_seen = 1;
1149 else if (partial_seen && args[i].reg == 0)
1150 must_preallocate = 1;
1151
1152 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1153 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1154 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1155 || TREE_CODE (args[i].tree_value) == COND_EXPR
1156 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1157 copy_to_evaluate_size
1158 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1159 }
1160
c62f36cf
RS
1161 if (copy_to_evaluate_size * 2 >= args_size.constant
1162 && args_size.constant > 0)
51bbfa0c
RS
1163 must_preallocate = 1;
1164 }
1165
1166 /* If the structure value address will reference the stack pointer, we must
1167 stabilize it. We don't need to do this if we know that we are not going
1168 to adjust the stack pointer in processing this call. */
1169
1170 if (structure_value_addr
1171 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1172 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1173 && (args_size.var
1174#ifndef ACCUMULATE_OUTGOING_ARGS
1175 || args_size.constant
1176#endif
1177 ))
1178 structure_value_addr = copy_to_reg (structure_value_addr);
1179
1180 /* If this function call is cse'able, precompute all the parameters.
1181 Note that if the parameter is constructed into a temporary, this will
1182 cause an additional copy because the parameter will be constructed
1183 into a temporary location and then copied into the outgoing arguments.
1184 If a parameter contains a call to alloca and this function uses the
1185 stack, precompute the parameter. */
1186
1ce0cb53
JW
1187 /* If we preallocated the stack space, and some arguments must be passed
1188 on the stack, then we must precompute any parameter which contains a
1189 function call which will store arguments on the stack.
1190 Otherwise, evaluating the parameter may clobber previous parameters
1191 which have already been stored into the stack. */
1192
51bbfa0c
RS
1193 for (i = 0; i < num_actuals; i++)
1194 if (is_const
1195 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1196 && calls_function (args[i].tree_value, 1))
1197 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1198 && calls_function (args[i].tree_value, 0)))
51bbfa0c
RS
1199 {
1200 args[i].initial_value = args[i].value
e5d70561 1201 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448
RK
1202
1203 if (GET_MODE (args[i].value ) != VOIDmode
1204 && GET_MODE (args[i].value) != args[i].mode)
1205 args[i].value = convert_to_mode (args[i].mode, args[i].value,
1206 args[i].unsignedp);
51bbfa0c 1207 preserve_temp_slots (args[i].value);
1efe6448 1208
51bbfa0c
RS
1209 free_temp_slots ();
1210
1211 /* ANSI doesn't require a sequence point here,
1212 but PCC has one, so this will avoid some problems. */
1213 emit_queue ();
1214 }
1215
1216 /* Now we are about to start emitting insns that can be deleted
1217 if a libcall is deleted. */
1218 if (is_const)
1219 start_sequence ();
1220
1221 /* If we have no actual push instructions, or shouldn't use them,
1222 make space for all args right now. */
1223
1224 if (args_size.var != 0)
1225 {
1226 if (old_stack_level == 0)
1227 {
e5d70561 1228 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1229 old_pending_adj = pending_stack_adjust;
1230 pending_stack_adjust = 0;
d64f5a78 1231#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1232 /* stack_arg_under_construction says whether a stack arg is
1233 being constructed at the old stack level. Pushing the stack
1234 gets a clean outgoing argument block. */
1235 old_stack_arg_under_construction = stack_arg_under_construction;
1236 stack_arg_under_construction = 0;
d64f5a78 1237#endif
51bbfa0c
RS
1238 }
1239 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1240 }
1241 else if (must_preallocate)
1242 {
1243 /* Note that we must go through the motions of allocating an argument
1244 block even if the size is zero because we may be storing args
1245 in the area reserved for register arguments, which may be part of
1246 the stack frame. */
1247 int needed = args_size.constant;
1248
1249#ifdef ACCUMULATE_OUTGOING_ARGS
1250 /* Store the maximum argument space used. It will be pushed by the
1251 prologue.
1252
1253 Since the stack pointer will never be pushed, it is possible for
1254 the evaluation of a parm to clobber something we have already
1255 written to the stack. Since most function calls on RISC machines
1256 do not use the stack, this is uncommon, but must work correctly.
1257
1258 Therefore, we save any area of the stack that was already written
1259 and that we are using. Here we set up to do this by making a new
1260 stack usage map from the old one. The actual save will be done
1261 by store_one_arg.
1262
1263 Another approach might be to try to reorder the argument
1264 evaluations to avoid this conflicting stack usage. */
1265
1266 if (needed > current_function_outgoing_args_size)
1267 current_function_outgoing_args_size = needed;
1268
1269#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1270 /* Since we will be writing into the entire argument area, the
1271 map must be allocated for its entire size, not just the part that
1272 is the responsibility of the caller. */
6f90e075 1273 needed += reg_parm_stack_space;
51bbfa0c
RS
1274#endif
1275
1276#ifdef ARGS_GROW_DOWNWARD
1277 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1278 needed + 1);
1279#else
1280 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
1281#endif
1282 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1283
1284 if (initial_highest_arg_in_use)
1285 bcopy (initial_stack_usage_map, stack_usage_map,
1286 initial_highest_arg_in_use);
1287
1288 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1289 bzero (&stack_usage_map[initial_highest_arg_in_use],
1290 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1291 needed = 0;
2f4aa534 1292
bfbf933a
RS
1293 /* The address of the outgoing argument list must not be copied to a
1294 register here, because argblock would be left pointing to the
1295 wrong place after the call to allocate_dynamic_stack_space below. */
2f4aa534 1296
51bbfa0c 1297 argblock = virtual_outgoing_args_rtx;
2f4aa534 1298
51bbfa0c
RS
1299#else /* not ACCUMULATE_OUTGOING_ARGS */
1300 if (inhibit_defer_pop == 0)
1301 {
1302 /* Try to reuse some or all of the pending_stack_adjust
1303 to get this space. Maybe we can avoid any pushing. */
1304 if (needed > pending_stack_adjust)
1305 {
1306 needed -= pending_stack_adjust;
1307 pending_stack_adjust = 0;
1308 }
1309 else
1310 {
1311 pending_stack_adjust -= needed;
1312 needed = 0;
1313 }
1314 }
1315 /* Special case this because overhead of `push_block' in this
1316 case is non-trivial. */
1317 if (needed == 0)
1318 argblock = virtual_outgoing_args_rtx;
1319 else
e5d70561 1320 argblock = push_block (GEN_INT (needed), 0, 0);
51bbfa0c
RS
1321
1322 /* We only really need to call `copy_to_reg' in the case where push
1323 insns are going to be used to pass ARGBLOCK to a function
1324 call in ARGS. In that case, the stack pointer changes value
1325 from the allocation point to the call point, and hence
1326 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1327 But might as well always do it. */
1328 argblock = copy_to_reg (argblock);
1329#endif /* not ACCUMULATE_OUTGOING_ARGS */
1330 }
1331
bfbf933a
RS
1332
1333#ifdef ACCUMULATE_OUTGOING_ARGS
1334 /* The save/restore code in store_one_arg handles all cases except one:
1335 a constructor call (including a C function returning a BLKmode struct)
1336 to initialize an argument. */
1337 if (stack_arg_under_construction)
1338 {
1339#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1340 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1341#else
e5d70561 1342 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1343#endif
1344 if (old_stack_level == 0)
1345 {
e5d70561 1346 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1347 old_pending_adj = pending_stack_adjust;
1348 pending_stack_adjust = 0;
1349 /* stack_arg_under_construction says whether a stack arg is
1350 being constructed at the old stack level. Pushing the stack
1351 gets a clean outgoing argument block. */
1352 old_stack_arg_under_construction = stack_arg_under_construction;
1353 stack_arg_under_construction = 0;
1354 /* Make a new map for the new argument list. */
1355 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1356 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1357 highest_outgoing_arg_in_use = 0;
1358 }
e5d70561 1359 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1360 }
1361 /* If argument evaluation might modify the stack pointer, copy the
1362 address of the argument list to a register. */
1363 for (i = 0; i < num_actuals; i++)
1364 if (args[i].pass_on_stack)
1365 {
1366 argblock = copy_addr_to_reg (argblock);
1367 break;
1368 }
1369#endif
1370
1371
51bbfa0c
RS
1372 /* If we preallocated stack space, compute the address of each argument.
1373 We need not ensure it is a valid memory address here; it will be
1374 validized when it is used. */
1375 if (argblock)
1376 {
1377 rtx arg_reg = argblock;
1378 int arg_offset = 0;
1379
1380 if (GET_CODE (argblock) == PLUS)
1381 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1382
1383 for (i = 0; i < num_actuals; i++)
1384 {
1385 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1386 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1387 rtx addr;
1388
1389 /* Skip this parm if it will not be passed on the stack. */
1390 if (! args[i].pass_on_stack && args[i].reg != 0)
1391 continue;
1392
1393 if (GET_CODE (offset) == CONST_INT)
1394 addr = plus_constant (arg_reg, INTVAL (offset));
1395 else
1396 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1397
1398 addr = plus_constant (addr, arg_offset);
1efe6448 1399 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1400
1401 if (GET_CODE (slot_offset) == CONST_INT)
1402 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1403 else
1404 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1405
1406 addr = plus_constant (addr, arg_offset);
1efe6448 1407 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1408 }
1409 }
1410
1411#ifdef PUSH_ARGS_REVERSED
1412#ifdef STACK_BOUNDARY
1413 /* If we push args individually in reverse order, perform stack alignment
1414 before the first push (the last arg). */
1415 if (argblock == 0)
e5d70561
RK
1416 anti_adjust_stack (GEN_INT (args_size.constant
1417 - original_args_size.constant));
51bbfa0c
RS
1418#endif
1419#endif
1420
1421 /* Don't try to defer pops if preallocating, not even from the first arg,
1422 since ARGBLOCK probably refers to the SP. */
1423 if (argblock)
1424 NO_DEFER_POP;
1425
1426 /* Get the function to call, in the form of RTL. */
1427 if (fndecl)
1428 /* Get a SYMBOL_REF rtx for the function address. */
1429 funexp = XEXP (DECL_RTL (fndecl), 0);
1430 else
1431 /* Generate an rtx (probably a pseudo-register) for the address. */
1432 {
e5d70561 1433 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
51bbfa0c
RS
1434 free_temp_slots (); /* FUNEXP can't be BLKmode */
1435 emit_queue ();
1436 }
1437
1438 /* Figure out the register where the value, if any, will come back. */
1439 valreg = 0;
1440 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1441 && ! structure_value_addr)
1442 {
1443 if (pcc_struct_value)
1444 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1445 fndecl);
1446 else
1447 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1448 }
1449
1450 /* Precompute all register parameters. It isn't safe to compute anything
1451 once we have started filling any specific hard regs. */
1452 reg_parm_seen = 0;
1453 for (i = 0; i < num_actuals; i++)
1454 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1455 {
1456 reg_parm_seen = 1;
1457
1458 if (args[i].value == 0)
1459 {
e5d70561
RK
1460 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1461 VOIDmode, 0);
51bbfa0c
RS
1462 preserve_temp_slots (args[i].value);
1463 free_temp_slots ();
1464
1465 /* ANSI doesn't require a sequence point here,
1466 but PCC has one, so this will avoid some problems. */
1467 emit_queue ();
1468 }
84b55618
RK
1469
1470 /* If we are to promote the function arg to a wider mode,
1471 do it now. */
84b55618 1472
1efe6448
RK
1473 if (GET_MODE (args[i].value) != VOIDmode
1474 && GET_MODE (args[i].value) != args[i].mode)
1475 args[i].value = convert_to_mode (args[i].mode, args[i].value,
84b55618 1476 args[i].unsignedp);
51bbfa0c
RS
1477 }
1478
1479#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1480 /* The argument list is the property of the called routine and it
1481 may clobber it. If the fixed area has been used for previous
1482 parameters, we must save and restore it.
1483
1484 Here we compute the boundary of the that needs to be saved, if any. */
1485
b94301c2
RS
1486#ifdef ARGS_GROW_DOWNWARD
1487 for (i = 0; i < reg_parm_stack_space + 1; i++)
1488#else
6f90e075 1489 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1490#endif
51bbfa0c
RS
1491 {
1492 if (i >= highest_outgoing_arg_in_use
1493 || stack_usage_map[i] == 0)
1494 continue;
1495
1496 if (low_to_save == -1)
1497 low_to_save = i;
1498
1499 high_to_save = i;
1500 }
1501
1502 if (low_to_save >= 0)
1503 {
1504 int num_to_save = high_to_save - low_to_save + 1;
1505 enum machine_mode save_mode
1506 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1507 rtx stack_area;
1508
1509 /* If we don't have the required alignment, must do this in BLKmode. */
1510 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1511 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1512 save_mode = BLKmode;
1513
1514 stack_area = gen_rtx (MEM, save_mode,
1515 memory_address (save_mode,
b94301c2
RS
1516
1517#ifdef ARGS_GROW_DOWNWARD
1518 plus_constant (argblock,
1519 - high_to_save)
1520#else
51bbfa0c 1521 plus_constant (argblock,
b94301c2
RS
1522 low_to_save)
1523#endif
1524 ));
51bbfa0c
RS
1525 if (save_mode == BLKmode)
1526 {
1527 save_area = assign_stack_temp (BLKmode, num_to_save, 1);
1528 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1529 GEN_INT (num_to_save),
51bbfa0c
RS
1530 PARM_BOUNDARY / BITS_PER_UNIT);
1531 }
1532 else
1533 {
1534 save_area = gen_reg_rtx (save_mode);
1535 emit_move_insn (save_area, stack_area);
1536 }
1537 }
1538#endif
1539
1540
1541 /* Now store (and compute if necessary) all non-register parms.
1542 These come before register parms, since they can require block-moves,
1543 which could clobber the registers used for register parms.
1544 Parms which have partial registers are not stored here,
1545 but we do preallocate space here if they want that. */
1546
1547 for (i = 0; i < num_actuals; i++)
1548 if (args[i].reg == 0 || args[i].pass_on_stack)
1549 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1550 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1551
4ab56118
RK
1552#ifdef STRICT_ALIGNMENT
1553 /* If we have a parm that is passed in registers but not in memory
1554 and whose alignment does not permit a direct copy into registers,
1555 make a group of pseudos that correspond to each register that we
1556 will later fill. */
1557
1558 for (i = 0; i < num_actuals; i++)
1559 if (args[i].reg != 0 && ! args[i].pass_on_stack
1560 && args[i].mode == BLKmode
1561 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1562 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1563 {
1564 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1565
1566 args[i].n_aligned_regs
1567 = args[i].partial ? args[i].partial
1568 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1569
1570 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1571 * args[i].n_aligned_regs);
1572
1573 for (j = 0; j < args[i].n_aligned_regs; j++)
1574 {
1575 rtx reg = gen_reg_rtx (word_mode);
1576 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1577 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1578 int bitpos;
1579
1580 args[i].aligned_regs[j] = reg;
1581
1582 /* Clobber REG and move each partword into it. Ensure we don't
1583 go past the end of the structure. Note that the loop below
1584 works because we've already verified that padding
1585 and endianness are compatible. */
1586
1587 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1588
1589 for (bitpos = 0;
7a03f4b4 1590 bitpos < BITS_PER_WORD && bytes > 0;
4ab56118
RK
1591 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1592 {
1593 int xbitpos = (BYTES_BIG_ENDIAN
fd328506 1594 ? BITS_PER_WORD - bitpos - bitsize
4ab56118
RK
1595 : bitpos);
1596
1597 store_bit_field (reg, bitsize, xbitpos, word_mode,
1598 extract_bit_field (word, bitsize, xbitpos, 1,
1599 NULL_RTX, word_mode,
1600 word_mode,
1601 bitsize / BITS_PER_UNIT,
1602 BITS_PER_WORD),
1603 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1604 }
1605 }
1606 }
1607#endif
1608
51bbfa0c
RS
1609 /* Now store any partially-in-registers parm.
1610 This is the last place a block-move can happen. */
1611 if (reg_parm_seen)
1612 for (i = 0; i < num_actuals; i++)
1613 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1614 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1615 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1616
1617#ifndef PUSH_ARGS_REVERSED
1618#ifdef STACK_BOUNDARY
1619 /* If we pushed args in forward order, perform stack alignment
1620 after pushing the last arg. */
1621 if (argblock == 0)
e5d70561
RK
1622 anti_adjust_stack (GEN_INT (args_size.constant
1623 - original_args_size.constant));
51bbfa0c
RS
1624#endif
1625#endif
1626
756e0e12
RS
1627 /* If register arguments require space on the stack and stack space
1628 was not preallocated, allocate stack space here for arguments
1629 passed in registers. */
1630#if ! defined(ALLOCATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1631 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1632 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1633#endif
1634
51bbfa0c
RS
1635 /* Pass the function the address in which to return a structure value. */
1636 if (structure_value_addr && ! structure_value_addr_parm)
1637 {
1638 emit_move_insn (struct_value_rtx,
1639 force_reg (Pmode,
e5d70561
RK
1640 force_operand (structure_value_addr,
1641 NULL_RTX)));
51bbfa0c
RS
1642 if (GET_CODE (struct_value_rtx) == REG)
1643 {
1644 push_to_sequence (use_insns);
1645 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
1646 use_insns = get_insns ();
1647 end_sequence ();
1648 }
1649 }
1650
1651 /* Now do the register loads required for any wholly-register parms or any
1652 parms which are passed both on the stack and in a register. Their
1653 expressions were already evaluated.
1654
1655 Mark all register-parms as living through the call, putting these USE
1656 insns in a list headed by USE_INSNS. */
1657
1658 for (i = 0; i < num_actuals; i++)
1659 {
1660 rtx list = args[i].reg;
1661 int partial = args[i].partial;
1662
1663 while (list)
1664 {
1665 rtx reg;
1666 int nregs;
1667
1668 /* Process each register that needs to get this arg. */
1669 if (GET_CODE (list) == EXPR_LIST)
1670 reg = XEXP (list, 0), list = XEXP (list, 1);
1671 else
1672 reg = list, list = 0;
1673
1674 /* Set to non-zero if must move a word at a time, even if just one
1675 word (e.g, partial == 1 && mode == DFmode). Set to zero if
1676 we just use a normal move insn. */
1677 nregs = (partial ? partial
1678 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1679 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1680 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1681 : 0));
1682
1683 /* If simple case, just do move. If normal partial, store_one_arg
1684 has already loaded the register for us. In all other cases,
1685 load the register(s) from memory. */
1686
1687 if (nregs == 0)
1688 emit_move_insn (reg, args[i].value);
4ab56118
RK
1689
1690#ifdef STRICT_ALIGNMENT
1691 /* If we have pre-computed the values to put in the registers in
1692 the case of non-aligned structures, copy them in now. */
1693
1694 else if (args[i].n_aligned_regs != 0)
1695 for (j = 0; j < args[i].n_aligned_regs; j++)
1696 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1697 args[i].aligned_regs[j]);
1698#endif
1699
51bbfa0c
RS
1700 else if (args[i].partial == 0 || args[i].pass_on_stack)
1701 move_block_to_reg (REGNO (reg),
1702 validize_mem (args[i].value), nregs,
1efe6448 1703 args[i].mode);
51bbfa0c
RS
1704
1705 push_to_sequence (use_insns);
1706 if (nregs == 0)
1707 emit_insn (gen_rtx (USE, VOIDmode, reg));
1708 else
1709 use_regs (REGNO (reg), nregs);
1710 use_insns = get_insns ();
1711 end_sequence ();
1712
1713 /* PARTIAL referred only to the first register, so clear it for the
1714 next time. */
1715 partial = 0;
1716 }
1717 }
1718
1719 /* Perform postincrements before actually calling the function. */
1720 emit_queue ();
1721
1722 /* All arguments and registers used for the call must be set up by now! */
1723
1724 funexp = prepare_call_address (funexp, fndecl, &use_insns);
1725
1726 /* Generate the actual call instruction. */
1727 emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
1728 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1729 valreg, old_inhibit_defer_pop, use_insns, is_const);
1730
1731 /* If call is cse'able, make appropriate pair of reg-notes around it.
1732 Test valreg so we don't crash; may safely ignore `const'
1733 if return type is void. */
1734 if (is_const && valreg != 0)
1735 {
1736 rtx note = 0;
1737 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1738 rtx insns;
1739
1740 /* Construct an "equal form" for the value which mentions all the
1741 arguments in order as well as the function name. */
1742#ifdef PUSH_ARGS_REVERSED
1743 for (i = 0; i < num_actuals; i++)
1744 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1745#else
1746 for (i = num_actuals - 1; i >= 0; i--)
1747 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1748#endif
1749 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1750
1751 insns = get_insns ();
1752 end_sequence ();
1753
1754 emit_libcall_block (insns, temp, valreg, note);
1755
1756 valreg = temp;
1757 }
1758
1759 /* For calls to `setjmp', etc., inform flow.c it should complain
1760 if nonvolatile values are live. */
1761
1762 if (returns_twice)
1763 {
1764 emit_note (name, NOTE_INSN_SETJMP);
1765 current_function_calls_setjmp = 1;
1766 }
1767
1768 if (is_longjmp)
1769 current_function_calls_longjmp = 1;
1770
1771 /* Notice functions that cannot return.
1772 If optimizing, insns emitted below will be dead.
1773 If not optimizing, they will exist, which is useful
1774 if the user uses the `return' command in the debugger. */
1775
1776 if (is_volatile || is_longjmp)
1777 emit_barrier ();
1778
51bbfa0c
RS
1779 /* If value type not void, return an rtx for the value. */
1780
1781 /* If there are cleanups to be called, don't use a hard reg as target. */
1782 if (cleanups_this_call != old_cleanups
1783 && target && REG_P (target)
1784 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1785 target = 0;
1786
1787 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1788 || ignore)
1789 {
1790 target = const0_rtx;
1791 }
1792 else if (structure_value_addr)
1793 {
1794 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
1795 {
1796 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1797 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1798 structure_value_addr));
1799 MEM_IN_STRUCT_P (target)
1800 = (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
1801 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
c1b98a95
RK
1802 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
1803 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE);
29008b51 1804 }
51bbfa0c
RS
1805 }
1806 else if (pcc_struct_value)
1807 {
1808 if (target == 0)
29008b51
JW
1809 {
1810 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1811 copy_to_reg (valreg));
1812 MEM_IN_STRUCT_P (target)
1813 = (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
1814 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
c1b98a95
RK
1815 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
1816 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE);
29008b51 1817 }
51bbfa0c
RS
1818 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1819 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1820 copy_to_reg (valreg)));
1821 else
1822 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
1823 expr_size (exp),
1824 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1825 }
84b55618
RK
1826 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
1827 && GET_MODE (target) == GET_MODE (valreg))
51bbfa0c
RS
1828 /* TARGET and VALREG cannot be equal at this point because the latter
1829 would not have REG_FUNCTION_VALUE_P true, while the former would if
1830 it were referring to the same register.
1831
1832 If they refer to the same register, this move will be a no-op, except
1833 when function inlining is being done. */
1834 emit_move_insn (target, valreg);
1835 else
1836 target = copy_to_reg (valreg);
1837
84b55618 1838#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
1839 /* If we promoted this return value, make the proper SUBREG. TARGET
1840 might be const0_rtx here, so be careful. */
1841 if (GET_CODE (target) == REG
1842 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 1843 {
5d2ac65e 1844 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
84b55618
RK
1845 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
1846
1847 if (TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
1848 || TREE_CODE (TREE_TYPE (exp)) == ENUMERAL_TYPE
1849 || TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE
1850 || TREE_CODE (TREE_TYPE (exp)) == CHAR_TYPE
1851 || TREE_CODE (TREE_TYPE (exp)) == REAL_TYPE
1852 || TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE
1853 || TREE_CODE (TREE_TYPE (exp)) == OFFSET_TYPE)
1854 {
1855 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (exp));
1856 }
1857
5d2ac65e
RK
1858 /* If we didn't promote as expected, something is wrong. */
1859 if (mode != GET_MODE (target))
1860 abort ();
1861
84b55618
RK
1862 target = gen_rtx (SUBREG, TYPE_MODE (TREE_TYPE (exp)), target, 0);
1863 SUBREG_PROMOTED_VAR_P (target) = 1;
1864 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
1865 }
1866#endif
1867
51bbfa0c
RS
1868 /* Perform all cleanups needed for the arguments of this call
1869 (i.e. destructors in C++). */
1870 expand_cleanups_to (old_cleanups);
1871
2f4aa534
RS
1872 /* If size of args is variable or this was a constructor call for a stack
1873 argument, restore saved stack-pointer value. */
51bbfa0c
RS
1874
1875 if (old_stack_level)
1876 {
e5d70561 1877 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 1878 pending_stack_adjust = old_pending_adj;
d64f5a78 1879#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1880 stack_arg_under_construction = old_stack_arg_under_construction;
1881 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
1882 stack_usage_map = initial_stack_usage_map;
d64f5a78 1883#endif
51bbfa0c 1884 }
51bbfa0c
RS
1885#ifdef ACCUMULATE_OUTGOING_ARGS
1886 else
1887 {
1888#ifdef REG_PARM_STACK_SPACE
1889 if (save_area)
1890 {
1891 enum machine_mode save_mode = GET_MODE (save_area);
1892 rtx stack_area
1893 = gen_rtx (MEM, save_mode,
1894 memory_address (save_mode,
b94301c2
RS
1895#ifdef ARGS_GROW_DOWNWARD
1896 plus_constant (argblock, - high_to_save)
1897#else
1898 plus_constant (argblock, low_to_save)
1899#endif
1900 ));
51bbfa0c
RS
1901
1902 if (save_mode != BLKmode)
1903 emit_move_insn (stack_area, save_area);
1904 else
1905 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
1906 GEN_INT (high_to_save - low_to_save + 1),
1907 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
1908 }
1909#endif
1910
1911 /* If we saved any argument areas, restore them. */
1912 for (i = 0; i < num_actuals; i++)
1913 if (args[i].save_area)
1914 {
1915 enum machine_mode save_mode = GET_MODE (args[i].save_area);
1916 rtx stack_area
1917 = gen_rtx (MEM, save_mode,
1918 memory_address (save_mode,
1919 XEXP (args[i].stack_slot, 0)));
1920
1921 if (save_mode != BLKmode)
1922 emit_move_insn (stack_area, args[i].save_area);
1923 else
1924 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 1925 GEN_INT (args[i].size.constant),
51bbfa0c
RS
1926 PARM_BOUNDARY / BITS_PER_UNIT);
1927 }
1928
1929 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
1930 stack_usage_map = initial_stack_usage_map;
1931 }
1932#endif
1933
59257ff7
RK
1934 /* If this was alloca, record the new stack level for nonlocal gotos.
1935 Check for the handler slots since we might not have a save area
1936 for non-local gotos. */
1937
1938 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 1939 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
1940
1941 pop_temp_slots ();
1942
1943 return target;
1944}
1945\f
322e3e34
RK
1946/* Output a library call to function FUN (a SYMBOL_REF rtx)
1947 (emitting the queue unless NO_QUEUE is nonzero),
1948 for a value of mode OUTMODE,
1949 with NARGS different arguments, passed as alternating rtx values
1950 and machine_modes to convert them to.
1951 The rtx values should have been passed through protect_from_queue already.
1952
1953 NO_QUEUE will be true if and only if the library call is a `const' call
1954 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1955 to the variable is_const in expand_call.
1956
1957 NO_QUEUE must be true for const calls, because if it isn't, then
1958 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1959 and will be lost if the libcall sequence is optimized away.
1960
1961 NO_QUEUE must be false for non-const calls, because if it isn't, the
1962 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1963 optimized. For instance, the instruction scheduler may incorrectly
1964 move memory references across the non-const call. */
1965
1966void
1967emit_library_call (va_alist)
1968 va_dcl
1969{
1970 va_list p;
1971 /* Total size in bytes of all the stack-parms scanned so far. */
1972 struct args_size args_size;
1973 /* Size of arguments before any adjustments (such as rounding). */
1974 struct args_size original_args_size;
1975 register int argnum;
1976 enum machine_mode outmode;
1977 int nargs;
1978 rtx fun;
1979 rtx orgfun;
1980 int inc;
1981 int count;
1982 rtx argblock = 0;
1983 CUMULATIVE_ARGS args_so_far;
1984 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1985 struct args_size offset; struct args_size size; };
1986 struct arg *argvec;
1987 int old_inhibit_defer_pop = inhibit_defer_pop;
1988 int no_queue = 0;
1989 rtx use_insns;
1990
1991 va_start (p);
1992 orgfun = fun = va_arg (p, rtx);
1993 no_queue = va_arg (p, int);
1994 outmode = va_arg (p, enum machine_mode);
1995 nargs = va_arg (p, int);
1996
1997 /* Copy all the libcall-arguments out of the varargs data
1998 and into a vector ARGVEC.
1999
2000 Compute how to pass each argument. We only support a very small subset
2001 of the full argument passing conventions to limit complexity here since
2002 library functions shouldn't have many args. */
2003
2004 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2005
2006 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2007
2008 args_size.constant = 0;
2009 args_size.var = 0;
2010
2011 for (count = 0; count < nargs; count++)
2012 {
2013 rtx val = va_arg (p, rtx);
2014 enum machine_mode mode = va_arg (p, enum machine_mode);
2015
2016 /* We cannot convert the arg value to the mode the library wants here;
2017 must do it earlier where we know the signedness of the arg. */
2018 if (mode == BLKmode
2019 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2020 abort ();
2021
2022 /* On some machines, there's no way to pass a float to a library fcn.
2023 Pass it as a double instead. */
2024#ifdef LIBGCC_NEEDS_DOUBLE
2025 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2026 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2027#endif
2028
2029 /* There's no need to call protect_from_queue, because
2030 either emit_move_insn or emit_push_insn will do that. */
2031
2032 /* Make sure it is a reasonable operand for a move or push insn. */
2033 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2034 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2035 val = force_operand (val, NULL_RTX);
2036
2037 argvec[count].value = val;
2038 argvec[count].mode = mode;
2039
2040#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2041 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2042 abort ();
2043#endif
2044
2045 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2046 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2047 abort ();
2048#ifdef FUNCTION_ARG_PARTIAL_NREGS
2049 argvec[count].partial
2050 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2051#else
2052 argvec[count].partial = 0;
2053#endif
2054
2055 locate_and_pad_parm (mode, NULL_TREE,
2056 argvec[count].reg && argvec[count].partial == 0,
2057 NULL_TREE, &args_size, &argvec[count].offset,
2058 &argvec[count].size);
2059
2060 if (argvec[count].size.var)
2061 abort ();
2062
2063#ifndef REG_PARM_STACK_SPACE
2064 if (argvec[count].partial)
2065 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2066#endif
2067
2068 if (argvec[count].reg == 0 || argvec[count].partial != 0
2069#ifdef REG_PARM_STACK_SPACE
2070 || 1
2071#endif
2072 )
2073 args_size.constant += argvec[count].size.constant;
2074
2075#ifdef ACCUMULATE_OUTGOING_ARGS
2076 /* If this arg is actually passed on the stack, it might be
2077 clobbering something we already put there (this library call might
2078 be inside the evaluation of an argument to a function whose call
2079 requires the stack). This will only occur when the library call
2080 has sufficient args to run out of argument registers. Abort in
2081 this case; if this ever occurs, code must be added to save and
2082 restore the arg slot. */
2083
2084 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2085 abort ();
2086#endif
2087
2088 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2089 }
2090 va_end (p);
2091
2092 /* If this machine requires an external definition for library
2093 functions, write one out. */
2094 assemble_external_libcall (fun);
2095
2096 original_args_size = args_size;
2097#ifdef STACK_BOUNDARY
2098 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2099 / STACK_BYTES) * STACK_BYTES);
2100#endif
2101
2102#ifdef REG_PARM_STACK_SPACE
2103 args_size.constant = MAX (args_size.constant,
2104 REG_PARM_STACK_SPACE (NULL_TREE));
2105#ifndef OUTGOING_REG_PARM_STACK_SPACE
2106 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2107#endif
2108#endif
2109
2110#ifdef ACCUMULATE_OUTGOING_ARGS
2111 if (args_size.constant > current_function_outgoing_args_size)
2112 current_function_outgoing_args_size = args_size.constant;
2113 args_size.constant = 0;
2114#endif
2115
2116#ifndef PUSH_ROUNDING
2117 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2118#endif
2119
2120#ifdef PUSH_ARGS_REVERSED
2121#ifdef STACK_BOUNDARY
2122 /* If we push args individually in reverse order, perform stack alignment
2123 before the first push (the last arg). */
2124 if (argblock == 0)
2125 anti_adjust_stack (GEN_INT (args_size.constant
2126 - original_args_size.constant));
2127#endif
2128#endif
2129
2130#ifdef PUSH_ARGS_REVERSED
2131 inc = -1;
2132 argnum = nargs - 1;
2133#else
2134 inc = 1;
2135 argnum = 0;
2136#endif
2137
2138 /* Push the args that need to be pushed. */
2139
2140 for (count = 0; count < nargs; count++, argnum += inc)
2141 {
2142 register enum machine_mode mode = argvec[argnum].mode;
2143 register rtx val = argvec[argnum].value;
2144 rtx reg = argvec[argnum].reg;
2145 int partial = argvec[argnum].partial;
2146
2147 if (! (reg != 0 && partial == 0))
2148 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2149 argblock, GEN_INT (argvec[count].offset.constant));
2150 NO_DEFER_POP;
2151 }
2152
2153#ifndef PUSH_ARGS_REVERSED
2154#ifdef STACK_BOUNDARY
2155 /* If we pushed args in forward order, perform stack alignment
2156 after pushing the last arg. */
2157 if (argblock == 0)
2158 anti_adjust_stack (GEN_INT (args_size.constant
2159 - original_args_size.constant));
2160#endif
2161#endif
2162
2163#ifdef PUSH_ARGS_REVERSED
2164 argnum = nargs - 1;
2165#else
2166 argnum = 0;
2167#endif
2168
2169 /* Now load any reg parms into their regs. */
2170
2171 for (count = 0; count < nargs; count++, argnum += inc)
2172 {
2173 register enum machine_mode mode = argvec[argnum].mode;
2174 register rtx val = argvec[argnum].value;
2175 rtx reg = argvec[argnum].reg;
2176 int partial = argvec[argnum].partial;
2177
2178 if (reg != 0 && partial == 0)
2179 emit_move_insn (reg, val);
2180 NO_DEFER_POP;
2181 }
2182
2183 /* For version 1.37, try deleting this entirely. */
2184 if (! no_queue)
2185 emit_queue ();
2186
2187 /* Any regs containing parms remain in use through the call. */
2188 start_sequence ();
2189 for (count = 0; count < nargs; count++)
2190 if (argvec[count].reg != 0)
2191 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2192
2193 use_insns = get_insns ();
2194 end_sequence ();
2195
2196 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2197
2198 /* Don't allow popping to be deferred, since then
2199 cse'ing of library calls could delete a call and leave the pop. */
2200 NO_DEFER_POP;
2201
2202 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2203 will set inhibit_defer_pop to that value. */
2204
2205 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2206 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2207 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2208 old_inhibit_defer_pop + 1, use_insns, no_queue);
2209
2210 /* Now restore inhibit_defer_pop to its actual original value. */
2211 OK_DEFER_POP;
2212}
2213\f
2214/* Like emit_library_call except that an extra argument, VALUE,
2215 comes second and says where to store the result.
2216 (If VALUE is zero, the result comes in the function value register.) */
2217
2218void
2219emit_library_call_value (va_alist)
2220 va_dcl
2221{
2222 va_list p;
2223 /* Total size in bytes of all the stack-parms scanned so far. */
2224 struct args_size args_size;
2225 /* Size of arguments before any adjustments (such as rounding). */
2226 struct args_size original_args_size;
2227 register int argnum;
2228 enum machine_mode outmode;
2229 int nargs;
2230 rtx fun;
2231 rtx orgfun;
2232 int inc;
2233 int count;
2234 rtx argblock = 0;
2235 CUMULATIVE_ARGS args_so_far;
2236 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2237 struct args_size offset; struct args_size size; };
2238 struct arg *argvec;
2239 int old_inhibit_defer_pop = inhibit_defer_pop;
2240 int no_queue = 0;
2241 rtx use_insns;
2242 rtx value;
2243 rtx mem_value = 0;
2244
2245 va_start (p);
2246 orgfun = fun = va_arg (p, rtx);
2247 value = va_arg (p, rtx);
2248 no_queue = va_arg (p, int);
2249 outmode = va_arg (p, enum machine_mode);
2250 nargs = va_arg (p, int);
2251
2252 /* If this kind of value comes back in memory,
2253 decide where in memory it should come back. */
2254 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2255 {
2256 if (GET_CODE (value) == MEM)
2257 mem_value = value;
2258 else
2259 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2260 }
2261
2262 /* ??? Unfinished: must pass the memory address as an argument. */
2263
2264 /* Copy all the libcall-arguments out of the varargs data
2265 and into a vector ARGVEC.
2266
2267 Compute how to pass each argument. We only support a very small subset
2268 of the full argument passing conventions to limit complexity here since
2269 library functions shouldn't have many args. */
2270
2271 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2272
2273 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2274
2275 args_size.constant = 0;
2276 args_size.var = 0;
2277
2278 count = 0;
2279
2280 /* If there's a structure value address to be passed,
2281 either pass it in the special place, or pass it as an extra argument. */
2282 if (mem_value)
2283 {
2284 rtx addr = XEXP (mem_value, 0);
2285
2286 if (! struct_value_rtx)
2287 {
2288 nargs++;
2289
2290 /* Make sure it is a reasonable operand for a move or push insn. */
2291 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2292 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2293 addr = force_operand (addr, NULL_RTX);
2294
2295 argvec[count].value = addr;
2296 argvec[count].mode = outmode;
2297 argvec[count].partial = 0;
2298
2299 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2300#ifdef FUNCTION_ARG_PARTIAL_NREGS
2301 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2302 abort ();
2303#endif
2304
2305 locate_and_pad_parm (outmode, NULL_TREE,
2306 argvec[count].reg && argvec[count].partial == 0,
2307 NULL_TREE, &args_size, &argvec[count].offset,
2308 &argvec[count].size);
2309
2310
2311 if (argvec[count].reg == 0 || argvec[count].partial != 0
2312#ifdef REG_PARM_STACK_SPACE
2313 || 1
2314#endif
2315 )
2316 args_size.constant += argvec[count].size.constant;
2317
2318 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2319 }
2320 }
2321
2322 for (; count < nargs; count++)
2323 {
2324 rtx val = va_arg (p, rtx);
2325 enum machine_mode mode = va_arg (p, enum machine_mode);
2326
2327 /* We cannot convert the arg value to the mode the library wants here;
2328 must do it earlier where we know the signedness of the arg. */
2329 if (mode == BLKmode
2330 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2331 abort ();
2332
2333 /* On some machines, there's no way to pass a float to a library fcn.
2334 Pass it as a double instead. */
2335#ifdef LIBGCC_NEEDS_DOUBLE
2336 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2337 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2338#endif
2339
2340 /* There's no need to call protect_from_queue, because
2341 either emit_move_insn or emit_push_insn will do that. */
2342
2343 /* Make sure it is a reasonable operand for a move or push insn. */
2344 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2345 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2346 val = force_operand (val, NULL_RTX);
2347
2348 argvec[count].value = val;
2349 argvec[count].mode = mode;
2350
2351#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2352 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2353 abort ();
2354#endif
2355
2356 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2357 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2358 abort ();
2359#ifdef FUNCTION_ARG_PARTIAL_NREGS
2360 argvec[count].partial
2361 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2362#else
2363 argvec[count].partial = 0;
2364#endif
2365
2366 locate_and_pad_parm (mode, NULL_TREE,
2367 argvec[count].reg && argvec[count].partial == 0,
2368 NULL_TREE, &args_size, &argvec[count].offset,
2369 &argvec[count].size);
2370
2371 if (argvec[count].size.var)
2372 abort ();
2373
2374#ifndef REG_PARM_STACK_SPACE
2375 if (argvec[count].partial)
2376 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2377#endif
2378
2379 if (argvec[count].reg == 0 || argvec[count].partial != 0
2380#ifdef REG_PARM_STACK_SPACE
2381 || 1
2382#endif
2383 )
2384 args_size.constant += argvec[count].size.constant;
2385
2386#ifdef ACCUMULATE_OUTGOING_ARGS
2387 /* If this arg is actually passed on the stack, it might be
2388 clobbering something we already put there (this library call might
2389 be inside the evaluation of an argument to a function whose call
2390 requires the stack). This will only occur when the library call
2391 has sufficient args to run out of argument registers. Abort in
2392 this case; if this ever occurs, code must be added to save and
2393 restore the arg slot. */
2394
2395 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2396 abort ();
2397#endif
2398
2399 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2400 }
2401 va_end (p);
2402
2403 /* If this machine requires an external definition for library
2404 functions, write one out. */
2405 assemble_external_libcall (fun);
2406
2407 original_args_size = args_size;
2408#ifdef STACK_BOUNDARY
2409 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2410 / STACK_BYTES) * STACK_BYTES);
2411#endif
2412
2413#ifdef REG_PARM_STACK_SPACE
2414 args_size.constant = MAX (args_size.constant,
2415 REG_PARM_STACK_SPACE (NULL_TREE));
2416#ifndef OUTGOING_REG_PARM_STACK_SPACE
2417 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2418#endif
2419#endif
2420
2421#ifdef ACCUMULATE_OUTGOING_ARGS
2422 if (args_size.constant > current_function_outgoing_args_size)
2423 current_function_outgoing_args_size = args_size.constant;
2424 args_size.constant = 0;
2425#endif
2426
2427#ifndef PUSH_ROUNDING
2428 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2429#endif
2430
2431#ifdef PUSH_ARGS_REVERSED
2432#ifdef STACK_BOUNDARY
2433 /* If we push args individually in reverse order, perform stack alignment
2434 before the first push (the last arg). */
2435 if (argblock == 0)
2436 anti_adjust_stack (GEN_INT (args_size.constant
2437 - original_args_size.constant));
2438#endif
2439#endif
2440
2441#ifdef PUSH_ARGS_REVERSED
2442 inc = -1;
2443 argnum = nargs - 1;
2444#else
2445 inc = 1;
2446 argnum = 0;
2447#endif
2448
2449 /* Push the args that need to be pushed. */
2450
2451 for (count = 0; count < nargs; count++, argnum += inc)
2452 {
2453 register enum machine_mode mode = argvec[argnum].mode;
2454 register rtx val = argvec[argnum].value;
2455 rtx reg = argvec[argnum].reg;
2456 int partial = argvec[argnum].partial;
2457
2458 if (! (reg != 0 && partial == 0))
2459 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2460 argblock, GEN_INT (argvec[count].offset.constant));
2461 NO_DEFER_POP;
2462 }
2463
2464#ifndef PUSH_ARGS_REVERSED
2465#ifdef STACK_BOUNDARY
2466 /* If we pushed args in forward order, perform stack alignment
2467 after pushing the last arg. */
2468 if (argblock == 0)
2469 anti_adjust_stack (GEN_INT (args_size.constant
2470 - original_args_size.constant));
2471#endif
2472#endif
2473
2474#ifdef PUSH_ARGS_REVERSED
2475 argnum = nargs - 1;
2476#else
2477 argnum = 0;
2478#endif
2479
2480 /* Now load any reg parms into their regs. */
2481
2482 if (mem_value != 0 && struct_value_rtx != 0)
2483 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2484
2485 for (count = 0; count < nargs; count++, argnum += inc)
2486 {
2487 register enum machine_mode mode = argvec[argnum].mode;
2488 register rtx val = argvec[argnum].value;
2489 rtx reg = argvec[argnum].reg;
2490 int partial = argvec[argnum].partial;
2491
2492 if (reg != 0 && partial == 0)
2493 emit_move_insn (reg, val);
2494 NO_DEFER_POP;
2495 }
2496
2497#if 0
2498 /* For version 1.37, try deleting this entirely. */
2499 if (! no_queue)
2500 emit_queue ();
2501#endif
2502
2503 /* Any regs containing parms remain in use through the call. */
2504 start_sequence ();
2505 for (count = 0; count < nargs; count++)
2506 if (argvec[count].reg != 0)
2507 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2508
2509 use_insns = get_insns ();
2510 end_sequence ();
2511
2512 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2513
2514 /* Don't allow popping to be deferred, since then
2515 cse'ing of library calls could delete a call and leave the pop. */
2516 NO_DEFER_POP;
2517
2518 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2519 will set inhibit_defer_pop to that value. */
2520
2521 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2522 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2523 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2524 old_inhibit_defer_pop + 1, use_insns, no_queue);
2525
2526 /* Now restore inhibit_defer_pop to its actual original value. */
2527 OK_DEFER_POP;
2528
2529 /* Copy the value to the right place. */
2530 if (outmode != VOIDmode)
2531 {
2532 if (mem_value)
2533 {
2534 if (value == 0)
2535 value = hard_libcall_value (outmode);
2536 if (value != mem_value)
2537 emit_move_insn (value, mem_value);
2538 }
2539 else if (value != 0)
2540 emit_move_insn (value, hard_libcall_value (outmode));
2541 }
2542}
2543\f
51bbfa0c
RS
2544#if 0
2545/* Return an rtx which represents a suitable home on the stack
2546 given TYPE, the type of the argument looking for a home.
2547 This is called only for BLKmode arguments.
2548
2549 SIZE is the size needed for this target.
2550 ARGS_ADDR is the address of the bottom of the argument block for this call.
2551 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2552 if this machine uses push insns. */
2553
2554static rtx
2555target_for_arg (type, size, args_addr, offset)
2556 tree type;
2557 rtx size;
2558 rtx args_addr;
2559 struct args_size offset;
2560{
2561 rtx target;
2562 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2563
2564 /* We do not call memory_address if possible,
2565 because we want to address as close to the stack
2566 as possible. For non-variable sized arguments,
2567 this will be stack-pointer relative addressing. */
2568 if (GET_CODE (offset_rtx) == CONST_INT)
2569 target = plus_constant (args_addr, INTVAL (offset_rtx));
2570 else
2571 {
2572 /* I have no idea how to guarantee that this
2573 will work in the presence of register parameters. */
2574 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2575 target = memory_address (QImode, target);
2576 }
2577
2578 return gen_rtx (MEM, BLKmode, target);
2579}
2580#endif
2581\f
2582/* Store a single argument for a function call
2583 into the register or memory area where it must be passed.
2584 *ARG describes the argument value and where to pass it.
2585
2586 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 2587 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
2588
2589 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2590 so must be careful about how the stack is used.
2591
2592 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2593 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2594 that we need not worry about saving and restoring the stack.
2595
2596 FNDECL is the declaration of the function we are calling. */
2597
2598static void
6f90e075
JW
2599store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2600 reg_parm_stack_space)
51bbfa0c
RS
2601 struct arg_data *arg;
2602 rtx argblock;
2603 int may_be_alloca;
2604 int variable_size;
2605 tree fndecl;
6f90e075 2606 int reg_parm_stack_space;
51bbfa0c
RS
2607{
2608 register tree pval = arg->tree_value;
2609 rtx reg = 0;
2610 int partial = 0;
2611 int used = 0;
2612 int i, lower_bound, upper_bound;
2613
2614 if (TREE_CODE (pval) == ERROR_MARK)
2615 return;
2616
2617#ifdef ACCUMULATE_OUTGOING_ARGS
2618 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2619 save any previous data at that location. */
2620 if (argblock && ! variable_size && arg->stack)
2621 {
2622#ifdef ARGS_GROW_DOWNWARD
2623 /* stack_slot is negative, but we want to index stack_usage_map */
2624 /* with positive values. */
2625 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2626 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2627 else
2628 abort ();
2629
2630 lower_bound = upper_bound - arg->size.constant;
2631#else
2632 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2633 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2634 else
2635 lower_bound = 0;
2636
2637 upper_bound = lower_bound + arg->size.constant;
2638#endif
2639
2640 for (i = lower_bound; i < upper_bound; i++)
2641 if (stack_usage_map[i]
2642#ifdef REG_PARM_STACK_SPACE
2643 /* Don't store things in the fixed argument area at this point;
2644 it has already been saved. */
6f90e075 2645 && i > reg_parm_stack_space
51bbfa0c
RS
2646#endif
2647 )
2648 break;
2649
2650 if (i != upper_bound)
2651 {
2652 /* We need to make a save area. See what mode we can make it. */
2653 enum machine_mode save_mode
2654 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
2655 rtx stack_area
2656 = gen_rtx (MEM, save_mode,
2657 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
2658
2659 if (save_mode == BLKmode)
2660 {
2661 arg->save_area = assign_stack_temp (BLKmode,
2662 arg->size.constant, 1);
2663 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 2664 GEN_INT (arg->size.constant),
51bbfa0c
RS
2665 PARM_BOUNDARY / BITS_PER_UNIT);
2666 }
2667 else
2668 {
2669 arg->save_area = gen_reg_rtx (save_mode);
2670 emit_move_insn (arg->save_area, stack_area);
2671 }
2672 }
2673 }
2674#endif
2675
2676 /* If this isn't going to be placed on both the stack and in registers,
2677 set up the register and number of words. */
2678 if (! arg->pass_on_stack)
2679 reg = arg->reg, partial = arg->partial;
2680
2681 if (reg != 0 && partial == 0)
2682 /* Being passed entirely in a register. We shouldn't be called in
2683 this case. */
2684 abort ();
2685
4ab56118
RK
2686#ifdef STRICT_ALIGNMENT
2687 /* If this arg needs special alignment, don't load the registers
2688 here. */
2689 if (arg->n_aligned_regs != 0)
2690 reg = 0;
2691#endif
2692
51bbfa0c
RS
2693 /* If this is being partially passed in a register, but multiple locations
2694 are specified, we assume that the one partially used is the one that is
2695 listed first. */
2696 if (reg && GET_CODE (reg) == EXPR_LIST)
2697 reg = XEXP (reg, 0);
2698
4ab56118 2699 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
2700 it directly into its stack slot. Otherwise, we can. */
2701 if (arg->value == 0)
d64f5a78
RS
2702 {
2703#ifdef ACCUMULATE_OUTGOING_ARGS
2704 /* stack_arg_under_construction is nonzero if a function argument is
2705 being evaluated directly into the outgoing argument list and
2706 expand_call must take special action to preserve the argument list
2707 if it is called recursively.
2708
2709 For scalar function arguments stack_usage_map is sufficient to
2710 determine which stack slots must be saved and restored. Scalar
2711 arguments in general have pass_on_stack == 0.
2712
2713 If this argument is initialized by a function which takes the
2714 address of the argument (a C++ constructor or a C function
2715 returning a BLKmode structure), then stack_usage_map is
2716 insufficient and expand_call must push the stack around the
2717 function call. Such arguments have pass_on_stack == 1.
2718
2719 Note that it is always safe to set stack_arg_under_construction,
2720 but this generates suboptimal code if set when not needed. */
2721
2722 if (arg->pass_on_stack)
2723 stack_arg_under_construction++;
2724#endif
e5d70561
RK
2725 arg->value = expand_expr (pval, partial ? NULL_RTX : arg->stack,
2726 VOIDmode, 0);
1efe6448
RK
2727
2728 /* If we are promoting object (or for any other reason) the mode
2729 doesn't agree, convert the mode. */
2730
2731 if (GET_MODE (arg->value) != VOIDmode
2732 && GET_MODE (arg->value) != arg->mode)
2733 arg->value = convert_to_mode (arg->mode, arg->value, arg->unsignedp);
2734
d64f5a78
RS
2735#ifdef ACCUMULATE_OUTGOING_ARGS
2736 if (arg->pass_on_stack)
2737 stack_arg_under_construction--;
2738#endif
2739 }
51bbfa0c
RS
2740
2741 /* Don't allow anything left on stack from computation
2742 of argument to alloca. */
2743 if (may_be_alloca)
2744 do_pending_stack_adjust ();
2745
2746 if (arg->value == arg->stack)
2747 /* If the value is already in the stack slot, we are done. */
2748 ;
1efe6448 2749 else if (arg->mode != BLKmode)
51bbfa0c
RS
2750 {
2751 register int size;
2752
2753 /* Argument is a scalar, not entirely passed in registers.
2754 (If part is passed in registers, arg->partial says how much
2755 and emit_push_insn will take care of putting it there.)
2756
2757 Push it, and if its size is less than the
2758 amount of space allocated to it,
2759 also bump stack pointer by the additional space.
2760 Note that in C the default argument promotions
2761 will prevent such mismatches. */
2762
1efe6448 2763 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
2764 /* Compute how much space the push instruction will push.
2765 On many machines, pushing a byte will advance the stack
2766 pointer by a halfword. */
2767#ifdef PUSH_ROUNDING
2768 size = PUSH_ROUNDING (size);
2769#endif
2770 used = size;
2771
2772 /* Compute how much space the argument should get:
2773 round up to a multiple of the alignment for arguments. */
1efe6448 2774 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
2775 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
2776 / (PARM_BOUNDARY / BITS_PER_UNIT))
2777 * (PARM_BOUNDARY / BITS_PER_UNIT));
2778
2779 /* This isn't already where we want it on the stack, so put it there.
2780 This can either be done with push or copy insns. */
ccf5d244
RK
2781 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
2782 0, partial, reg, used - size,
2783 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
2784 }
2785 else
2786 {
2787 /* BLKmode, at least partly to be pushed. */
2788
2789 register int excess;
2790 rtx size_rtx;
2791
2792 /* Pushing a nonscalar.
2793 If part is passed in registers, PARTIAL says how much
2794 and emit_push_insn will take care of putting it there. */
2795
2796 /* Round its size up to a multiple
2797 of the allocation unit for arguments. */
2798
2799 if (arg->size.var != 0)
2800 {
2801 excess = 0;
2802 size_rtx = ARGS_SIZE_RTX (arg->size);
2803 }
2804 else
2805 {
51bbfa0c
RS
2806 /* PUSH_ROUNDING has no effect on us, because
2807 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 2808 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 2809 + partial * UNITS_PER_WORD);
e4f93898 2810 size_rtx = expr_size (pval);
51bbfa0c
RS
2811 }
2812
1efe6448 2813 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
2814 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
2815 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
2816 }
2817
2818
2819 /* Unless this is a partially-in-register argument, the argument is now
2820 in the stack.
2821
2822 ??? Note that this can change arg->value from arg->stack to
2823 arg->stack_slot and it matters when they are not the same.
2824 It isn't totally clear that this is correct in all cases. */
2825 if (partial == 0)
2826 arg->value = arg->stack_slot;
2827
2828 /* Once we have pushed something, pops can't safely
2829 be deferred during the rest of the arguments. */
2830 NO_DEFER_POP;
2831
2832 /* ANSI doesn't require a sequence point here,
2833 but PCC has one, so this will avoid some problems. */
2834 emit_queue ();
2835
2836 /* Free any temporary slots made in processing this argument. */
2837 free_temp_slots ();
2838
2839#ifdef ACCUMULATE_OUTGOING_ARGS
2840 /* Now mark the segment we just used. */
2841 if (argblock && ! variable_size && arg->stack)
2842 for (i = lower_bound; i < upper_bound; i++)
2843 stack_usage_map[i] = 1;
2844#endif
2845}
This page took 0.365169 seconds and 5 git commands to generate.