]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
(simplify_unary_operation): Cast constant 1 to HOST_WIDE_INT
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
ebef2728 2 Copyright (C) 1989, 1992, 1993, 1994 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20#include "config.h"
21#include "rtl.h"
22#include "tree.h"
23#include "flags.h"
24#include "expr.h"
4f90e4a0 25#ifdef __STDC__
04fe4385 26#include <stdarg.h>
4f90e4a0 27#else
04fe4385 28#include <varargs.h>
4f90e4a0 29#endif
51bbfa0c
RS
30#include "insn-flags.h"
31
32/* Decide whether a function's arguments should be processed
bbc8a071
RK
33 from first to last or from last to first.
34
35 They should if the stack and args grow in opposite directions, but
36 only if we have push insns. */
51bbfa0c 37
51bbfa0c 38#ifdef PUSH_ROUNDING
bbc8a071 39
40083ddf 40#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
41#define PUSH_ARGS_REVERSED /* If it's last to first */
42#endif
bbc8a071 43
51bbfa0c
RS
44#endif
45
46/* Like STACK_BOUNDARY but in units of bytes, not bits. */
47#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
48
49/* Data structure and subroutines used within expand_call. */
50
51struct arg_data
52{
53 /* Tree node for this argument. */
54 tree tree_value;
1efe6448
RK
55 /* Mode for value; TYPE_MODE unless promoted. */
56 enum machine_mode mode;
51bbfa0c
RS
57 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 rtx value;
59 /* Initially-compute RTL value for argument; only for const functions. */
60 rtx initial_value;
61 /* Register to pass this argument in, 0 if passed on stack, or an
62 EXPR_LIST if the arg is to be copied into multiple different
63 registers. */
64 rtx reg;
84b55618
RK
65 /* If REG was promoted from the actual mode of the argument expression,
66 indicates whether the promotion is sign- or zero-extended. */
67 int unsignedp;
51bbfa0c
RS
68 /* Number of registers to use. 0 means put the whole arg in registers.
69 Also 0 if not passed in registers. */
70 int partial;
d64f5a78
RS
71 /* Non-zero if argument must be passed on stack.
72 Note that some arguments may be passed on the stack
73 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
74 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
75 int pass_on_stack;
76 /* Offset of this argument from beginning of stack-args. */
77 struct args_size offset;
78 /* Similar, but offset to the start of the stack slot. Different from
79 OFFSET if this arg pads downward. */
80 struct args_size slot_offset;
81 /* Size of this argument on the stack, rounded up for any padding it gets,
82 parts of the argument passed in registers do not count.
83 If REG_PARM_STACK_SPACE is defined, then register parms
84 are counted here as well. */
85 struct args_size size;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
88 rtx stack;
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to FUNCTION_ARG_BOUNDARY. */
92 rtx stack_slot;
93#ifdef ACCUMULATE_OUTGOING_ARGS
94 /* Place that this stack area has been saved, if needed. */
95 rtx save_area;
96#endif
4ab56118
RK
97#ifdef STRICT_ALIGNMENT
98 /* If an argument's alignment does not permit direct copying into registers,
99 copy in smaller-sized pieces into pseudos. These are stored in a
100 block pointed to by this field. The next field says how many
101 word-sized pseudos we made. */
102 rtx *aligned_regs;
103 int n_aligned_regs;
104#endif
51bbfa0c
RS
105};
106
107#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 108/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
109 the corresponding stack location has been used.
110 This vector is used to prevent a function call within an argument from
111 clobbering any stack already set up. */
112static char *stack_usage_map;
113
114/* Size of STACK_USAGE_MAP. */
115static int highest_outgoing_arg_in_use;
2f4aa534
RS
116
117/* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122int stack_arg_under_construction;
51bbfa0c
RS
123#endif
124
322e3e34 125static int calls_function PROTO((tree, int));
9f4d9f6c 126static int calls_function_1 PROTO((tree, int));
322e3e34
RK
127static void emit_call_1 PROTO((rtx, tree, int, int, rtx, rtx, int,
128 rtx, int));
129static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
130 tree, int));
51bbfa0c 131\f
1ce0cb53
JW
132/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
133 `alloca'.
134
135 If WHICH is 0, return 1 if EXP contains a call to any function.
136 Actually, we only need return 1 if evaluating EXP would require pushing
137 arguments on the stack, but that is too difficult to compute, so we just
138 assume any function call might require the stack. */
51bbfa0c 139
1c8d7aef
RS
140static tree calls_function_save_exprs;
141
51bbfa0c 142static int
1ce0cb53 143calls_function (exp, which)
51bbfa0c 144 tree exp;
1ce0cb53 145 int which;
1c8d7aef
RS
146{
147 int val;
148 calls_function_save_exprs = 0;
149 val = calls_function_1 (exp, which);
150 calls_function_save_exprs = 0;
151 return val;
152}
153
154static int
155calls_function_1 (exp, which)
156 tree exp;
157 int which;
51bbfa0c
RS
158{
159 register int i;
0207efa2
RK
160 enum tree_code code = TREE_CODE (exp);
161 int type = TREE_CODE_CLASS (code);
162 int length = tree_code_length[(int) code];
51bbfa0c 163
0207efa2
RK
164 /* If this code is langauge-specific, we don't know what it will do. */
165 if ((int) code >= NUM_TREE_CODES)
166 return 1;
51bbfa0c 167
0207efa2 168 /* Only expressions and references can contain calls. */
3b59a331
RS
169 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
170 && type != 'b')
51bbfa0c
RS
171 return 0;
172
0207efa2 173 switch (code)
51bbfa0c
RS
174 {
175 case CALL_EXPR:
1ce0cb53
JW
176 if (which == 0)
177 return 1;
178 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
179 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
180 == FUNCTION_DECL))
181 {
182 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
183
184 if ((DECL_BUILT_IN (fndecl)
185 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
186 || (DECL_SAVED_INSNS (fndecl)
187 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
188 & FUNCTION_FLAGS_CALLS_ALLOCA)))
189 return 1;
190 }
51bbfa0c
RS
191
192 /* Third operand is RTL. */
193 length = 2;
194 break;
195
196 case SAVE_EXPR:
197 if (SAVE_EXPR_RTL (exp) != 0)
198 return 0;
1c8d7aef
RS
199 if (value_member (exp, calls_function_save_exprs))
200 return 0;
201 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
202 calls_function_save_exprs);
203 return (TREE_OPERAND (exp, 0) != 0
204 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
205
206 case BLOCK:
ef03bc85
CH
207 {
208 register tree local;
209
210 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 211 if (DECL_INITIAL (local) != 0
1c8d7aef 212 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
213 return 1;
214 }
215 {
216 register tree subblock;
217
218 for (subblock = BLOCK_SUBBLOCKS (exp);
219 subblock;
220 subblock = TREE_CHAIN (subblock))
1c8d7aef 221 if (calls_function_1 (subblock, which))
ef03bc85
CH
222 return 1;
223 }
224 return 0;
51bbfa0c
RS
225
226 case METHOD_CALL_EXPR:
227 length = 3;
228 break;
229
230 case WITH_CLEANUP_EXPR:
231 length = 1;
232 break;
233
234 case RTL_EXPR:
235 return 0;
236 }
237
238 for (i = 0; i < length; i++)
239 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 240 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
241 return 1;
242
243 return 0;
244}
245\f
246/* Force FUNEXP into a form suitable for the address of a CALL,
247 and return that as an rtx. Also load the static chain register
248 if FNDECL is a nested function.
249
77cac2f2
RK
250 CALL_FUSAGE points to a variable holding the prospective
251 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 252
03dacb02 253rtx
77cac2f2 254prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
255 rtx funexp;
256 tree fndecl;
77cac2f2 257 rtx *call_fusage;
01368078 258 int reg_parm_seen;
51bbfa0c
RS
259{
260 rtx static_chain_value = 0;
261
262 funexp = protect_from_queue (funexp, 0);
263
264 if (fndecl != 0)
265 /* Get possible static chain value for nested function in C. */
266 static_chain_value = lookup_static_chain (fndecl);
267
268 /* Make a valid memory address and copy constants thru pseudo-regs,
269 but not for a constant address if -fno-function-cse. */
270 if (GET_CODE (funexp) != SYMBOL_REF)
01368078
RK
271 funexp =
272#ifdef SMALL_REGISTER_CLASSES
273 /* If we are using registers for parameters, force the
274 function address into a register now. */
275 reg_parm_seen ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
276 :
277#endif
278 memory_address (FUNCTION_MODE, funexp);
51bbfa0c
RS
279 else
280 {
281#ifndef NO_FUNCTION_CSE
282 if (optimize && ! flag_no_function_cse)
283#ifdef NO_RECURSIVE_FUNCTION_CSE
284 if (fndecl != current_function_decl)
285#endif
286 funexp = force_reg (Pmode, funexp);
287#endif
288 }
289
290 if (static_chain_value != 0)
291 {
292 emit_move_insn (static_chain_rtx, static_chain_value);
293
77cac2f2 294 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
295 }
296
297 return funexp;
298}
299
300/* Generate instructions to call function FUNEXP,
301 and optionally pop the results.
302 The CALL_INSN is the first insn generated.
303
304 FUNTYPE is the data type of the function, or, for a library call,
305 the identifier for the name of the call. This is given to the
306 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
307
308 STACK_SIZE is the number of bytes of arguments on the stack,
309 rounded up to STACK_BOUNDARY; zero if the size is variable.
310 This is both to put into the call insn and
311 to generate explicit popping code if necessary.
312
313 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
314 It is zero if this call doesn't want a structure value.
315
316 NEXT_ARG_REG is the rtx that results from executing
317 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
318 just after all the args have had their registers assigned.
319 This could be whatever you like, but normally it is the first
320 arg-register beyond those used for args in this call,
321 or 0 if all the arg-registers are used in this call.
322 It is passed on to `gen_call' so you can put this info in the call insn.
323
324 VALREG is a hard register in which a value is returned,
325 or 0 if the call does not return a value.
326
327 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
328 the args to this call were processed.
329 We restore `inhibit_defer_pop' to that value.
330
94b25f81
RK
331 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
332 denote registers used by the called function.
51bbfa0c
RS
333
334 IS_CONST is true if this is a `const' call. */
335
322e3e34 336static void
51bbfa0c 337emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
77cac2f2 338 valreg, old_inhibit_defer_pop, call_fusage, is_const)
51bbfa0c
RS
339 rtx funexp;
340 tree funtype;
341 int stack_size;
342 int struct_value_size;
343 rtx next_arg_reg;
344 rtx valreg;
345 int old_inhibit_defer_pop;
77cac2f2 346 rtx call_fusage;
51bbfa0c
RS
347 int is_const;
348{
e5d70561
RK
349 rtx stack_size_rtx = GEN_INT (stack_size);
350 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c
RS
351 rtx call_insn;
352 int already_popped = 0;
353
354 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
355 and we don't want to load it into a register as an optimization,
356 because prepare_call_address already did it if it should be done. */
357 if (GET_CODE (funexp) != SYMBOL_REF)
358 funexp = memory_address (FUNCTION_MODE, funexp);
359
360#ifndef ACCUMULATE_OUTGOING_ARGS
361#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
362 if (HAVE_call_pop && HAVE_call_value_pop
363 && (RETURN_POPS_ARGS (funtype, stack_size) > 0 || stack_size == 0))
364 {
e5d70561 365 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (funtype, stack_size));
51bbfa0c
RS
366 rtx pat;
367
368 /* If this subroutine pops its own args, record that in the call insn
369 if possible, for the sake of frame pointer elimination. */
370 if (valreg)
371 pat = gen_call_value_pop (valreg,
372 gen_rtx (MEM, FUNCTION_MODE, funexp),
373 stack_size_rtx, next_arg_reg, n_pop);
374 else
375 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
376 stack_size_rtx, next_arg_reg, n_pop);
377
378 emit_call_insn (pat);
379 already_popped = 1;
380 }
381 else
382#endif
383#endif
384
385#if defined (HAVE_call) && defined (HAVE_call_value)
386 if (HAVE_call && HAVE_call_value)
387 {
388 if (valreg)
389 emit_call_insn (gen_call_value (valreg,
390 gen_rtx (MEM, FUNCTION_MODE, funexp),
e992302c
BK
391 stack_size_rtx, next_arg_reg,
392 NULL_RTX));
51bbfa0c
RS
393 else
394 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
395 stack_size_rtx, next_arg_reg,
396 struct_value_size_rtx));
397 }
398 else
399#endif
400 abort ();
401
77cac2f2 402 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
403 for (call_insn = get_last_insn ();
404 call_insn && GET_CODE (call_insn) != CALL_INSN;
405 call_insn = PREV_INSN (call_insn))
406 ;
407
408 if (! call_insn)
409 abort ();
410
e59e60a7
RK
411 /* Put the register usage information on the CALL. If there is already
412 some usage information, put ours at the end. */
413 if (CALL_INSN_FUNCTION_USAGE (call_insn))
414 {
415 rtx link;
416
417 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
418 link = XEXP (link, 1))
419 ;
420
421 XEXP (link, 1) = call_fusage;
422 }
423 else
424 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
425
426 /* If this is a const call, then set the insn's unchanging bit. */
427 if (is_const)
428 CONST_CALL_P (call_insn) = 1;
429
b1e64e0d
RS
430 /* Restore this now, so that we do defer pops for this call's args
431 if the context of the call as a whole permits. */
432 inhibit_defer_pop = old_inhibit_defer_pop;
433
51bbfa0c
RS
434#ifndef ACCUMULATE_OUTGOING_ARGS
435 /* If returning from the subroutine does not automatically pop the args,
436 we need an instruction to pop them sooner or later.
437 Perhaps do it now; perhaps just record how much space to pop later.
438
439 If returning from the subroutine does pop the args, indicate that the
440 stack pointer will be changed. */
441
442 if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
443 {
444 if (!already_popped)
77cac2f2 445 CALL_INSN_FUNCTION_USAGE (call_insn) =
96fd013f 446 gen_rtx (EXPR_LIST, VOIDmode,
984d9166 447 gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
77cac2f2 448 CALL_INSN_FUNCTION_USAGE (call_insn));
51bbfa0c 449 stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
e5d70561 450 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
451 }
452
453 if (stack_size != 0)
454 {
70a73141 455 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
456 pending_stack_adjust += stack_size;
457 else
458 adjust_stack (stack_size_rtx);
459 }
460#endif
461}
462
463/* Generate all the code for a function call
464 and return an rtx for its value.
465 Store the value in TARGET (specified as an rtx) if convenient.
466 If the value is stored in TARGET then TARGET is returned.
467 If IGNORE is nonzero, then we ignore the value of the function call. */
468
469rtx
8129842c 470expand_call (exp, target, ignore)
51bbfa0c
RS
471 tree exp;
472 rtx target;
473 int ignore;
51bbfa0c
RS
474{
475 /* List of actual parameters. */
476 tree actparms = TREE_OPERAND (exp, 1);
477 /* RTX for the function to be called. */
478 rtx funexp;
479 /* Tree node for the function to be called (not the address!). */
480 tree funtree;
481 /* Data type of the function. */
482 tree funtype;
483 /* Declaration of the function being called,
484 or 0 if the function is computed (not known by name). */
485 tree fndecl = 0;
486 char *name = 0;
487
488 /* Register in which non-BLKmode value will be returned,
489 or 0 if no value or if value is BLKmode. */
490 rtx valreg;
491 /* Address where we should return a BLKmode value;
492 0 if value not BLKmode. */
493 rtx structure_value_addr = 0;
494 /* Nonzero if that address is being passed by treating it as
495 an extra, implicit first parameter. Otherwise,
496 it is passed by being copied directly into struct_value_rtx. */
497 int structure_value_addr_parm = 0;
498 /* Size of aggregate value wanted, or zero if none wanted
499 or if we are using the non-reentrant PCC calling convention
500 or expecting the value in registers. */
501 int struct_value_size = 0;
502 /* Nonzero if called function returns an aggregate in memory PCC style,
503 by returning the address of where to find it. */
504 int pcc_struct_value = 0;
505
506 /* Number of actual parameters in this call, including struct value addr. */
507 int num_actuals;
508 /* Number of named args. Args after this are anonymous ones
509 and they must all go on the stack. */
510 int n_named_args;
511 /* Count arg position in order args appear. */
512 int argpos;
513
514 /* Vector of information about each argument.
515 Arguments are numbered in the order they will be pushed,
516 not the order they are written. */
517 struct arg_data *args;
518
519 /* Total size in bytes of all the stack-parms scanned so far. */
520 struct args_size args_size;
521 /* Size of arguments before any adjustments (such as rounding). */
522 struct args_size original_args_size;
523 /* Data on reg parms scanned so far. */
524 CUMULATIVE_ARGS args_so_far;
525 /* Nonzero if a reg parm has been scanned. */
526 int reg_parm_seen;
efd65a8b
RS
527 /* Nonzero if this is an indirect function call. */
528 int current_call_is_indirect = 0;
51bbfa0c
RS
529
530 /* Nonzero if we must avoid push-insns in the args for this call.
531 If stack space is allocated for register parameters, but not by the
532 caller, then it is preallocated in the fixed part of the stack frame.
533 So the entire argument block must then be preallocated (i.e., we
534 ignore PUSH_ROUNDING in that case). */
535
536#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
537 int must_preallocate = 1;
538#else
539#ifdef PUSH_ROUNDING
540 int must_preallocate = 0;
541#else
542 int must_preallocate = 1;
543#endif
544#endif
545
f72aed24 546 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
547 int reg_parm_stack_space = 0;
548
51bbfa0c
RS
549 /* 1 if scanning parms front to back, -1 if scanning back to front. */
550 int inc;
551 /* Address of space preallocated for stack parms
552 (on machines that lack push insns), or 0 if space not preallocated. */
553 rtx argblock = 0;
554
555 /* Nonzero if it is plausible that this is a call to alloca. */
556 int may_be_alloca;
557 /* Nonzero if this is a call to setjmp or a related function. */
558 int returns_twice;
559 /* Nonzero if this is a call to `longjmp'. */
560 int is_longjmp;
561 /* Nonzero if this is a call to an inline function. */
562 int is_integrable = 0;
51bbfa0c
RS
563 /* Nonzero if this is a call to a `const' function.
564 Note that only explicitly named functions are handled as `const' here. */
565 int is_const = 0;
566 /* Nonzero if this is a call to a `volatile' function. */
567 int is_volatile = 0;
568#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
569 /* Define the boundary of the register parm stack space that needs to be
570 save, if any. */
571 int low_to_save = -1, high_to_save;
572 rtx save_area = 0; /* Place that it is saved */
573#endif
574
575#ifdef ACCUMULATE_OUTGOING_ARGS
576 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
577 char *initial_stack_usage_map = stack_usage_map;
578#endif
579
580 rtx old_stack_level = 0;
79be3418 581 int old_pending_adj = 0;
2f4aa534 582 int old_stack_arg_under_construction;
51bbfa0c
RS
583 int old_inhibit_defer_pop = inhibit_defer_pop;
584 tree old_cleanups = cleanups_this_call;
77cac2f2 585 rtx call_fusage = 0;
51bbfa0c 586 register tree p;
4ab56118 587 register int i, j;
51bbfa0c
RS
588
589 /* See if we can find a DECL-node for the actual function.
590 As a result, decide whether this is a call to an integrable function. */
591
592 p = TREE_OPERAND (exp, 0);
593 if (TREE_CODE (p) == ADDR_EXPR)
594 {
595 fndecl = TREE_OPERAND (p, 0);
596 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 597 fndecl = 0;
51bbfa0c
RS
598 else
599 {
600 if (!flag_no_inline
601 && fndecl != current_function_decl
aa10adff
RK
602 && DECL_INLINE (fndecl)
603 && DECL_SAVED_INSNS (fndecl))
51bbfa0c
RS
604 is_integrable = 1;
605 else if (! TREE_ADDRESSABLE (fndecl))
606 {
13d39dbc 607 /* In case this function later becomes inlinable,
51bbfa0c
RS
608 record that there was already a non-inline call to it.
609
610 Use abstraction instead of setting TREE_ADDRESSABLE
611 directly. */
0481a55e
RK
612 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline)
613 warning_with_decl (fndecl, "can't inline call to `%s'");
51bbfa0c
RS
614 mark_addressable (fndecl);
615 }
616
d45cf215
RS
617 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
618 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 619 is_const = 1;
5e24110e
RS
620
621 if (TREE_THIS_VOLATILE (fndecl))
622 is_volatile = 1;
51bbfa0c
RS
623 }
624 }
625
fdff8c6d
RK
626 /* If we don't have specific function to call, see if we have a
627 constant or `noreturn' function from the type. */
628 if (fndecl == 0)
629 {
630 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
631 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
632 }
633
6f90e075
JW
634#ifdef REG_PARM_STACK_SPACE
635#ifdef MAYBE_REG_PARM_STACK_SPACE
636 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
637#else
638 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
639#endif
640#endif
641
51bbfa0c
RS
642 /* Warn if this value is an aggregate type,
643 regardless of which calling convention we are using for it. */
05e3bdb9 644 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
645 warning ("function call has aggregate value");
646
647 /* Set up a place to return a structure. */
648
649 /* Cater to broken compilers. */
650 if (aggregate_value_p (exp))
651 {
652 /* This call returns a big structure. */
653 is_const = 0;
654
655#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
656 {
657 pcc_struct_value = 1;
0dd532dc
JW
658 /* Easier than making that case work right. */
659 if (is_integrable)
660 {
661 /* In case this is a static function, note that it has been
662 used. */
663 if (! TREE_ADDRESSABLE (fndecl))
664 mark_addressable (fndecl);
665 is_integrable = 0;
666 }
9e7b1d0a
RS
667 }
668#else /* not PCC_STATIC_STRUCT_RETURN */
669 {
670 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 671
9e7b1d0a
RS
672 if (target && GET_CODE (target) == MEM)
673 structure_value_addr = XEXP (target, 0);
674 else
675 {
676 /* Assign a temporary on the stack to hold the value. */
51bbfa0c 677
9e7b1d0a
RS
678 /* For variable-sized objects, we must be called with a target
679 specified. If we were to allocate space on the stack here,
680 we would have no way of knowing when to free it. */
51bbfa0c 681
002bdd6c
RK
682 if (struct_value_size < 0)
683 abort ();
684
9e7b1d0a
RS
685 structure_value_addr
686 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
687 target = 0;
688 }
689 }
690#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
691 }
692
693 /* If called function is inline, try to integrate it. */
694
695 if (is_integrable)
696 {
697 rtx temp;
2f4aa534 698 rtx before_call = get_last_insn ();
51bbfa0c
RS
699
700 temp = expand_inline_function (fndecl, actparms, target,
701 ignore, TREE_TYPE (exp),
702 structure_value_addr);
703
704 /* If inlining succeeded, return. */
854e97f0 705 if ((HOST_WIDE_INT) temp != -1)
51bbfa0c 706 {
ef5057f8
MS
707 if (flag_short_temps)
708 {
709 /* Perform all cleanups needed for the arguments of this
710 call (i.e. destructors in C++). It is ok if these
711 destructors clobber RETURN_VALUE_REG, because the
712 only time we care about this is when TARGET is that
713 register. But in C++, we take care to never return
714 that register directly. */
715 expand_cleanups_to (old_cleanups);
716 }
d64f5a78
RS
717
718#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
719 /* If the outgoing argument list must be preserved, push
720 the stack before executing the inlined function if it
721 makes any calls. */
722
723 for (i = reg_parm_stack_space - 1; i >= 0; i--)
724 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
725 break;
726
727 if (stack_arg_under_construction || i >= 0)
728 {
d64f5a78 729 rtx insn = NEXT_INSN (before_call), seq;
2f4aa534 730
d64f5a78
RS
731 /* Look for a call in the inline function code.
732 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
733 nonzero then there is a call and it is not necessary
734 to scan the insns. */
735
736 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
737 for (; insn; insn = NEXT_INSN (insn))
738 if (GET_CODE (insn) == CALL_INSN)
739 break;
2f4aa534
RS
740
741 if (insn)
742 {
d64f5a78
RS
743 /* Reserve enough stack space so that the largest
744 argument list of any function call in the inline
745 function does not overlap the argument list being
746 evaluated. This is usually an overestimate because
747 allocate_dynamic_stack_space reserves space for an
748 outgoing argument list in addition to the requested
749 space, but there is no way to ask for stack space such
750 that an argument list of a certain length can be
751 safely constructed. */
752
753 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
754#ifdef REG_PARM_STACK_SPACE
755 /* Add the stack space reserved for register arguments
756 in the inline function. What is really needed is the
757 largest value of reg_parm_stack_space in the inline
758 function, but that is not available. Using the current
759 value of reg_parm_stack_space is wrong, but gives
760 correct results on all supported machines. */
761 adjust += reg_parm_stack_space;
762#endif
2f4aa534 763 start_sequence ();
ccf5d244 764 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
765 allocate_dynamic_stack_space (GEN_INT (adjust),
766 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
767 seq = get_insns ();
768 end_sequence ();
769 emit_insns_before (seq, NEXT_INSN (before_call));
e5d70561 770 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
771 }
772 }
d64f5a78 773#endif
51bbfa0c
RS
774
775 /* If the result is equivalent to TARGET, return TARGET to simplify
776 checks in store_expr. They can be equivalent but not equal in the
777 case of a function that returns BLKmode. */
778 if (temp != target && rtx_equal_p (temp, target))
779 return target;
780 return temp;
781 }
782
783 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
784 separately after all. If function was declared inline,
785 give a warning. */
786 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
787 && ! TREE_ADDRESSABLE (fndecl))
788 warning_with_decl (fndecl, "can't inline call to `%s'");
51bbfa0c
RS
789 mark_addressable (fndecl);
790 }
791
792 /* When calling a const function, we must pop the stack args right away,
793 so that the pop is deleted or moved with the call. */
794 if (is_const)
795 NO_DEFER_POP;
796
797 function_call_count++;
798
799 if (fndecl && DECL_NAME (fndecl))
800 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
801
efd65a8b
RS
802 /* On some machines (such as the PA) indirect calls have a different
803 calling convention than normal calls. FUNCTION_ARG in the target
804 description can look at current_call_is_indirect to determine which
805 calling convention to use. */
806 current_call_is_indirect = (fndecl == 0);
807#if 0
808 = TREE_CODE (TREE_OPERAND (exp, 0)) == NON_LVALUE_EXPR ? 1 : 0;
809#endif
810
51bbfa0c
RS
811#if 0
812 /* Unless it's a call to a specific function that isn't alloca,
813 if it has one argument, we must assume it might be alloca. */
814
815 may_be_alloca =
816 (!(fndecl != 0 && strcmp (name, "alloca"))
817 && actparms != 0
818 && TREE_CHAIN (actparms) == 0);
819#else
820 /* We assume that alloca will always be called by name. It
821 makes no sense to pass it as a pointer-to-function to
822 anything that does not understand its behavior. */
823 may_be_alloca =
824 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
825 && name[0] == 'a'
826 && ! strcmp (name, "alloca"))
827 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
828 && name[0] == '_'
829 && ! strcmp (name, "__builtin_alloca"))));
830#endif
831
832 /* See if this is a call to a function that can return more than once
833 or a call to longjmp. */
834
835 returns_twice = 0;
836 is_longjmp = 0;
837
838 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
839 {
840 char *tname = name;
841
8d515633 842 /* Disregard prefix _, __ or __x. */
51bbfa0c 843 if (name[0] == '_')
8d515633
RS
844 {
845 if (name[1] == '_' && name[2] == 'x')
846 tname += 3;
847 else if (name[1] == '_')
848 tname += 2;
849 else
850 tname += 1;
851 }
51bbfa0c
RS
852
853 if (tname[0] == 's')
854 {
855 returns_twice
856 = ((tname[1] == 'e'
857 && (! strcmp (tname, "setjmp")
858 || ! strcmp (tname, "setjmp_syscall")))
859 || (tname[1] == 'i'
860 && ! strcmp (tname, "sigsetjmp"))
861 || (tname[1] == 'a'
862 && ! strcmp (tname, "savectx")));
863 if (tname[1] == 'i'
864 && ! strcmp (tname, "siglongjmp"))
865 is_longjmp = 1;
866 }
867 else if ((tname[0] == 'q' && tname[1] == 's'
868 && ! strcmp (tname, "qsetjmp"))
869 || (tname[0] == 'v' && tname[1] == 'f'
870 && ! strcmp (tname, "vfork")))
871 returns_twice = 1;
872
873 else if (tname[0] == 'l' && tname[1] == 'o'
874 && ! strcmp (tname, "longjmp"))
875 is_longjmp = 1;
876 }
877
51bbfa0c
RS
878 if (may_be_alloca)
879 current_function_calls_alloca = 1;
880
881 /* Don't let pending stack adjusts add up to too much.
882 Also, do all pending adjustments now
883 if there is any chance this might be a call to alloca. */
884
885 if (pending_stack_adjust >= 32
886 || (pending_stack_adjust > 0 && may_be_alloca))
887 do_pending_stack_adjust ();
888
889 /* Operand 0 is a pointer-to-function; get the type of the function. */
890 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
891 if (TREE_CODE (funtype) != POINTER_TYPE)
892 abort ();
893 funtype = TREE_TYPE (funtype);
894
cc79451b
RK
895 /* Push the temporary stack slot level so that we can free any temporaries
896 we make. */
51bbfa0c
RS
897 push_temp_slots ();
898
899 /* Start updating where the next arg would go. */
85ec8ec4 900 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX);
51bbfa0c
RS
901
902 /* If struct_value_rtx is 0, it means pass the address
903 as if it were an extra parameter. */
904 if (structure_value_addr && struct_value_rtx == 0)
905 {
5582b006
RK
906 /* If structure_value_addr is a REG other than
907 virtual_outgoing_args_rtx, we can use always use it. If it
908 is not a REG, we must always copy it into a register.
909 If it is virtual_outgoing_args_rtx, we must copy it to another
910 register in some cases. */
911 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 912#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
913 || (stack_arg_under_construction
914 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 915#endif
5582b006
RK
916 ? copy_addr_to_reg (structure_value_addr)
917 : structure_value_addr);
d64f5a78 918
51bbfa0c
RS
919 actparms
920 = tree_cons (error_mark_node,
921 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 922 temp),
51bbfa0c
RS
923 actparms);
924 structure_value_addr_parm = 1;
925 }
926
927 /* Count the arguments and set NUM_ACTUALS. */
928 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
929 num_actuals = i;
930
931 /* Compute number of named args.
932 Normally, don't include the last named arg if anonymous args follow.
933 (If no anonymous args follow, the result of list_length
934 is actually one too large.)
935
936 If SETUP_INCOMING_VARARGS is defined, this machine will be able to
937 place unnamed args that were passed in registers into the stack. So
938 treat all args as named. This allows the insns emitting for a specific
d45cf215 939 argument list to be independent of the function declaration.
51bbfa0c
RS
940
941 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
942 way to pass unnamed args in registers, so we must force them into
943 memory. */
944#ifndef SETUP_INCOMING_VARARGS
945 if (TYPE_ARG_TYPES (funtype) != 0)
946 n_named_args
947 = list_length (TYPE_ARG_TYPES (funtype)) - 1
948 /* Count the struct value address, if it is passed as a parm. */
949 + structure_value_addr_parm;
950 else
951#endif
952 /* If we know nothing, treat all args as named. */
953 n_named_args = num_actuals;
954
955 /* Make a vector to hold all the information about each arg. */
956 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 957 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
958
959 args_size.constant = 0;
960 args_size.var = 0;
961
962 /* In this loop, we consider args in the order they are written.
963 We fill up ARGS from the front of from the back if necessary
964 so that in any case the first arg to be pushed ends up at the front. */
965
966#ifdef PUSH_ARGS_REVERSED
967 i = num_actuals - 1, inc = -1;
968 /* In this case, must reverse order of args
969 so that we compute and push the last arg first. */
970#else
971 i = 0, inc = 1;
972#endif
973
974 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
975 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
976 {
977 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 978 int unsignedp;
84b55618 979 enum machine_mode mode;
51bbfa0c
RS
980
981 args[i].tree_value = TREE_VALUE (p);
982
983 /* Replace erroneous argument with constant zero. */
984 if (type == error_mark_node || TYPE_SIZE (type) == 0)
985 args[i].tree_value = integer_zero_node, type = integer_type_node;
986
5c1c34d3
RK
987 /* If TYPE is a transparent union, pass things the way we would
988 pass the first field of the union. We have already verified that
989 the modes are the same. */
990 if (TYPE_TRANSPARENT_UNION (type))
991 type = TREE_TYPE (TYPE_FIELDS (type));
992
51bbfa0c
RS
993 /* Decide where to pass this arg.
994
995 args[i].reg is nonzero if all or part is passed in registers.
996
997 args[i].partial is nonzero if part but not all is passed in registers,
998 and the exact value says how many words are passed in registers.
999
1000 args[i].pass_on_stack is nonzero if the argument must at least be
1001 computed on the stack. It may then be loaded back into registers
1002 if args[i].reg is nonzero.
1003
1004 These decisions are driven by the FUNCTION_... macros and must agree
1005 with those made by function.c. */
1006
51bbfa0c 1007 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1008 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1009 && contains_placeholder_p (TYPE_SIZE (type)))
4ac3d994 1010 || TYPE_NEEDS_CONSTRUCTING (type)
7ef1fbd7
RK
1011#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1012 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1013 type, argpos < n_named_args)
1014#endif
1015 )
51bbfa0c 1016 {
5e0de251
DE
1017#ifdef FUNCTION_ARG_CALLEE_COPIES
1018 if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
1019 argpos < n_named_args)
1020 /* If it's in a register, we must make a copy of it too. */
1021 /* ??? Is this a sufficient test? Is there a better one? */
1022 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1023 && REG_P (DECL_RTL (args[i].tree_value))))
51bbfa0c 1024 {
5e0de251
DE
1025 args[i].tree_value = build1 (ADDR_EXPR,
1026 build_pointer_type (type),
1027 args[i].tree_value);
1028 type = build_pointer_type (type);
51bbfa0c
RS
1029 }
1030 else
5e0de251 1031#endif
82c0ff02 1032 {
5e0de251
DE
1033 /* We make a copy of the object and pass the address to the
1034 function being called. */
1035 rtx copy;
51bbfa0c 1036
5e0de251
DE
1037 if (TYPE_SIZE (type) == 0
1038 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1039 {
1040 /* This is a variable-sized object. Make space on the stack
1041 for it. */
1042 rtx size_rtx = expr_size (TREE_VALUE (p));
1043
1044 if (old_stack_level == 0)
1045 {
1046 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1047 old_pending_adj = pending_stack_adjust;
1048 pending_stack_adjust = 0;
1049 }
1050
1051 copy = gen_rtx (MEM, BLKmode,
1052 allocate_dynamic_stack_space (size_rtx,
1053 NULL_RTX,
1054 TYPE_ALIGN (type)));
1055 }
1056 else
1057 {
1058 int size = int_size_in_bytes (type);
1059 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
1060 }
51bbfa0c 1061
05e3bdb9 1062 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1063
5e0de251
DE
1064 store_expr (args[i].tree_value, copy, 0);
1065
1066 args[i].tree_value = build1 (ADDR_EXPR,
1067 build_pointer_type (type),
1068 make_tree (type, copy));
1069 type = build_pointer_type (type);
1070 }
51bbfa0c 1071 }
51bbfa0c 1072
84b55618 1073 mode = TYPE_MODE (type);
321e0bba 1074 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1075
1076#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1077 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1078#endif
1079
321e0bba 1080 args[i].unsignedp = unsignedp;
1efe6448 1081 args[i].mode = mode;
84b55618 1082 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1083 argpos < n_named_args);
1084#ifdef FUNCTION_ARG_PARTIAL_NREGS
1085 if (args[i].reg)
1086 args[i].partial
84b55618 1087 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1088 argpos < n_named_args);
1089#endif
1090
84b55618 1091 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c
RS
1092
1093 /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
1094 we are to pass this arg in the register(s) designated by FOO, but
1095 also to pass it in the stack. */
1096 if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
1097 && XEXP (args[i].reg, 0) == 0)
1098 args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
1099
1100 /* If this is an addressable type, we must preallocate the stack
1101 since we must evaluate the object into its final location.
1102
1103 If this is to be passed in both registers and the stack, it is simpler
1104 to preallocate. */
1105 if (TREE_ADDRESSABLE (type)
1106 || (args[i].pass_on_stack && args[i].reg != 0))
1107 must_preallocate = 1;
1108
1109 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1110 we cannot consider this function call constant. */
1111 if (TREE_ADDRESSABLE (type))
1112 is_const = 0;
1113
1114 /* Compute the stack-size of this argument. */
1115 if (args[i].reg == 0 || args[i].partial != 0
1116#ifdef REG_PARM_STACK_SPACE
6f90e075 1117 || reg_parm_stack_space > 0
51bbfa0c
RS
1118#endif
1119 || args[i].pass_on_stack)
1efe6448 1120 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1121#ifdef STACK_PARMS_IN_REG_PARM_AREA
1122 1,
1123#else
1124 args[i].reg != 0,
1125#endif
1126 fndecl, &args_size, &args[i].offset,
1127 &args[i].size);
1128
1129#ifndef ARGS_GROW_DOWNWARD
1130 args[i].slot_offset = args_size;
1131#endif
1132
1133#ifndef REG_PARM_STACK_SPACE
1134 /* If a part of the arg was put into registers,
1135 don't include that part in the amount pushed. */
1136 if (! args[i].pass_on_stack)
1137 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1138 / (PARM_BOUNDARY / BITS_PER_UNIT)
1139 * (PARM_BOUNDARY / BITS_PER_UNIT));
1140#endif
1141
1142 /* Update ARGS_SIZE, the total stack space for args so far. */
1143
1144 args_size.constant += args[i].size.constant;
1145 if (args[i].size.var)
1146 {
1147 ADD_PARM_SIZE (args_size, args[i].size.var);
1148 }
1149
1150 /* Since the slot offset points to the bottom of the slot,
1151 we must record it after incrementing if the args grow down. */
1152#ifdef ARGS_GROW_DOWNWARD
1153 args[i].slot_offset = args_size;
1154
1155 args[i].slot_offset.constant = -args_size.constant;
1156 if (args_size.var)
1157 {
1158 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1159 }
1160#endif
1161
1162 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1163 have been used, etc. */
1164
1165 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1166 argpos < n_named_args);
1167 }
1168
6f90e075
JW
1169#ifdef FINAL_REG_PARM_STACK_SPACE
1170 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1171 args_size.var);
1172#endif
1173
51bbfa0c
RS
1174 /* Compute the actual size of the argument block required. The variable
1175 and constant sizes must be combined, the size may have to be rounded,
1176 and there may be a minimum required size. */
1177
1178 original_args_size = args_size;
1179 if (args_size.var)
1180 {
1181 /* If this function requires a variable-sized argument list, don't try to
1182 make a cse'able block for this call. We may be able to do this
1183 eventually, but it is too complicated to keep track of what insns go
1184 in the cse'able block and which don't. */
1185
1186 is_const = 0;
1187 must_preallocate = 1;
1188
1189 args_size.var = ARGS_SIZE_TREE (args_size);
1190 args_size.constant = 0;
1191
1192#ifdef STACK_BOUNDARY
1193 if (STACK_BOUNDARY != BITS_PER_UNIT)
1194 args_size.var = round_up (args_size.var, STACK_BYTES);
1195#endif
1196
1197#ifdef REG_PARM_STACK_SPACE
6f90e075 1198 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1199 {
1200 args_size.var
1201 = size_binop (MAX_EXPR, args_size.var,
1202 size_int (REG_PARM_STACK_SPACE (fndecl)));
1203
1204#ifndef OUTGOING_REG_PARM_STACK_SPACE
1205 /* The area corresponding to register parameters is not to count in
1206 the size of the block we need. So make the adjustment. */
1207 args_size.var
1208 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1209 size_int (reg_parm_stack_space));
51bbfa0c
RS
1210#endif
1211 }
1212#endif
1213 }
1214 else
1215 {
1216#ifdef STACK_BOUNDARY
1217 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1218 / STACK_BYTES) * STACK_BYTES);
1219#endif
1220
1221#ifdef REG_PARM_STACK_SPACE
1222 args_size.constant = MAX (args_size.constant,
6f90e075 1223 reg_parm_stack_space);
e1336658
JW
1224#ifdef MAYBE_REG_PARM_STACK_SPACE
1225 if (reg_parm_stack_space == 0)
1226 args_size.constant = 0;
1227#endif
51bbfa0c 1228#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1229 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1230#endif
1231#endif
1232 }
1233
1234 /* See if we have or want to preallocate stack space.
1235
1236 If we would have to push a partially-in-regs parm
1237 before other stack parms, preallocate stack space instead.
1238
1239 If the size of some parm is not a multiple of the required stack
1240 alignment, we must preallocate.
1241
1242 If the total size of arguments that would otherwise create a copy in
1243 a temporary (such as a CALL) is more than half the total argument list
1244 size, preallocation is faster.
1245
1246 Another reason to preallocate is if we have a machine (like the m88k)
1247 where stack alignment is required to be maintained between every
1248 pair of insns, not just when the call is made. However, we assume here
1249 that such machines either do not have push insns (and hence preallocation
1250 would occur anyway) or the problem is taken care of with
1251 PUSH_ROUNDING. */
1252
1253 if (! must_preallocate)
1254 {
1255 int partial_seen = 0;
1256 int copy_to_evaluate_size = 0;
1257
1258 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1259 {
1260 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1261 partial_seen = 1;
1262 else if (partial_seen && args[i].reg == 0)
1263 must_preallocate = 1;
1264
1265 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1266 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1267 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1268 || TREE_CODE (args[i].tree_value) == COND_EXPR
1269 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1270 copy_to_evaluate_size
1271 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1272 }
1273
c62f36cf
RS
1274 if (copy_to_evaluate_size * 2 >= args_size.constant
1275 && args_size.constant > 0)
51bbfa0c
RS
1276 must_preallocate = 1;
1277 }
1278
1279 /* If the structure value address will reference the stack pointer, we must
1280 stabilize it. We don't need to do this if we know that we are not going
1281 to adjust the stack pointer in processing this call. */
1282
1283 if (structure_value_addr
1284 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1285 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1286 && (args_size.var
1287#ifndef ACCUMULATE_OUTGOING_ARGS
1288 || args_size.constant
1289#endif
1290 ))
1291 structure_value_addr = copy_to_reg (structure_value_addr);
1292
1293 /* If this function call is cse'able, precompute all the parameters.
1294 Note that if the parameter is constructed into a temporary, this will
1295 cause an additional copy because the parameter will be constructed
1296 into a temporary location and then copied into the outgoing arguments.
1297 If a parameter contains a call to alloca and this function uses the
1298 stack, precompute the parameter. */
1299
1ce0cb53
JW
1300 /* If we preallocated the stack space, and some arguments must be passed
1301 on the stack, then we must precompute any parameter which contains a
1302 function call which will store arguments on the stack.
1303 Otherwise, evaluating the parameter may clobber previous parameters
1304 which have already been stored into the stack. */
1305
51bbfa0c
RS
1306 for (i = 0; i < num_actuals; i++)
1307 if (is_const
1308 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1309 && calls_function (args[i].tree_value, 1))
1310 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1311 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1312 {
cc79451b
RK
1313 push_temp_slots ();
1314
51bbfa0c 1315 args[i].initial_value = args[i].value
e5d70561 1316 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1317
7373d92d
RK
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1319 args[i].value
1320 = convert_modes (args[i].mode,
1321 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1322 args[i].value, args[i].unsignedp);
1323
51bbfa0c 1324 preserve_temp_slots (args[i].value);
cc79451b 1325 pop_temp_slots ();
51bbfa0c
RS
1326
1327 /* ANSI doesn't require a sequence point here,
1328 but PCC has one, so this will avoid some problems. */
1329 emit_queue ();
1330 }
1331
1332 /* Now we are about to start emitting insns that can be deleted
1333 if a libcall is deleted. */
1334 if (is_const)
1335 start_sequence ();
1336
1337 /* If we have no actual push instructions, or shouldn't use them,
1338 make space for all args right now. */
1339
1340 if (args_size.var != 0)
1341 {
1342 if (old_stack_level == 0)
1343 {
e5d70561 1344 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1345 old_pending_adj = pending_stack_adjust;
1346 pending_stack_adjust = 0;
d64f5a78 1347#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1348 /* stack_arg_under_construction says whether a stack arg is
1349 being constructed at the old stack level. Pushing the stack
1350 gets a clean outgoing argument block. */
1351 old_stack_arg_under_construction = stack_arg_under_construction;
1352 stack_arg_under_construction = 0;
d64f5a78 1353#endif
51bbfa0c
RS
1354 }
1355 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1356 }
26a258fe 1357 else
51bbfa0c
RS
1358 {
1359 /* Note that we must go through the motions of allocating an argument
1360 block even if the size is zero because we may be storing args
1361 in the area reserved for register arguments, which may be part of
1362 the stack frame. */
26a258fe 1363
51bbfa0c
RS
1364 int needed = args_size.constant;
1365
51bbfa0c 1366 /* Store the maximum argument space used. It will be pushed by the
26a258fe 1367 prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow checking). */
51bbfa0c
RS
1368
1369 if (needed > current_function_outgoing_args_size)
1370 current_function_outgoing_args_size = needed;
1371
26a258fe
PB
1372 if (must_preallocate)
1373 {
1374#ifdef ACCUMULATE_OUTGOING_ARGS
1375 /* Since the stack pointer will never be pushed, it is possible for
1376 the evaluation of a parm to clobber something we have already
1377 written to the stack. Since most function calls on RISC machines
1378 do not use the stack, this is uncommon, but must work correctly.
1379
1380 Therefore, we save any area of the stack that was already written
1381 and that we are using. Here we set up to do this by making a new
1382 stack usage map from the old one. The actual save will be done
1383 by store_one_arg.
1384
1385 Another approach might be to try to reorder the argument
1386 evaluations to avoid this conflicting stack usage. */
1387
51bbfa0c 1388#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
26a258fe
PB
1389 /* Since we will be writing into the entire argument area, the
1390 map must be allocated for its entire size, not just the part that
1391 is the responsibility of the caller. */
1392 needed += reg_parm_stack_space;
51bbfa0c
RS
1393#endif
1394
1395#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1396 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1397 needed + 1);
51bbfa0c 1398#else
26a258fe
PB
1399 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1400 needed);
51bbfa0c 1401#endif
26a258fe 1402 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1403
26a258fe
PB
1404 if (initial_highest_arg_in_use)
1405 bcopy (initial_stack_usage_map, stack_usage_map,
1406 initial_highest_arg_in_use);
51bbfa0c 1407
26a258fe
PB
1408 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1409 bzero (&stack_usage_map[initial_highest_arg_in_use],
1410 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1411 needed = 0;
2f4aa534 1412
26a258fe
PB
1413 /* The address of the outgoing argument list must not be copied to a
1414 register here, because argblock would be left pointing to the
1415 wrong place after the call to allocate_dynamic_stack_space below.
1416 */
2f4aa534 1417
26a258fe 1418 argblock = virtual_outgoing_args_rtx;
2f4aa534 1419
51bbfa0c 1420#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1421 if (inhibit_defer_pop == 0)
51bbfa0c 1422 {
26a258fe
PB
1423 /* Try to reuse some or all of the pending_stack_adjust
1424 to get this space. Maybe we can avoid any pushing. */
1425 if (needed > pending_stack_adjust)
1426 {
1427 needed -= pending_stack_adjust;
1428 pending_stack_adjust = 0;
1429 }
1430 else
1431 {
1432 pending_stack_adjust -= needed;
1433 needed = 0;
1434 }
51bbfa0c 1435 }
26a258fe
PB
1436 /* Special case this because overhead of `push_block' in this
1437 case is non-trivial. */
1438 if (needed == 0)
1439 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1440 else
26a258fe
PB
1441 argblock = push_block (GEN_INT (needed), 0, 0);
1442
1443 /* We only really need to call `copy_to_reg' in the case where push
1444 insns are going to be used to pass ARGBLOCK to a function
1445 call in ARGS. In that case, the stack pointer changes value
1446 from the allocation point to the call point, and hence
1447 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1448 But might as well always do it. */
1449 argblock = copy_to_reg (argblock);
51bbfa0c 1450#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1451 }
51bbfa0c
RS
1452 }
1453
bfbf933a
RS
1454#ifdef ACCUMULATE_OUTGOING_ARGS
1455 /* The save/restore code in store_one_arg handles all cases except one:
1456 a constructor call (including a C function returning a BLKmode struct)
1457 to initialize an argument. */
1458 if (stack_arg_under_construction)
1459 {
1460#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
e5d70561 1461 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1462#else
e5d70561 1463 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1464#endif
1465 if (old_stack_level == 0)
1466 {
e5d70561 1467 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1468 old_pending_adj = pending_stack_adjust;
1469 pending_stack_adjust = 0;
1470 /* stack_arg_under_construction says whether a stack arg is
1471 being constructed at the old stack level. Pushing the stack
1472 gets a clean outgoing argument block. */
1473 old_stack_arg_under_construction = stack_arg_under_construction;
1474 stack_arg_under_construction = 0;
1475 /* Make a new map for the new argument list. */
1476 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1477 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1478 highest_outgoing_arg_in_use = 0;
1479 }
e5d70561 1480 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1481 }
1482 /* If argument evaluation might modify the stack pointer, copy the
1483 address of the argument list to a register. */
1484 for (i = 0; i < num_actuals; i++)
1485 if (args[i].pass_on_stack)
1486 {
1487 argblock = copy_addr_to_reg (argblock);
1488 break;
1489 }
1490#endif
1491
1492
51bbfa0c
RS
1493 /* If we preallocated stack space, compute the address of each argument.
1494 We need not ensure it is a valid memory address here; it will be
1495 validized when it is used. */
1496 if (argblock)
1497 {
1498 rtx arg_reg = argblock;
1499 int arg_offset = 0;
1500
1501 if (GET_CODE (argblock) == PLUS)
1502 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1503
1504 for (i = 0; i < num_actuals; i++)
1505 {
1506 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1507 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1508 rtx addr;
1509
1510 /* Skip this parm if it will not be passed on the stack. */
1511 if (! args[i].pass_on_stack && args[i].reg != 0)
1512 continue;
1513
1514 if (GET_CODE (offset) == CONST_INT)
1515 addr = plus_constant (arg_reg, INTVAL (offset));
1516 else
1517 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1518
1519 addr = plus_constant (addr, arg_offset);
1efe6448 1520 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
0c0600d5 1521 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1522 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1523
1524 if (GET_CODE (slot_offset) == CONST_INT)
1525 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1526 else
1527 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1528
1529 addr = plus_constant (addr, arg_offset);
1efe6448 1530 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
51bbfa0c
RS
1531 }
1532 }
1533
1534#ifdef PUSH_ARGS_REVERSED
1535#ifdef STACK_BOUNDARY
1536 /* If we push args individually in reverse order, perform stack alignment
1537 before the first push (the last arg). */
1538 if (argblock == 0)
e5d70561
RK
1539 anti_adjust_stack (GEN_INT (args_size.constant
1540 - original_args_size.constant));
51bbfa0c
RS
1541#endif
1542#endif
1543
1544 /* Don't try to defer pops if preallocating, not even from the first arg,
1545 since ARGBLOCK probably refers to the SP. */
1546 if (argblock)
1547 NO_DEFER_POP;
1548
1549 /* Get the function to call, in the form of RTL. */
1550 if (fndecl)
ef5d30c9
RK
1551 {
1552 /* If this is the first use of the function, see if we need to
1553 make an external definition for it. */
1554 if (! TREE_USED (fndecl))
1555 {
1556 assemble_external (fndecl);
1557 TREE_USED (fndecl) = 1;
1558 }
1559
1560 /* Get a SYMBOL_REF rtx for the function address. */
1561 funexp = XEXP (DECL_RTL (fndecl), 0);
1562 }
51bbfa0c
RS
1563 else
1564 /* Generate an rtx (probably a pseudo-register) for the address. */
1565 {
cc79451b 1566 push_temp_slots ();
e5d70561 1567 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1568 pop_temp_slots (); /* FUNEXP can't be BLKmode */
51bbfa0c
RS
1569 emit_queue ();
1570 }
1571
1572 /* Figure out the register where the value, if any, will come back. */
1573 valreg = 0;
1574 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1575 && ! structure_value_addr)
1576 {
1577 if (pcc_struct_value)
1578 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1579 fndecl);
1580 else
1581 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1582 }
1583
1584 /* Precompute all register parameters. It isn't safe to compute anything
1585 once we have started filling any specific hard regs. */
1586 reg_parm_seen = 0;
1587 for (i = 0; i < num_actuals; i++)
1588 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1589 {
1590 reg_parm_seen = 1;
1591
1592 if (args[i].value == 0)
1593 {
cc79451b 1594 push_temp_slots ();
e5d70561
RK
1595 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1596 VOIDmode, 0);
51bbfa0c 1597 preserve_temp_slots (args[i].value);
cc79451b 1598 pop_temp_slots ();
51bbfa0c
RS
1599
1600 /* ANSI doesn't require a sequence point here,
1601 but PCC has one, so this will avoid some problems. */
1602 emit_queue ();
1603 }
84b55618
RK
1604
1605 /* If we are to promote the function arg to a wider mode,
1606 do it now. */
84b55618 1607
843fec55
RK
1608 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1609 args[i].value
1610 = convert_modes (args[i].mode,
1611 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1612 args[i].value, args[i].unsignedp);
ebef2728
RK
1613
1614 /* If the value is expensive, and we are inside an appropriately
1615 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1616 into the hard reg.
1617
1618 For small register classes, also do this if this call uses
1619 register parameters. This is to avoid reload conflicts while
1620 loading the parameters registers. */
ebef2728
RK
1621
1622 if ((! (GET_CODE (args[i].value) == REG
1623 || (GET_CODE (args[i].value) == SUBREG
1624 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1625 && args[i].mode != BLKmode
1626 && rtx_cost (args[i].value, SET) > 2
01368078 1627#ifdef SMALL_REGISTER_CLASSES
4f48d56a 1628 && (reg_parm_seen || preserve_subexpressions_p ())
01368078 1629#else
4f48d56a 1630 && preserve_subexpressions_p ()
01368078 1631#endif
4f48d56a 1632 )
ebef2728 1633 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1634 }
1635
1636#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1637 /* The argument list is the property of the called routine and it
1638 may clobber it. If the fixed area has been used for previous
1639 parameters, we must save and restore it.
1640
1641 Here we compute the boundary of the that needs to be saved, if any. */
1642
b94301c2
RS
1643#ifdef ARGS_GROW_DOWNWARD
1644 for (i = 0; i < reg_parm_stack_space + 1; i++)
1645#else
6f90e075 1646 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1647#endif
51bbfa0c
RS
1648 {
1649 if (i >= highest_outgoing_arg_in_use
1650 || stack_usage_map[i] == 0)
1651 continue;
1652
1653 if (low_to_save == -1)
1654 low_to_save = i;
1655
1656 high_to_save = i;
1657 }
1658
1659 if (low_to_save >= 0)
1660 {
1661 int num_to_save = high_to_save - low_to_save + 1;
1662 enum machine_mode save_mode
1663 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1664 rtx stack_area;
1665
1666 /* If we don't have the required alignment, must do this in BLKmode. */
1667 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1668 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1669 save_mode = BLKmode;
1670
1671 stack_area = gen_rtx (MEM, save_mode,
1672 memory_address (save_mode,
b94301c2
RS
1673
1674#ifdef ARGS_GROW_DOWNWARD
1675 plus_constant (argblock,
1676 - high_to_save)
1677#else
51bbfa0c 1678 plus_constant (argblock,
b94301c2
RS
1679 low_to_save)
1680#endif
1681 ));
51bbfa0c
RS
1682 if (save_mode == BLKmode)
1683 {
1684 save_area = assign_stack_temp (BLKmode, num_to_save, 1);
1685 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1686 GEN_INT (num_to_save),
51bbfa0c
RS
1687 PARM_BOUNDARY / BITS_PER_UNIT);
1688 }
1689 else
1690 {
1691 save_area = gen_reg_rtx (save_mode);
1692 emit_move_insn (save_area, stack_area);
1693 }
1694 }
1695#endif
1696
1697
1698 /* Now store (and compute if necessary) all non-register parms.
1699 These come before register parms, since they can require block-moves,
1700 which could clobber the registers used for register parms.
1701 Parms which have partial registers are not stored here,
1702 but we do preallocate space here if they want that. */
1703
1704 for (i = 0; i < num_actuals; i++)
1705 if (args[i].reg == 0 || args[i].pass_on_stack)
1706 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1707 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1708
4ab56118
RK
1709#ifdef STRICT_ALIGNMENT
1710 /* If we have a parm that is passed in registers but not in memory
1711 and whose alignment does not permit a direct copy into registers,
1712 make a group of pseudos that correspond to each register that we
1713 will later fill. */
1714
1715 for (i = 0; i < num_actuals; i++)
1716 if (args[i].reg != 0 && ! args[i].pass_on_stack
1717 && args[i].mode == BLKmode
1718 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1719 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1720 {
1721 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
8498efd0 1722 int big_endian_correction = 0;
4ab56118
RK
1723
1724 args[i].n_aligned_regs
1725 = args[i].partial ? args[i].partial
1726 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1727
1728 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1729 * args[i].n_aligned_regs);
1730
8498efd0
JW
1731 /* Structures smaller than a word are aligned to the least signifcant
1732 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
1733 must skip the empty high order bytes when calculating the bit
1734 offset. */
1735 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1736 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1737
4ab56118
RK
1738 for (j = 0; j < args[i].n_aligned_regs; j++)
1739 {
1740 rtx reg = gen_reg_rtx (word_mode);
1741 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1742 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1743 int bitpos;
1744
1745 args[i].aligned_regs[j] = reg;
1746
1747 /* Clobber REG and move each partword into it. Ensure we don't
1748 go past the end of the structure. Note that the loop below
1749 works because we've already verified that padding
1750 and endianness are compatible. */
1751
1752 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1753
1754 for (bitpos = 0;
7a03f4b4 1755 bitpos < BITS_PER_WORD && bytes > 0;
4ab56118
RK
1756 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1757 {
8498efd0 1758 int xbitpos = bitpos + big_endian_correction;
4ab56118
RK
1759
1760 store_bit_field (reg, bitsize, xbitpos, word_mode,
8498efd0 1761 extract_bit_field (word, bitsize, bitpos, 1,
4ab56118
RK
1762 NULL_RTX, word_mode,
1763 word_mode,
1764 bitsize / BITS_PER_UNIT,
1765 BITS_PER_WORD),
1766 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1767 }
1768 }
1769 }
1770#endif
1771
51bbfa0c
RS
1772 /* Now store any partially-in-registers parm.
1773 This is the last place a block-move can happen. */
1774 if (reg_parm_seen)
1775 for (i = 0; i < num_actuals; i++)
1776 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1777 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1778 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1779
1780#ifndef PUSH_ARGS_REVERSED
1781#ifdef STACK_BOUNDARY
1782 /* If we pushed args in forward order, perform stack alignment
1783 after pushing the last arg. */
1784 if (argblock == 0)
e5d70561
RK
1785 anti_adjust_stack (GEN_INT (args_size.constant
1786 - original_args_size.constant));
51bbfa0c
RS
1787#endif
1788#endif
1789
756e0e12
RS
1790 /* If register arguments require space on the stack and stack space
1791 was not preallocated, allocate stack space here for arguments
1792 passed in registers. */
6e716e89 1793#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1794 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1795 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1796#endif
1797
51bbfa0c
RS
1798 /* Pass the function the address in which to return a structure value. */
1799 if (structure_value_addr && ! structure_value_addr_parm)
1800 {
1801 emit_move_insn (struct_value_rtx,
1802 force_reg (Pmode,
e5d70561
RK
1803 force_operand (structure_value_addr,
1804 NULL_RTX)));
51bbfa0c 1805 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1806 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1807 }
1808
77cac2f2 1809 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1810
51bbfa0c
RS
1811 /* Now do the register loads required for any wholly-register parms or any
1812 parms which are passed both on the stack and in a register. Their
1813 expressions were already evaluated.
1814
1815 Mark all register-parms as living through the call, putting these USE
77cac2f2 1816 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c
RS
1817
1818 for (i = 0; i < num_actuals; i++)
1819 {
1820 rtx list = args[i].reg;
1821 int partial = args[i].partial;
1822
1823 while (list)
1824 {
1825 rtx reg;
1826 int nregs;
1827
1828 /* Process each register that needs to get this arg. */
1829 if (GET_CODE (list) == EXPR_LIST)
1830 reg = XEXP (list, 0), list = XEXP (list, 1);
1831 else
1832 reg = list, list = 0;
1833
6b972c4f
JW
1834 /* Set to non-negative if must move a word at a time, even if just
1835 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1836 we just use a normal move insn. This value can be zero if the
1837 argument is a zero size structure with no fields. */
51bbfa0c
RS
1838 nregs = (partial ? partial
1839 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1840 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1841 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1842 : -1));
51bbfa0c
RS
1843
1844 /* If simple case, just do move. If normal partial, store_one_arg
1845 has already loaded the register for us. In all other cases,
1846 load the register(s) from memory. */
1847
6b972c4f 1848 if (nregs == -1)
51bbfa0c 1849 emit_move_insn (reg, args[i].value);
4ab56118
RK
1850
1851#ifdef STRICT_ALIGNMENT
1852 /* If we have pre-computed the values to put in the registers in
1853 the case of non-aligned structures, copy them in now. */
1854
1855 else if (args[i].n_aligned_regs != 0)
1856 for (j = 0; j < args[i].n_aligned_regs; j++)
1857 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1858 args[i].aligned_regs[j]);
1859#endif
1860
51bbfa0c 1861 else if (args[i].partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1862 move_block_to_reg (REGNO (reg),
1863 validize_mem (args[i].value), nregs,
1864 args[i].mode);
0304dfbb
DE
1865
1866 if (nregs == -1)
1867 use_reg (&call_fusage, reg);
1868 else
1869 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1870
1871 /* PARTIAL referred only to the first register, so clear it for the
1872 next time. */
1873 partial = 0;
1874 }
1875 }
1876
1877 /* Perform postincrements before actually calling the function. */
1878 emit_queue ();
1879
1880 /* All arguments and registers used for the call must be set up by now! */
1881
51bbfa0c
RS
1882 /* Generate the actual call instruction. */
1883 emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
1884 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1885 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1886
1887 /* If call is cse'able, make appropriate pair of reg-notes around it.
1888 Test valreg so we don't crash; may safely ignore `const'
1889 if return type is void. */
1890 if (is_const && valreg != 0)
1891 {
1892 rtx note = 0;
1893 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1894 rtx insns;
1895
1896 /* Construct an "equal form" for the value which mentions all the
1897 arguments in order as well as the function name. */
1898#ifdef PUSH_ARGS_REVERSED
1899 for (i = 0; i < num_actuals; i++)
1900 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1901#else
1902 for (i = num_actuals - 1; i >= 0; i--)
1903 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1904#endif
1905 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1906
1907 insns = get_insns ();
1908 end_sequence ();
1909
1910 emit_libcall_block (insns, temp, valreg, note);
1911
1912 valreg = temp;
1913 }
4f48d56a
RK
1914 else if (is_const)
1915 {
1916 /* Otherwise, just write out the sequence without a note. */
1917 rtx insns = get_insns ();
1918
1919 end_sequence ();
1920 emit_insns (insns);
1921 }
51bbfa0c
RS
1922
1923 /* For calls to `setjmp', etc., inform flow.c it should complain
1924 if nonvolatile values are live. */
1925
1926 if (returns_twice)
1927 {
1928 emit_note (name, NOTE_INSN_SETJMP);
1929 current_function_calls_setjmp = 1;
1930 }
1931
1932 if (is_longjmp)
1933 current_function_calls_longjmp = 1;
1934
1935 /* Notice functions that cannot return.
1936 If optimizing, insns emitted below will be dead.
1937 If not optimizing, they will exist, which is useful
1938 if the user uses the `return' command in the debugger. */
1939
1940 if (is_volatile || is_longjmp)
1941 emit_barrier ();
1942
51bbfa0c
RS
1943 /* If value type not void, return an rtx for the value. */
1944
1945 /* If there are cleanups to be called, don't use a hard reg as target. */
1946 if (cleanups_this_call != old_cleanups
1947 && target && REG_P (target)
1948 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1949 target = 0;
1950
1951 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1952 || ignore)
1953 {
1954 target = const0_rtx;
1955 }
1956 else if (structure_value_addr)
1957 {
1958 if (target == 0 || GET_CODE (target) != MEM)
29008b51
JW
1959 {
1960 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1961 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1962 structure_value_addr));
05e3bdb9 1963 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 1964 }
51bbfa0c
RS
1965 }
1966 else if (pcc_struct_value)
1967 {
1968 if (target == 0)
29008b51 1969 {
30082223
RS
1970 /* We used leave the value in the location that it is
1971 returned in, but that causes problems if it is used more
1972 than once in one expression. Rather than trying to track
1973 when a copy is required, we always copy when TARGET is
1974 not specified. This calling sequence is only used on
1975 a few machines and TARGET is usually nonzero. */
1976 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
6d8b61b9
RS
1977 {
1978 target = assign_stack_temp (BLKmode,
1979 int_size_in_bytes (TREE_TYPE (exp)),
1980 0);
1981
05e3bdb9 1982 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
3b780899 1983
6d8b61b9
RS
1984 /* Save this temp slot around the pop below. */
1985 preserve_temp_slots (target);
1986 }
30082223
RS
1987 else
1988 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
29008b51 1989 }
30082223
RS
1990
1991 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
51bbfa0c
RS
1992 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1993 copy_to_reg (valreg)));
1994 else
1995 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
1996 expr_size (exp),
1997 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1998 }
84b55618
RK
1999 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2000 && GET_MODE (target) == GET_MODE (valreg))
51bbfa0c
RS
2001 /* TARGET and VALREG cannot be equal at this point because the latter
2002 would not have REG_FUNCTION_VALUE_P true, while the former would if
2003 it were referring to the same register.
2004
2005 If they refer to the same register, this move will be a no-op, except
2006 when function inlining is being done. */
2007 emit_move_insn (target, valreg);
766b19fb
JL
2008 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2009 {
2010 /* Some machines (the PA for example) want to return all small
2011 structures in registers regardless of the structure's alignment.
2012
2013 Deal with them explicitly by copying from the return registers
2014 into the target MEM locations. */
2015 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2016 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2017 int i;
2018 enum machine_mode tmpmode;
2019
2020 if (target == 0)
2021 target = assign_stack_temp (BLKmode, bytes, 0);
2022 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2023
2024 /* We could probably emit more efficient code for machines
2025 which do not use strict alignment, but it doesn't seem
2026 worth the effort at the current time. */
2027 for (i = 0; i < n_regs; i++)
2028 {
2029 rtx src = operand_subword_force (valreg, i, BLKmode);
2030 rtx dst = operand_subword (target, i, 1, BLKmode);
2031 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2032 int bitpos, big_endian_correction = 0;
2033
2034 /* Should never happen. */
2035 if (src == NULL || dst == NULL)
2036 abort ();
2037
2038 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
2039 big_endian_correction
2040 = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
2041
2042 for (bitpos = 0;
2043 bitpos < BITS_PER_WORD && bytes > 0;
2044 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
2045 {
2046 int xbitpos = bitpos + big_endian_correction;
2047
2048 store_bit_field (dst, bitsize, xbitpos, word_mode,
2049 extract_bit_field (src, bitsize, bitpos, 1,
2050 NULL_RTX, word_mode,
2051 word_mode,
2052 bitsize / BITS_PER_UNIT,
2053 BITS_PER_WORD),
2054 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2055 }
2056 }
2057 }
51bbfa0c
RS
2058 else
2059 target = copy_to_reg (valreg);
2060
84b55618 2061#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2062 /* If we promoted this return value, make the proper SUBREG. TARGET
2063 might be const0_rtx here, so be careful. */
2064 if (GET_CODE (target) == REG
766b19fb 2065 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2066 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2067 {
321e0bba
RK
2068 tree type = TREE_TYPE (exp);
2069 int unsignedp = TREE_UNSIGNED (type);
84b55618 2070
321e0bba
RK
2071 /* If we don't promote as expected, something is wrong. */
2072 if (GET_MODE (target)
2073 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2074 abort ();
2075
321e0bba 2076 target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
84b55618
RK
2077 SUBREG_PROMOTED_VAR_P (target) = 1;
2078 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2079 }
2080#endif
2081
5dab5552
MS
2082 if (flag_short_temps)
2083 {
2084 /* Perform all cleanups needed for the arguments of this call
2085 (i.e. destructors in C++). */
2086 expand_cleanups_to (old_cleanups);
2087 }
51bbfa0c 2088
2f4aa534
RS
2089 /* If size of args is variable or this was a constructor call for a stack
2090 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2091
2092 if (old_stack_level)
2093 {
e5d70561 2094 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2095 pending_stack_adjust = old_pending_adj;
d64f5a78 2096#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2097 stack_arg_under_construction = old_stack_arg_under_construction;
2098 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2099 stack_usage_map = initial_stack_usage_map;
d64f5a78 2100#endif
51bbfa0c 2101 }
51bbfa0c
RS
2102#ifdef ACCUMULATE_OUTGOING_ARGS
2103 else
2104 {
2105#ifdef REG_PARM_STACK_SPACE
2106 if (save_area)
2107 {
2108 enum machine_mode save_mode = GET_MODE (save_area);
2109 rtx stack_area
2110 = gen_rtx (MEM, save_mode,
2111 memory_address (save_mode,
b94301c2
RS
2112#ifdef ARGS_GROW_DOWNWARD
2113 plus_constant (argblock, - high_to_save)
2114#else
2115 plus_constant (argblock, low_to_save)
2116#endif
2117 ));
51bbfa0c
RS
2118
2119 if (save_mode != BLKmode)
2120 emit_move_insn (stack_area, save_area);
2121 else
2122 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2123 GEN_INT (high_to_save - low_to_save + 1),
2124 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2125 }
2126#endif
2127
2128 /* If we saved any argument areas, restore them. */
2129 for (i = 0; i < num_actuals; i++)
2130 if (args[i].save_area)
2131 {
2132 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2133 rtx stack_area
2134 = gen_rtx (MEM, save_mode,
2135 memory_address (save_mode,
2136 XEXP (args[i].stack_slot, 0)));
2137
2138 if (save_mode != BLKmode)
2139 emit_move_insn (stack_area, args[i].save_area);
2140 else
2141 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2142 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2143 PARM_BOUNDARY / BITS_PER_UNIT);
2144 }
2145
2146 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2147 stack_usage_map = initial_stack_usage_map;
2148 }
2149#endif
2150
59257ff7
RK
2151 /* If this was alloca, record the new stack level for nonlocal gotos.
2152 Check for the handler slots since we might not have a save area
2153 for non-local gotos. */
2154
2155 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2156 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2157
2158 pop_temp_slots ();
2159
2160 return target;
2161}
2162\f
322e3e34
RK
2163/* Output a library call to function FUN (a SYMBOL_REF rtx)
2164 (emitting the queue unless NO_QUEUE is nonzero),
2165 for a value of mode OUTMODE,
2166 with NARGS different arguments, passed as alternating rtx values
2167 and machine_modes to convert them to.
2168 The rtx values should have been passed through protect_from_queue already.
2169
2170 NO_QUEUE will be true if and only if the library call is a `const' call
2171 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2172 to the variable is_const in expand_call.
2173
2174 NO_QUEUE must be true for const calls, because if it isn't, then
2175 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2176 and will be lost if the libcall sequence is optimized away.
2177
2178 NO_QUEUE must be false for non-const calls, because if it isn't, the
2179 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2180 optimized. For instance, the instruction scheduler may incorrectly
2181 move memory references across the non-const call. */
2182
2183void
4f90e4a0
RK
2184emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2185 int nargs, ...))
322e3e34 2186{
4f90e4a0
RK
2187#ifndef __STDC__
2188 rtx orgfun;
2189 int no_queue;
2190 enum machine_mode outmode;
2191 int nargs;
2192#endif
322e3e34
RK
2193 va_list p;
2194 /* Total size in bytes of all the stack-parms scanned so far. */
2195 struct args_size args_size;
2196 /* Size of arguments before any adjustments (such as rounding). */
2197 struct args_size original_args_size;
2198 register int argnum;
322e3e34 2199 rtx fun;
322e3e34
RK
2200 int inc;
2201 int count;
2202 rtx argblock = 0;
2203 CUMULATIVE_ARGS args_so_far;
2204 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2205 struct args_size offset; struct args_size size; };
2206 struct arg *argvec;
2207 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2208 rtx call_fusage = 0;
efd65a8b
RS
2209 /* library calls are never indirect calls. */
2210 int current_call_is_indirect = 0;
322e3e34 2211
4f90e4a0
RK
2212 VA_START (p, nargs);
2213
2214#ifndef __STDC__
2215 orgfun = va_arg (p, rtx);
322e3e34
RK
2216 no_queue = va_arg (p, int);
2217 outmode = va_arg (p, enum machine_mode);
2218 nargs = va_arg (p, int);
4f90e4a0
RK
2219#endif
2220
2221 fun = orgfun;
322e3e34
RK
2222
2223 /* Copy all the libcall-arguments out of the varargs data
2224 and into a vector ARGVEC.
2225
2226 Compute how to pass each argument. We only support a very small subset
2227 of the full argument passing conventions to limit complexity here since
2228 library functions shouldn't have many args. */
2229
2230 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2231
2232 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2233
2234 args_size.constant = 0;
2235 args_size.var = 0;
2236
888aa7a9
RS
2237 push_temp_slots ();
2238
322e3e34
RK
2239 for (count = 0; count < nargs; count++)
2240 {
2241 rtx val = va_arg (p, rtx);
2242 enum machine_mode mode = va_arg (p, enum machine_mode);
2243
2244 /* We cannot convert the arg value to the mode the library wants here;
2245 must do it earlier where we know the signedness of the arg. */
2246 if (mode == BLKmode
2247 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2248 abort ();
2249
2250 /* On some machines, there's no way to pass a float to a library fcn.
2251 Pass it as a double instead. */
2252#ifdef LIBGCC_NEEDS_DOUBLE
2253 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2254 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2255#endif
2256
2257 /* There's no need to call protect_from_queue, because
2258 either emit_move_insn or emit_push_insn will do that. */
2259
2260 /* Make sure it is a reasonable operand for a move or push insn. */
2261 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2262 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2263 val = force_operand (val, NULL_RTX);
2264
322e3e34
RK
2265#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2266 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2267 {
a44492f0
RK
2268 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2269 be viewed as just an efficiency improvement. */
888aa7a9
RS
2270 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2271 emit_move_insn (slot, val);
8301b6e2 2272 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2273 mode = Pmode;
888aa7a9 2274 }
322e3e34
RK
2275#endif
2276
888aa7a9
RS
2277 argvec[count].value = val;
2278 argvec[count].mode = mode;
2279
322e3e34
RK
2280 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2281 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2282 abort ();
2283#ifdef FUNCTION_ARG_PARTIAL_NREGS
2284 argvec[count].partial
2285 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2286#else
2287 argvec[count].partial = 0;
2288#endif
2289
2290 locate_and_pad_parm (mode, NULL_TREE,
2291 argvec[count].reg && argvec[count].partial == 0,
2292 NULL_TREE, &args_size, &argvec[count].offset,
2293 &argvec[count].size);
2294
2295 if (argvec[count].size.var)
2296 abort ();
2297
2298#ifndef REG_PARM_STACK_SPACE
2299 if (argvec[count].partial)
2300 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2301#endif
2302
2303 if (argvec[count].reg == 0 || argvec[count].partial != 0
2304#ifdef REG_PARM_STACK_SPACE
2305 || 1
2306#endif
2307 )
2308 args_size.constant += argvec[count].size.constant;
2309
2310#ifdef ACCUMULATE_OUTGOING_ARGS
2311 /* If this arg is actually passed on the stack, it might be
2312 clobbering something we already put there (this library call might
2313 be inside the evaluation of an argument to a function whose call
2314 requires the stack). This will only occur when the library call
2315 has sufficient args to run out of argument registers. Abort in
2316 this case; if this ever occurs, code must be added to save and
2317 restore the arg slot. */
2318
2319 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2320 abort ();
2321#endif
2322
2323 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2324 }
2325 va_end (p);
2326
2327 /* If this machine requires an external definition for library
2328 functions, write one out. */
2329 assemble_external_libcall (fun);
2330
2331 original_args_size = args_size;
2332#ifdef STACK_BOUNDARY
2333 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2334 / STACK_BYTES) * STACK_BYTES);
2335#endif
2336
2337#ifdef REG_PARM_STACK_SPACE
2338 args_size.constant = MAX (args_size.constant,
2339 REG_PARM_STACK_SPACE (NULL_TREE));
2340#ifndef OUTGOING_REG_PARM_STACK_SPACE
2341 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2342#endif
2343#endif
2344
322e3e34
RK
2345 if (args_size.constant > current_function_outgoing_args_size)
2346 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2347
2348#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2349 args_size.constant = 0;
2350#endif
2351
2352#ifndef PUSH_ROUNDING
2353 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2354#endif
2355
2356#ifdef PUSH_ARGS_REVERSED
2357#ifdef STACK_BOUNDARY
2358 /* If we push args individually in reverse order, perform stack alignment
2359 before the first push (the last arg). */
2360 if (argblock == 0)
2361 anti_adjust_stack (GEN_INT (args_size.constant
2362 - original_args_size.constant));
2363#endif
2364#endif
2365
2366#ifdef PUSH_ARGS_REVERSED
2367 inc = -1;
2368 argnum = nargs - 1;
2369#else
2370 inc = 1;
2371 argnum = 0;
2372#endif
2373
2374 /* Push the args that need to be pushed. */
2375
2376 for (count = 0; count < nargs; count++, argnum += inc)
2377 {
2378 register enum machine_mode mode = argvec[argnum].mode;
2379 register rtx val = argvec[argnum].value;
2380 rtx reg = argvec[argnum].reg;
2381 int partial = argvec[argnum].partial;
2382
2383 if (! (reg != 0 && partial == 0))
2384 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2385 argblock, GEN_INT (argvec[count].offset.constant));
2386 NO_DEFER_POP;
2387 }
2388
2389#ifndef PUSH_ARGS_REVERSED
2390#ifdef STACK_BOUNDARY
2391 /* If we pushed args in forward order, perform stack alignment
2392 after pushing the last arg. */
2393 if (argblock == 0)
2394 anti_adjust_stack (GEN_INT (args_size.constant
2395 - original_args_size.constant));
2396#endif
2397#endif
2398
2399#ifdef PUSH_ARGS_REVERSED
2400 argnum = nargs - 1;
2401#else
2402 argnum = 0;
2403#endif
2404
77cac2f2 2405 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2406
322e3e34
RK
2407 /* Now load any reg parms into their regs. */
2408
2409 for (count = 0; count < nargs; count++, argnum += inc)
2410 {
2411 register enum machine_mode mode = argvec[argnum].mode;
2412 register rtx val = argvec[argnum].value;
2413 rtx reg = argvec[argnum].reg;
2414 int partial = argvec[argnum].partial;
2415
2416 if (reg != 0 && partial == 0)
2417 emit_move_insn (reg, val);
2418 NO_DEFER_POP;
2419 }
2420
2421 /* For version 1.37, try deleting this entirely. */
2422 if (! no_queue)
2423 emit_queue ();
2424
2425 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2426 for (count = 0; count < nargs; count++)
2427 if (argvec[count].reg != 0)
77cac2f2 2428 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2429
322e3e34
RK
2430 /* Don't allow popping to be deferred, since then
2431 cse'ing of library calls could delete a call and leave the pop. */
2432 NO_DEFER_POP;
2433
2434 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2435 will set inhibit_defer_pop to that value. */
2436
2437 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2438 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2439 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2440 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2441
888aa7a9
RS
2442 pop_temp_slots ();
2443
322e3e34
RK
2444 /* Now restore inhibit_defer_pop to its actual original value. */
2445 OK_DEFER_POP;
2446}
2447\f
2448/* Like emit_library_call except that an extra argument, VALUE,
2449 comes second and says where to store the result.
fac0ad80
RS
2450 (If VALUE is zero, this function chooses a convenient way
2451 to return the value.
322e3e34 2452
fac0ad80
RS
2453 This function returns an rtx for where the value is to be found.
2454 If VALUE is nonzero, VALUE is returned. */
2455
2456rtx
4f90e4a0
RK
2457emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2458 enum machine_mode outmode, int nargs, ...))
322e3e34 2459{
4f90e4a0
RK
2460#ifndef __STDC__
2461 rtx orgfun;
2462 rtx value;
2463 int no_queue;
2464 enum machine_mode outmode;
2465 int nargs;
2466#endif
322e3e34
RK
2467 va_list p;
2468 /* Total size in bytes of all the stack-parms scanned so far. */
2469 struct args_size args_size;
2470 /* Size of arguments before any adjustments (such as rounding). */
2471 struct args_size original_args_size;
2472 register int argnum;
322e3e34 2473 rtx fun;
322e3e34
RK
2474 int inc;
2475 int count;
2476 rtx argblock = 0;
2477 CUMULATIVE_ARGS args_so_far;
2478 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2479 struct args_size offset; struct args_size size; };
2480 struct arg *argvec;
2481 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2482 rtx call_fusage = 0;
322e3e34 2483 rtx mem_value = 0;
fac0ad80 2484 int pcc_struct_value = 0;
4f389214 2485 int struct_value_size = 0;
efd65a8b
RS
2486 /* library calls are never indirect calls. */
2487 int current_call_is_indirect = 0;
d61bee95 2488 int is_const;
322e3e34 2489
4f90e4a0
RK
2490 VA_START (p, nargs);
2491
2492#ifndef __STDC__
2493 orgfun = va_arg (p, rtx);
322e3e34
RK
2494 value = va_arg (p, rtx);
2495 no_queue = va_arg (p, int);
2496 outmode = va_arg (p, enum machine_mode);
2497 nargs = va_arg (p, int);
4f90e4a0
RK
2498#endif
2499
d61bee95 2500 is_const = no_queue;
4f90e4a0 2501 fun = orgfun;
322e3e34
RK
2502
2503 /* If this kind of value comes back in memory,
2504 decide where in memory it should come back. */
fac0ad80 2505 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2506 {
fac0ad80
RS
2507#ifdef PCC_STATIC_STRUCT_RETURN
2508 rtx pointer_reg
2509 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2510 0);
2511 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2512 pcc_struct_value = 1;
2513 if (value == 0)
2514 value = gen_reg_rtx (outmode);
2515#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2516 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2517 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2518 mem_value = value;
2519 else
2520 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2521#endif
779c643a
JW
2522
2523 /* This call returns a big structure. */
2524 is_const = 0;
322e3e34
RK
2525 }
2526
2527 /* ??? Unfinished: must pass the memory address as an argument. */
2528
2529 /* Copy all the libcall-arguments out of the varargs data
2530 and into a vector ARGVEC.
2531
2532 Compute how to pass each argument. We only support a very small subset
2533 of the full argument passing conventions to limit complexity here since
2534 library functions shouldn't have many args. */
2535
2536 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2537
2538 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2539
2540 args_size.constant = 0;
2541 args_size.var = 0;
2542
2543 count = 0;
2544
888aa7a9
RS
2545 push_temp_slots ();
2546
322e3e34
RK
2547 /* If there's a structure value address to be passed,
2548 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2549 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2550 {
2551 rtx addr = XEXP (mem_value, 0);
fac0ad80 2552 nargs++;
322e3e34 2553
fac0ad80
RS
2554 /* Make sure it is a reasonable operand for a move or push insn. */
2555 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2556 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2557 addr = force_operand (addr, NULL_RTX);
322e3e34 2558
fac0ad80 2559 argvec[count].value = addr;
4fc3dcd5 2560 argvec[count].mode = Pmode;
fac0ad80 2561 argvec[count].partial = 0;
322e3e34 2562
4fc3dcd5 2563 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2564#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2565 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2566 abort ();
322e3e34
RK
2567#endif
2568
4fc3dcd5 2569 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2570 argvec[count].reg && argvec[count].partial == 0,
2571 NULL_TREE, &args_size, &argvec[count].offset,
2572 &argvec[count].size);
322e3e34
RK
2573
2574
fac0ad80 2575 if (argvec[count].reg == 0 || argvec[count].partial != 0
322e3e34 2576#ifdef REG_PARM_STACK_SPACE
fac0ad80 2577 || 1
322e3e34 2578#endif
fac0ad80
RS
2579 )
2580 args_size.constant += argvec[count].size.constant;
322e3e34 2581
4fc3dcd5 2582 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree)0, 1);
fac0ad80
RS
2583
2584 count++;
322e3e34
RK
2585 }
2586
2587 for (; count < nargs; count++)
2588 {
2589 rtx val = va_arg (p, rtx);
2590 enum machine_mode mode = va_arg (p, enum machine_mode);
2591
2592 /* We cannot convert the arg value to the mode the library wants here;
2593 must do it earlier where we know the signedness of the arg. */
2594 if (mode == BLKmode
2595 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2596 abort ();
2597
2598 /* On some machines, there's no way to pass a float to a library fcn.
2599 Pass it as a double instead. */
2600#ifdef LIBGCC_NEEDS_DOUBLE
2601 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2602 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2603#endif
2604
2605 /* There's no need to call protect_from_queue, because
2606 either emit_move_insn or emit_push_insn will do that. */
2607
2608 /* Make sure it is a reasonable operand for a move or push insn. */
2609 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2610 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2611 val = force_operand (val, NULL_RTX);
2612
322e3e34
RK
2613#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2614 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2615 {
a44492f0
RK
2616 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2617 be viewed as just an efficiency improvement. */
888aa7a9
RS
2618 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2619 emit_move_insn (slot, val);
2620 val = XEXP (slot, 0);
2621 mode = Pmode;
2622 }
322e3e34
RK
2623#endif
2624
888aa7a9
RS
2625 argvec[count].value = val;
2626 argvec[count].mode = mode;
2627
322e3e34
RK
2628 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2629 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2630 abort ();
2631#ifdef FUNCTION_ARG_PARTIAL_NREGS
2632 argvec[count].partial
2633 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2634#else
2635 argvec[count].partial = 0;
2636#endif
2637
2638 locate_and_pad_parm (mode, NULL_TREE,
2639 argvec[count].reg && argvec[count].partial == 0,
2640 NULL_TREE, &args_size, &argvec[count].offset,
2641 &argvec[count].size);
2642
2643 if (argvec[count].size.var)
2644 abort ();
2645
2646#ifndef REG_PARM_STACK_SPACE
2647 if (argvec[count].partial)
2648 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2649#endif
2650
2651 if (argvec[count].reg == 0 || argvec[count].partial != 0
2652#ifdef REG_PARM_STACK_SPACE
2653 || 1
2654#endif
2655 )
2656 args_size.constant += argvec[count].size.constant;
2657
2658#ifdef ACCUMULATE_OUTGOING_ARGS
2659 /* If this arg is actually passed on the stack, it might be
2660 clobbering something we already put there (this library call might
2661 be inside the evaluation of an argument to a function whose call
2662 requires the stack). This will only occur when the library call
2663 has sufficient args to run out of argument registers. Abort in
2664 this case; if this ever occurs, code must be added to save and
2665 restore the arg slot. */
2666
2667 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2668 abort ();
2669#endif
2670
2671 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2672 }
2673 va_end (p);
2674
2675 /* If this machine requires an external definition for library
2676 functions, write one out. */
2677 assemble_external_libcall (fun);
2678
2679 original_args_size = args_size;
2680#ifdef STACK_BOUNDARY
2681 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2682 / STACK_BYTES) * STACK_BYTES);
2683#endif
2684
2685#ifdef REG_PARM_STACK_SPACE
2686 args_size.constant = MAX (args_size.constant,
2687 REG_PARM_STACK_SPACE (NULL_TREE));
2688#ifndef OUTGOING_REG_PARM_STACK_SPACE
2689 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2690#endif
2691#endif
2692
322e3e34
RK
2693 if (args_size.constant > current_function_outgoing_args_size)
2694 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2695
2696#ifdef ACCUMULATE_OUTGOING_ARGS
322e3e34
RK
2697 args_size.constant = 0;
2698#endif
2699
2700#ifndef PUSH_ROUNDING
2701 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2702#endif
2703
2704#ifdef PUSH_ARGS_REVERSED
2705#ifdef STACK_BOUNDARY
2706 /* If we push args individually in reverse order, perform stack alignment
2707 before the first push (the last arg). */
2708 if (argblock == 0)
2709 anti_adjust_stack (GEN_INT (args_size.constant
2710 - original_args_size.constant));
2711#endif
2712#endif
2713
2714#ifdef PUSH_ARGS_REVERSED
2715 inc = -1;
2716 argnum = nargs - 1;
2717#else
2718 inc = 1;
2719 argnum = 0;
2720#endif
2721
2722 /* Push the args that need to be pushed. */
2723
2724 for (count = 0; count < nargs; count++, argnum += inc)
2725 {
2726 register enum machine_mode mode = argvec[argnum].mode;
2727 register rtx val = argvec[argnum].value;
2728 rtx reg = argvec[argnum].reg;
2729 int partial = argvec[argnum].partial;
2730
2731 if (! (reg != 0 && partial == 0))
2732 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2733 argblock, GEN_INT (argvec[count].offset.constant));
2734 NO_DEFER_POP;
2735 }
2736
2737#ifndef PUSH_ARGS_REVERSED
2738#ifdef STACK_BOUNDARY
2739 /* If we pushed args in forward order, perform stack alignment
2740 after pushing the last arg. */
2741 if (argblock == 0)
2742 anti_adjust_stack (GEN_INT (args_size.constant
2743 - original_args_size.constant));
2744#endif
2745#endif
2746
2747#ifdef PUSH_ARGS_REVERSED
2748 argnum = nargs - 1;
2749#else
2750 argnum = 0;
2751#endif
2752
77cac2f2 2753 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2754
322e3e34
RK
2755 /* Now load any reg parms into their regs. */
2756
322e3e34
RK
2757 for (count = 0; count < nargs; count++, argnum += inc)
2758 {
2759 register enum machine_mode mode = argvec[argnum].mode;
2760 register rtx val = argvec[argnum].value;
2761 rtx reg = argvec[argnum].reg;
2762 int partial = argvec[argnum].partial;
2763
2764 if (reg != 0 && partial == 0)
2765 emit_move_insn (reg, val);
2766 NO_DEFER_POP;
2767 }
2768
2769#if 0
2770 /* For version 1.37, try deleting this entirely. */
2771 if (! no_queue)
2772 emit_queue ();
2773#endif
2774
2775 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2776 for (count = 0; count < nargs; count++)
2777 if (argvec[count].reg != 0)
77cac2f2 2778 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2779
fac0ad80
RS
2780 /* Pass the function the address in which to return a structure value. */
2781 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2782 {
2783 emit_move_insn (struct_value_rtx,
2784 force_reg (Pmode,
2785 force_operand (XEXP (mem_value, 0),
2786 NULL_RTX)));
2787 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2788 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
2789 }
2790
322e3e34
RK
2791 /* Don't allow popping to be deferred, since then
2792 cse'ing of library calls could delete a call and leave the pop. */
2793 NO_DEFER_POP;
2794
2795 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2796 will set inhibit_defer_pop to that value. */
2797
4f389214
RS
2798 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant,
2799 struct_value_size,
322e3e34 2800 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
fac0ad80
RS
2801 (outmode != VOIDmode && mem_value == 0
2802 ? hard_libcall_value (outmode) : NULL_RTX),
77cac2f2 2803 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
2804
2805 /* Now restore inhibit_defer_pop to its actual original value. */
2806 OK_DEFER_POP;
2807
888aa7a9
RS
2808 pop_temp_slots ();
2809
322e3e34
RK
2810 /* Copy the value to the right place. */
2811 if (outmode != VOIDmode)
2812 {
2813 if (mem_value)
2814 {
2815 if (value == 0)
fac0ad80 2816 value = mem_value;
322e3e34
RK
2817 if (value != mem_value)
2818 emit_move_insn (value, mem_value);
2819 }
2820 else if (value != 0)
2821 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
2822 else
2823 value = hard_libcall_value (outmode);
322e3e34 2824 }
fac0ad80
RS
2825
2826 return value;
322e3e34
RK
2827}
2828\f
51bbfa0c
RS
2829#if 0
2830/* Return an rtx which represents a suitable home on the stack
2831 given TYPE, the type of the argument looking for a home.
2832 This is called only for BLKmode arguments.
2833
2834 SIZE is the size needed for this target.
2835 ARGS_ADDR is the address of the bottom of the argument block for this call.
2836 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2837 if this machine uses push insns. */
2838
2839static rtx
2840target_for_arg (type, size, args_addr, offset)
2841 tree type;
2842 rtx size;
2843 rtx args_addr;
2844 struct args_size offset;
2845{
2846 rtx target;
2847 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2848
2849 /* We do not call memory_address if possible,
2850 because we want to address as close to the stack
2851 as possible. For non-variable sized arguments,
2852 this will be stack-pointer relative addressing. */
2853 if (GET_CODE (offset_rtx) == CONST_INT)
2854 target = plus_constant (args_addr, INTVAL (offset_rtx));
2855 else
2856 {
2857 /* I have no idea how to guarantee that this
2858 will work in the presence of register parameters. */
2859 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2860 target = memory_address (QImode, target);
2861 }
2862
2863 return gen_rtx (MEM, BLKmode, target);
2864}
2865#endif
2866\f
2867/* Store a single argument for a function call
2868 into the register or memory area where it must be passed.
2869 *ARG describes the argument value and where to pass it.
2870
2871 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 2872 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
2873
2874 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2875 so must be careful about how the stack is used.
2876
2877 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2878 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2879 that we need not worry about saving and restoring the stack.
2880
2881 FNDECL is the declaration of the function we are calling. */
2882
2883static void
6f90e075
JW
2884store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2885 reg_parm_stack_space)
51bbfa0c
RS
2886 struct arg_data *arg;
2887 rtx argblock;
2888 int may_be_alloca;
2889 int variable_size;
2890 tree fndecl;
6f90e075 2891 int reg_parm_stack_space;
51bbfa0c
RS
2892{
2893 register tree pval = arg->tree_value;
2894 rtx reg = 0;
2895 int partial = 0;
2896 int used = 0;
2897 int i, lower_bound, upper_bound;
2898
2899 if (TREE_CODE (pval) == ERROR_MARK)
2900 return;
2901
cc79451b
RK
2902 /* Push a new temporary level for any temporaries we make for
2903 this argument. */
2904 push_temp_slots ();
2905
51bbfa0c
RS
2906#ifdef ACCUMULATE_OUTGOING_ARGS
2907 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2908 save any previous data at that location. */
2909 if (argblock && ! variable_size && arg->stack)
2910 {
2911#ifdef ARGS_GROW_DOWNWARD
2912 /* stack_slot is negative, but we want to index stack_usage_map */
2913 /* with positive values. */
2914 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2915 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2916 else
2917 abort ();
2918
2919 lower_bound = upper_bound - arg->size.constant;
2920#else
2921 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2922 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2923 else
2924 lower_bound = 0;
2925
2926 upper_bound = lower_bound + arg->size.constant;
2927#endif
2928
2929 for (i = lower_bound; i < upper_bound; i++)
2930 if (stack_usage_map[i]
2931#ifdef REG_PARM_STACK_SPACE
2932 /* Don't store things in the fixed argument area at this point;
2933 it has already been saved. */
6f90e075 2934 && i > reg_parm_stack_space
51bbfa0c
RS
2935#endif
2936 )
2937 break;
2938
2939 if (i != upper_bound)
2940 {
2941 /* We need to make a save area. See what mode we can make it. */
2942 enum machine_mode save_mode
2943 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
2944 rtx stack_area
2945 = gen_rtx (MEM, save_mode,
2946 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
2947
2948 if (save_mode == BLKmode)
2949 {
2950 arg->save_area = assign_stack_temp (BLKmode,
2951 arg->size.constant, 1);
cc79451b 2952 preserve_temp_slots (arg->save_area);
51bbfa0c 2953 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 2954 GEN_INT (arg->size.constant),
51bbfa0c
RS
2955 PARM_BOUNDARY / BITS_PER_UNIT);
2956 }
2957 else
2958 {
2959 arg->save_area = gen_reg_rtx (save_mode);
2960 emit_move_insn (arg->save_area, stack_area);
2961 }
2962 }
2963 }
2964#endif
2965
2966 /* If this isn't going to be placed on both the stack and in registers,
2967 set up the register and number of words. */
2968 if (! arg->pass_on_stack)
2969 reg = arg->reg, partial = arg->partial;
2970
2971 if (reg != 0 && partial == 0)
2972 /* Being passed entirely in a register. We shouldn't be called in
2973 this case. */
2974 abort ();
2975
4ab56118
RK
2976#ifdef STRICT_ALIGNMENT
2977 /* If this arg needs special alignment, don't load the registers
2978 here. */
2979 if (arg->n_aligned_regs != 0)
2980 reg = 0;
2981#endif
2982
51bbfa0c
RS
2983 /* If this is being partially passed in a register, but multiple locations
2984 are specified, we assume that the one partially used is the one that is
2985 listed first. */
2986 if (reg && GET_CODE (reg) == EXPR_LIST)
2987 reg = XEXP (reg, 0);
2988
4ab56118 2989 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
2990 it directly into its stack slot. Otherwise, we can. */
2991 if (arg->value == 0)
d64f5a78
RS
2992 {
2993#ifdef ACCUMULATE_OUTGOING_ARGS
2994 /* stack_arg_under_construction is nonzero if a function argument is
2995 being evaluated directly into the outgoing argument list and
2996 expand_call must take special action to preserve the argument list
2997 if it is called recursively.
2998
2999 For scalar function arguments stack_usage_map is sufficient to
3000 determine which stack slots must be saved and restored. Scalar
3001 arguments in general have pass_on_stack == 0.
3002
3003 If this argument is initialized by a function which takes the
3004 address of the argument (a C++ constructor or a C function
3005 returning a BLKmode structure), then stack_usage_map is
3006 insufficient and expand_call must push the stack around the
3007 function call. Such arguments have pass_on_stack == 1.
3008
3009 Note that it is always safe to set stack_arg_under_construction,
3010 but this generates suboptimal code if set when not needed. */
3011
3012 if (arg->pass_on_stack)
3013 stack_arg_under_construction++;
3014#endif
3a08477a
RK
3015 arg->value = expand_expr (pval,
3016 (partial
3017 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3018 ? NULL_RTX : arg->stack,
e5d70561 3019 VOIDmode, 0);
1efe6448
RK
3020
3021 /* If we are promoting object (or for any other reason) the mode
3022 doesn't agree, convert the mode. */
3023
7373d92d
RK
3024 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3025 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3026 arg->value, arg->unsignedp);
1efe6448 3027
d64f5a78
RS
3028#ifdef ACCUMULATE_OUTGOING_ARGS
3029 if (arg->pass_on_stack)
3030 stack_arg_under_construction--;
3031#endif
3032 }
51bbfa0c
RS
3033
3034 /* Don't allow anything left on stack from computation
3035 of argument to alloca. */
3036 if (may_be_alloca)
3037 do_pending_stack_adjust ();
3038
3039 if (arg->value == arg->stack)
3040 /* If the value is already in the stack slot, we are done. */
3041 ;
1efe6448 3042 else if (arg->mode != BLKmode)
51bbfa0c
RS
3043 {
3044 register int size;
3045
3046 /* Argument is a scalar, not entirely passed in registers.
3047 (If part is passed in registers, arg->partial says how much
3048 and emit_push_insn will take care of putting it there.)
3049
3050 Push it, and if its size is less than the
3051 amount of space allocated to it,
3052 also bump stack pointer by the additional space.
3053 Note that in C the default argument promotions
3054 will prevent such mismatches. */
3055
1efe6448 3056 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3057 /* Compute how much space the push instruction will push.
3058 On many machines, pushing a byte will advance the stack
3059 pointer by a halfword. */
3060#ifdef PUSH_ROUNDING
3061 size = PUSH_ROUNDING (size);
3062#endif
3063 used = size;
3064
3065 /* Compute how much space the argument should get:
3066 round up to a multiple of the alignment for arguments. */
1efe6448 3067 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3068 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3069 / (PARM_BOUNDARY / BITS_PER_UNIT))
3070 * (PARM_BOUNDARY / BITS_PER_UNIT));
3071
3072 /* This isn't already where we want it on the stack, so put it there.
3073 This can either be done with push or copy insns. */
ccf5d244
RK
3074 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3075 0, partial, reg, used - size,
3076 argblock, ARGS_SIZE_RTX (arg->offset));
51bbfa0c
RS
3077 }
3078 else
3079 {
3080 /* BLKmode, at least partly to be pushed. */
3081
3082 register int excess;
3083 rtx size_rtx;
3084
3085 /* Pushing a nonscalar.
3086 If part is passed in registers, PARTIAL says how much
3087 and emit_push_insn will take care of putting it there. */
3088
3089 /* Round its size up to a multiple
3090 of the allocation unit for arguments. */
3091
3092 if (arg->size.var != 0)
3093 {
3094 excess = 0;
3095 size_rtx = ARGS_SIZE_RTX (arg->size);
3096 }
3097 else
3098 {
51bbfa0c
RS
3099 /* PUSH_ROUNDING has no effect on us, because
3100 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3101 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3102 + partial * UNITS_PER_WORD);
e4f93898 3103 size_rtx = expr_size (pval);
51bbfa0c
RS
3104 }
3105
1efe6448 3106 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c
RS
3107 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3108 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3109 }
3110
3111
3112 /* Unless this is a partially-in-register argument, the argument is now
3113 in the stack.
3114
3115 ??? Note that this can change arg->value from arg->stack to
3116 arg->stack_slot and it matters when they are not the same.
3117 It isn't totally clear that this is correct in all cases. */
3118 if (partial == 0)
3119 arg->value = arg->stack_slot;
3120
3121 /* Once we have pushed something, pops can't safely
3122 be deferred during the rest of the arguments. */
3123 NO_DEFER_POP;
3124
3125 /* ANSI doesn't require a sequence point here,
3126 but PCC has one, so this will avoid some problems. */
3127 emit_queue ();
3128
db907e7b
RK
3129 /* Free any temporary slots made in processing this argument. Show
3130 that we might have taken the address of something and pushed that
3131 as an operand. */
3132 preserve_temp_slots (NULL_RTX);
51bbfa0c 3133 free_temp_slots ();
cc79451b 3134 pop_temp_slots ();
51bbfa0c
RS
3135
3136#ifdef ACCUMULATE_OUTGOING_ARGS
3137 /* Now mark the segment we just used. */
3138 if (argblock && ! variable_size && arg->stack)
3139 for (i = lower_bound; i < upper_bound; i++)
3140 stack_usage_map[i] = 1;
3141#endif
3142}
This page took 0.5416 seconds and 5 git commands to generate.