]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
toplev.c (enum dump_file_index, dump_file): Add DFI_sibling.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f
JL
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920
KG
23#include "system.h"
24#include "rtl.h"
25#include "tree.h"
26#include "flags.h"
27#include "expr.h"
49ad7cfa 28#include "function.h"
670ee920 29#include "regs.h"
51bbfa0c 30#include "insn-flags.h"
5f6da302 31#include "toplev.h"
d6f4ec51 32#include "output.h"
b1474bb7 33#include "tm_p.h"
51bbfa0c 34
f73ad30e
JH
35#ifndef ACCUMULATE_OUTGOING_ARGS
36#define ACCUMULATE_OUTGOING_ARGS 0
37#endif
38
39/* Supply a default definition for PUSH_ARGS. */
40#ifndef PUSH_ARGS
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
43#else
44#define PUSH_ARGS 0
45#endif
46#endif
47
0a1c58a2
JL
48#if !defined FUNCTION_OK_FOR_SIBCALL
49#define FUNCTION_OK_FOR_SIBCALL(DECL) 1
50#endif
51
c795bca9
BS
52#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
53#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
54#endif
55
51bbfa0c 56/* Decide whether a function's arguments should be processed
bbc8a071
RK
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
51bbfa0c 61
51bbfa0c 62#ifdef PUSH_ROUNDING
bbc8a071 63
40083ddf 64#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
f73ad30e 65#define PUSH_ARGS_REVERSED PUSH_ARGS
51bbfa0c 66#endif
bbc8a071 67
51bbfa0c
RS
68#endif
69
f73ad30e
JH
70#ifndef PUSH_ARGS_REVERSED
71#define PUSH_ARGS_REVERSED 0
72#endif
73
c795bca9
BS
74/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
75#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
76
77/* Data structure and subroutines used within expand_call. */
78
79struct arg_data
80{
81 /* Tree node for this argument. */
82 tree tree_value;
1efe6448
RK
83 /* Mode for value; TYPE_MODE unless promoted. */
84 enum machine_mode mode;
51bbfa0c
RS
85 /* Current RTL value for argument, or 0 if it isn't precomputed. */
86 rtx value;
87 /* Initially-compute RTL value for argument; only for const functions. */
88 rtx initial_value;
89 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 90 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
91 registers. */
92 rtx reg;
84b55618
RK
93 /* If REG was promoted from the actual mode of the argument expression,
94 indicates whether the promotion is sign- or zero-extended. */
95 int unsignedp;
51bbfa0c
RS
96 /* Number of registers to use. 0 means put the whole arg in registers.
97 Also 0 if not passed in registers. */
98 int partial;
d64f5a78
RS
99 /* Non-zero if argument must be passed on stack.
100 Note that some arguments may be passed on the stack
101 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
102 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
103 int pass_on_stack;
104 /* Offset of this argument from beginning of stack-args. */
105 struct args_size offset;
106 /* Similar, but offset to the start of the stack slot. Different from
107 OFFSET if this arg pads downward. */
108 struct args_size slot_offset;
109 /* Size of this argument on the stack, rounded up for any padding it gets,
110 parts of the argument passed in registers do not count.
111 If REG_PARM_STACK_SPACE is defined, then register parms
112 are counted here as well. */
113 struct args_size size;
114 /* Location on the stack at which parameter should be stored. The store
115 has already been done if STACK == VALUE. */
116 rtx stack;
117 /* Location on the stack of the start of this argument slot. This can
118 differ from STACK if this arg pads downward. This location is known
119 to be aligned to FUNCTION_ARG_BOUNDARY. */
120 rtx stack_slot;
51bbfa0c
RS
121 /* Place that this stack area has been saved, if needed. */
122 rtx save_area;
4ab56118
RK
123 /* If an argument's alignment does not permit direct copying into registers,
124 copy in smaller-sized pieces into pseudos. These are stored in a
125 block pointed to by this field. The next field says how many
126 word-sized pseudos we made. */
127 rtx *aligned_regs;
128 int n_aligned_regs;
4fc026cd
CM
129 /* The amount that the stack pointer needs to be adjusted to
130 force alignment for the next argument. */
131 struct args_size alignment_pad;
51bbfa0c
RS
132};
133
b94301c2 134/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
135 the corresponding stack location has been used.
136 This vector is used to prevent a function call within an argument from
137 clobbering any stack already set up. */
138static char *stack_usage_map;
139
140/* Size of STACK_USAGE_MAP. */
141static int highest_outgoing_arg_in_use;
2f4aa534
RS
142
143/* stack_arg_under_construction is nonzero when an argument may be
144 initialized with a constructor call (including a C function that
145 returns a BLKmode struct) and expand_call must take special action
146 to make sure the object being constructed does not overlap the
147 argument list for the constructor call. */
148int stack_arg_under_construction;
51bbfa0c 149
3d994c6b
KG
150static int calls_function PARAMS ((tree, int));
151static int calls_function_1 PARAMS ((tree, int));
0a1c58a2 152
f2d33f13
JH
153/* Nonzero if this is a call to a `const' function. */
154#define ECF_CONST 1
155/* Nonzero if this is a call to a `volatile' function. */
156#define ECF_NORETURN 2
157/* Nonzero if this is a call to malloc or a related function. */
158#define ECF_MALLOC 4
159/* Nonzero if it is plausible that this is a call to alloca. */
160#define ECF_MAY_BE_ALLOCA 8
161/* Nonzero if this is a call to a function that won't throw an exception. */
162#define ECF_NOTHROW 16
163/* Nonzero if this is a call to setjmp or a related function. */
164#define ECF_RETURNS_TWICE 32
165/* Nonzero if this is a call to `longjmp'. */
166#define ECF_LONGJMP 64
167/* Nonzero if this is a syscall that makes a new process in the image of
168 the current one. */
169#define ECF_FORK_OR_EXEC 128
170#define ECF_SIBCALL 256
2a8f6b90
JH
171/* Nonzero if this is a call to "pure" function (like const function,
172 but may read memory. */
173#define ECF_PURE 512
f2d33f13 174
3d994c6b
KG
175static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
176 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
0a1c58a2 177 rtx, int, rtx, int));
3d994c6b
KG
178static void precompute_register_parameters PARAMS ((int,
179 struct arg_data *,
180 int *));
181static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
182 int));
183static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
184 int));
185static int finalize_must_preallocate PARAMS ((int, int,
186 struct arg_data *,
187 struct args_size *));
40d6e956
JH
188static void precompute_arguments PARAMS ((int, int,
189 struct arg_data *));
3d994c6b 190static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
191 struct args_size *,
192 int));
3d994c6b
KG
193static void initialize_argument_information PARAMS ((int,
194 struct arg_data *,
195 struct args_size *,
196 int, tree, tree,
197 CUMULATIVE_ARGS *,
198 int, rtx *, int *,
f2d33f13 199 int *, int *));
3d994c6b
KG
200static void compute_argument_addresses PARAMS ((struct arg_data *,
201 rtx, int));
202static rtx rtx_for_function_call PARAMS ((tree, tree));
203static void load_register_parameters PARAMS ((struct arg_data *,
204 int, rtx *));
12a22e76 205static int libfunc_nothrow PARAMS ((rtx));
de76b467
JH
206static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
207 enum machine_mode,
208 int, va_list));
f2d33f13
JH
209static int special_function_p PARAMS ((tree, int));
210static int flags_from_decl_or_type PARAMS ((tree));
211static rtx try_to_integrate PARAMS ((tree, tree, rtx,
212 int, tree, rtx));
21a3b983 213
f73ad30e 214#ifdef REG_PARM_STACK_SPACE
3d994c6b
KG
215static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
216static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 217#endif
51bbfa0c 218\f
1ce0cb53
JW
219/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
220 `alloca'.
221
222 If WHICH is 0, return 1 if EXP contains a call to any function.
223 Actually, we only need return 1 if evaluating EXP would require pushing
224 arguments on the stack, but that is too difficult to compute, so we just
225 assume any function call might require the stack. */
51bbfa0c 226
1c8d7aef
RS
227static tree calls_function_save_exprs;
228
51bbfa0c 229static int
1ce0cb53 230calls_function (exp, which)
51bbfa0c 231 tree exp;
1ce0cb53 232 int which;
1c8d7aef
RS
233{
234 int val;
235 calls_function_save_exprs = 0;
236 val = calls_function_1 (exp, which);
237 calls_function_save_exprs = 0;
238 return val;
239}
240
241static int
242calls_function_1 (exp, which)
243 tree exp;
244 int which;
51bbfa0c
RS
245{
246 register int i;
0207efa2
RK
247 enum tree_code code = TREE_CODE (exp);
248 int type = TREE_CODE_CLASS (code);
249 int length = tree_code_length[(int) code];
51bbfa0c 250
ddd5a7c1 251 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
252 if ((int) code >= NUM_TREE_CODES)
253 return 1;
51bbfa0c 254
0207efa2 255 /* Only expressions and references can contain calls. */
3b59a331
RS
256 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
257 && type != 'b')
51bbfa0c
RS
258 return 0;
259
0207efa2 260 switch (code)
51bbfa0c
RS
261 {
262 case CALL_EXPR:
1ce0cb53
JW
263 if (which == 0)
264 return 1;
265 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
266 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
267 == FUNCTION_DECL))
268 {
269 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
270
271 if ((DECL_BUILT_IN (fndecl)
95815af9 272 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
273 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
274 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 275 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
276 return 1;
277 }
51bbfa0c
RS
278
279 /* Third operand is RTL. */
280 length = 2;
281 break;
282
283 case SAVE_EXPR:
284 if (SAVE_EXPR_RTL (exp) != 0)
285 return 0;
1c8d7aef
RS
286 if (value_member (exp, calls_function_save_exprs))
287 return 0;
288 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
289 calls_function_save_exprs);
290 return (TREE_OPERAND (exp, 0) != 0
291 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
292
293 case BLOCK:
ef03bc85
CH
294 {
295 register tree local;
296
297 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 298 if (DECL_INITIAL (local) != 0
1c8d7aef 299 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
300 return 1;
301 }
302 {
303 register tree subblock;
304
305 for (subblock = BLOCK_SUBBLOCKS (exp);
306 subblock;
307 subblock = TREE_CHAIN (subblock))
1c8d7aef 308 if (calls_function_1 (subblock, which))
ef03bc85
CH
309 return 1;
310 }
311 return 0;
51bbfa0c
RS
312
313 case METHOD_CALL_EXPR:
314 length = 3;
315 break;
316
317 case WITH_CLEANUP_EXPR:
318 length = 1;
319 break;
320
321 case RTL_EXPR:
322 return 0;
e9a25f70
JL
323
324 default:
325 break;
51bbfa0c
RS
326 }
327
328 for (i = 0; i < length; i++)
329 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 330 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
331 return 1;
332
333 return 0;
334}
335\f
336/* Force FUNEXP into a form suitable for the address of a CALL,
337 and return that as an rtx. Also load the static chain register
338 if FNDECL is a nested function.
339
77cac2f2
RK
340 CALL_FUSAGE points to a variable holding the prospective
341 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 342
03dacb02 343rtx
77cac2f2 344prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
345 rtx funexp;
346 tree fndecl;
77cac2f2 347 rtx *call_fusage;
01368078 348 int reg_parm_seen;
51bbfa0c
RS
349{
350 rtx static_chain_value = 0;
351
352 funexp = protect_from_queue (funexp, 0);
353
354 if (fndecl != 0)
0f41302f 355 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
356 static_chain_value = lookup_static_chain (fndecl);
357
358 /* Make a valid memory address and copy constants thru pseudo-regs,
359 but not for a constant address if -fno-function-cse. */
360 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 361 /* If we are using registers for parameters, force the
e9a25f70
JL
362 function address into a register now. */
363 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
364 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
365 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
366 else
367 {
368#ifndef NO_FUNCTION_CSE
369 if (optimize && ! flag_no_function_cse)
370#ifdef NO_RECURSIVE_FUNCTION_CSE
371 if (fndecl != current_function_decl)
372#endif
373 funexp = force_reg (Pmode, funexp);
374#endif
375 }
376
377 if (static_chain_value != 0)
378 {
379 emit_move_insn (static_chain_rtx, static_chain_value);
380
f991a240
RK
381 if (GET_CODE (static_chain_rtx) == REG)
382 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
383 }
384
385 return funexp;
386}
387
388/* Generate instructions to call function FUNEXP,
389 and optionally pop the results.
390 The CALL_INSN is the first insn generated.
391
607ea900 392 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
393 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
394
334c4f0f
RK
395 FUNTYPE is the data type of the function. This is given to the macro
396 RETURN_POPS_ARGS to determine whether this function pops its own args.
397 We used to allow an identifier for library functions, but that doesn't
398 work when the return type is an aggregate type and the calling convention
399 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
400
401 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
402 ROUNDED_STACK_SIZE is that number rounded up to
403 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
404 both to put into the call insn and to generate explicit popping
405 code if necessary.
51bbfa0c
RS
406
407 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
408 It is zero if this call doesn't want a structure value.
409
410 NEXT_ARG_REG is the rtx that results from executing
411 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
412 just after all the args have had their registers assigned.
413 This could be whatever you like, but normally it is the first
414 arg-register beyond those used for args in this call,
415 or 0 if all the arg-registers are used in this call.
416 It is passed on to `gen_call' so you can put this info in the call insn.
417
418 VALREG is a hard register in which a value is returned,
419 or 0 if the call does not return a value.
420
421 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
422 the args to this call were processed.
423 We restore `inhibit_defer_pop' to that value.
424
94b25f81 425 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
f2d33f13 426 denote registers used by the called function. */
51bbfa0c 427
322e3e34 428static void
fb5eebb9
RH
429emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
430 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
0a1c58a2 431 call_fusage, ecf_flags)
51bbfa0c 432 rtx funexp;
c84e2712
KG
433 tree fndecl ATTRIBUTE_UNUSED;
434 tree funtype ATTRIBUTE_UNUSED;
6a651371 435 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 436 HOST_WIDE_INT rounded_stack_size;
962f1324 437 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
438 rtx next_arg_reg;
439 rtx valreg;
440 int old_inhibit_defer_pop;
77cac2f2 441 rtx call_fusage;
0a1c58a2 442 int ecf_flags;
51bbfa0c 443{
062e7fd8 444 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 445#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 446 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 447#endif
51bbfa0c
RS
448 rtx call_insn;
449 int already_popped = 0;
fb5eebb9 450 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
51bbfa0c
RS
451
452 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
453 and we don't want to load it into a register as an optimization,
454 because prepare_call_address already did it if it should be done. */
455 if (GET_CODE (funexp) != SYMBOL_REF)
456 funexp = memory_address (FUNCTION_MODE, funexp);
457
0a1c58a2
JL
458#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
459 if ((ecf_flags & ECF_SIBCALL)
460 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
461 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
462 || stack_size == 0))
463 {
464 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
465 rtx pat;
466
467 /* If this subroutine pops its own args, record that in the call insn
468 if possible, for the sake of frame pointer elimination. */
469
470 if (valreg)
471 pat = gen_sibcall_value_pop (valreg,
472 gen_rtx_MEM (FUNCTION_MODE, funexp),
473 rounded_stack_size_rtx, next_arg_reg,
474 n_pop);
475 else
476 pat = gen_sibcall_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
477 rounded_stack_size_rtx, next_arg_reg, n_pop);
478
479 emit_call_insn (pat);
480 already_popped = 1;
481 }
482 else
483#endif
484
51bbfa0c 485#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
486/* If the target has "call" or "call_value" insns, then prefer them
487 if no arguments are actually popped. If the target does not have
488 "call" or "call_value" insns, then we must use the popping versions
489 even if the call has no arguments to pop. */
490#if defined (HAVE_call) && defined (HAVE_call_value)
491 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
f73ad30e 492 && n_popped > 0)
8bcafee3
JDA
493#else
494 if (HAVE_call_pop && HAVE_call_value_pop)
495#endif
51bbfa0c 496 {
fb5eebb9 497 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
498 rtx pat;
499
500 /* If this subroutine pops its own args, record that in the call insn
501 if possible, for the sake of frame pointer elimination. */
2c8da025 502
51bbfa0c
RS
503 if (valreg)
504 pat = gen_call_value_pop (valreg,
38a448ca 505 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 506 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 507 else
38a448ca 508 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 509 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
510
511 emit_call_insn (pat);
512 already_popped = 1;
513 }
514 else
515#endif
51bbfa0c 516
0a1c58a2
JL
517#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
518 if ((ecf_flags & ECF_SIBCALL)
519 && HAVE_sibcall && HAVE_sibcall_value)
520 {
521 if (valreg)
522 emit_call_insn (gen_sibcall_value (valreg,
523 gen_rtx_MEM (FUNCTION_MODE, funexp),
524 rounded_stack_size_rtx,
525 next_arg_reg, NULL_RTX));
526 else
527 emit_call_insn (gen_sibcall (gen_rtx_MEM (FUNCTION_MODE, funexp),
528 rounded_stack_size_rtx, next_arg_reg,
529 struct_value_size_rtx));
530 }
531 else
532#endif
533
51bbfa0c
RS
534#if defined (HAVE_call) && defined (HAVE_call_value)
535 if (HAVE_call && HAVE_call_value)
536 {
537 if (valreg)
538 emit_call_insn (gen_call_value (valreg,
38a448ca 539 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 540 rounded_stack_size_rtx, next_arg_reg,
e992302c 541 NULL_RTX));
51bbfa0c 542 else
38a448ca 543 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 544 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
545 struct_value_size_rtx));
546 }
547 else
548#endif
549 abort ();
550
77cac2f2 551 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
552 for (call_insn = get_last_insn ();
553 call_insn && GET_CODE (call_insn) != CALL_INSN;
554 call_insn = PREV_INSN (call_insn))
555 ;
556
557 if (! call_insn)
558 abort ();
559
2a8f6b90
JH
560 /* Mark memory as used for "pure" function call. */
561 if (ecf_flags & ECF_PURE)
562 {
563 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
564 gen_rtx_USE (VOIDmode,
565 gen_rtx_MEM (BLKmode,
566 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
567 }
568
e59e60a7
RK
569 /* Put the register usage information on the CALL. If there is already
570 some usage information, put ours at the end. */
571 if (CALL_INSN_FUNCTION_USAGE (call_insn))
572 {
573 rtx link;
574
575 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
576 link = XEXP (link, 1))
577 ;
578
579 XEXP (link, 1) = call_fusage;
580 }
581 else
582 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
583
584 /* If this is a const call, then set the insn's unchanging bit. */
2a8f6b90 585 if (ecf_flags & (ECF_CONST | ECF_PURE))
51bbfa0c
RS
586 CONST_CALL_P (call_insn) = 1;
587
12a22e76
JM
588 /* If this call can't throw, attach a REG_EH_REGION reg note to that
589 effect. */
0a1c58a2 590 if (ecf_flags & ECF_NOTHROW)
54cea123 591 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
592 REG_NOTES (call_insn));
593
0a1c58a2
JL
594 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
595
b1e64e0d
RS
596 /* Restore this now, so that we do defer pops for this call's args
597 if the context of the call as a whole permits. */
598 inhibit_defer_pop = old_inhibit_defer_pop;
599
fb5eebb9 600 if (n_popped > 0)
51bbfa0c
RS
601 {
602 if (!already_popped)
e3da301d 603 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
604 = gen_rtx_EXPR_LIST (VOIDmode,
605 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
606 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 607 rounded_stack_size -= n_popped;
062e7fd8 608 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 609 stack_pointer_delta -= n_popped;
51bbfa0c
RS
610 }
611
f73ad30e 612 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 613 {
f73ad30e
JH
614 /* If returning from the subroutine does not automatically pop the args,
615 we need an instruction to pop them sooner or later.
616 Perhaps do it now; perhaps just record how much space to pop later.
617
618 If returning from the subroutine does pop the args, indicate that the
619 stack pointer will be changed. */
620
f73ad30e
JH
621 if (rounded_stack_size != 0)
622 {
623 if (flag_defer_pop && inhibit_defer_pop == 0
2a8f6b90 624 && !(ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
625 pending_stack_adjust += rounded_stack_size;
626 else
627 adjust_stack (rounded_stack_size_rtx);
628 }
51bbfa0c 629 }
f73ad30e
JH
630 /* When we accumulate outgoing args, we must avoid any stack manipulations.
631 Restore the stack pointer to its original value now. Usually
632 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
633 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
634 popping variants of functions exist as well.
635
636 ??? We may optimize similar to defer_pop above, but it is
637 probably not worthwhile.
638
639 ??? It will be worthwhile to enable combine_stack_adjustments even for
640 such machines. */
641 else if (n_popped)
642 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
643}
644
20efdf74
JL
645/* Determine if the function identified by NAME and FNDECL is one with
646 special properties we wish to know about.
647
648 For example, if the function might return more than one time (setjmp), then
649 set RETURNS_TWICE to a nonzero value.
650
f2d33f13 651 Similarly set LONGJMP for if the function is in the longjmp family.
20efdf74 652
f2d33f13 653 Set MALLOC for any of the standard memory allocation functions which
20efdf74
JL
654 allocate from the heap.
655
656 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
657 space from the stack such as alloca. */
658
f2d33f13
JH
659static int
660special_function_p (fndecl, flags)
20efdf74 661 tree fndecl;
f2d33f13 662 int flags;
20efdf74 663{
f2d33f13 664 if (! (flags & ECF_MALLOC)
3a8c995b 665 && fndecl && DECL_NAME (fndecl)
140592a0 666 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
667 /* Exclude functions not at the file scope, or not `extern',
668 since they are not the magic functions we would otherwise
669 think they are. */
670 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
671 {
3a8c995b 672 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
673 char *tname = name;
674
ca54603f
JL
675 /* We assume that alloca will always be called by name. It
676 makes no sense to pass it as a pointer-to-function to
677 anything that does not understand its behavior. */
f2d33f13
JH
678 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
679 && name[0] == 'a'
680 && ! strcmp (name, "alloca"))
681 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
682 && name[0] == '_'
683 && ! strcmp (name, "__builtin_alloca"))))
684 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 685
20efdf74
JL
686 /* Disregard prefix _, __ or __x. */
687 if (name[0] == '_')
688 {
689 if (name[1] == '_' && name[2] == 'x')
690 tname += 3;
691 else if (name[1] == '_')
692 tname += 2;
693 else
694 tname += 1;
695 }
696
697 if (tname[0] == 's')
698 {
f2d33f13
JH
699 if ((tname[1] == 'e'
700 && (! strcmp (tname, "setjmp")
701 || ! strcmp (tname, "setjmp_syscall")))
702 || (tname[1] == 'i'
703 && ! strcmp (tname, "sigsetjmp"))
704 || (tname[1] == 'a'
705 && ! strcmp (tname, "savectx")))
706 flags |= ECF_RETURNS_TWICE;
707
20efdf74
JL
708 if (tname[1] == 'i'
709 && ! strcmp (tname, "siglongjmp"))
f2d33f13 710 flags |= ECF_LONGJMP;
20efdf74
JL
711 }
712 else if ((tname[0] == 'q' && tname[1] == 's'
713 && ! strcmp (tname, "qsetjmp"))
714 || (tname[0] == 'v' && tname[1] == 'f'
715 && ! strcmp (tname, "vfork")))
f2d33f13 716 flags |= ECF_RETURNS_TWICE;
20efdf74
JL
717
718 else if (tname[0] == 'l' && tname[1] == 'o'
719 && ! strcmp (tname, "longjmp"))
f2d33f13 720 flags |= ECF_LONGJMP;
fa76d9e0
JR
721
722 else if ((tname[0] == 'f' && tname[1] == 'o'
723 && ! strcmp (tname, "fork"))
724 /* Linux specific: __clone. check NAME to insist on the
725 leading underscores, to avoid polluting the ISO / POSIX
726 namespace. */
727 || (name[0] == '_' && name[1] == '_'
728 && ! strcmp (tname, "clone"))
729 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
730 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
731 && (tname[5] == '\0'
732 || ((tname[5] == 'p' || tname[5] == 'e')
733 && tname[6] == '\0'))))
f2d33f13 734 flags |= ECF_FORK_OR_EXEC;
fa76d9e0 735
140592a0 736 /* Do not add any more malloc-like functions to this list,
82514696
KG
737 instead mark them as malloc functions using the malloc attribute.
738 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
739 it may return the same address across multiple calls.
740 C++ operator new is not suitable because it is not required
741 to return a unique pointer; indeed, the standard placement new
742 just returns its argument. */
91d024d5
ML
743 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
744 && (! strcmp (tname, "malloc")
745 || ! strcmp (tname, "calloc")
746 || ! strcmp (tname, "strdup")))
f2d33f13 747 flags |= ECF_MALLOC;
20efdf74 748 }
f2d33f13 749 return flags;
20efdf74
JL
750}
751
f2d33f13
JH
752/* Return nonzero when tree represent call to longjmp. */
753int
754setjmp_call_p (fndecl)
755 tree fndecl;
756{
757 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
758}
759
760/* Detect flags (function attributes) from the function type node. */
761static int
762flags_from_decl_or_type (exp)
763 tree exp;
764{
765 int flags = 0;
766 /* ??? We can't set IS_MALLOC for function types? */
767 if (DECL_P (exp))
768 {
769 /* The function exp may have the `malloc' attribute. */
770 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
771 flags |= ECF_MALLOC;
772
2a8f6b90
JH
773 /* The function exp may have the `pure' attribute. */
774 if (DECL_P (exp) && DECL_IS_PURE (exp))
775 flags |= ECF_PURE;
776
f2d33f13
JH
777 if (TREE_NOTHROW (exp))
778 flags |= ECF_NOTHROW;
779 }
780
781 if (TREE_READONLY (exp) && !TREE_THIS_VOLATILE (exp))
782 flags |= ECF_CONST;
783
784 if (TREE_THIS_VOLATILE (exp))
785 flags |= ECF_NORETURN;
786
787 return flags;
788}
789
790
20efdf74
JL
791/* Precompute all register parameters as described by ARGS, storing values
792 into fields within the ARGS array.
793
794 NUM_ACTUALS indicates the total number elements in the ARGS array.
795
796 Set REG_PARM_SEEN if we encounter a register parameter. */
797
798static void
799precompute_register_parameters (num_actuals, args, reg_parm_seen)
800 int num_actuals;
801 struct arg_data *args;
802 int *reg_parm_seen;
803{
804 int i;
805
806 *reg_parm_seen = 0;
807
808 for (i = 0; i < num_actuals; i++)
809 if (args[i].reg != 0 && ! args[i].pass_on_stack)
810 {
811 *reg_parm_seen = 1;
812
813 if (args[i].value == 0)
814 {
815 push_temp_slots ();
816 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
817 VOIDmode, 0);
818 preserve_temp_slots (args[i].value);
819 pop_temp_slots ();
820
821 /* ANSI doesn't require a sequence point here,
822 but PCC has one, so this will avoid some problems. */
823 emit_queue ();
824 }
825
826 /* If we are to promote the function arg to a wider mode,
827 do it now. */
828
829 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
830 args[i].value
831 = convert_modes (args[i].mode,
832 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
833 args[i].value, args[i].unsignedp);
834
835 /* If the value is expensive, and we are inside an appropriately
836 short loop, put the value into a pseudo and then put the pseudo
837 into the hard reg.
838
839 For small register classes, also do this if this call uses
840 register parameters. This is to avoid reload conflicts while
841 loading the parameters registers. */
842
843 if ((! (GET_CODE (args[i].value) == REG
844 || (GET_CODE (args[i].value) == SUBREG
845 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
846 && args[i].mode != BLKmode
847 && rtx_cost (args[i].value, SET) > 2
848 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
849 || preserve_subexpressions_p ()))
850 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
851 }
852}
853
f73ad30e 854#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
855
856 /* The argument list is the property of the called routine and it
857 may clobber it. If the fixed area has been used for previous
858 parameters, we must save and restore it. */
859static rtx
860save_fixed_argument_area (reg_parm_stack_space, argblock,
861 low_to_save, high_to_save)
862 int reg_parm_stack_space;
863 rtx argblock;
864 int *low_to_save;
865 int *high_to_save;
866{
867 int i;
868 rtx save_area = NULL_RTX;
869
870 /* Compute the boundary of the that needs to be saved, if any. */
871#ifdef ARGS_GROW_DOWNWARD
872 for (i = 0; i < reg_parm_stack_space + 1; i++)
873#else
874 for (i = 0; i < reg_parm_stack_space; i++)
875#endif
876 {
877 if (i >= highest_outgoing_arg_in_use
878 || stack_usage_map[i] == 0)
879 continue;
880
881 if (*low_to_save == -1)
882 *low_to_save = i;
883
884 *high_to_save = i;
885 }
886
887 if (*low_to_save >= 0)
888 {
889 int num_to_save = *high_to_save - *low_to_save + 1;
890 enum machine_mode save_mode
891 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
892 rtx stack_area;
893
894 /* If we don't have the required alignment, must do this in BLKmode. */
895 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
896 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
897 save_mode = BLKmode;
898
899#ifdef ARGS_GROW_DOWNWARD
900 stack_area = gen_rtx_MEM (save_mode,
901 memory_address (save_mode,
902 plus_constant (argblock,
903 - *high_to_save)));
904#else
905 stack_area = gen_rtx_MEM (save_mode,
906 memory_address (save_mode,
907 plus_constant (argblock,
908 *low_to_save)));
909#endif
910 if (save_mode == BLKmode)
911 {
912 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
19caa751
RK
913 /* Cannot use emit_block_move here because it can be done by a
914 library call which in turn gets into this place again and deadly
915 infinite recursion happens. */
04572513 916 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
19caa751 917 PARM_BOUNDARY);
20efdf74
JL
918 }
919 else
920 {
921 save_area = gen_reg_rtx (save_mode);
922 emit_move_insn (save_area, stack_area);
923 }
924 }
925 return save_area;
926}
927
928static void
929restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
930 rtx save_area;
931 rtx argblock;
932 int high_to_save;
933 int low_to_save;
934{
935 enum machine_mode save_mode = GET_MODE (save_area);
936#ifdef ARGS_GROW_DOWNWARD
937 rtx stack_area
938 = gen_rtx_MEM (save_mode,
939 memory_address (save_mode,
940 plus_constant (argblock,
941 - high_to_save)));
942#else
943 rtx stack_area
944 = gen_rtx_MEM (save_mode,
945 memory_address (save_mode,
946 plus_constant (argblock,
947 low_to_save)));
948#endif
949
950 if (save_mode != BLKmode)
951 emit_move_insn (stack_area, save_area);
952 else
04572513
JJ
953 /* Cannot use emit_block_move here because it can be done by a library
954 call which in turn gets into this place again and deadly infinite
955 recursion happens. */
956 move_by_pieces (stack_area, validize_mem (save_area),
19caa751 957 high_to_save - low_to_save + 1, PARM_BOUNDARY);
20efdf74
JL
958}
959#endif
960
961/* If any elements in ARGS refer to parameters that are to be passed in
962 registers, but not in memory, and whose alignment does not permit a
963 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
964 which we will later copy into the appropriate hard registers.
965
966 Pseudos for each unaligned argument will be stored into the array
967 args[argnum].aligned_regs. The caller is responsible for deallocating
968 the aligned_regs array if it is nonzero. */
969
20efdf74
JL
970static void
971store_unaligned_arguments_into_pseudos (args, num_actuals)
972 struct arg_data *args;
973 int num_actuals;
974{
975 int i, j;
976
977 for (i = 0; i < num_actuals; i++)
978 if (args[i].reg != 0 && ! args[i].pass_on_stack
979 && args[i].mode == BLKmode
980 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
981 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
982 {
983 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
984 int big_endian_correction = 0;
985
986 args[i].n_aligned_regs
987 = args[i].partial ? args[i].partial
988 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
989
8e6a59fe
MM
990 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
991 * args[i].n_aligned_regs);
20efdf74
JL
992
993 /* Structures smaller than a word are aligned to the least
994 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
995 this means we must skip the empty high order bytes when
996 calculating the bit offset. */
997 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
998 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
999
1000 for (j = 0; j < args[i].n_aligned_regs; j++)
1001 {
1002 rtx reg = gen_reg_rtx (word_mode);
1003 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1004 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1005 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1006
1007 args[i].aligned_regs[j] = reg;
1008
1009 /* There is no need to restrict this code to loading items
1010 in TYPE_ALIGN sized hunks. The bitfield instructions can
1011 load up entire word sized registers efficiently.
1012
1013 ??? This may not be needed anymore.
1014 We use to emit a clobber here but that doesn't let later
1015 passes optimize the instructions we emit. By storing 0 into
1016 the register later passes know the first AND to zero out the
1017 bitfield being set in the register is unnecessary. The store
1018 of 0 will be deleted as will at least the first AND. */
1019
1020 emit_move_insn (reg, const0_rtx);
1021
1022 bytes -= bitsize / BITS_PER_UNIT;
1023 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
19caa751
RK
1024 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1025 word_mode, word_mode, bitalign,
20efdf74 1026 BITS_PER_WORD),
19caa751 1027 bitalign, BITS_PER_WORD);
20efdf74
JL
1028 }
1029 }
1030}
1031
d7cdf113
JL
1032/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1033 ACTPARMS.
1034
1035 NUM_ACTUALS is the total number of parameters.
1036
1037 N_NAMED_ARGS is the total number of named arguments.
1038
1039 FNDECL is the tree code for the target of this call (if known)
1040
1041 ARGS_SO_FAR holds state needed by the target to know where to place
1042 the next argument.
1043
1044 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1045 for arguments which are passed in registers.
1046
1047 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1048 and may be modified by this routine.
1049
f2d33f13 1050 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
d7cdf113
JL
1051 flags which may may be modified by this routine. */
1052
1053static void
1054initialize_argument_information (num_actuals, args, args_size, n_named_args,
1055 actparms, fndecl, args_so_far,
1056 reg_parm_stack_space, old_stack_level,
f2d33f13 1057 old_pending_adj, must_preallocate,
7d167afd 1058 ecf_flags)
91813b28 1059 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
1060 struct arg_data *args;
1061 struct args_size *args_size;
91813b28 1062 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
1063 tree actparms;
1064 tree fndecl;
959f3a06 1065 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
1066 int reg_parm_stack_space;
1067 rtx *old_stack_level;
1068 int *old_pending_adj;
1069 int *must_preallocate;
f2d33f13 1070 int *ecf_flags;
d7cdf113
JL
1071{
1072 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1073 int inc;
1074
1075 /* Count arg position in order args appear. */
1076 int argpos;
1077
4fc026cd 1078 struct args_size alignment_pad;
d7cdf113
JL
1079 int i;
1080 tree p;
1081
1082 args_size->constant = 0;
1083 args_size->var = 0;
1084
1085 /* In this loop, we consider args in the order they are written.
1086 We fill up ARGS from the front or from the back if necessary
1087 so that in any case the first arg to be pushed ends up at the front. */
1088
f73ad30e
JH
1089 if (PUSH_ARGS_REVERSED)
1090 {
1091 i = num_actuals - 1, inc = -1;
1092 /* In this case, must reverse order of args
1093 so that we compute and push the last arg first. */
1094 }
1095 else
1096 {
1097 i = 0, inc = 1;
1098 }
d7cdf113
JL
1099
1100 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1101 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1102 {
1103 tree type = TREE_TYPE (TREE_VALUE (p));
1104 int unsignedp;
1105 enum machine_mode mode;
1106
1107 args[i].tree_value = TREE_VALUE (p);
1108
1109 /* Replace erroneous argument with constant zero. */
d0f062fb 1110 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1111 args[i].tree_value = integer_zero_node, type = integer_type_node;
1112
1113 /* If TYPE is a transparent union, pass things the way we would
1114 pass the first field of the union. We have already verified that
1115 the modes are the same. */
1116 if (TYPE_TRANSPARENT_UNION (type))
1117 type = TREE_TYPE (TYPE_FIELDS (type));
1118
1119 /* Decide where to pass this arg.
1120
1121 args[i].reg is nonzero if all or part is passed in registers.
1122
1123 args[i].partial is nonzero if part but not all is passed in registers,
1124 and the exact value says how many words are passed in registers.
1125
1126 args[i].pass_on_stack is nonzero if the argument must at least be
1127 computed on the stack. It may then be loaded back into registers
1128 if args[i].reg is nonzero.
1129
1130 These decisions are driven by the FUNCTION_... macros and must agree
1131 with those made by function.c. */
1132
1133 /* See if this argument should be passed by invisible reference. */
1134 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1135 && contains_placeholder_p (TYPE_SIZE (type)))
1136 || TREE_ADDRESSABLE (type)
1137#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1138 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1139 type, argpos < n_named_args)
1140#endif
1141 )
1142 {
1143 /* If we're compiling a thunk, pass through invisible
1144 references instead of making a copy. */
1145 if (current_function_is_thunk
1146#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1147 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1148 type, argpos < n_named_args)
1149 /* If it's in a register, we must make a copy of it too. */
1150 /* ??? Is this a sufficient test? Is there a better one? */
1151 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1152 && REG_P (DECL_RTL (args[i].tree_value)))
1153 && ! TREE_ADDRESSABLE (type))
1154#endif
1155 )
1156 {
1157 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1158 new object from the argument. If we are passing by
1159 invisible reference, the callee will do that for us, so we
1160 can strip off the TARGET_EXPR. This is not always safe,
1161 but it is safe in the only case where this is a useful
1162 optimization; namely, when the argument is a plain object.
1163 In that case, the frontend is just asking the backend to
1164 make a bitwise copy of the argument. */
1165
1166 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
2f939d94 1167 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
d7cdf113
JL
1168 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1169 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1170
1171 args[i].tree_value = build1 (ADDR_EXPR,
1172 build_pointer_type (type),
1173 args[i].tree_value);
1174 type = build_pointer_type (type);
1175 }
1176 else
1177 {
1178 /* We make a copy of the object and pass the address to the
1179 function being called. */
1180 rtx copy;
1181
d0f062fb 1182 if (!COMPLETE_TYPE_P (type)
d7cdf113
JL
1183 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1184 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1185 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1186 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1187 {
1188 /* This is a variable-sized object. Make space on the stack
1189 for it. */
1190 rtx size_rtx = expr_size (TREE_VALUE (p));
1191
1192 if (*old_stack_level == 0)
1193 {
1194 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1195 *old_pending_adj = pending_stack_adjust;
1196 pending_stack_adjust = 0;
1197 }
1198
1199 copy = gen_rtx_MEM (BLKmode,
1200 allocate_dynamic_stack_space (size_rtx,
1201 NULL_RTX,
1202 TYPE_ALIGN (type)));
1203 }
1204 else
1205 {
1206 int size = int_size_in_bytes (type);
1207 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1208 }
1209
1210 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1211
1212 store_expr (args[i].tree_value, copy, 0);
2a8f6b90 1213 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
d7cdf113
JL
1214
1215 args[i].tree_value = build1 (ADDR_EXPR,
1216 build_pointer_type (type),
1217 make_tree (type, copy));
1218 type = build_pointer_type (type);
1219 }
1220 }
1221
1222 mode = TYPE_MODE (type);
1223 unsignedp = TREE_UNSIGNED (type);
1224
1225#ifdef PROMOTE_FUNCTION_ARGS
1226 mode = promote_mode (type, mode, &unsignedp, 1);
1227#endif
1228
1229 args[i].unsignedp = unsignedp;
1230 args[i].mode = mode;
7d167afd
JJ
1231
1232#ifdef FUNCTION_INCOMING_ARG
1233 /* If this is a sibling call and the machine has register windows, the
1234 register window has to be unwinded before calling the routine, so
1235 arguments have to go into the incoming registers. */
3ccb603d 1236 if (*ecf_flags & ECF_SIBCALL)
7d167afd
JJ
1237 args[i].reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1238 argpos < n_named_args);
1239 else
1240#endif
1241 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1242 argpos < n_named_args);
1243
d7cdf113
JL
1244#ifdef FUNCTION_ARG_PARTIAL_NREGS
1245 if (args[i].reg)
1246 args[i].partial
959f3a06 1247 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1248 argpos < n_named_args);
1249#endif
1250
1251 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1252
1253 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1254 it means that we are to pass this arg in the register(s) designated
1255 by the PARALLEL, but also to pass it in the stack. */
1256 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1257 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1258 args[i].pass_on_stack = 1;
1259
1260 /* If this is an addressable type, we must preallocate the stack
1261 since we must evaluate the object into its final location.
1262
1263 If this is to be passed in both registers and the stack, it is simpler
1264 to preallocate. */
1265 if (TREE_ADDRESSABLE (type)
1266 || (args[i].pass_on_stack && args[i].reg != 0))
1267 *must_preallocate = 1;
1268
1269 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1270 we cannot consider this function call constant. */
1271 if (TREE_ADDRESSABLE (type))
2a8f6b90 1272 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
d7cdf113
JL
1273
1274 /* Compute the stack-size of this argument. */
1275 if (args[i].reg == 0 || args[i].partial != 0
1276 || reg_parm_stack_space > 0
1277 || args[i].pass_on_stack)
1278 locate_and_pad_parm (mode, type,
1279#ifdef STACK_PARMS_IN_REG_PARM_AREA
1280 1,
1281#else
1282 args[i].reg != 0,
1283#endif
1284 fndecl, args_size, &args[i].offset,
4fc026cd 1285 &args[i].size, &alignment_pad);
d7cdf113
JL
1286
1287#ifndef ARGS_GROW_DOWNWARD
1288 args[i].slot_offset = *args_size;
1289#endif
1290
4fc026cd
CM
1291 args[i].alignment_pad = alignment_pad;
1292
d7cdf113
JL
1293 /* If a part of the arg was put into registers,
1294 don't include that part in the amount pushed. */
1295 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1296 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1297 / (PARM_BOUNDARY / BITS_PER_UNIT)
1298 * (PARM_BOUNDARY / BITS_PER_UNIT));
1299
1300 /* Update ARGS_SIZE, the total stack space for args so far. */
1301
1302 args_size->constant += args[i].size.constant;
1303 if (args[i].size.var)
1304 {
1305 ADD_PARM_SIZE (*args_size, args[i].size.var);
1306 }
1307
1308 /* Since the slot offset points to the bottom of the slot,
1309 we must record it after incrementing if the args grow down. */
1310#ifdef ARGS_GROW_DOWNWARD
1311 args[i].slot_offset = *args_size;
1312
1313 args[i].slot_offset.constant = -args_size->constant;
1314 if (args_size->var)
fed3cef0 1315 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1316#endif
1317
1318 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1319 have been used, etc. */
1320
959f3a06 1321 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1322 argpos < n_named_args);
1323 }
1324}
1325
599f37b6
JL
1326/* Update ARGS_SIZE to contain the total size for the argument block.
1327 Return the original constant component of the argument block's size.
1328
1329 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1330 for arguments passed in registers. */
1331
1332static int
c2f8b491
JH
1333compute_argument_block_size (reg_parm_stack_space, args_size,
1334 preferred_stack_boundary)
599f37b6
JL
1335 int reg_parm_stack_space;
1336 struct args_size *args_size;
c2f8b491 1337 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1338{
1339 int unadjusted_args_size = args_size->constant;
1340
f73ad30e
JH
1341 /* For accumulate outgoing args mode we don't need to align, since the frame
1342 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1343 backends from generating missaligned frame sizes. */
1344#ifdef STACK_BOUNDARY
1345 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1346 preferred_stack_boundary = STACK_BOUNDARY;
1347#endif
1348
599f37b6
JL
1349 /* Compute the actual size of the argument block required. The variable
1350 and constant sizes must be combined, the size may have to be rounded,
1351 and there may be a minimum required size. */
1352
1353 if (args_size->var)
1354 {
1355 args_size->var = ARGS_SIZE_TREE (*args_size);
1356 args_size->constant = 0;
1357
1358#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491
JH
1359 preferred_stack_boundary /= BITS_PER_UNIT;
1360 if (preferred_stack_boundary > 1)
1503a7ec
JH
1361 {
1362 /* We don't handle this case yet. To handle it correctly we have
1363 to add the delta, round and substract the delta.
1364 Currently no machine description requires this support. */
1365 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1366 abort();
1367 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1368 }
599f37b6
JL
1369#endif
1370
1371 if (reg_parm_stack_space > 0)
1372 {
1373 args_size->var
1374 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1375 ssize_int (reg_parm_stack_space));
599f37b6
JL
1376
1377#ifndef OUTGOING_REG_PARM_STACK_SPACE
1378 /* The area corresponding to register parameters is not to count in
1379 the size of the block we need. So make the adjustment. */
1380 args_size->var
1381 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1382 ssize_int (reg_parm_stack_space));
599f37b6
JL
1383#endif
1384 }
1385 }
1386 else
1387 {
1388#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491 1389 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1390 if (preferred_stack_boundary < 1)
1391 preferred_stack_boundary = 1;
fb5eebb9 1392 args_size->constant = (((args_size->constant
1503a7ec 1393 + stack_pointer_delta
c2f8b491
JH
1394 + preferred_stack_boundary - 1)
1395 / preferred_stack_boundary
1396 * preferred_stack_boundary)
1503a7ec 1397 - stack_pointer_delta);
599f37b6
JL
1398#endif
1399
1400 args_size->constant = MAX (args_size->constant,
1401 reg_parm_stack_space);
1402
1403#ifdef MAYBE_REG_PARM_STACK_SPACE
1404 if (reg_parm_stack_space == 0)
1405 args_size->constant = 0;
1406#endif
1407
1408#ifndef OUTGOING_REG_PARM_STACK_SPACE
1409 args_size->constant -= reg_parm_stack_space;
1410#endif
1411 }
1412 return unadjusted_args_size;
1413}
1414
19832c77 1415/* Precompute parameters as needed for a function call.
cc0b1adc 1416
f2d33f13 1417 FLAGS is mask of ECF_* constants.
cc0b1adc 1418
cc0b1adc
JL
1419 NUM_ACTUALS is the number of arguments.
1420
1421 ARGS is an array containing information for each argument; this routine
40d6e956
JH
1422 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1423 */
cc0b1adc
JL
1424
1425static void
40d6e956 1426precompute_arguments (flags, num_actuals, args)
f2d33f13 1427 int flags;
cc0b1adc
JL
1428 int num_actuals;
1429 struct arg_data *args;
cc0b1adc
JL
1430{
1431 int i;
1432
1433 /* If this function call is cse'able, precompute all the parameters.
1434 Note that if the parameter is constructed into a temporary, this will
1435 cause an additional copy because the parameter will be constructed
1436 into a temporary location and then copied into the outgoing arguments.
1437 If a parameter contains a call to alloca and this function uses the
1438 stack, precompute the parameter. */
1439
1440 /* If we preallocated the stack space, and some arguments must be passed
1441 on the stack, then we must precompute any parameter which contains a
1442 function call which will store arguments on the stack.
1443 Otherwise, evaluating the parameter may clobber previous parameters
40d6e956
JH
1444 which have already been stored into the stack. (we have code to avoid
1445 such case by saving the ougoing stack arguments, but it results in
1446 worse code) */
cc0b1adc
JL
1447
1448 for (i = 0; i < num_actuals; i++)
2a8f6b90 1449 if ((flags & (ECF_CONST | ECF_PURE))
40d6e956 1450 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
cc0b1adc
JL
1451 {
1452 /* If this is an addressable type, we cannot pre-evaluate it. */
1453 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1454 abort ();
1455
1456 push_temp_slots ();
1457
47841d1b 1458 args[i].value
cc0b1adc
JL
1459 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1460
1461 preserve_temp_slots (args[i].value);
1462 pop_temp_slots ();
1463
1464 /* ANSI doesn't require a sequence point here,
1465 but PCC has one, so this will avoid some problems. */
1466 emit_queue ();
1467
1468 args[i].initial_value = args[i].value
47841d1b 1469 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1470
1471 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1472 {
1473 args[i].value
1474 = convert_modes (args[i].mode,
1475 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1476 args[i].value, args[i].unsignedp);
1477#ifdef PROMOTE_FOR_CALL_ONLY
1478 /* CSE will replace this only if it contains args[i].value
1479 pseudo, so convert it down to the declared mode using
1480 a SUBREG. */
1481 if (GET_CODE (args[i].value) == REG
1482 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1483 {
1484 args[i].initial_value
1485 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1486 args[i].value, 0);
1487 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1488 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1489 = args[i].unsignedp;
1490 }
1491#endif
1492 }
cc0b1adc
JL
1493 }
1494}
1495
0f9b3ea6
JL
1496/* Given the current state of MUST_PREALLOCATE and information about
1497 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1498 compute and return the final value for MUST_PREALLOCATE. */
1499
1500static int
1501finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1502 int must_preallocate;
1503 int num_actuals;
1504 struct arg_data *args;
1505 struct args_size *args_size;
1506{
1507 /* See if we have or want to preallocate stack space.
1508
1509 If we would have to push a partially-in-regs parm
1510 before other stack parms, preallocate stack space instead.
1511
1512 If the size of some parm is not a multiple of the required stack
1513 alignment, we must preallocate.
1514
1515 If the total size of arguments that would otherwise create a copy in
1516 a temporary (such as a CALL) is more than half the total argument list
1517 size, preallocation is faster.
1518
1519 Another reason to preallocate is if we have a machine (like the m88k)
1520 where stack alignment is required to be maintained between every
1521 pair of insns, not just when the call is made. However, we assume here
1522 that such machines either do not have push insns (and hence preallocation
1523 would occur anyway) or the problem is taken care of with
1524 PUSH_ROUNDING. */
1525
1526 if (! must_preallocate)
1527 {
1528 int partial_seen = 0;
1529 int copy_to_evaluate_size = 0;
1530 int i;
1531
1532 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1533 {
1534 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1535 partial_seen = 1;
1536 else if (partial_seen && args[i].reg == 0)
1537 must_preallocate = 1;
1538
1539 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1540 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1541 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1542 || TREE_CODE (args[i].tree_value) == COND_EXPR
1543 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1544 copy_to_evaluate_size
1545 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1546 }
1547
1548 if (copy_to_evaluate_size * 2 >= args_size->constant
1549 && args_size->constant > 0)
1550 must_preallocate = 1;
1551 }
1552 return must_preallocate;
1553}
599f37b6 1554
a45bdd02
JL
1555/* If we preallocated stack space, compute the address of each argument
1556 and store it into the ARGS array.
1557
1558 We need not ensure it is a valid memory address here; it will be
1559 validized when it is used.
1560
1561 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1562
1563static void
1564compute_argument_addresses (args, argblock, num_actuals)
1565 struct arg_data *args;
1566 rtx argblock;
1567 int num_actuals;
1568{
1569 if (argblock)
1570 {
1571 rtx arg_reg = argblock;
1572 int i, arg_offset = 0;
1573
1574 if (GET_CODE (argblock) == PLUS)
1575 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1576
1577 for (i = 0; i < num_actuals; i++)
1578 {
1579 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1580 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1581 rtx addr;
1582
1583 /* Skip this parm if it will not be passed on the stack. */
1584 if (! args[i].pass_on_stack && args[i].reg != 0)
1585 continue;
1586
1587 if (GET_CODE (offset) == CONST_INT)
1588 addr = plus_constant (arg_reg, INTVAL (offset));
1589 else
1590 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1591
1592 addr = plus_constant (addr, arg_offset);
1593 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1594 MEM_SET_IN_STRUCT_P
1595 (args[i].stack,
1596 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1597
1598 if (GET_CODE (slot_offset) == CONST_INT)
1599 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1600 else
1601 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1602
1603 addr = plus_constant (addr, arg_offset);
1604 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1605 }
1606 }
1607}
1608
1609/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1610 in a call instruction.
1611
1612 FNDECL is the tree node for the target function. For an indirect call
1613 FNDECL will be NULL_TREE.
1614
1615 EXP is the CALL_EXPR for this call. */
1616
1617static rtx
1618rtx_for_function_call (fndecl, exp)
1619 tree fndecl;
1620 tree exp;
1621{
1622 rtx funexp;
1623
1624 /* Get the function to call, in the form of RTL. */
1625 if (fndecl)
1626 {
1627 /* If this is the first use of the function, see if we need to
1628 make an external definition for it. */
1629 if (! TREE_USED (fndecl))
1630 {
1631 assemble_external (fndecl);
1632 TREE_USED (fndecl) = 1;
1633 }
1634
1635 /* Get a SYMBOL_REF rtx for the function address. */
1636 funexp = XEXP (DECL_RTL (fndecl), 0);
1637 }
1638 else
1639 /* Generate an rtx (probably a pseudo-register) for the address. */
1640 {
91ab1046 1641 rtx funaddr;
a45bdd02 1642 push_temp_slots ();
91ab1046
DT
1643 funaddr = funexp =
1644 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1645 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1646
1647 /* Check the function is executable. */
1648 if (current_function_check_memory_usage)
91ab1046
DT
1649 {
1650#ifdef POINTERS_EXTEND_UNSIGNED
1651 /* It might be OK to convert funexp in place, but there's
1652 a lot going on between here and when it happens naturally
1653 that this seems safer. */
1654 funaddr = convert_memory_address (Pmode, funexp);
1655#endif
1656 emit_library_call (chkr_check_exec_libfunc, 1,
1657 VOIDmode, 1,
1658 funaddr, Pmode);
1659 }
a45bdd02
JL
1660 emit_queue ();
1661 }
1662 return funexp;
1663}
1664
21a3b983
JL
1665/* Do the register loads required for any wholly-register parms or any
1666 parms which are passed both on the stack and in a register. Their
1667 expressions were already evaluated.
1668
1669 Mark all register-parms as living through the call, putting these USE
1670 insns in the CALL_INSN_FUNCTION_USAGE field. */
1671
1672static void
1673load_register_parameters (args, num_actuals, call_fusage)
1674 struct arg_data *args;
1675 int num_actuals;
1676 rtx *call_fusage;
1677{
1678 int i, j;
1679
1680#ifdef LOAD_ARGS_REVERSED
1681 for (i = num_actuals - 1; i >= 0; i--)
1682#else
1683 for (i = 0; i < num_actuals; i++)
1684#endif
1685 {
1686 rtx reg = args[i].reg;
1687 int partial = args[i].partial;
1688 int nregs;
1689
1690 if (reg)
1691 {
1692 /* Set to non-negative if must move a word at a time, even if just
1693 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1694 we just use a normal move insn. This value can be zero if the
1695 argument is a zero size structure with no fields. */
1696 nregs = (partial ? partial
1697 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1698 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1699 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1700 : -1));
1701
1702 /* Handle calls that pass values in multiple non-contiguous
1703 locations. The Irix 6 ABI has examples of this. */
1704
1705 if (GET_CODE (reg) == PARALLEL)
19caa751
RK
1706 emit_group_load (reg, args[i].value,
1707 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1708 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
21a3b983
JL
1709
1710 /* If simple case, just do move. If normal partial, store_one_arg
1711 has already loaded the register for us. In all other cases,
1712 load the register(s) from memory. */
1713
1714 else if (nregs == -1)
1715 emit_move_insn (reg, args[i].value);
1716
1717 /* If we have pre-computed the values to put in the registers in
1718 the case of non-aligned structures, copy them in now. */
1719
1720 else if (args[i].n_aligned_regs != 0)
1721 for (j = 0; j < args[i].n_aligned_regs; j++)
1722 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1723 args[i].aligned_regs[j]);
1724
1725 else if (partial == 0 || args[i].pass_on_stack)
1726 move_block_to_reg (REGNO (reg),
1727 validize_mem (args[i].value), nregs,
1728 args[i].mode);
1729
1730 /* Handle calls that pass values in multiple non-contiguous
1731 locations. The Irix 6 ABI has examples of this. */
1732 if (GET_CODE (reg) == PARALLEL)
1733 use_group_regs (call_fusage, reg);
1734 else if (nregs == -1)
1735 use_reg (call_fusage, reg);
1736 else
1737 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1738 }
1739 }
1740}
1741
f2d33f13
JH
1742/* Try to integreate function. See expand_inline_function for documentation
1743 about the parameters. */
1744
1745static rtx
1746try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1747 tree fndecl;
1748 tree actparms;
1749 rtx target;
1750 int ignore;
1751 tree type;
1752 rtx structure_value_addr;
1753{
1754 rtx temp;
1755 rtx before_call;
1756 int i;
1757 rtx old_stack_level = 0;
7657ad0a 1758 int reg_parm_stack_space = 0;
f2d33f13
JH
1759
1760#ifdef REG_PARM_STACK_SPACE
1761#ifdef MAYBE_REG_PARM_STACK_SPACE
1762 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1763#else
1764 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1765#endif
1766#endif
1767
1768 before_call = get_last_insn ();
1769
1770 temp = expand_inline_function (fndecl, actparms, target,
1771 ignore, type,
1772 structure_value_addr);
1773
1774 /* If inlining succeeded, return. */
1775 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1776 {
1777 if (ACCUMULATE_OUTGOING_ARGS)
1778 {
1779 /* If the outgoing argument list must be preserved, push
1780 the stack before executing the inlined function if it
1781 makes any calls. */
1782
1783 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1784 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1785 break;
1786
1787 if (stack_arg_under_construction || i >= 0)
1788 {
1789 rtx first_insn
1790 = before_call ? NEXT_INSN (before_call) : get_insns ();
1791 rtx insn = NULL_RTX, seq;
1792
1793 /* Look for a call in the inline function code.
1794 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1795 nonzero then there is a call and it is not necessary
1796 to scan the insns. */
1797
1798 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1799 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1800 if (GET_CODE (insn) == CALL_INSN)
1801 break;
1802
1803 if (insn)
1804 {
1805 /* Reserve enough stack space so that the largest
1806 argument list of any function call in the inline
1807 function does not overlap the argument list being
1808 evaluated. This is usually an overestimate because
1809 allocate_dynamic_stack_space reserves space for an
1810 outgoing argument list in addition to the requested
1811 space, but there is no way to ask for stack space such
1812 that an argument list of a certain length can be
1813 safely constructed.
1814
1815 Add the stack space reserved for register arguments, if
1816 any, in the inline function. What is really needed is the
1817 largest value of reg_parm_stack_space in the inline
1818 function, but that is not available. Using the current
1819 value of reg_parm_stack_space is wrong, but gives
1820 correct results on all supported machines. */
1821
1822 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1823 + reg_parm_stack_space);
1824
1825 start_sequence ();
1826 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1827 allocate_dynamic_stack_space (GEN_INT (adjust),
1828 NULL_RTX, BITS_PER_UNIT);
1829 seq = get_insns ();
1830 end_sequence ();
1831 emit_insns_before (seq, first_insn);
1832 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1833 }
1834 }
1835 }
1836
1837 /* If the result is equivalent to TARGET, return TARGET to simplify
1838 checks in store_expr. They can be equivalent but not equal in the
1839 case of a function that returns BLKmode. */
1840 if (temp != target && rtx_equal_p (temp, target))
1841 return target;
1842 return temp;
1843 }
1844
1845 /* If inlining failed, mark FNDECL as needing to be compiled
1846 separately after all. If function was declared inline,
1847 give a warning. */
1848 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1849 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1850 {
1851 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1852 warning ("called from here");
1853 }
1854 mark_addressable (fndecl);
1855 return (rtx) (HOST_WIDE_INT) - 1;
1856}
1857
51bbfa0c
RS
1858/* Generate all the code for a function call
1859 and return an rtx for its value.
1860 Store the value in TARGET (specified as an rtx) if convenient.
1861 If the value is stored in TARGET then TARGET is returned.
1862 If IGNORE is nonzero, then we ignore the value of the function call. */
1863
1864rtx
8129842c 1865expand_call (exp, target, ignore)
51bbfa0c
RS
1866 tree exp;
1867 rtx target;
1868 int ignore;
51bbfa0c 1869{
0a1c58a2
JL
1870 /* Nonzero if we are currently expanding a call. */
1871 static int currently_expanding_call = 0;
1872
51bbfa0c
RS
1873 /* List of actual parameters. */
1874 tree actparms = TREE_OPERAND (exp, 1);
1875 /* RTX for the function to be called. */
1876 rtx funexp;
0a1c58a2
JL
1877 /* Sequence of insns to perform a tail recursive "call". */
1878 rtx tail_recursion_insns = NULL_RTX;
1879 /* Sequence of insns to perform a normal "call". */
1880 rtx normal_call_insns = NULL_RTX;
1881 /* Sequence of insns to perform a tail recursive "call". */
1882 rtx tail_call_insns = NULL_RTX;
51bbfa0c
RS
1883 /* Data type of the function. */
1884 tree funtype;
1885 /* Declaration of the function being called,
1886 or 0 if the function is computed (not known by name). */
1887 tree fndecl = 0;
1888 char *name = 0;
0a1c58a2 1889 rtx insn;
194c7c45 1890 int try_tail_call;
0a1c58a2 1891 int pass;
51bbfa0c
RS
1892
1893 /* Register in which non-BLKmode value will be returned,
1894 or 0 if no value or if value is BLKmode. */
1895 rtx valreg;
1896 /* Address where we should return a BLKmode value;
1897 0 if value not BLKmode. */
1898 rtx structure_value_addr = 0;
1899 /* Nonzero if that address is being passed by treating it as
1900 an extra, implicit first parameter. Otherwise,
1901 it is passed by being copied directly into struct_value_rtx. */
1902 int structure_value_addr_parm = 0;
1903 /* Size of aggregate value wanted, or zero if none wanted
1904 or if we are using the non-reentrant PCC calling convention
1905 or expecting the value in registers. */
e5e809f4 1906 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1907 /* Nonzero if called function returns an aggregate in memory PCC style,
1908 by returning the address of where to find it. */
1909 int pcc_struct_value = 0;
1910
1911 /* Number of actual parameters in this call, including struct value addr. */
1912 int num_actuals;
1913 /* Number of named args. Args after this are anonymous ones
1914 and they must all go on the stack. */
1915 int n_named_args;
51bbfa0c
RS
1916
1917 /* Vector of information about each argument.
1918 Arguments are numbered in the order they will be pushed,
1919 not the order they are written. */
1920 struct arg_data *args;
1921
1922 /* Total size in bytes of all the stack-parms scanned so far. */
1923 struct args_size args_size;
1924 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1925 int unadjusted_args_size;
51bbfa0c
RS
1926 /* Data on reg parms scanned so far. */
1927 CUMULATIVE_ARGS args_so_far;
1928 /* Nonzero if a reg parm has been scanned. */
1929 int reg_parm_seen;
efd65a8b 1930 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1931
1932 /* Nonzero if we must avoid push-insns in the args for this call.
1933 If stack space is allocated for register parameters, but not by the
1934 caller, then it is preallocated in the fixed part of the stack frame.
1935 So the entire argument block must then be preallocated (i.e., we
1936 ignore PUSH_ROUNDING in that case). */
1937
f73ad30e 1938 int must_preallocate = !PUSH_ARGS;
51bbfa0c 1939
f72aed24 1940 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1941 int reg_parm_stack_space = 0;
1942
51bbfa0c
RS
1943 /* Address of space preallocated for stack parms
1944 (on machines that lack push insns), or 0 if space not preallocated. */
1945 rtx argblock = 0;
1946
f2d33f13
JH
1947 /* Mask of ECF_ flags. */
1948 int flags = 0;
51bbfa0c
RS
1949 /* Nonzero if this is a call to an inline function. */
1950 int is_integrable = 0;
f73ad30e 1951#ifdef REG_PARM_STACK_SPACE
51bbfa0c
RS
1952 /* Define the boundary of the register parm stack space that needs to be
1953 save, if any. */
1954 int low_to_save = -1, high_to_save;
1955 rtx save_area = 0; /* Place that it is saved */
1956#endif
1957
51bbfa0c
RS
1958 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1959 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1960 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1961
1962 rtx old_stack_level = 0;
79be3418 1963 int old_pending_adj = 0;
51bbfa0c 1964 int old_inhibit_defer_pop = inhibit_defer_pop;
1503a7ec 1965 int old_stack_allocated;
0a1c58a2 1966 rtx call_fusage;
51bbfa0c 1967 register tree p;
21a3b983 1968 register int i;
0a1c58a2 1969 int preferred_stack_boundary;
51bbfa0c 1970
7815214e
RK
1971 /* The value of the function call can be put in a hard register. But
1972 if -fcheck-memory-usage, code which invokes functions (and thus
1973 damages some hard registers) can be inserted before using the value.
1974 So, target is always a pseudo-register in that case. */
7d384cc0 1975 if (current_function_check_memory_usage)
7815214e
RK
1976 target = 0;
1977
f2d33f13
JH
1978 /* See if this is "nothrow" function call. */
1979 if (TREE_NOTHROW (exp))
1980 flags |= ECF_NOTHROW;
1981
51bbfa0c
RS
1982 /* See if we can find a DECL-node for the actual function.
1983 As a result, decide whether this is a call to an integrable function. */
1984
1985 p = TREE_OPERAND (exp, 0);
1986 if (TREE_CODE (p) == ADDR_EXPR)
1987 {
1988 fndecl = TREE_OPERAND (p, 0);
1989 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1990 fndecl = 0;
51bbfa0c
RS
1991 else
1992 {
1993 if (!flag_no_inline
1994 && fndecl != current_function_decl
aa10adff 1995 && DECL_INLINE (fndecl)
1cf4f698 1996 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1997 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1998 is_integrable = 1;
1999 else if (! TREE_ADDRESSABLE (fndecl))
2000 {
13d39dbc 2001 /* In case this function later becomes inlinable,
51bbfa0c
RS
2002 record that there was already a non-inline call to it.
2003
2004 Use abstraction instead of setting TREE_ADDRESSABLE
2005 directly. */
da8c1713
RK
2006 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2007 && optimize > 0)
1907795e
JM
2008 {
2009 warning_with_decl (fndecl, "can't inline call to `%s'");
2010 warning ("called from here");
2011 }
51bbfa0c
RS
2012 mark_addressable (fndecl);
2013 }
2014
f2d33f13 2015 flags |= flags_from_decl_or_type (fndecl);
51bbfa0c
RS
2016 }
2017 }
2018
fdff8c6d 2019 /* If we don't have specific function to call, see if we have a
f2d33f13 2020 attributes set in the type. */
fdff8c6d 2021 if (fndecl == 0)
cc2a3e63 2022 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
fdff8c6d 2023
6f90e075
JW
2024#ifdef REG_PARM_STACK_SPACE
2025#ifdef MAYBE_REG_PARM_STACK_SPACE
2026 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2027#else
2028 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2029#endif
2030#endif
2031
f73ad30e
JH
2032#ifndef OUTGOING_REG_PARM_STACK_SPACE
2033 if (reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4
JL
2034 must_preallocate = 1;
2035#endif
2036
51bbfa0c
RS
2037 /* Warn if this value is an aggregate type,
2038 regardless of which calling convention we are using for it. */
05e3bdb9 2039 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
2040 warning ("function call has aggregate value");
2041
2042 /* Set up a place to return a structure. */
2043
2044 /* Cater to broken compilers. */
2045 if (aggregate_value_p (exp))
2046 {
2047 /* This call returns a big structure. */
2a8f6b90 2048 flags &= ~(ECF_CONST | ECF_PURE);
51bbfa0c
RS
2049
2050#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2051 {
2052 pcc_struct_value = 1;
0dd532dc
JW
2053 /* Easier than making that case work right. */
2054 if (is_integrable)
2055 {
2056 /* In case this is a static function, note that it has been
2057 used. */
2058 if (! TREE_ADDRESSABLE (fndecl))
2059 mark_addressable (fndecl);
2060 is_integrable = 0;
2061 }
9e7b1d0a
RS
2062 }
2063#else /* not PCC_STATIC_STRUCT_RETURN */
2064 {
2065 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 2066
9e7b1d0a
RS
2067 if (target && GET_CODE (target) == MEM)
2068 structure_value_addr = XEXP (target, 0);
2069 else
2070 {
e9a25f70
JL
2071 /* Assign a temporary to hold the value. */
2072 tree d;
51bbfa0c 2073
9e7b1d0a
RS
2074 /* For variable-sized objects, we must be called with a target
2075 specified. If we were to allocate space on the stack here,
2076 we would have no way of knowing when to free it. */
51bbfa0c 2077
002bdd6c
RK
2078 if (struct_value_size < 0)
2079 abort ();
2080
e9a25f70
JL
2081 /* This DECL is just something to feed to mark_addressable;
2082 it doesn't get pushed. */
2083 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2084 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
2085 mark_addressable (d);
14a774a9 2086 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 2087 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 2088 TREE_USED (d) = 1;
9e7b1d0a
RS
2089 target = 0;
2090 }
2091 }
2092#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2093 }
2094
2095 /* If called function is inline, try to integrate it. */
2096
2097 if (is_integrable)
2098 {
f2d33f13
JH
2099 rtx temp = try_to_integrate (fndecl, actparms, target,
2100 ignore, TREE_TYPE (exp),
2101 structure_value_addr);
2102 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2103 return temp;
51bbfa0c
RS
2104 }
2105
0a1c58a2
JL
2106 currently_expanding_call++;
2107
194c7c45
RH
2108 /* Tail calls can make things harder to debug, and we're traditionally
2109 pushed these optimizations into -O2. Don't try if we're already
2110 expanding a call, as that means we're an argument. Similarly, if
2111 there's pending loops or cleanups we know there's code to follow
2112 the call. */
0a1c58a2 2113
194c7c45 2114 try_tail_call = 0;
db8cb48e 2115 if (flag_optimize_sibling_calls
0a1c58a2 2116 && currently_expanding_call == 1
e245d3af
RH
2117 && stmt_loop_nest_empty ()
2118 && ! any_pending_cleanups (1))
0a1c58a2 2119 {
194c7c45
RH
2120 tree new_actparms = NULL_TREE;
2121
2122 /* Ok, we're going to give the tail call the old college try.
2123 This means we're going to evaluate the function arguments
2124 up to three times. There are two degrees of badness we can
2125 encounter, those that can be unsaved and those that can't.
2126 (See unsafe_for_reeval commentary for details.)
2127
2128 Generate a new argument list. Pass safe arguments through
2129 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2130 For hard badness, evaluate them now and put their resulting
2131 rtx in a temporary VAR_DECL. */
2132
0a1c58a2 2133 for (p = actparms; p; p = TREE_CHAIN (p))
194c7c45
RH
2134 switch (unsafe_for_reeval (TREE_VALUE (p)))
2135 {
2136 case 0: /* Safe. */
2137 new_actparms = tree_cons (TREE_PURPOSE (p), TREE_VALUE (p),
2138 new_actparms);
2139 break;
0a1c58a2 2140
194c7c45
RH
2141 case 1: /* Mildly unsafe. */
2142 new_actparms = tree_cons (TREE_PURPOSE (p),
2143 unsave_expr (TREE_VALUE (p)),
2144 new_actparms);
2145 break;
0a1c58a2 2146
194c7c45 2147 case 2: /* Wildly unsafe. */
0a1c58a2 2148 {
194c7c45
RH
2149 tree var = build_decl (VAR_DECL, NULL_TREE,
2150 TREE_TYPE (TREE_VALUE (p)));
2151 DECL_RTL (var) = expand_expr (TREE_VALUE (p), NULL_RTX,
2152 VOIDmode, EXPAND_NORMAL);
2153 new_actparms = tree_cons (TREE_PURPOSE (p), var, new_actparms);
0a1c58a2 2154 }
194c7c45 2155 break;
0a1c58a2 2156
194c7c45
RH
2157 default:
2158 abort ();
2159 }
2160
2161 /* We built the new argument chain backwards. */
2162 actparms = nreverse (new_actparms);
2163
2164 /* Expanding one of those dangerous arguments could have added
2165 cleanups, but otherwise give it a whirl. */
2166 try_tail_call = ! any_pending_cleanups (1);
0a1c58a2
JL
2167 }
2168
2169 /* Generate a tail recursion sequence when calling ourselves. */
2170
194c7c45 2171 if (try_tail_call
0a1c58a2
JL
2172 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2173 && TREE_OPERAND (TREE_OPERAND (exp, 0), 0) == current_function_decl)
2174 {
2175 /* We want to emit any pending stack adjustments before the tail
2176 recursion "call". That way we know any adjustment after the tail
2177 recursion call can be ignored if we indeed use the tail recursion
2178 call expansion. */
2179 int save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2180 int save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
2181
2182 /* Use a new sequence to hold any RTL we generate. We do not even
2183 know if we will use this RTL yet. The final decision can not be
2184 made until after RTL generation for the entire function is
2185 complete. */
b06775f9 2186 start_sequence ();
0a1c58a2
JL
2187
2188 /* Emit the pending stack adjustments before we expand any arguments. */
2189 do_pending_stack_adjust ();
2190
b06775f9
RH
2191 if (optimize_tail_recursion (actparms, get_last_insn ()))
2192 tail_recursion_insns = get_insns ();
0a1c58a2
JL
2193 end_sequence ();
2194
0a1c58a2
JL
2195 /* Restore the original pending stack adjustment for the sibling and
2196 normal call cases below. */
2197 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 2198 stack_pointer_delta = save_stack_pointer_delta;
0a1c58a2
JL
2199 }
2200
51bbfa0c
RS
2201 function_call_count++;
2202
2203 if (fndecl && DECL_NAME (fndecl))
2204 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2205
0a1c58a2
JL
2206#ifdef PREFERRED_STACK_BOUNDARY
2207 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2208#else
2209 preferred_stack_boundary = STACK_BOUNDARY;
2210#endif
2211
c2f8b491
JH
2212 /* Ensure current function's preferred stack boundary is at least
2213 what we need. We don't have to increase alignment for recursive
2214 functions. */
2215 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2216 && fndecl != current_function_decl)
2217 cfun->preferred_stack_boundary = preferred_stack_boundary;
2218
51bbfa0c 2219 /* See if this is a call to a function that can return more than once
20efdf74 2220 or a call to longjmp or malloc. */
f2d33f13 2221 flags |= special_function_p (fndecl, flags);
51bbfa0c 2222
f2d33f13 2223 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
2224 current_function_calls_alloca = 1;
2225
39842893
JL
2226 /* Operand 0 is a pointer-to-function; get the type of the function. */
2227 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2228 if (! POINTER_TYPE_P (funtype))
2229 abort ();
2230 funtype = TREE_TYPE (funtype);
2231
0a1c58a2
JL
2232 /* We want to make two insn chains; one for a sibling call, the other
2233 for a normal call. We will select one of the two chains after
2234 initial RTL generation is complete. */
2235 for (pass = 0; pass < 2; pass++)
2236 {
2237 int sibcall_failure = 0;
2238 /* We want to emit ay pending stack adjustments before the tail
2239 recursion "call". That way we know any adjustment after the tail
2240 recursion call can be ignored if we indeed use the tail recursion
2241 call expansion. */
2242 int save_pending_stack_adjust;
1503a7ec 2243 int save_stack_pointer_delta;
0a1c58a2 2244 rtx insns;
7d167afd 2245 rtx before_call, next_arg_reg;
39842893 2246
0a1c58a2
JL
2247 if (pass == 0)
2248 {
2249 /* Various reasons we can not use a sibling call. */
194c7c45 2250 if (! try_tail_call
0a1c58a2
JL
2251#ifdef HAVE_sibcall_epilogue
2252 || ! HAVE_sibcall_epilogue
2253#else
2254 || 1
2255#endif
2256 /* The structure value address is used and modified in the
2257 loop below. It does not seem worth the effort to save and
2258 restore it as a state variable since few optimizable
2259 sibling calls will return a structure. */
2260 || structure_value_addr != NULL_RTX
2261 /* If the register holding the address is a callee saved
2262 register, then we lose. We have no way to prevent that,
2263 so we only allow calls to named functions. */
194c7c45
RH
2264 /* ??? This could be done by having the insn constraints
2265 use a register class that is all call-clobbered. Any
2266 reload insns generated to fix things up would appear
2267 before the sibcall_epilogue. */
0a1c58a2
JL
2268 || fndecl == NULL_TREE
2269 || ! FUNCTION_OK_FOR_SIBCALL (fndecl))
2270 continue;
51bbfa0c 2271
1c81f9fe
JM
2272 /* Emit any queued insns now; otherwise they would end up in
2273 only one of the alternates. */
2274 emit_queue ();
2275
e245d3af
RH
2276 /* We know at this point that there are not currently any
2277 pending cleanups. If, however, in the process of evaluating
2278 the arguments we were to create some, we'll need to be
2279 able to get rid of them. */
2280 expand_start_target_temps ();
2281
0a1c58a2
JL
2282 /* State variables we need to save and restore between
2283 iterations. */
2284 save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2285 save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2286 }
f2d33f13
JH
2287 if (pass)
2288 flags &= ~ECF_SIBCALL;
2289 else
2290 flags |= ECF_SIBCALL;
51bbfa0c 2291
0a1c58a2 2292 /* Other state variables that we must reinitialize each time
f2d33f13 2293 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
2294 argblock = 0;
2295 call_fusage = 0;
fa76d9e0 2296
0a1c58a2 2297 /* Start a new sequence for the normal call case.
51bbfa0c 2298
0a1c58a2
JL
2299 From this point on, if the sibling call fails, we want to set
2300 sibcall_failure instead of continuing the loop. */
2301 start_sequence ();
eecb6f50 2302
0a1c58a2
JL
2303 /* When calling a const function, we must pop the stack args right away,
2304 so that the pop is deleted or moved with the call. */
2a8f6b90 2305 if (flags & (ECF_CONST | ECF_PURE))
0a1c58a2 2306 NO_DEFER_POP;
51bbfa0c 2307
0a1c58a2
JL
2308 /* Don't let pending stack adjusts add up to too much.
2309 Also, do all pending adjustments now if there is any chance
2310 this might be a call to alloca or if we are expanding a sibling
2311 call sequence. */
2312 if (pending_stack_adjust >= 32
f2d33f13 2313 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
0a1c58a2
JL
2314 || pass == 0)
2315 do_pending_stack_adjust ();
51bbfa0c 2316
f2d33f13 2317 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
0a1c58a2
JL
2318 {
2319 /* A fork duplicates the profile information, and an exec discards
2320 it. We can't rely on fork/exec to be paired. So write out the
2321 profile information we have gathered so far, and clear it. */
2322 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2323 is subject to race conditions, just as with multithreaded
2324 programs. */
2325
2326 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2327 VOIDmode, 0);
2328 }
2329
2330 /* Push the temporary stack slot level so that we can free any
2331 temporaries we make. */
2332 push_temp_slots ();
51bbfa0c 2333
0a1c58a2 2334 /* Start updating where the next arg would go.
51bbfa0c 2335
0a1c58a2
JL
2336 On some machines (such as the PA) indirect calls have a different
2337 calling convention than normal calls. The last argument in
2338 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2339 or not. */
2340 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2341
2342 /* If struct_value_rtx is 0, it means pass the address
2343 as if it were an extra parameter. */
2344 if (structure_value_addr && struct_value_rtx == 0)
2345 {
2346 /* If structure_value_addr is a REG other than
2347 virtual_outgoing_args_rtx, we can use always use it. If it
2348 is not a REG, we must always copy it into a register.
2349 If it is virtual_outgoing_args_rtx, we must copy it to another
2350 register in some cases. */
2351 rtx temp = (GET_CODE (structure_value_addr) != REG
f73ad30e
JH
2352 || (ACCUMULATE_OUTGOING_ARGS
2353 && stack_arg_under_construction
0a1c58a2 2354 && structure_value_addr == virtual_outgoing_args_rtx)
0a1c58a2
JL
2355 ? copy_addr_to_reg (structure_value_addr)
2356 : structure_value_addr);
2357
2358 actparms
2359 = tree_cons (error_mark_node,
2360 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2361 temp),
2362 actparms);
2363 structure_value_addr_parm = 1;
2364 }
2365
2366 /* Count the arguments and set NUM_ACTUALS. */
2367 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2368 num_actuals = i;
2369
2370 /* Compute number of named args.
2371 Normally, don't include the last named arg if anonymous args follow.
2372 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2373 (If no anonymous args follow, the result of list_length is actually
2374 one too large. This is harmless.)
2375
2376 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2377 zero, this machine will be able to place unnamed args that were
2378 passed in registers into the stack. So treat all args as named.
2379 This allows the insns emitting for a specific argument list to be
2380 independent of the function declaration.
2381
2382 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2383 reliable way to pass unnamed args in registers, so we must force
2384 them into memory. */
2385
2386 if ((STRICT_ARGUMENT_NAMING
2387 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2388 && TYPE_ARG_TYPES (funtype) != 0)
2389 n_named_args
2390 = (list_length (TYPE_ARG_TYPES (funtype))
2391 /* Don't include the last named arg. */
2392 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2393 /* Count the struct value address, if it is passed as a parm. */
2394 + structure_value_addr_parm);
2395 else
2396 /* If we know nothing, treat all args as named. */
2397 n_named_args = num_actuals;
2398
2399 /* Make a vector to hold all the information about each arg. */
2400 args = (struct arg_data *) alloca (num_actuals
2401 * sizeof (struct arg_data));
2402 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2403
2404 /* Build up entries inthe ARGS array, compute the size of the arguments
2405 into ARGS_SIZE, etc. */
2406 initialize_argument_information (num_actuals, args, &args_size,
2407 n_named_args, actparms, fndecl,
2408 &args_so_far, reg_parm_stack_space,
2409 &old_stack_level, &old_pending_adj,
f2d33f13 2410 &must_preallocate, &flags);
51bbfa0c 2411
6f90e075 2412#ifdef FINAL_REG_PARM_STACK_SPACE
0a1c58a2
JL
2413 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2414 args_size.var);
6f90e075
JW
2415#endif
2416
0a1c58a2
JL
2417 if (args_size.var)
2418 {
2419 /* If this function requires a variable-sized argument list, don't
2420 try to make a cse'able block for this call. We may be able to
2421 do this eventually, but it is too complicated to keep track of
2422 what insns go in the cse'able block and which don't.
51bbfa0c 2423
0a1c58a2 2424 Also do not make a sibling call. */
e5e809f4 2425
2a8f6b90 2426 flags &= ~(ECF_CONST | ECF_PURE);
0a1c58a2
JL
2427 must_preallocate = 1;
2428 sibcall_failure = 1;
2429 }
2430
7d167afd
JJ
2431 if (args_size.constant > current_function_args_size)
2432 {
2433 /* If this function requires more stack slots than the current
2434 function, we cannot change it into a sibling call. */
2435 sibcall_failure = 1;
2436 }
2437
0a1c58a2
JL
2438 /* Compute the actual size of the argument block required. The variable
2439 and constant sizes must be combined, the size may have to be rounded,
2440 and there may be a minimum required size. When generating a sibcall
2441 pattern, do not round up, since we'll be re-using whatever space our
2442 caller provided. */
2443 unadjusted_args_size
2444 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2445 (pass == 0 ? 0
2446 : preferred_stack_boundary));
2447
2448 /* If the callee pops its own arguments, then it must pop exactly
2449 the same number of arguments as the current function. */
2450 if (RETURN_POPS_ARGS (fndecl, funtype, unadjusted_args_size)
2451 != RETURN_POPS_ARGS (current_function_decl,
2452 TREE_TYPE (current_function_decl),
2453 current_function_args_size))
2454 sibcall_failure = 1;
2455
2456 /* Now make final decision about preallocating stack space. */
2457 must_preallocate = finalize_must_preallocate (must_preallocate,
2458 num_actuals, args,
2459 &args_size);
2460
2461 /* If the structure value address will reference the stack pointer, we
2462 must stabilize it. We don't need to do this if we know that we are
2463 not going to adjust the stack pointer in processing this call. */
2464
2465 if (structure_value_addr
2466 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2467 || reg_mentioned_p (virtual_outgoing_args_rtx,
2468 structure_value_addr))
2469 && (args_size.var
f73ad30e 2470 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)
0a1c58a2
JL
2471 ))
2472 structure_value_addr = copy_to_reg (structure_value_addr);
51bbfa0c 2473
0a1c58a2 2474 /* Precompute any arguments as needed. */
40d6e956 2475 precompute_arguments (flags, num_actuals, args);
51bbfa0c 2476
0a1c58a2
JL
2477 /* Now we are about to start emitting insns that can be deleted
2478 if a libcall is deleted. */
2a8f6b90 2479 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
0a1c58a2 2480 start_sequence ();
51bbfa0c 2481
1503a7ec
JH
2482 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2483
0a1c58a2
JL
2484 /* If we have no actual push instructions, or shouldn't use them,
2485 make space for all args right now. */
51bbfa0c 2486
0a1c58a2 2487 if (args_size.var != 0)
51bbfa0c 2488 {
0a1c58a2
JL
2489 if (old_stack_level == 0)
2490 {
2491 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2492 old_pending_adj = pending_stack_adjust;
2493 pending_stack_adjust = 0;
0a1c58a2
JL
2494 /* stack_arg_under_construction says whether a stack arg is
2495 being constructed at the old stack level. Pushing the stack
2496 gets a clean outgoing argument block. */
2497 old_stack_arg_under_construction = stack_arg_under_construction;
2498 stack_arg_under_construction = 0;
0a1c58a2
JL
2499 }
2500 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
51bbfa0c 2501 }
0a1c58a2
JL
2502 else
2503 {
2504 /* Note that we must go through the motions of allocating an argument
2505 block even if the size is zero because we may be storing args
2506 in the area reserved for register arguments, which may be part of
2507 the stack frame. */
26a258fe 2508
0a1c58a2 2509 int needed = args_size.constant;
51bbfa0c 2510
0a1c58a2
JL
2511 /* Store the maximum argument space used. It will be pushed by
2512 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2513 checking). */
51bbfa0c 2514
0a1c58a2
JL
2515 if (needed > current_function_outgoing_args_size)
2516 current_function_outgoing_args_size = needed;
51bbfa0c 2517
0a1c58a2
JL
2518 if (must_preallocate)
2519 {
f73ad30e
JH
2520 if (ACCUMULATE_OUTGOING_ARGS)
2521 {
2522 /* Since the stack pointer will never be pushed, it is possible
2523 for the evaluation of a parm to clobber something we have
2524 already written to the stack. Since most function calls on
2525 RISC machines do not use the stack, this is uncommon, but
2526 must work correctly.
26a258fe 2527
f73ad30e
JH
2528 Therefore, we save any area of the stack that was already
2529 written and that we are using. Here we set up to do this by
2530 making a new stack usage map from the old one. The actual
2531 save will be done by store_one_arg.
26a258fe 2532
f73ad30e
JH
2533 Another approach might be to try to reorder the argument
2534 evaluations to avoid this conflicting stack usage. */
26a258fe 2535
e5e809f4 2536#ifndef OUTGOING_REG_PARM_STACK_SPACE
f73ad30e
JH
2537 /* Since we will be writing into the entire argument area, the
2538 map must be allocated for its entire size, not just the part
2539 that is the responsibility of the caller. */
2540 needed += reg_parm_stack_space;
51bbfa0c
RS
2541#endif
2542
2543#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
2544 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2545 needed + 1);
51bbfa0c 2546#else
f73ad30e
JH
2547 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2548 needed);
51bbfa0c 2549#endif
f73ad30e 2550 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2551
f73ad30e
JH
2552 if (initial_highest_arg_in_use)
2553 bcopy (initial_stack_usage_map, stack_usage_map,
2554 initial_highest_arg_in_use);
2f4aa534 2555
f73ad30e
JH
2556 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2557 bzero (&stack_usage_map[initial_highest_arg_in_use],
2558 (highest_outgoing_arg_in_use
2559 - initial_highest_arg_in_use));
2560 needed = 0;
2f4aa534 2561
f73ad30e
JH
2562 /* The address of the outgoing argument list must not be copied
2563 to a register here, because argblock would be left pointing
2564 to the wrong place after the call to
2565 allocate_dynamic_stack_space below. */
2f4aa534 2566
f73ad30e
JH
2567 argblock = virtual_outgoing_args_rtx;
2568 }
2569 else
26a258fe 2570 {
f73ad30e 2571 if (inhibit_defer_pop == 0)
0a1c58a2 2572 {
f73ad30e
JH
2573 /* Try to reuse some or all of the pending_stack_adjust
2574 to get this space. Maybe we can avoid any pushing. */
2575 if (needed > pending_stack_adjust)
2576 {
2577 needed -= pending_stack_adjust;
2578 pending_stack_adjust = 0;
2579 }
2580 else
2581 {
2582 pending_stack_adjust -= needed;
2583 needed = 0;
2584 }
0a1c58a2 2585 }
f73ad30e
JH
2586 /* Special case this because overhead of `push_block' in this
2587 case is non-trivial. */
2588 if (needed == 0)
2589 argblock = virtual_outgoing_args_rtx;
0a1c58a2 2590 else
f73ad30e
JH
2591 argblock = push_block (GEN_INT (needed), 0, 0);
2592
2593 /* We only really need to call `copy_to_reg' in the case where
2594 push insns are going to be used to pass ARGBLOCK to a function
2595 call in ARGS. In that case, the stack pointer changes value
2596 from the allocation point to the call point, and hence
2597 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2598 But might as well always do it. */
2599 argblock = copy_to_reg (argblock);
26a258fe 2600 }
0a1c58a2
JL
2601 }
2602 }
2603
2604 /* The argument block when performing a sibling call is the
2605 incoming argument block. */
2606 if (pass == 0)
2607 {
2608 rtx temp = plus_constant (arg_pointer_rtx,
2609 FIRST_PARM_OFFSET (current_function_decl));
2610 argblock = force_reg (Pmode, force_operand (temp, NULL_RTX));
26a258fe 2611 }
51bbfa0c 2612
f73ad30e 2613 if (ACCUMULATE_OUTGOING_ARGS)
0a1c58a2 2614 {
f73ad30e
JH
2615 /* The save/restore code in store_one_arg handles all cases except one:
2616 a constructor call (including a C function returning a BLKmode struct)
2617 to initialize an argument. */
2618 if (stack_arg_under_construction)
2619 {
e5e809f4 2620#ifndef OUTGOING_REG_PARM_STACK_SPACE
f73ad30e 2621 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2622#else
f73ad30e 2623 rtx push_size = GEN_INT (args_size.constant);
bfbf933a 2624#endif
f73ad30e
JH
2625 if (old_stack_level == 0)
2626 {
2627 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2628 old_pending_adj = pending_stack_adjust;
2629 pending_stack_adjust = 0;
2630 /* stack_arg_under_construction says whether a stack arg is
2631 being constructed at the old stack level. Pushing the stack
2632 gets a clean outgoing argument block. */
2633 old_stack_arg_under_construction = stack_arg_under_construction;
2634 stack_arg_under_construction = 0;
2635 /* Make a new map for the new argument list. */
2636 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2637 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2638 highest_outgoing_arg_in_use = 0;
2639 }
2640 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
0a1c58a2 2641 }
f73ad30e
JH
2642 /* If argument evaluation might modify the stack pointer, copy the
2643 address of the argument list to a register. */
2644 for (i = 0; i < num_actuals; i++)
2645 if (args[i].pass_on_stack)
2646 {
2647 argblock = copy_addr_to_reg (argblock);
2648 break;
2649 }
bfbf933a 2650 }
bfbf933a 2651
0a1c58a2 2652 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2653
c795bca9 2654#ifdef PREFERRED_STACK_BOUNDARY
0a1c58a2
JL
2655 /* If we push args individually in reverse order, perform stack alignment
2656 before the first push (the last arg). */
f73ad30e
JH
2657 if (PUSH_ARGS_REVERSED && argblock == 0
2658 && args_size.constant != unadjusted_args_size)
4e217aed 2659 {
0a1c58a2
JL
2660 /* When the stack adjustment is pending, we get better code
2661 by combining the adjustments. */
2a8f6b90 2662 if (pending_stack_adjust && ! (flags & (ECF_CONST | ECF_PURE))
0a1c58a2
JL
2663 && ! inhibit_defer_pop)
2664 {
1503a7ec 2665 int adjust;
0a1c58a2
JL
2666 args_size.constant = (unadjusted_args_size
2667 + ((pending_stack_adjust
2668 + args_size.constant
0a1c58a2
JL
2669 - unadjusted_args_size)
2670 % (preferred_stack_boundary
2671 / BITS_PER_UNIT)));
1503a7ec
JH
2672 adjust = (pending_stack_adjust - args_size.constant
2673 + unadjusted_args_size);
2674 adjust_stack (GEN_INT (adjust));
2675 pending_stack_adjust = 0;
0a1c58a2
JL
2676 }
2677 else if (argblock == 0)
2678 anti_adjust_stack (GEN_INT (args_size.constant
2679 - unadjusted_args_size));
0a1c58a2 2680 }
ebcd0b57
JH
2681 /* Now that the stack is properly aligned, pops can't safely
2682 be deferred during the evaluation of the arguments. */
2683 NO_DEFER_POP;
51bbfa0c
RS
2684#endif
2685
0a1c58a2
JL
2686 /* Don't try to defer pops if preallocating, not even from the first arg,
2687 since ARGBLOCK probably refers to the SP. */
2688 if (argblock)
2689 NO_DEFER_POP;
51bbfa0c 2690
0a1c58a2 2691 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c 2692
0a1c58a2
JL
2693 /* Figure out the register where the value, if any, will come back. */
2694 valreg = 0;
2695 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2696 && ! structure_value_addr)
2697 {
2698 if (pcc_struct_value)
2699 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
7d167afd 2700 fndecl, (pass == 0));
0a1c58a2 2701 else
7d167afd 2702 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
0a1c58a2 2703 }
51bbfa0c 2704
0a1c58a2
JL
2705 /* Precompute all register parameters. It isn't safe to compute anything
2706 once we have started filling any specific hard regs. */
2707 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c 2708
f73ad30e 2709#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
2710 /* Save the fixed argument area if it's part of the caller's frame and
2711 is clobbered by argument setup for this call. */
f73ad30e
JH
2712 if (ACCUMULATE_OUTGOING_ARGS)
2713 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2714 &low_to_save, &high_to_save);
b94301c2 2715#endif
51bbfa0c 2716
0a1c58a2
JL
2717 /* Now store (and compute if necessary) all non-register parms.
2718 These come before register parms, since they can require block-moves,
2719 which could clobber the registers used for register parms.
2720 Parms which have partial registers are not stored here,
2721 but we do preallocate space here if they want that. */
51bbfa0c 2722
0a1c58a2
JL
2723 for (i = 0; i < num_actuals; i++)
2724 if (args[i].reg == 0 || args[i].pass_on_stack)
f2d33f13 2725 store_one_arg (&args[i], argblock, flags & ECF_MAY_BE_ALLOCA,
0a1c58a2
JL
2726 args_size.var != 0, reg_parm_stack_space);
2727
2728 /* If we have a parm that is passed in registers but not in memory
2729 and whose alignment does not permit a direct copy into registers,
2730 make a group of pseudos that correspond to each register that we
2731 will later fill. */
2732 if (STRICT_ALIGNMENT)
2733 store_unaligned_arguments_into_pseudos (args, num_actuals);
2734
2735 /* Now store any partially-in-registers parm.
2736 This is the last place a block-move can happen. */
2737 if (reg_parm_seen)
2738 for (i = 0; i < num_actuals; i++)
2739 if (args[i].partial != 0 && ! args[i].pass_on_stack)
f2d33f13 2740 store_one_arg (&args[i], argblock, flags & ECF_MAY_BE_ALLOCA,
0a1c58a2 2741 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2742
c795bca9 2743#ifdef PREFERRED_STACK_BOUNDARY
0a1c58a2
JL
2744 /* If we pushed args in forward order, perform stack alignment
2745 after pushing the last arg. */
f73ad30e 2746 if (!PUSH_ARGS_REVERSED && argblock == 0)
0a1c58a2
JL
2747 anti_adjust_stack (GEN_INT (args_size.constant
2748 - unadjusted_args_size));
51bbfa0c
RS
2749#endif
2750
0a1c58a2
JL
2751 /* If register arguments require space on the stack and stack space
2752 was not preallocated, allocate stack space here for arguments
2753 passed in registers. */
f73ad30e
JH
2754#ifdef OUTGOING_REG_PARM_STACK_SPACE
2755 if (!ACCUMULATE_OUTGOING_ARGS
1dfb49b9 2756 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 2757 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2758#endif
2759
0a1c58a2
JL
2760 /* Pass the function the address in which to return a
2761 structure value. */
2762 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2763 {
2764 emit_move_insn (struct_value_rtx,
2765 force_reg (Pmode,
2766 force_operand (structure_value_addr,
2767 NULL_RTX)));
2768
2769 /* Mark the memory for the aggregate as write-only. */
2770 if (current_function_check_memory_usage)
2771 emit_library_call (chkr_set_right_libfunc, 1,
2772 VOIDmode, 3,
2773 structure_value_addr, ptr_mode,
2774 GEN_INT (struct_value_size),
2775 TYPE_MODE (sizetype),
2776 GEN_INT (MEMORY_USE_WO),
2777 TYPE_MODE (integer_type_node));
2778
2779 if (GET_CODE (struct_value_rtx) == REG)
2780 use_reg (&call_fusage, struct_value_rtx);
2781 }
c2939b57 2782
0a1c58a2
JL
2783 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2784 reg_parm_seen);
51bbfa0c 2785
0a1c58a2
JL
2786 load_register_parameters (args, num_actuals, &call_fusage);
2787
2788 /* Perform postincrements before actually calling the function. */
2789 emit_queue ();
51bbfa0c 2790
0a1c58a2
JL
2791 /* Save a pointer to the last insn before the call, so that we can
2792 later safely search backwards to find the CALL_INSN. */
2793 before_call = get_last_insn ();
51bbfa0c 2794
7d167afd
JJ
2795 /* Set up next argument register. For sibling calls on machines
2796 with register windows this should be the incoming register. */
2797#ifdef FUNCTION_INCOMING_ARG
2798 if (pass == 0)
2799 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2800 void_type_node, 1);
2801 else
2802#endif
2803 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2804 void_type_node, 1);
2805
0a1c58a2
JL
2806 /* All arguments and registers used for the call must be set up by
2807 now! */
2808
ebcd0b57
JH
2809#ifdef PREFERRED_STACK_BOUNDARY
2810 /* Stack must to be properly aligned now. */
2811 if (stack_pointer_delta & (preferred_stack_boundary / BITS_PER_UNIT - 1))
2812 abort();
2813#endif
2814
0a1c58a2
JL
2815 /* Generate the actual call instruction. */
2816 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2817 args_size.constant, struct_value_size,
7d167afd 2818 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
f2d33f13 2819 flags);
0a1c58a2 2820
1503a7ec
JH
2821 /* Verify that we've deallocated all the stack we used. */
2822 if (pass
2823 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
2824 abort();
2825
0a1c58a2
JL
2826 /* If call is cse'able, make appropriate pair of reg-notes around it.
2827 Test valreg so we don't crash; may safely ignore `const'
2828 if return type is void. Disable for PARALLEL return values, because
2829 we have no way to move such values into a pseudo register. */
2a8f6b90
JH
2830 if ((flags & (ECF_CONST | ECF_PURE))
2831 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
9ae8ffe7 2832 {
0a1c58a2
JL
2833 rtx note = 0;
2834 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2835 rtx insns;
9ae8ffe7 2836
0a1c58a2
JL
2837 /* Mark the return value as a pointer if needed. */
2838 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
bdb429a5 2839 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
0a1c58a2
JL
2840
2841 /* Construct an "equal form" for the value which mentions all the
2842 arguments in order as well as the function name. */
5591ee6f
JH
2843 for (i = 0; i < num_actuals; i++)
2844 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
0a1c58a2 2845 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
9ae8ffe7 2846
0a1c58a2
JL
2847 insns = get_insns ();
2848 end_sequence ();
9ae8ffe7 2849
2a8f6b90
JH
2850 if (flags & ECF_PURE)
2851 note = gen_rtx_EXPR_LIST (VOIDmode,
2852 gen_rtx_USE (VOIDmode,
2853 gen_rtx_MEM (BLKmode,
2854 gen_rtx_SCRATCH (VOIDmode))), note);
2855
0a1c58a2
JL
2856 emit_libcall_block (insns, temp, valreg, note);
2857
2858 valreg = temp;
2859 }
2a8f6b90 2860 else if (flags & (ECF_CONST | ECF_PURE))
0a1c58a2
JL
2861 {
2862 /* Otherwise, just write out the sequence without a note. */
2863 rtx insns = get_insns ();
9ae8ffe7 2864
0a1c58a2
JL
2865 end_sequence ();
2866 emit_insns (insns);
2867 }
f2d33f13 2868 else if (flags & ECF_MALLOC)
0a1c58a2
JL
2869 {
2870 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2871 rtx last, insns;
2872
2873 /* The return value from a malloc-like function is a pointer. */
2874 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
bdb429a5 2875 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
0a1c58a2
JL
2876
2877 emit_move_insn (temp, valreg);
2878
2879 /* The return value from a malloc-like function can not alias
2880 anything else. */
2881 last = get_last_insn ();
2882 REG_NOTES (last) =
2883 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2884
2885 /* Write out the sequence. */
2886 insns = get_insns ();
2887 end_sequence ();
2888 emit_insns (insns);
2889 valreg = temp;
2890 }
51bbfa0c 2891
0a1c58a2
JL
2892 /* For calls to `setjmp', etc., inform flow.c it should complain
2893 if nonvolatile values are live. For functions that cannot return,
2894 inform flow that control does not fall through. */
51bbfa0c 2895
f2d33f13 2896 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
c2939b57 2897 {
0a1c58a2
JL
2898 /* The barrier or NOTE_INSN_SETJMP note must be emitted
2899 immediately after the CALL_INSN. Some ports emit more
2900 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 2901
0a1c58a2
JL
2902 rtx last = get_last_insn ();
2903 while (GET_CODE (last) != CALL_INSN)
2904 {
2905 last = PREV_INSN (last);
2906 /* There was no CALL_INSN? */
2907 if (last == before_call)
2908 abort ();
2909 }
51bbfa0c 2910
f2d33f13 2911 if (flags & ECF_RETURNS_TWICE)
0a1c58a2
JL
2912 {
2913 emit_note_after (NOTE_INSN_SETJMP, last);
2914 current_function_calls_setjmp = 1;
2915 sibcall_failure = 1;
2916 }
2917 else
2918 emit_barrier_after (last);
2919 }
51bbfa0c 2920
f2d33f13 2921 if (flags & ECF_LONGJMP)
0a1c58a2 2922 current_function_calls_longjmp = 1, sibcall_failure = 1;
51bbfa0c 2923
25a1fcb4
RK
2924 /* If this function is returning into a memory location marked as
2925 readonly, it means it is initializing that location. But we normally
2926 treat functions as not clobbering such locations, so we need to
2927 specify that this one does. */
2928 if (target != 0 && GET_CODE (target) == MEM
2929 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
2930 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
2931
0a1c58a2 2932 /* If value type not void, return an rtx for the value. */
51bbfa0c 2933
0a1c58a2
JL
2934 /* If there are cleanups to be called, don't use a hard reg as target.
2935 We need to double check this and see if it matters anymore. */
194c7c45
RH
2936 if (any_pending_cleanups (1))
2937 {
2938 if (target && REG_P (target)
2939 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2940 target = 0;
2941 sibcall_failure = 1;
2942 }
51bbfa0c 2943
0a1c58a2
JL
2944 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2945 || ignore)
29008b51 2946 {
0a1c58a2 2947 target = const0_rtx;
29008b51 2948 }
0a1c58a2
JL
2949 else if (structure_value_addr)
2950 {
2951 if (target == 0 || GET_CODE (target) != MEM)
2952 {
2953 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2954 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2955 structure_value_addr));
2956 MEM_SET_IN_STRUCT_P (target,
2957 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2958 }
2959 }
2960 else if (pcc_struct_value)
cacbd532 2961 {
0a1c58a2
JL
2962 /* This is the special C++ case where we need to
2963 know what the true target was. We take care to
2964 never use this value more than once in one expression. */
2965 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2966 copy_to_reg (valreg));
c6df88cb 2967 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532 2968 }
0a1c58a2
JL
2969 /* Handle calls that return values in multiple non-contiguous locations.
2970 The Irix 6 ABI has examples of this. */
2971 else if (GET_CODE (valreg) == PARALLEL)
2972 {
2973 int bytes = int_size_in_bytes (TREE_TYPE (exp));
cacbd532 2974
0a1c58a2
JL
2975 if (target == 0)
2976 {
2977 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
2978 bytes, 0);
2979 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2980 preserve_temp_slots (target);
2981 }
2982
2983 if (! rtx_equal_p (target, valreg))
2984 emit_group_store (target, valreg, bytes,
19caa751
RK
2985 TYPE_ALIGN (TREE_TYPE (exp)));
2986
0a1c58a2
JL
2987 /* We can not support sibling calls for this case. */
2988 sibcall_failure = 1;
2989 }
2990 else if (target
2991 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2992 && GET_MODE (target) == GET_MODE (valreg))
2993 {
2994 /* TARGET and VALREG cannot be equal at this point because the
2995 latter would not have REG_FUNCTION_VALUE_P true, while the
2996 former would if it were referring to the same register.
2997
2998 If they refer to the same register, this move will be a no-op,
2999 except when function inlining is being done. */
3000 emit_move_insn (target, valreg);
3001 }
3002 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3003 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3004 else
3005 target = copy_to_reg (valreg);
51bbfa0c 3006
84b55618 3007#ifdef PROMOTE_FUNCTION_RETURN
0a1c58a2
JL
3008 /* If we promoted this return value, make the proper SUBREG. TARGET
3009 might be const0_rtx here, so be careful. */
3010 if (GET_CODE (target) == REG
3011 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3012 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3013 {
3014 tree type = TREE_TYPE (exp);
3015 int unsignedp = TREE_UNSIGNED (type);
84b55618 3016
0a1c58a2
JL
3017 /* If we don't promote as expected, something is wrong. */
3018 if (GET_MODE (target)
3019 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3020 abort ();
5d2ac65e 3021
0a1c58a2
JL
3022 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3023 SUBREG_PROMOTED_VAR_P (target) = 1;
3024 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3025 }
84b55618
RK
3026#endif
3027
0a1c58a2
JL
3028 /* If size of args is variable or this was a constructor call for a stack
3029 argument, restore saved stack-pointer value. */
51bbfa0c 3030
0a1c58a2
JL
3031 if (old_stack_level)
3032 {
3033 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3034 pending_stack_adjust = old_pending_adj;
0a1c58a2
JL
3035 stack_arg_under_construction = old_stack_arg_under_construction;
3036 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3037 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
3038 sibcall_failure = 1;
3039 }
f73ad30e 3040 else if (ACCUMULATE_OUTGOING_ARGS)
0a1c58a2 3041 {
51bbfa0c 3042#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3043 if (save_area)
3044 {
3045 restore_fixed_argument_area (save_area, argblock,
3046 high_to_save, low_to_save);
3047 sibcall_failure = 1;
3048 }
b94301c2 3049#endif
51bbfa0c 3050
0a1c58a2
JL
3051 /* If we saved any argument areas, restore them. */
3052 for (i = 0; i < num_actuals; i++)
3053 if (args[i].save_area)
3054 {
3055 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3056 rtx stack_area
3057 = gen_rtx_MEM (save_mode,
3058 memory_address (save_mode,
3059 XEXP (args[i].stack_slot, 0)));
3060
3061 if (save_mode != BLKmode)
3062 emit_move_insn (stack_area, args[i].save_area);
3063 else
3064 emit_block_move (stack_area,
3065 validize_mem (args[i].save_area),
3066 GEN_INT (args[i].size.constant),
19caa751 3067 PARM_BOUNDARY);
0a1c58a2
JL
3068 sibcall_failure = 1;
3069 }
51bbfa0c 3070
0a1c58a2
JL
3071 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3072 stack_usage_map = initial_stack_usage_map;
3073 }
51bbfa0c 3074
0a1c58a2
JL
3075 /* If this was alloca, record the new stack level for nonlocal gotos.
3076 Check for the handler slots since we might not have a save area
3077 for non-local gotos. */
59257ff7 3078
f2d33f13 3079 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
0a1c58a2 3080 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c 3081
0a1c58a2
JL
3082 pop_temp_slots ();
3083
3084 /* Free up storage we no longer need. */
3085 for (i = 0; i < num_actuals; ++i)
3086 if (args[i].aligned_regs)
3087 free (args[i].aligned_regs);
3088
e245d3af
RH
3089 if (pass == 0)
3090 {
3091 /* Undo the fake expand_start_target_temps we did earlier. If
3092 there had been any cleanups created, we've already set
3093 sibcall_failure. */
3094 expand_end_target_temps ();
3095 }
3096
0a1c58a2
JL
3097 insns = get_insns ();
3098 end_sequence ();
3099
3100 if (pass == 0)
3101 {
3102 tail_call_insns = insns;
3103
7d167afd
JJ
3104 /* If something prevents making this a sibling call,
3105 zero out the sequence. */
3106 if (sibcall_failure)
0a1c58a2 3107 tail_call_insns = NULL_RTX;
0a1c58a2
JL
3108 /* Restore the pending stack adjustment now that we have
3109 finished generating the sibling call sequence. */
1503a7ec 3110
0a1c58a2 3111 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 3112 stack_pointer_delta = save_stack_pointer_delta;
0a1c58a2
JL
3113 }
3114 else
3115 normal_call_insns = insns;
3116 }
3117
3118 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3119 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3120 can happen if the arguments to this function call an inline
3121 function who's expansion contains another CALL_PLACEHOLDER.
3122
3123 If there are any C_Ps in any of these sequences, replace them
3124 with their normal call. */
3125
3126 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3127 if (GET_CODE (insn) == CALL_INSN
3128 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3129 replace_call_placeholder (insn, sibcall_use_normal);
3130
3131 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3132 if (GET_CODE (insn) == CALL_INSN
3133 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3134 replace_call_placeholder (insn, sibcall_use_normal);
3135
3136 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3137 if (GET_CODE (insn) == CALL_INSN
3138 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3139 replace_call_placeholder (insn, sibcall_use_normal);
3140
3141 /* If this was a potential tail recursion site, then emit a
3142 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3143 One of them will be selected later. */
3144 if (tail_recursion_insns || tail_call_insns)
3145 {
3146 /* The tail recursion label must be kept around. We could expose
3147 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3148 and makes determining true tail recursion sites difficult.
3149
3150 So we set LABEL_PRESERVE_P here, then clear it when we select
3151 one of the call sequences after rtl generation is complete. */
3152 if (tail_recursion_insns)
3153 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3154 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3155 tail_call_insns,
3156 tail_recursion_insns,
3157 tail_recursion_label));
3158 }
3159 else
3160 emit_insns (normal_call_insns);
51bbfa0c 3161
0a1c58a2 3162 currently_expanding_call--;
8e6a59fe 3163
51bbfa0c
RS
3164 return target;
3165}
3166\f
12a22e76
JM
3167/* Returns nonzero if FUN is the symbol for a library function which can
3168 not throw. */
3169
3170static int
3171libfunc_nothrow (fun)
3172 rtx fun;
3173{
3174 if (fun == throw_libfunc
3175 || fun == rethrow_libfunc
3176 || fun == sjthrow_libfunc
3177 || fun == sjpopnthrow_libfunc)
3178 return 0;
3179
3180 return 1;
3181}
43bc5f13 3182\f
de76b467
JH
3183/* Output a library call to function FUN (a SYMBOL_REF rtx).
3184 The RETVAL parameter specifies whether return value needs to be saved, other
3185 parameters are documented in the emit_library_call function bellow. */
3186static rtx
2a8f6b90 3187emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
de76b467
JH
3188 int retval;
3189 rtx orgfun;
3190 rtx value;
2a8f6b90 3191 int fn_type;
de76b467
JH
3192 enum machine_mode outmode;
3193 int nargs;
3194 va_list p;
43bc5f13 3195{
3c0fca12
RH
3196 /* Total size in bytes of all the stack-parms scanned so far. */
3197 struct args_size args_size;
3198 /* Size of arguments before any adjustments (such as rounding). */
3199 struct args_size original_args_size;
3200 register int argnum;
3201 rtx fun;
3202 int inc;
3203 int count;
3204 struct args_size alignment_pad;
3205 rtx argblock = 0;
3206 CUMULATIVE_ARGS args_so_far;
3207 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3208 struct args_size offset; struct args_size size; rtx save_area; };
3209 struct arg *argvec;
3210 int old_inhibit_defer_pop = inhibit_defer_pop;
3211 rtx call_fusage = 0;
3212 rtx mem_value = 0;
5591ee6f 3213 rtx valreg;
3c0fca12
RH
3214 int pcc_struct_value = 0;
3215 int struct_value_size = 0;
f2d33f13 3216 int flags = 0;
3c0fca12 3217 int reg_parm_stack_space = 0;
3c0fca12 3218 int needed;
3c0fca12 3219
f73ad30e 3220#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
3221 /* Define the boundary of the register parm stack space that needs to be
3222 save, if any. */
3223 int low_to_save = -1, high_to_save = 0;
3224 rtx save_area = 0; /* Place that it is saved */
3225#endif
3226
3c0fca12
RH
3227 /* Size of the stack reserved for parameter registers. */
3228 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3229 char *initial_stack_usage_map = stack_usage_map;
3c0fca12
RH
3230
3231#ifdef REG_PARM_STACK_SPACE
3232#ifdef MAYBE_REG_PARM_STACK_SPACE
3233 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3234#else
3235 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3236#endif
3237#endif
3238
2a8f6b90 3239 if (fn_type == 1)
f2d33f13 3240 flags |= ECF_CONST;
2a8f6b90
JH
3241 else if (fn_type == 2)
3242 flags |= ECF_PURE;
3c0fca12
RH
3243 fun = orgfun;
3244
f2d33f13
JH
3245 if (libfunc_nothrow (fun))
3246 flags |= ECF_NOTHROW;
3c0fca12
RH
3247
3248#ifdef PREFERRED_STACK_BOUNDARY
3249 /* Ensure current function's preferred stack boundary is at least
3250 what we need. */
3251 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3252 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3253#endif
3254
3255 /* If this kind of value comes back in memory,
3256 decide where in memory it should come back. */
de76b467 3257 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3c0fca12
RH
3258 {
3259#ifdef PCC_STATIC_STRUCT_RETURN
3260 rtx pointer_reg
3261 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3262 0, 0);
3263 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3264 pcc_struct_value = 1;
3265 if (value == 0)
3266 value = gen_reg_rtx (outmode);
3267#else /* not PCC_STATIC_STRUCT_RETURN */
3268 struct_value_size = GET_MODE_SIZE (outmode);
3269 if (value != 0 && GET_CODE (value) == MEM)
3270 mem_value = value;
3271 else
3272 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3273#endif
3274
3275 /* This call returns a big structure. */
2a8f6b90 3276 flags &= ~(ECF_CONST | ECF_PURE);
3c0fca12
RH
3277 }
3278
3279 /* ??? Unfinished: must pass the memory address as an argument. */
3280
3281 /* Copy all the libcall-arguments out of the varargs data
3282 and into a vector ARGVEC.
3283
3284 Compute how to pass each argument. We only support a very small subset
3285 of the full argument passing conventions to limit complexity here since
3286 library functions shouldn't have many args. */
3287
3288 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3289 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3290
3291 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3292
3293 args_size.constant = 0;
3294 args_size.var = 0;
3295
3296 count = 0;
3297
5591ee6f
JH
3298 /* Now we are about to start emitting insns that can be deleted
3299 if a libcall is deleted. */
2a8f6b90 3300 if (flags & (ECF_CONST | ECF_PURE))
5591ee6f
JH
3301 start_sequence ();
3302
3c0fca12
RH
3303 push_temp_slots ();
3304
3305 /* If there's a structure value address to be passed,
3306 either pass it in the special place, or pass it as an extra argument. */
3307 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3308 {
3309 rtx addr = XEXP (mem_value, 0);
3310 nargs++;
3311
3312 /* Make sure it is a reasonable operand for a move or push insn. */
3313 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3314 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3315 addr = force_operand (addr, NULL_RTX);
3316
3317 argvec[count].value = addr;
3318 argvec[count].mode = Pmode;
3319 argvec[count].partial = 0;
3320
3321 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3322#ifdef FUNCTION_ARG_PARTIAL_NREGS
3323 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3324 abort ();
3325#endif
3326
3327 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f
CM
3328#ifdef STACK_PARMS_IN_REG_PARM_AREA
3329 1,
3330#else
3331 argvec[count].reg != 0,
3332#endif
3c0fca12
RH
3333 NULL_TREE, &args_size, &argvec[count].offset,
3334 &argvec[count].size, &alignment_pad);
3335
3336
3337 if (argvec[count].reg == 0 || argvec[count].partial != 0
3338 || reg_parm_stack_space > 0)
3339 args_size.constant += argvec[count].size.constant;
3340
3341 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3342
3343 count++;
3344 }
3345
3346 for (; count < nargs; count++)
3347 {
3348 rtx val = va_arg (p, rtx);
3349 enum machine_mode mode = va_arg (p, enum machine_mode);
3350
3351 /* We cannot convert the arg value to the mode the library wants here;
3352 must do it earlier where we know the signedness of the arg. */
3353 if (mode == BLKmode
3354 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3355 abort ();
3356
3357 /* On some machines, there's no way to pass a float to a library fcn.
3358 Pass it as a double instead. */
3359#ifdef LIBGCC_NEEDS_DOUBLE
3360 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3361 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3362#endif
3363
3364 /* There's no need to call protect_from_queue, because
3365 either emit_move_insn or emit_push_insn will do that. */
3366
3367 /* Make sure it is a reasonable operand for a move or push insn. */
3368 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3369 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3370 val = force_operand (val, NULL_RTX);
3371
3372#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3373 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3374 {
3375 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3376 be viewed as just an efficiency improvement. */
3377 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3378 emit_move_insn (slot, val);
de76b467 3379 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12
RH
3380 mode = Pmode;
3381 }
3382#endif
3383
3384 argvec[count].value = val;
3385 argvec[count].mode = mode;
3386
3387 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3388
3389#ifdef FUNCTION_ARG_PARTIAL_NREGS
3390 argvec[count].partial
3391 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3392#else
3393 argvec[count].partial = 0;
3394#endif
3395
3396 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f
CM
3397#ifdef STACK_PARMS_IN_REG_PARM_AREA
3398 1,
3399#else
3400 argvec[count].reg != 0,
3401#endif
3c0fca12
RH
3402 NULL_TREE, &args_size, &argvec[count].offset,
3403 &argvec[count].size, &alignment_pad);
3404
3405 if (argvec[count].size.var)
3406 abort ();
3407
3408 if (reg_parm_stack_space == 0 && argvec[count].partial)
3409 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3410
3411 if (argvec[count].reg == 0 || argvec[count].partial != 0
3412 || reg_parm_stack_space > 0)
3413 args_size.constant += argvec[count].size.constant;
3414
3415 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3416 }
3c0fca12
RH
3417
3418#ifdef FINAL_REG_PARM_STACK_SPACE
3419 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3420 args_size.var);
3421#endif
3422 /* If this machine requires an external definition for library
3423 functions, write one out. */
3424 assemble_external_libcall (fun);
3425
3426 original_args_size = args_size;
3427#ifdef PREFERRED_STACK_BOUNDARY
1503a7ec
JH
3428 args_size.constant = (((args_size.constant
3429 + stack_pointer_delta
3430 + STACK_BYTES - 1)
3431 / STACK_BYTES
3432 * STACK_BYTES)
3433 - stack_pointer_delta);
3c0fca12
RH
3434#endif
3435
3436 args_size.constant = MAX (args_size.constant,
3437 reg_parm_stack_space);
3438
3439#ifndef OUTGOING_REG_PARM_STACK_SPACE
3440 args_size.constant -= reg_parm_stack_space;
3441#endif
3442
3443 if (args_size.constant > current_function_outgoing_args_size)
3444 current_function_outgoing_args_size = args_size.constant;
3445
f73ad30e
JH
3446 if (ACCUMULATE_OUTGOING_ARGS)
3447 {
3448 /* Since the stack pointer will never be pushed, it is possible for
3449 the evaluation of a parm to clobber something we have already
3450 written to the stack. Since most function calls on RISC machines
3451 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 3452
f73ad30e
JH
3453 Therefore, we save any area of the stack that was already written
3454 and that we are using. Here we set up to do this by making a new
3455 stack usage map from the old one.
3c0fca12 3456
f73ad30e
JH
3457 Another approach might be to try to reorder the argument
3458 evaluations to avoid this conflicting stack usage. */
3c0fca12 3459
f73ad30e 3460 needed = args_size.constant;
3c0fca12
RH
3461
3462#ifndef OUTGOING_REG_PARM_STACK_SPACE
f73ad30e
JH
3463 /* Since we will be writing into the entire argument area, the
3464 map must be allocated for its entire size, not just the part that
3465 is the responsibility of the caller. */
3466 needed += reg_parm_stack_space;
3c0fca12
RH
3467#endif
3468
3469#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3470 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3471 needed + 1);
3c0fca12 3472#else
f73ad30e
JH
3473 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3474 needed);
3c0fca12 3475#endif
f73ad30e 3476 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3c0fca12 3477
f73ad30e
JH
3478 if (initial_highest_arg_in_use)
3479 bcopy (initial_stack_usage_map, stack_usage_map,
3480 initial_highest_arg_in_use);
3c0fca12 3481
f73ad30e
JH
3482 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3483 bzero (&stack_usage_map[initial_highest_arg_in_use],
3484 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3485 needed = 0;
3c0fca12 3486
f73ad30e
JH
3487 /* The address of the outgoing argument list must not be copied to a
3488 register here, because argblock would be left pointing to the
3489 wrong place after the call to allocate_dynamic_stack_space below.
3490 */
3c0fca12 3491
f73ad30e
JH
3492 argblock = virtual_outgoing_args_rtx;
3493 }
3494 else
3495 {
3496 if (!PUSH_ARGS)
3497 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3498 }
3c0fca12 3499
3c0fca12
RH
3500#ifdef PREFERRED_STACK_BOUNDARY
3501 /* If we push args individually in reverse order, perform stack alignment
3502 before the first push (the last arg). */
f73ad30e 3503 if (argblock == 0 && PUSH_ARGS_REVERSED)
3c0fca12
RH
3504 anti_adjust_stack (GEN_INT (args_size.constant
3505 - original_args_size.constant));
3506#endif
3c0fca12 3507
f73ad30e
JH
3508 if (PUSH_ARGS_REVERSED)
3509 {
3510 inc = -1;
3511 argnum = nargs - 1;
3512 }
3513 else
3514 {
3515 inc = 1;
3516 argnum = 0;
3517 }
3c0fca12 3518
f73ad30e
JH
3519#ifdef REG_PARM_STACK_SPACE
3520 if (ACCUMULATE_OUTGOING_ARGS)
3521 {
3522 /* The argument list is the property of the called routine and it
3523 may clobber it. If the fixed area has been used for previous
3524 parameters, we must save and restore it.
3c0fca12 3525
f73ad30e 3526 Here we compute the boundary of the that needs to be saved, if any. */
3c0fca12
RH
3527
3528#ifdef ARGS_GROW_DOWNWARD
f73ad30e 3529 for (count = 0; count < reg_parm_stack_space + 1; count++)
3c0fca12 3530#else
f73ad30e 3531 for (count = 0; count < reg_parm_stack_space; count++)
3c0fca12 3532#endif
f73ad30e
JH
3533 {
3534 if (count >= highest_outgoing_arg_in_use
3535 || stack_usage_map[count] == 0)
3536 continue;
3c0fca12 3537
f73ad30e
JH
3538 if (low_to_save == -1)
3539 low_to_save = count;
3c0fca12 3540
f73ad30e
JH
3541 high_to_save = count;
3542 }
3c0fca12 3543
f73ad30e
JH
3544 if (low_to_save >= 0)
3545 {
3546 int num_to_save = high_to_save - low_to_save + 1;
3547 enum machine_mode save_mode
3548 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3549 rtx stack_area;
3c0fca12 3550
f73ad30e
JH
3551 /* If we don't have the required alignment, must do this in BLKmode. */
3552 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3553 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3554 save_mode = BLKmode;
3c0fca12
RH
3555
3556#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3557 stack_area = gen_rtx_MEM (save_mode,
3558 memory_address (save_mode,
3559 plus_constant (argblock,
3560 - high_to_save)));
3c0fca12 3561#else
f73ad30e
JH
3562 stack_area = gen_rtx_MEM (save_mode,
3563 memory_address (save_mode,
3564 plus_constant (argblock,
3565 low_to_save)));
3c0fca12 3566#endif
f73ad30e
JH
3567 if (save_mode == BLKmode)
3568 {
3569 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3570 emit_block_move (validize_mem (save_area), stack_area,
19caa751 3571 GEN_INT (num_to_save), PARM_BOUNDARY);
f73ad30e
JH
3572 }
3573 else
3574 {
3575 save_area = gen_reg_rtx (save_mode);
3576 emit_move_insn (save_area, stack_area);
3577 }
3c0fca12
RH
3578 }
3579 }
3580#endif
3581
3582 /* Push the args that need to be pushed. */
3583
3584 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3585 are to be pushed. */
3586 for (count = 0; count < nargs; count++, argnum += inc)
3587 {
3588 register enum machine_mode mode = argvec[argnum].mode;
3589 register rtx val = argvec[argnum].value;
3590 rtx reg = argvec[argnum].reg;
3591 int partial = argvec[argnum].partial;
f73ad30e 3592 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
3593
3594 if (! (reg != 0 && partial == 0))
3595 {
f73ad30e
JH
3596 if (ACCUMULATE_OUTGOING_ARGS)
3597 {
3598 /* If this is being stored into a pre-allocated, fixed-size, stack
3599 area, save any previous data at that location. */
3c0fca12
RH
3600
3601#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3602 /* stack_slot is negative, but we want to index stack_usage_map
3603 with positive values. */
3604 upper_bound = -argvec[argnum].offset.constant + 1;
3605 lower_bound = upper_bound - argvec[argnum].size.constant;
3c0fca12 3606#else
f73ad30e
JH
3607 lower_bound = argvec[argnum].offset.constant;
3608 upper_bound = lower_bound + argvec[argnum].size.constant;
3c0fca12
RH
3609#endif
3610
f73ad30e
JH
3611 for (i = lower_bound; i < upper_bound; i++)
3612 if (stack_usage_map[i]
3613 /* Don't store things in the fixed argument area at this point;
3614 it has already been saved. */
3615 && i > reg_parm_stack_space)
3616 break;
3c0fca12 3617
f73ad30e
JH
3618 if (i != upper_bound)
3619 {
3620 /* We need to make a save area. See what mode we can make it. */
3621 enum machine_mode save_mode
3622 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3623 MODE_INT, 1);
3624 rtx stack_area
3625 = gen_rtx_MEM
3626 (save_mode,
3627 memory_address
3628 (save_mode,
3629 plus_constant (argblock,
3630 argvec[argnum].offset.constant)));
3631 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3632
3633 emit_move_insn (argvec[argnum].save_area, stack_area);
3634 }
3c0fca12 3635 }
19caa751 3636
3c0fca12
RH
3637 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3638 argblock, GEN_INT (argvec[argnum].offset.constant),
3639 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3640
3c0fca12 3641 /* Now mark the segment we just used. */
f73ad30e
JH
3642 if (ACCUMULATE_OUTGOING_ARGS)
3643 for (i = lower_bound; i < upper_bound; i++)
3644 stack_usage_map[i] = 1;
3c0fca12
RH
3645
3646 NO_DEFER_POP;
3647 }
3648 }
3649
3c0fca12
RH
3650#ifdef PREFERRED_STACK_BOUNDARY
3651 /* If we pushed args in forward order, perform stack alignment
3652 after pushing the last arg. */
f73ad30e 3653 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3c0fca12
RH
3654 anti_adjust_stack (GEN_INT (args_size.constant
3655 - original_args_size.constant));
3656#endif
3c0fca12 3657
f73ad30e
JH
3658 if (PUSH_ARGS_REVERSED)
3659 argnum = nargs - 1;
3660 else
3661 argnum = 0;
3c0fca12
RH
3662
3663 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3664
3665 /* Now load any reg parms into their regs. */
3666
3667 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3668 are to be pushed. */
3669 for (count = 0; count < nargs; count++, argnum += inc)
3670 {
3671 register rtx val = argvec[argnum].value;
3672 rtx reg = argvec[argnum].reg;
3673 int partial = argvec[argnum].partial;
3674
3675 /* Handle calls that pass values in multiple non-contiguous
3676 locations. The PA64 has examples of this for library calls. */
3677 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3678 emit_group_load (reg, val,
3679 GET_MODE_SIZE (GET_MODE (val)),
3680 GET_MODE_ALIGNMENT (GET_MODE (val)));
3681 else if (reg != 0 && partial == 0)
3682 emit_move_insn (reg, val);
3683
3684 NO_DEFER_POP;
3685 }
3686
3c0fca12
RH
3687 /* Any regs containing parms remain in use through the call. */
3688 for (count = 0; count < nargs; count++)
3689 {
3690 rtx reg = argvec[count].reg;
3691 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3692 use_group_regs (&call_fusage, reg);
3693 else if (reg != 0)
3694 use_reg (&call_fusage, reg);
3695 }
3696
3697 /* Pass the function the address in which to return a structure value. */
3698 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3699 {
3700 emit_move_insn (struct_value_rtx,
3701 force_reg (Pmode,
3702 force_operand (XEXP (mem_value, 0),
3703 NULL_RTX)));
3704 if (GET_CODE (struct_value_rtx) == REG)
3705 use_reg (&call_fusage, struct_value_rtx);
3706 }
3707
3708 /* Don't allow popping to be deferred, since then
3709 cse'ing of library calls could delete a call and leave the pop. */
3710 NO_DEFER_POP;
5591ee6f
JH
3711 valreg = (mem_value == 0 && outmode != VOIDmode
3712 ? hard_libcall_value (outmode) : NULL_RTX);
3c0fca12 3713
ebcd0b57
JH
3714#ifdef PREFERRED_STACK_BOUNDARY
3715 /* Stack must to be properly aligned now. */
3716 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3717 abort();
3718#endif
3719
3c0fca12
RH
3720 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3721 will set inhibit_defer_pop to that value. */
de76b467
JH
3722 /* The return type is needed to decide how many bytes the function pops.
3723 Signedness plays no role in that, so for simplicity, we pretend it's
3724 always signed. We also assume that the list of arguments passed has
3725 no impact, so we pretend it is unknown. */
3c0fca12
RH
3726
3727 emit_call_1 (fun,
3728 get_identifier (XSTR (orgfun, 0)),
de76b467
JH
3729 build_function_type (outmode == VOIDmode ? void_type_node
3730 : type_for_mode (outmode, 0), NULL_TREE),
3c0fca12
RH
3731 original_args_size.constant, args_size.constant,
3732 struct_value_size,
3733 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
5591ee6f 3734 valreg,
f2d33f13 3735 old_inhibit_defer_pop + 1, call_fusage, flags);
3c0fca12
RH
3736
3737 /* Now restore inhibit_defer_pop to its actual original value. */
3738 OK_DEFER_POP;
3739
5591ee6f
JH
3740 /* If call is cse'able, make appropriate pair of reg-notes around it.
3741 Test valreg so we don't crash; may safely ignore `const'
3742 if return type is void. Disable for PARALLEL return values, because
3743 we have no way to move such values into a pseudo register. */
2a8f6b90 3744 if ((flags & (ECF_CONST | ECF_PURE))
5591ee6f
JH
3745 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3746 {
3747 rtx note = 0;
3748 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3749 rtx insns;
3750 int i;
3751
3752 /* Construct an "equal form" for the value which mentions all the
3753 arguments in order as well as the function name. */
3754 for (i = 0; i < nargs; i++)
3755 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3756 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3757
3758 insns = get_insns ();
3759 end_sequence ();
3760
2a8f6b90
JH
3761 if (flags & ECF_PURE)
3762 note = gen_rtx_EXPR_LIST (VOIDmode,
3763 gen_rtx_USE (VOIDmode,
3764 gen_rtx_MEM (BLKmode,
3765 gen_rtx_SCRATCH (VOIDmode))), note);
3766
5591ee6f
JH
3767 emit_libcall_block (insns, temp, valreg, note);
3768
3769 valreg = temp;
3770 }
2a8f6b90 3771 else if (flags & (ECF_CONST | ECF_PURE))
5591ee6f
JH
3772 {
3773 /* Otherwise, just write out the sequence without a note. */
3774 rtx insns = get_insns ();
3775
3776 end_sequence ();
3777 emit_insns (insns);
3778 }
3c0fca12
RH
3779 pop_temp_slots ();
3780
3781 /* Copy the value to the right place. */
de76b467 3782 if (outmode != VOIDmode && retval)
3c0fca12
RH
3783 {
3784 if (mem_value)
3785 {
3786 if (value == 0)
3787 value = mem_value;
3788 if (value != mem_value)
3789 emit_move_insn (value, mem_value);
3790 }
3791 else if (value != 0)
3792 emit_move_insn (value, hard_libcall_value (outmode));
3793 else
3794 value = hard_libcall_value (outmode);
3795 }
3796
f73ad30e 3797 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 3798 {
f73ad30e
JH
3799#ifdef REG_PARM_STACK_SPACE
3800 if (save_area)
3801 {
3802 enum machine_mode save_mode = GET_MODE (save_area);
3c0fca12 3803#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3804 rtx stack_area
3805 = gen_rtx_MEM (save_mode,
3806 memory_address (save_mode,
3807 plus_constant (argblock,
3808 - high_to_save)));
3c0fca12 3809#else
f73ad30e
JH
3810 rtx stack_area
3811 = gen_rtx_MEM (save_mode,
3812 memory_address (save_mode,
3813 plus_constant (argblock, low_to_save)));
3c0fca12 3814#endif
f73ad30e
JH
3815 if (save_mode != BLKmode)
3816 emit_move_insn (stack_area, save_area);
3817 else
3818 emit_block_move (stack_area, validize_mem (save_area),
3819 GEN_INT (high_to_save - low_to_save + 1),
19caa751 3820 PARM_BOUNDARY);
f73ad30e 3821 }
3c0fca12 3822#endif
f73ad30e
JH
3823
3824 /* If we saved any argument areas, restore them. */
3825 for (count = 0; count < nargs; count++)
3826 if (argvec[count].save_area)
3827 {
3828 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3829 rtx stack_area
3830 = gen_rtx_MEM (save_mode,
3831 memory_address
3832 (save_mode,
3833 plus_constant (argblock,
3834 argvec[count].offset.constant)));
3835
3836 emit_move_insn (stack_area, argvec[count].save_area);
3837 }
3c0fca12 3838
f73ad30e
JH
3839 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3840 stack_usage_map = initial_stack_usage_map;
3841 }
43bc5f13 3842
de76b467
JH
3843 return value;
3844
3845}
3846\f
3847/* Output a library call to function FUN (a SYMBOL_REF rtx)
3848 (emitting the queue unless NO_QUEUE is nonzero),
3849 for a value of mode OUTMODE,
3850 with NARGS different arguments, passed as alternating rtx values
3851 and machine_modes to convert them to.
3852 The rtx values should have been passed through protect_from_queue already.
3853
2a8f6b90
JH
3854 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3855 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3856 calls, that are handled like `const' calls with extra
3857 (use (memory (scratch)). */
de76b467
JH
3858
3859void
2a8f6b90 3860emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
de76b467
JH
3861 int nargs, ...))
3862{
3863#ifndef ANSI_PROTOTYPES
3864 rtx orgfun;
2a8f6b90 3865 int fn_type;
de76b467
JH
3866 enum machine_mode outmode;
3867 int nargs;
3868#endif
3869 va_list p;
3870
3871 VA_START (p, nargs);
3872
3873#ifndef ANSI_PROTOTYPES
3874 orgfun = va_arg (p, rtx);
2a8f6b90 3875 fn_type = va_arg (p, int);
de76b467
JH
3876 outmode = va_arg (p, enum machine_mode);
3877 nargs = va_arg (p, int);
3878#endif
3879
2a8f6b90 3880 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
de76b467
JH
3881
3882 va_end (p);
3883}
3884\f
3885/* Like emit_library_call except that an extra argument, VALUE,
3886 comes second and says where to store the result.
3887 (If VALUE is zero, this function chooses a convenient way
3888 to return the value.
3889
3890 This function returns an rtx for where the value is to be found.
3891 If VALUE is nonzero, VALUE is returned. */
3892
3893rtx
2a8f6b90 3894emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
de76b467
JH
3895 enum machine_mode outmode, int nargs, ...))
3896{
3897#ifndef ANSI_PROTOTYPES
3898 rtx orgfun;
3899 rtx value;
2a8f6b90 3900 int fn_type;
de76b467
JH
3901 enum machine_mode outmode;
3902 int nargs;
3903#endif
3904 va_list p;
3905
3906 VA_START (p, nargs);
3907
3908#ifndef ANSI_PROTOTYPES
3909 orgfun = va_arg (p, rtx);
3910 value = va_arg (p, rtx);
2a8f6b90 3911 fn_type = va_arg (p, int);
de76b467
JH
3912 outmode = va_arg (p, enum machine_mode);
3913 nargs = va_arg (p, int);
3914#endif
3915
2a8f6b90 3916 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
de76b467
JH
3917
3918 va_end (p);
3919
fac0ad80 3920 return value;
322e3e34
RK
3921}
3922\f
51bbfa0c
RS
3923#if 0
3924/* Return an rtx which represents a suitable home on the stack
3925 given TYPE, the type of the argument looking for a home.
3926 This is called only for BLKmode arguments.
3927
3928 SIZE is the size needed for this target.
3929 ARGS_ADDR is the address of the bottom of the argument block for this call.
3930 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3931 if this machine uses push insns. */
3932
3933static rtx
3934target_for_arg (type, size, args_addr, offset)
3935 tree type;
3936 rtx size;
3937 rtx args_addr;
3938 struct args_size offset;
3939{
3940 rtx target;
3941 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3942
3943 /* We do not call memory_address if possible,
3944 because we want to address as close to the stack
3945 as possible. For non-variable sized arguments,
3946 this will be stack-pointer relative addressing. */
3947 if (GET_CODE (offset_rtx) == CONST_INT)
3948 target = plus_constant (args_addr, INTVAL (offset_rtx));
3949 else
3950 {
3951 /* I have no idea how to guarantee that this
3952 will work in the presence of register parameters. */
38a448ca 3953 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3954 target = memory_address (QImode, target);
3955 }
3956
38a448ca 3957 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3958}
3959#endif
3960\f
3961/* Store a single argument for a function call
3962 into the register or memory area where it must be passed.
3963 *ARG describes the argument value and where to pass it.
3964
3965 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3966 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3967
3968 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3969 so must be careful about how the stack is used.
3970
3971 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3972 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3973 that we need not worry about saving and restoring the stack.
3974
3975 FNDECL is the declaration of the function we are calling. */
3976
3977static void
c84e2712 3978store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3979 reg_parm_stack_space)
51bbfa0c
RS
3980 struct arg_data *arg;
3981 rtx argblock;
3982 int may_be_alloca;
0f9b3ea6 3983 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3984 int reg_parm_stack_space;
51bbfa0c
RS
3985{
3986 register tree pval = arg->tree_value;
3987 rtx reg = 0;
3988 int partial = 0;
3989 int used = 0;
6a651371 3990 int i, lower_bound = 0, upper_bound = 0;
51bbfa0c
RS
3991
3992 if (TREE_CODE (pval) == ERROR_MARK)
3993 return;
3994
cc79451b
RK
3995 /* Push a new temporary level for any temporaries we make for
3996 this argument. */
3997 push_temp_slots ();
3998
f73ad30e 3999 if (ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 4000 {
f73ad30e
JH
4001 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4002 save any previous data at that location. */
4003 if (argblock && ! variable_size && arg->stack)
4004 {
51bbfa0c 4005#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4006 /* stack_slot is negative, but we want to index stack_usage_map
4007 with positive values. */
4008 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4009 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4010 else
4011 upper_bound = 0;
51bbfa0c 4012
f73ad30e 4013 lower_bound = upper_bound - arg->size.constant;
51bbfa0c 4014#else
f73ad30e
JH
4015 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4016 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4017 else
4018 lower_bound = 0;
51bbfa0c 4019
f73ad30e 4020 upper_bound = lower_bound + arg->size.constant;
51bbfa0c
RS
4021#endif
4022
f73ad30e
JH
4023 for (i = lower_bound; i < upper_bound; i++)
4024 if (stack_usage_map[i]
4025 /* Don't store things in the fixed argument area at this point;
4026 it has already been saved. */
4027 && i > reg_parm_stack_space)
4028 break;
51bbfa0c 4029
f73ad30e 4030 if (i != upper_bound)
51bbfa0c 4031 {
f73ad30e
JH
4032 /* We need to make a save area. See what mode we can make it. */
4033 enum machine_mode save_mode
4034 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4035 rtx stack_area
4036 = gen_rtx_MEM (save_mode,
4037 memory_address (save_mode,
4038 XEXP (arg->stack_slot, 0)));
4039
4040 if (save_mode == BLKmode)
4041 {
4042 arg->save_area = assign_stack_temp (BLKmode,
4043 arg->size.constant, 0);
4044 MEM_SET_IN_STRUCT_P (arg->save_area,
4045 AGGREGATE_TYPE_P (TREE_TYPE
4046 (arg->tree_value)));
4047 preserve_temp_slots (arg->save_area);
4048 emit_block_move (validize_mem (arg->save_area), stack_area,
4049 GEN_INT (arg->size.constant),
19caa751 4050 PARM_BOUNDARY);
f73ad30e
JH
4051 }
4052 else
4053 {
4054 arg->save_area = gen_reg_rtx (save_mode);
4055 emit_move_insn (arg->save_area, stack_area);
4056 }
51bbfa0c
RS
4057 }
4058 }
f73ad30e
JH
4059 /* Now that we have saved any slots that will be overwritten by this
4060 store, mark all slots this store will use. We must do this before
4061 we actually expand the argument since the expansion itself may
4062 trigger library calls which might need to use the same stack slot. */
4063 if (argblock && ! variable_size && arg->stack)
4064 for (i = lower_bound; i < upper_bound; i++)
4065 stack_usage_map[i] = 1;
51bbfa0c 4066 }
b564df06 4067
51bbfa0c
RS
4068 /* If this isn't going to be placed on both the stack and in registers,
4069 set up the register and number of words. */
4070 if (! arg->pass_on_stack)
4071 reg = arg->reg, partial = arg->partial;
4072
4073 if (reg != 0 && partial == 0)
4074 /* Being passed entirely in a register. We shouldn't be called in
4075 this case. */
4076 abort ();
4077
4ab56118
RK
4078 /* If this arg needs special alignment, don't load the registers
4079 here. */
4080 if (arg->n_aligned_regs != 0)
4081 reg = 0;
4ab56118 4082
4ab56118 4083 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4084 it directly into its stack slot. Otherwise, we can. */
4085 if (arg->value == 0)
d64f5a78 4086 {
d64f5a78
RS
4087 /* stack_arg_under_construction is nonzero if a function argument is
4088 being evaluated directly into the outgoing argument list and
4089 expand_call must take special action to preserve the argument list
4090 if it is called recursively.
4091
4092 For scalar function arguments stack_usage_map is sufficient to
4093 determine which stack slots must be saved and restored. Scalar
4094 arguments in general have pass_on_stack == 0.
4095
4096 If this argument is initialized by a function which takes the
4097 address of the argument (a C++ constructor or a C function
4098 returning a BLKmode structure), then stack_usage_map is
4099 insufficient and expand_call must push the stack around the
4100 function call. Such arguments have pass_on_stack == 1.
4101
4102 Note that it is always safe to set stack_arg_under_construction,
4103 but this generates suboptimal code if set when not needed. */
4104
4105 if (arg->pass_on_stack)
4106 stack_arg_under_construction++;
f73ad30e 4107
3a08477a
RK
4108 arg->value = expand_expr (pval,
4109 (partial
4110 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4111 ? NULL_RTX : arg->stack,
e5d70561 4112 VOIDmode, 0);
1efe6448
RK
4113
4114 /* If we are promoting object (or for any other reason) the mode
4115 doesn't agree, convert the mode. */
4116
7373d92d
RK
4117 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4118 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4119 arg->value, arg->unsignedp);
1efe6448 4120
d64f5a78
RS
4121 if (arg->pass_on_stack)
4122 stack_arg_under_construction--;
d64f5a78 4123 }
51bbfa0c
RS
4124
4125 /* Don't allow anything left on stack from computation
4126 of argument to alloca. */
4127 if (may_be_alloca)
4128 do_pending_stack_adjust ();
4129
4130 if (arg->value == arg->stack)
7815214e 4131 {
c5c76735 4132 /* If the value is already in the stack slot, we are done. */
7d384cc0 4133 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 4134 {
7815214e 4135 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4136 XEXP (arg->stack, 0), Pmode,
7d384cc0 4137 ARGS_SIZE_RTX (arg->size),
7815214e 4138 TYPE_MODE (sizetype),
956d6950
JL
4139 GEN_INT (MEMORY_USE_RW),
4140 TYPE_MODE (integer_type_node));
7815214e
RK
4141 }
4142 }
1efe6448 4143 else if (arg->mode != BLKmode)
51bbfa0c
RS
4144 {
4145 register int size;
4146
4147 /* Argument is a scalar, not entirely passed in registers.
4148 (If part is passed in registers, arg->partial says how much
4149 and emit_push_insn will take care of putting it there.)
4150
4151 Push it, and if its size is less than the
4152 amount of space allocated to it,
4153 also bump stack pointer by the additional space.
4154 Note that in C the default argument promotions
4155 will prevent such mismatches. */
4156
1efe6448 4157 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4158 /* Compute how much space the push instruction will push.
4159 On many machines, pushing a byte will advance the stack
4160 pointer by a halfword. */
4161#ifdef PUSH_ROUNDING
4162 size = PUSH_ROUNDING (size);
4163#endif
4164 used = size;
4165
4166 /* Compute how much space the argument should get:
4167 round up to a multiple of the alignment for arguments. */
1efe6448 4168 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4169 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4170 / (PARM_BOUNDARY / BITS_PER_UNIT))
4171 * (PARM_BOUNDARY / BITS_PER_UNIT));
4172
4173 /* This isn't already where we want it on the stack, so put it there.
4174 This can either be done with push or copy insns. */
e5e809f4
JL
4175 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4176 partial, reg, used - size, argblock,
4fc026cd
CM
4177 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4178 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
4179 }
4180 else
4181 {
4182 /* BLKmode, at least partly to be pushed. */
4183
4184 register int excess;
4185 rtx size_rtx;
4186
4187 /* Pushing a nonscalar.
4188 If part is passed in registers, PARTIAL says how much
4189 and emit_push_insn will take care of putting it there. */
4190
4191 /* Round its size up to a multiple
4192 of the allocation unit for arguments. */
4193
4194 if (arg->size.var != 0)
4195 {
4196 excess = 0;
4197 size_rtx = ARGS_SIZE_RTX (arg->size);
4198 }
4199 else
4200 {
51bbfa0c
RS
4201 /* PUSH_ROUNDING has no effect on us, because
4202 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 4203 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4204 + partial * UNITS_PER_WORD);
e4f93898 4205 size_rtx = expr_size (pval);
51bbfa0c
RS
4206 }
4207
1efe6448 4208 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
19caa751
RK
4209 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4210 argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
4211 reg_parm_stack_space,
4212 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
4213 }
4214
4215
4216 /* Unless this is a partially-in-register argument, the argument is now
4217 in the stack.
4218
4219 ??? Note that this can change arg->value from arg->stack to
4220 arg->stack_slot and it matters when they are not the same.
4221 It isn't totally clear that this is correct in all cases. */
4222 if (partial == 0)
3b917a55 4223 arg->value = arg->stack_slot;
51bbfa0c
RS
4224
4225 /* Once we have pushed something, pops can't safely
4226 be deferred during the rest of the arguments. */
4227 NO_DEFER_POP;
4228
4229 /* ANSI doesn't require a sequence point here,
4230 but PCC has one, so this will avoid some problems. */
4231 emit_queue ();
4232
db907e7b
RK
4233 /* Free any temporary slots made in processing this argument. Show
4234 that we might have taken the address of something and pushed that
4235 as an operand. */
4236 preserve_temp_slots (NULL_RTX);
51bbfa0c 4237 free_temp_slots ();
cc79451b 4238 pop_temp_slots ();
51bbfa0c 4239}
This page took 1.337599 seconds and 5 git commands to generate.