]> gcc.gnu.org Git - gcc.git/blame - gcc/calls.c
* lang.c (lang_decode_option): Enable -Wunused when -Wall given.
[gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
3c71940f
JL
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
51bbfa0c
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920
KG
23#include "system.h"
24#include "rtl.h"
25#include "tree.h"
26#include "flags.h"
27#include "expr.h"
49ad7cfa 28#include "function.h"
670ee920 29#include "regs.h"
51bbfa0c 30#include "insn-flags.h"
5f6da302 31#include "toplev.h"
d6f4ec51 32#include "output.h"
b1474bb7 33#include "tm_p.h"
51bbfa0c 34
c795bca9
BS
35#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37#endif
38
51bbfa0c 39/* Decide whether a function's arguments should be processed
bbc8a071
RK
40 from first to last or from last to first.
41
42 They should if the stack and args grow in opposite directions, but
43 only if we have push insns. */
51bbfa0c 44
51bbfa0c 45#ifdef PUSH_ROUNDING
bbc8a071 46
40083ddf 47#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
48#define PUSH_ARGS_REVERSED /* If it's last to first */
49#endif
bbc8a071 50
51bbfa0c
RS
51#endif
52
c795bca9
BS
53/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
55
56/* Data structure and subroutines used within expand_call. */
57
58struct arg_data
59{
60 /* Tree node for this argument. */
61 tree tree_value;
1efe6448
RK
62 /* Mode for value; TYPE_MODE unless promoted. */
63 enum machine_mode mode;
51bbfa0c
RS
64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 rtx value;
66 /* Initially-compute RTL value for argument; only for const functions. */
67 rtx initial_value;
68 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 69 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
70 registers. */
71 rtx reg;
84b55618
RK
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
74 int unsignedp;
51bbfa0c
RS
75 /* Number of registers to use. 0 means put the whole arg in registers.
76 Also 0 if not passed in registers. */
77 int partial;
d64f5a78
RS
78 /* Non-zero if argument must be passed on stack.
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
82 int pass_on_stack;
83 /* Offset of this argument from beginning of stack-args. */
84 struct args_size offset;
85 /* Similar, but offset to the start of the stack slot. Different from
86 OFFSET if this arg pads downward. */
87 struct args_size slot_offset;
88 /* Size of this argument on the stack, rounded up for any padding it gets,
89 parts of the argument passed in registers do not count.
90 If REG_PARM_STACK_SPACE is defined, then register parms
91 are counted here as well. */
92 struct args_size size;
93 /* Location on the stack at which parameter should be stored. The store
94 has already been done if STACK == VALUE. */
95 rtx stack;
96 /* Location on the stack of the start of this argument slot. This can
97 differ from STACK if this arg pads downward. This location is known
98 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 rtx stack_slot;
100#ifdef ACCUMULATE_OUTGOING_ARGS
101 /* Place that this stack area has been saved, if needed. */
102 rtx save_area;
103#endif
4ab56118
RK
104 /* If an argument's alignment does not permit direct copying into registers,
105 copy in smaller-sized pieces into pseudos. These are stored in a
106 block pointed to by this field. The next field says how many
107 word-sized pseudos we made. */
108 rtx *aligned_regs;
109 int n_aligned_regs;
4fc026cd
CM
110 /* The amount that the stack pointer needs to be adjusted to
111 force alignment for the next argument. */
112 struct args_size alignment_pad;
51bbfa0c
RS
113};
114
115#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 116/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
117 the corresponding stack location has been used.
118 This vector is used to prevent a function call within an argument from
119 clobbering any stack already set up. */
120static char *stack_usage_map;
121
122/* Size of STACK_USAGE_MAP. */
123static int highest_outgoing_arg_in_use;
2f4aa534
RS
124
125/* stack_arg_under_construction is nonzero when an argument may be
126 initialized with a constructor call (including a C function that
127 returns a BLKmode struct) and expand_call must take special action
128 to make sure the object being constructed does not overlap the
129 argument list for the constructor call. */
130int stack_arg_under_construction;
51bbfa0c
RS
131#endif
132
3d994c6b
KG
133static int calls_function PARAMS ((tree, int));
134static int calls_function_1 PARAMS ((tree, int));
135static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
136 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
12a22e76 137 rtx, int, rtx, int, int));
3d994c6b
KG
138static void precompute_register_parameters PARAMS ((int,
139 struct arg_data *,
140 int *));
141static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
142 int));
143static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
144 int));
145static int finalize_must_preallocate PARAMS ((int, int,
146 struct arg_data *,
147 struct args_size *));
148static void precompute_arguments PARAMS ((int, int, int,
149 struct arg_data *,
150 struct args_size *));
151static int compute_argument_block_size PARAMS ((int,
c2f8b491
JH
152 struct args_size *,
153 int));
3d994c6b
KG
154static void initialize_argument_information PARAMS ((int,
155 struct arg_data *,
156 struct args_size *,
157 int, tree, tree,
158 CUMULATIVE_ARGS *,
159 int, rtx *, int *,
160 int *, int *));
161static void compute_argument_addresses PARAMS ((struct arg_data *,
162 rtx, int));
163static rtx rtx_for_function_call PARAMS ((tree, tree));
164static void load_register_parameters PARAMS ((struct arg_data *,
165 int, rtx *));
43bc5f13
JH
166static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
167 enum machine_mode,
168 int, va_list));
12a22e76 169static int libfunc_nothrow PARAMS ((rtx));
21a3b983 170
20efdf74 171#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3d994c6b
KG
172static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
173static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
20efdf74 174#endif
51bbfa0c 175\f
1ce0cb53
JW
176/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
177 `alloca'.
178
179 If WHICH is 0, return 1 if EXP contains a call to any function.
180 Actually, we only need return 1 if evaluating EXP would require pushing
181 arguments on the stack, but that is too difficult to compute, so we just
182 assume any function call might require the stack. */
51bbfa0c 183
1c8d7aef
RS
184static tree calls_function_save_exprs;
185
51bbfa0c 186static int
1ce0cb53 187calls_function (exp, which)
51bbfa0c 188 tree exp;
1ce0cb53 189 int which;
1c8d7aef
RS
190{
191 int val;
192 calls_function_save_exprs = 0;
193 val = calls_function_1 (exp, which);
194 calls_function_save_exprs = 0;
195 return val;
196}
197
198static int
199calls_function_1 (exp, which)
200 tree exp;
201 int which;
51bbfa0c
RS
202{
203 register int i;
0207efa2
RK
204 enum tree_code code = TREE_CODE (exp);
205 int type = TREE_CODE_CLASS (code);
206 int length = tree_code_length[(int) code];
51bbfa0c 207
ddd5a7c1 208 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
209 if ((int) code >= NUM_TREE_CODES)
210 return 1;
51bbfa0c 211
0207efa2 212 /* Only expressions and references can contain calls. */
3b59a331
RS
213 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
214 && type != 'b')
51bbfa0c
RS
215 return 0;
216
0207efa2 217 switch (code)
51bbfa0c
RS
218 {
219 case CALL_EXPR:
1ce0cb53
JW
220 if (which == 0)
221 return 1;
222 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
223 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
224 == FUNCTION_DECL))
225 {
226 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
227
228 if ((DECL_BUILT_IN (fndecl)
95815af9 229 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
0207efa2
RK
230 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
231 || (DECL_SAVED_INSNS (fndecl)
49ad7cfa 232 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
0207efa2
RK
233 return 1;
234 }
51bbfa0c
RS
235
236 /* Third operand is RTL. */
237 length = 2;
238 break;
239
240 case SAVE_EXPR:
241 if (SAVE_EXPR_RTL (exp) != 0)
242 return 0;
1c8d7aef
RS
243 if (value_member (exp, calls_function_save_exprs))
244 return 0;
245 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
246 calls_function_save_exprs);
247 return (TREE_OPERAND (exp, 0) != 0
248 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
249
250 case BLOCK:
ef03bc85
CH
251 {
252 register tree local;
253
254 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 255 if (DECL_INITIAL (local) != 0
1c8d7aef 256 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
257 return 1;
258 }
259 {
260 register tree subblock;
261
262 for (subblock = BLOCK_SUBBLOCKS (exp);
263 subblock;
264 subblock = TREE_CHAIN (subblock))
1c8d7aef 265 if (calls_function_1 (subblock, which))
ef03bc85
CH
266 return 1;
267 }
268 return 0;
51bbfa0c
RS
269
270 case METHOD_CALL_EXPR:
271 length = 3;
272 break;
273
274 case WITH_CLEANUP_EXPR:
275 length = 1;
276 break;
277
278 case RTL_EXPR:
279 return 0;
e9a25f70
JL
280
281 default:
282 break;
51bbfa0c
RS
283 }
284
285 for (i = 0; i < length; i++)
286 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 287 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
288 return 1;
289
290 return 0;
291}
292\f
293/* Force FUNEXP into a form suitable for the address of a CALL,
294 and return that as an rtx. Also load the static chain register
295 if FNDECL is a nested function.
296
77cac2f2
RK
297 CALL_FUSAGE points to a variable holding the prospective
298 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 299
03dacb02 300rtx
77cac2f2 301prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
302 rtx funexp;
303 tree fndecl;
77cac2f2 304 rtx *call_fusage;
01368078 305 int reg_parm_seen;
51bbfa0c
RS
306{
307 rtx static_chain_value = 0;
308
309 funexp = protect_from_queue (funexp, 0);
310
311 if (fndecl != 0)
0f41302f 312 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
313 static_chain_value = lookup_static_chain (fndecl);
314
315 /* Make a valid memory address and copy constants thru pseudo-regs,
316 but not for a constant address if -fno-function-cse. */
317 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 318 /* If we are using registers for parameters, force the
e9a25f70
JL
319 function address into a register now. */
320 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
321 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
322 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
323 else
324 {
325#ifndef NO_FUNCTION_CSE
326 if (optimize && ! flag_no_function_cse)
327#ifdef NO_RECURSIVE_FUNCTION_CSE
328 if (fndecl != current_function_decl)
329#endif
330 funexp = force_reg (Pmode, funexp);
331#endif
332 }
333
334 if (static_chain_value != 0)
335 {
336 emit_move_insn (static_chain_rtx, static_chain_value);
337
f991a240
RK
338 if (GET_CODE (static_chain_rtx) == REG)
339 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
340 }
341
342 return funexp;
343}
344
345/* Generate instructions to call function FUNEXP,
346 and optionally pop the results.
347 The CALL_INSN is the first insn generated.
348
607ea900 349 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
350 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
351
334c4f0f
RK
352 FUNTYPE is the data type of the function. This is given to the macro
353 RETURN_POPS_ARGS to determine whether this function pops its own args.
354 We used to allow an identifier for library functions, but that doesn't
355 work when the return type is an aggregate type and the calling convention
356 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
357
358 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
359 ROUNDED_STACK_SIZE is that number rounded up to
360 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
361 both to put into the call insn and to generate explicit popping
362 code if necessary.
51bbfa0c
RS
363
364 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
365 It is zero if this call doesn't want a structure value.
366
367 NEXT_ARG_REG is the rtx that results from executing
368 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
369 just after all the args have had their registers assigned.
370 This could be whatever you like, but normally it is the first
371 arg-register beyond those used for args in this call,
372 or 0 if all the arg-registers are used in this call.
373 It is passed on to `gen_call' so you can put this info in the call insn.
374
375 VALREG is a hard register in which a value is returned,
376 or 0 if the call does not return a value.
377
378 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
379 the args to this call were processed.
380 We restore `inhibit_defer_pop' to that value.
381
94b25f81
RK
382 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
383 denote registers used by the called function.
51bbfa0c
RS
384
385 IS_CONST is true if this is a `const' call. */
386
322e3e34 387static void
fb5eebb9
RH
388emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
389 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
12a22e76 390 call_fusage, is_const, nothrow)
51bbfa0c 391 rtx funexp;
c84e2712
KG
392 tree fndecl ATTRIBUTE_UNUSED;
393 tree funtype ATTRIBUTE_UNUSED;
6a651371 394 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
fb5eebb9 395 HOST_WIDE_INT rounded_stack_size;
962f1324 396 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
51bbfa0c
RS
397 rtx next_arg_reg;
398 rtx valreg;
399 int old_inhibit_defer_pop;
77cac2f2 400 rtx call_fusage;
12a22e76 401 int is_const, nothrow;
51bbfa0c 402{
062e7fd8 403 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57bed152 404#if defined (HAVE_call) && defined (HAVE_call_value)
e5d70561 405 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
57bed152 406#endif
51bbfa0c 407 rtx call_insn;
081f5e7e 408#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 409 int already_popped = 0;
fb5eebb9 410 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
081f5e7e 411#endif
51bbfa0c
RS
412
413 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
414 and we don't want to load it into a register as an optimization,
415 because prepare_call_address already did it if it should be done. */
416 if (GET_CODE (funexp) != SYMBOL_REF)
417 funexp = memory_address (FUNCTION_MODE, funexp);
418
419#ifndef ACCUMULATE_OUTGOING_ARGS
420#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8bcafee3
JDA
421/* If the target has "call" or "call_value" insns, then prefer them
422 if no arguments are actually popped. If the target does not have
423 "call" or "call_value" insns, then we must use the popping versions
424 even if the call has no arguments to pop. */
425#if defined (HAVE_call) && defined (HAVE_call_value)
426 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
427 && n_popped > 0)
428#else
429 if (HAVE_call_pop && HAVE_call_value_pop)
430#endif
51bbfa0c 431 {
fb5eebb9 432 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
433 rtx pat;
434
435 /* If this subroutine pops its own args, record that in the call insn
436 if possible, for the sake of frame pointer elimination. */
2c8da025 437
51bbfa0c
RS
438 if (valreg)
439 pat = gen_call_value_pop (valreg,
38a448ca 440 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 441 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 442 else
38a448ca 443 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 444 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
445
446 emit_call_insn (pat);
447 already_popped = 1;
448 }
449 else
450#endif
451#endif
452
453#if defined (HAVE_call) && defined (HAVE_call_value)
454 if (HAVE_call && HAVE_call_value)
455 {
456 if (valreg)
457 emit_call_insn (gen_call_value (valreg,
38a448ca 458 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 459 rounded_stack_size_rtx, next_arg_reg,
e992302c 460 NULL_RTX));
51bbfa0c 461 else
38a448ca 462 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 463 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
464 struct_value_size_rtx));
465 }
466 else
467#endif
468 abort ();
469
77cac2f2 470 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
471 for (call_insn = get_last_insn ();
472 call_insn && GET_CODE (call_insn) != CALL_INSN;
473 call_insn = PREV_INSN (call_insn))
474 ;
475
476 if (! call_insn)
477 abort ();
478
e59e60a7
RK
479 /* Put the register usage information on the CALL. If there is already
480 some usage information, put ours at the end. */
481 if (CALL_INSN_FUNCTION_USAGE (call_insn))
482 {
483 rtx link;
484
485 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
486 link = XEXP (link, 1))
487 ;
488
489 XEXP (link, 1) = call_fusage;
490 }
491 else
492 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
493
494 /* If this is a const call, then set the insn's unchanging bit. */
495 if (is_const)
496 CONST_CALL_P (call_insn) = 1;
497
12a22e76
JM
498 /* If this call can't throw, attach a REG_EH_REGION reg note to that
499 effect. */
500 if (nothrow)
54cea123 501 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76
JM
502 REG_NOTES (call_insn));
503
b1e64e0d
RS
504 /* Restore this now, so that we do defer pops for this call's args
505 if the context of the call as a whole permits. */
506 inhibit_defer_pop = old_inhibit_defer_pop;
507
51bbfa0c
RS
508#ifndef ACCUMULATE_OUTGOING_ARGS
509 /* If returning from the subroutine does not automatically pop the args,
510 we need an instruction to pop them sooner or later.
511 Perhaps do it now; perhaps just record how much space to pop later.
512
513 If returning from the subroutine does pop the args, indicate that the
514 stack pointer will be changed. */
515
c2732da3
JM
516 /* The space for the args is no longer waiting for the call; either it
517 was popped by the call, or it'll be popped below. */
518 arg_space_so_far -= rounded_stack_size;
519
fb5eebb9 520 if (n_popped > 0)
51bbfa0c
RS
521 {
522 if (!already_popped)
e3da301d 523 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
524 = gen_rtx_EXPR_LIST (VOIDmode,
525 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
526 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 527 rounded_stack_size -= n_popped;
062e7fd8 528 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
529 }
530
fb5eebb9 531 if (rounded_stack_size != 0)
51bbfa0c 532 {
70a73141 533 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
fb5eebb9 534 pending_stack_adjust += rounded_stack_size;
51bbfa0c 535 else
062e7fd8 536 adjust_stack (rounded_stack_size_rtx);
51bbfa0c
RS
537 }
538#endif
539}
540
20efdf74
JL
541/* Determine if the function identified by NAME and FNDECL is one with
542 special properties we wish to know about.
543
544 For example, if the function might return more than one time (setjmp), then
545 set RETURNS_TWICE to a nonzero value.
546
547 Similarly set IS_LONGJMP for if the function is in the longjmp family.
548
549 Set IS_MALLOC for any of the standard memory allocation functions which
550 allocate from the heap.
551
552 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
553 space from the stack such as alloca. */
554
3a8c995b 555void
fa76d9e0 556special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
20efdf74 557 is_malloc, may_be_alloca)
20efdf74
JL
558 tree fndecl;
559 int *returns_twice;
560 int *is_longjmp;
fa76d9e0 561 int *fork_or_exec;
20efdf74
JL
562 int *is_malloc;
563 int *may_be_alloca;
564{
565 *returns_twice = 0;
566 *is_longjmp = 0;
fa76d9e0 567 *fork_or_exec = 0;
20efdf74
JL
568 *may_be_alloca = 0;
569
140592a0
AG
570 /* The function decl may have the `malloc' attribute. */
571 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
572
3a8c995b
MM
573 if (! *is_malloc
574 && fndecl && DECL_NAME (fndecl)
140592a0 575 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
576 /* Exclude functions not at the file scope, or not `extern',
577 since they are not the magic functions we would otherwise
578 think they are. */
579 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
580 {
3a8c995b 581 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
20efdf74
JL
582 char *tname = name;
583
ca54603f
JL
584 /* We assume that alloca will always be called by name. It
585 makes no sense to pass it as a pointer-to-function to
586 anything that does not understand its behavior. */
587 *may_be_alloca
588 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
589 && name[0] == 'a'
590 && ! strcmp (name, "alloca"))
591 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
592 && name[0] == '_'
593 && ! strcmp (name, "__builtin_alloca"))));
594
20efdf74
JL
595 /* Disregard prefix _, __ or __x. */
596 if (name[0] == '_')
597 {
598 if (name[1] == '_' && name[2] == 'x')
599 tname += 3;
600 else if (name[1] == '_')
601 tname += 2;
602 else
603 tname += 1;
604 }
605
606 if (tname[0] == 's')
607 {
608 *returns_twice
609 = ((tname[1] == 'e'
610 && (! strcmp (tname, "setjmp")
611 || ! strcmp (tname, "setjmp_syscall")))
612 || (tname[1] == 'i'
613 && ! strcmp (tname, "sigsetjmp"))
614 || (tname[1] == 'a'
615 && ! strcmp (tname, "savectx")));
616 if (tname[1] == 'i'
617 && ! strcmp (tname, "siglongjmp"))
618 *is_longjmp = 1;
619 }
620 else if ((tname[0] == 'q' && tname[1] == 's'
621 && ! strcmp (tname, "qsetjmp"))
622 || (tname[0] == 'v' && tname[1] == 'f'
623 && ! strcmp (tname, "vfork")))
624 *returns_twice = 1;
625
626 else if (tname[0] == 'l' && tname[1] == 'o'
627 && ! strcmp (tname, "longjmp"))
628 *is_longjmp = 1;
fa76d9e0
JR
629
630 else if ((tname[0] == 'f' && tname[1] == 'o'
631 && ! strcmp (tname, "fork"))
632 /* Linux specific: __clone. check NAME to insist on the
633 leading underscores, to avoid polluting the ISO / POSIX
634 namespace. */
635 || (name[0] == '_' && name[1] == '_'
636 && ! strcmp (tname, "clone"))
637 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
638 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
639 && (tname[5] == '\0'
640 || ((tname[5] == 'p' || tname[5] == 'e')
641 && tname[6] == '\0'))))
642 *fork_or_exec = 1;
643
140592a0 644 /* Do not add any more malloc-like functions to this list,
82514696
KG
645 instead mark them as malloc functions using the malloc attribute.
646 Note, realloc is not suitable for attribute malloc since
1e5a1107
JM
647 it may return the same address across multiple calls.
648 C++ operator new is not suitable because it is not required
649 to return a unique pointer; indeed, the standard placement new
650 just returns its argument. */
20efdf74
JL
651 else if (! strcmp (tname, "malloc")
652 || ! strcmp (tname, "calloc")
1e5a1107 653 || ! strcmp (tname, "strdup"))
20efdf74
JL
654 *is_malloc = 1;
655 }
656}
657
658/* Precompute all register parameters as described by ARGS, storing values
659 into fields within the ARGS array.
660
661 NUM_ACTUALS indicates the total number elements in the ARGS array.
662
663 Set REG_PARM_SEEN if we encounter a register parameter. */
664
665static void
666precompute_register_parameters (num_actuals, args, reg_parm_seen)
667 int num_actuals;
668 struct arg_data *args;
669 int *reg_parm_seen;
670{
671 int i;
672
673 *reg_parm_seen = 0;
674
675 for (i = 0; i < num_actuals; i++)
676 if (args[i].reg != 0 && ! args[i].pass_on_stack)
677 {
678 *reg_parm_seen = 1;
679
680 if (args[i].value == 0)
681 {
682 push_temp_slots ();
683 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
684 VOIDmode, 0);
685 preserve_temp_slots (args[i].value);
686 pop_temp_slots ();
687
688 /* ANSI doesn't require a sequence point here,
689 but PCC has one, so this will avoid some problems. */
690 emit_queue ();
691 }
692
693 /* If we are to promote the function arg to a wider mode,
694 do it now. */
695
696 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
697 args[i].value
698 = convert_modes (args[i].mode,
699 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
700 args[i].value, args[i].unsignedp);
701
702 /* If the value is expensive, and we are inside an appropriately
703 short loop, put the value into a pseudo and then put the pseudo
704 into the hard reg.
705
706 For small register classes, also do this if this call uses
707 register parameters. This is to avoid reload conflicts while
708 loading the parameters registers. */
709
710 if ((! (GET_CODE (args[i].value) == REG
711 || (GET_CODE (args[i].value) == SUBREG
712 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
713 && args[i].mode != BLKmode
714 && rtx_cost (args[i].value, SET) > 2
715 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
716 || preserve_subexpressions_p ()))
717 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
718 }
719}
720
721#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
722
723 /* The argument list is the property of the called routine and it
724 may clobber it. If the fixed area has been used for previous
725 parameters, we must save and restore it. */
726static rtx
727save_fixed_argument_area (reg_parm_stack_space, argblock,
728 low_to_save, high_to_save)
729 int reg_parm_stack_space;
730 rtx argblock;
731 int *low_to_save;
732 int *high_to_save;
733{
734 int i;
735 rtx save_area = NULL_RTX;
736
737 /* Compute the boundary of the that needs to be saved, if any. */
738#ifdef ARGS_GROW_DOWNWARD
739 for (i = 0; i < reg_parm_stack_space + 1; i++)
740#else
741 for (i = 0; i < reg_parm_stack_space; i++)
742#endif
743 {
744 if (i >= highest_outgoing_arg_in_use
745 || stack_usage_map[i] == 0)
746 continue;
747
748 if (*low_to_save == -1)
749 *low_to_save = i;
750
751 *high_to_save = i;
752 }
753
754 if (*low_to_save >= 0)
755 {
756 int num_to_save = *high_to_save - *low_to_save + 1;
757 enum machine_mode save_mode
758 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
759 rtx stack_area;
760
761 /* If we don't have the required alignment, must do this in BLKmode. */
762 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
763 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
764 save_mode = BLKmode;
765
766#ifdef ARGS_GROW_DOWNWARD
767 stack_area = gen_rtx_MEM (save_mode,
768 memory_address (save_mode,
769 plus_constant (argblock,
770 - *high_to_save)));
771#else
772 stack_area = gen_rtx_MEM (save_mode,
773 memory_address (save_mode,
774 plus_constant (argblock,
775 *low_to_save)));
776#endif
777 if (save_mode == BLKmode)
778 {
779 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
04572513
JJ
780 /* Cannot use emit_block_move here because it can be done by a library
781 call which in turn gets into this place again and deadly infinite
782 recursion happens. */
783 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
784 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
785 }
786 else
787 {
788 save_area = gen_reg_rtx (save_mode);
789 emit_move_insn (save_area, stack_area);
790 }
791 }
792 return save_area;
793}
794
795static void
796restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
797 rtx save_area;
798 rtx argblock;
799 int high_to_save;
800 int low_to_save;
801{
802 enum machine_mode save_mode = GET_MODE (save_area);
803#ifdef ARGS_GROW_DOWNWARD
804 rtx stack_area
805 = gen_rtx_MEM (save_mode,
806 memory_address (save_mode,
807 plus_constant (argblock,
808 - high_to_save)));
809#else
810 rtx stack_area
811 = gen_rtx_MEM (save_mode,
812 memory_address (save_mode,
813 plus_constant (argblock,
814 low_to_save)));
815#endif
816
817 if (save_mode != BLKmode)
818 emit_move_insn (stack_area, save_area);
819 else
04572513
JJ
820 /* Cannot use emit_block_move here because it can be done by a library
821 call which in turn gets into this place again and deadly infinite
822 recursion happens. */
823 move_by_pieces (stack_area, validize_mem (save_area),
824 high_to_save - low_to_save + 1,
825 PARM_BOUNDARY / BITS_PER_UNIT);
20efdf74
JL
826}
827#endif
828
829/* If any elements in ARGS refer to parameters that are to be passed in
830 registers, but not in memory, and whose alignment does not permit a
831 direct copy into registers. Copy the values into a group of pseudos
8e6a59fe
MM
832 which we will later copy into the appropriate hard registers.
833
834 Pseudos for each unaligned argument will be stored into the array
835 args[argnum].aligned_regs. The caller is responsible for deallocating
836 the aligned_regs array if it is nonzero. */
837
20efdf74
JL
838static void
839store_unaligned_arguments_into_pseudos (args, num_actuals)
840 struct arg_data *args;
841 int num_actuals;
842{
843 int i, j;
844
845 for (i = 0; i < num_actuals; i++)
846 if (args[i].reg != 0 && ! args[i].pass_on_stack
847 && args[i].mode == BLKmode
848 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
849 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
850 {
851 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
852 int big_endian_correction = 0;
853
854 args[i].n_aligned_regs
855 = args[i].partial ? args[i].partial
856 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
857
8e6a59fe
MM
858 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
859 * args[i].n_aligned_regs);
20efdf74
JL
860
861 /* Structures smaller than a word are aligned to the least
862 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
863 this means we must skip the empty high order bytes when
864 calculating the bit offset. */
865 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
866 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
867
868 for (j = 0; j < args[i].n_aligned_regs; j++)
869 {
870 rtx reg = gen_reg_rtx (word_mode);
871 rtx word = operand_subword_force (args[i].value, j, BLKmode);
872 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
873 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
874
875 args[i].aligned_regs[j] = reg;
876
877 /* There is no need to restrict this code to loading items
878 in TYPE_ALIGN sized hunks. The bitfield instructions can
879 load up entire word sized registers efficiently.
880
881 ??? This may not be needed anymore.
882 We use to emit a clobber here but that doesn't let later
883 passes optimize the instructions we emit. By storing 0 into
884 the register later passes know the first AND to zero out the
885 bitfield being set in the register is unnecessary. The store
886 of 0 will be deleted as will at least the first AND. */
887
888 emit_move_insn (reg, const0_rtx);
889
890 bytes -= bitsize / BITS_PER_UNIT;
891 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
892 extract_bit_field (word, bitsize, 0, 1,
893 NULL_RTX, word_mode,
894 word_mode,
895 bitalign / BITS_PER_UNIT,
896 BITS_PER_WORD),
897 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
898 }
899 }
900}
901
d7cdf113
JL
902/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
903 ACTPARMS.
904
905 NUM_ACTUALS is the total number of parameters.
906
907 N_NAMED_ARGS is the total number of named arguments.
908
909 FNDECL is the tree code for the target of this call (if known)
910
911 ARGS_SO_FAR holds state needed by the target to know where to place
912 the next argument.
913
914 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
915 for arguments which are passed in registers.
916
917 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
918 and may be modified by this routine.
919
920 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
921 flags which may may be modified by this routine. */
922
923static void
924initialize_argument_information (num_actuals, args, args_size, n_named_args,
925 actparms, fndecl, args_so_far,
926 reg_parm_stack_space, old_stack_level,
927 old_pending_adj, must_preallocate, is_const)
91813b28 928 int num_actuals ATTRIBUTE_UNUSED;
d7cdf113
JL
929 struct arg_data *args;
930 struct args_size *args_size;
91813b28 931 int n_named_args ATTRIBUTE_UNUSED;
d7cdf113
JL
932 tree actparms;
933 tree fndecl;
959f3a06 934 CUMULATIVE_ARGS *args_so_far;
d7cdf113
JL
935 int reg_parm_stack_space;
936 rtx *old_stack_level;
937 int *old_pending_adj;
938 int *must_preallocate;
939 int *is_const;
940{
941 /* 1 if scanning parms front to back, -1 if scanning back to front. */
942 int inc;
943
944 /* Count arg position in order args appear. */
945 int argpos;
946
4fc026cd 947 struct args_size alignment_pad;
d7cdf113
JL
948 int i;
949 tree p;
950
951 args_size->constant = 0;
952 args_size->var = 0;
953
954 /* In this loop, we consider args in the order they are written.
955 We fill up ARGS from the front or from the back if necessary
956 so that in any case the first arg to be pushed ends up at the front. */
957
958#ifdef PUSH_ARGS_REVERSED
959 i = num_actuals - 1, inc = -1;
960 /* In this case, must reverse order of args
961 so that we compute and push the last arg first. */
962#else
963 i = 0, inc = 1;
964#endif
965
966 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
967 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
968 {
969 tree type = TREE_TYPE (TREE_VALUE (p));
970 int unsignedp;
971 enum machine_mode mode;
972
973 args[i].tree_value = TREE_VALUE (p);
974
975 /* Replace erroneous argument with constant zero. */
976 if (type == error_mark_node || TYPE_SIZE (type) == 0)
977 args[i].tree_value = integer_zero_node, type = integer_type_node;
978
979 /* If TYPE is a transparent union, pass things the way we would
980 pass the first field of the union. We have already verified that
981 the modes are the same. */
982 if (TYPE_TRANSPARENT_UNION (type))
983 type = TREE_TYPE (TYPE_FIELDS (type));
984
985 /* Decide where to pass this arg.
986
987 args[i].reg is nonzero if all or part is passed in registers.
988
989 args[i].partial is nonzero if part but not all is passed in registers,
990 and the exact value says how many words are passed in registers.
991
992 args[i].pass_on_stack is nonzero if the argument must at least be
993 computed on the stack. It may then be loaded back into registers
994 if args[i].reg is nonzero.
995
996 These decisions are driven by the FUNCTION_... macros and must agree
997 with those made by function.c. */
998
999 /* See if this argument should be passed by invisible reference. */
1000 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1001 && contains_placeholder_p (TYPE_SIZE (type)))
1002 || TREE_ADDRESSABLE (type)
1003#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1004 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1005 type, argpos < n_named_args)
1006#endif
1007 )
1008 {
1009 /* If we're compiling a thunk, pass through invisible
1010 references instead of making a copy. */
1011 if (current_function_is_thunk
1012#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1013 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1014 type, argpos < n_named_args)
1015 /* If it's in a register, we must make a copy of it too. */
1016 /* ??? Is this a sufficient test? Is there a better one? */
1017 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1018 && REG_P (DECL_RTL (args[i].tree_value)))
1019 && ! TREE_ADDRESSABLE (type))
1020#endif
1021 )
1022 {
1023 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1024 new object from the argument. If we are passing by
1025 invisible reference, the callee will do that for us, so we
1026 can strip off the TARGET_EXPR. This is not always safe,
1027 but it is safe in the only case where this is a useful
1028 optimization; namely, when the argument is a plain object.
1029 In that case, the frontend is just asking the backend to
1030 make a bitwise copy of the argument. */
1031
1032 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1033 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1034 (args[i].tree_value, 1)))
1035 == 'd')
1036 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1037 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1038
1039 args[i].tree_value = build1 (ADDR_EXPR,
1040 build_pointer_type (type),
1041 args[i].tree_value);
1042 type = build_pointer_type (type);
1043 }
1044 else
1045 {
1046 /* We make a copy of the object and pass the address to the
1047 function being called. */
1048 rtx copy;
1049
1050 if (TYPE_SIZE (type) == 0
1051 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1052 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1053 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1054 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1055 {
1056 /* This is a variable-sized object. Make space on the stack
1057 for it. */
1058 rtx size_rtx = expr_size (TREE_VALUE (p));
1059
1060 if (*old_stack_level == 0)
1061 {
1062 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1063 *old_pending_adj = pending_stack_adjust;
1064 pending_stack_adjust = 0;
1065 }
1066
1067 copy = gen_rtx_MEM (BLKmode,
1068 allocate_dynamic_stack_space (size_rtx,
1069 NULL_RTX,
1070 TYPE_ALIGN (type)));
1071 }
1072 else
1073 {
1074 int size = int_size_in_bytes (type);
1075 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1076 }
1077
1078 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1079
1080 store_expr (args[i].tree_value, copy, 0);
1081 *is_const = 0;
1082
1083 args[i].tree_value = build1 (ADDR_EXPR,
1084 build_pointer_type (type),
1085 make_tree (type, copy));
1086 type = build_pointer_type (type);
1087 }
1088 }
1089
1090 mode = TYPE_MODE (type);
1091 unsignedp = TREE_UNSIGNED (type);
1092
1093#ifdef PROMOTE_FUNCTION_ARGS
1094 mode = promote_mode (type, mode, &unsignedp, 1);
1095#endif
1096
1097 args[i].unsignedp = unsignedp;
1098 args[i].mode = mode;
959f3a06 1099 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
d7cdf113
JL
1100 argpos < n_named_args);
1101#ifdef FUNCTION_ARG_PARTIAL_NREGS
1102 if (args[i].reg)
1103 args[i].partial
959f3a06 1104 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1105 argpos < n_named_args);
1106#endif
1107
1108 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1109
1110 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1111 it means that we are to pass this arg in the register(s) designated
1112 by the PARALLEL, but also to pass it in the stack. */
1113 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1114 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1115 args[i].pass_on_stack = 1;
1116
1117 /* If this is an addressable type, we must preallocate the stack
1118 since we must evaluate the object into its final location.
1119
1120 If this is to be passed in both registers and the stack, it is simpler
1121 to preallocate. */
1122 if (TREE_ADDRESSABLE (type)
1123 || (args[i].pass_on_stack && args[i].reg != 0))
1124 *must_preallocate = 1;
1125
1126 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1127 we cannot consider this function call constant. */
1128 if (TREE_ADDRESSABLE (type))
1129 *is_const = 0;
1130
1131 /* Compute the stack-size of this argument. */
1132 if (args[i].reg == 0 || args[i].partial != 0
1133 || reg_parm_stack_space > 0
1134 || args[i].pass_on_stack)
1135 locate_and_pad_parm (mode, type,
1136#ifdef STACK_PARMS_IN_REG_PARM_AREA
1137 1,
1138#else
1139 args[i].reg != 0,
1140#endif
1141 fndecl, args_size, &args[i].offset,
4fc026cd 1142 &args[i].size, &alignment_pad);
d7cdf113
JL
1143
1144#ifndef ARGS_GROW_DOWNWARD
1145 args[i].slot_offset = *args_size;
1146#endif
1147
4fc026cd
CM
1148 args[i].alignment_pad = alignment_pad;
1149
d7cdf113
JL
1150 /* If a part of the arg was put into registers,
1151 don't include that part in the amount pushed. */
1152 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1153 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1154 / (PARM_BOUNDARY / BITS_PER_UNIT)
1155 * (PARM_BOUNDARY / BITS_PER_UNIT));
1156
1157 /* Update ARGS_SIZE, the total stack space for args so far. */
1158
1159 args_size->constant += args[i].size.constant;
1160 if (args[i].size.var)
1161 {
1162 ADD_PARM_SIZE (*args_size, args[i].size.var);
1163 }
1164
1165 /* Since the slot offset points to the bottom of the slot,
1166 we must record it after incrementing if the args grow down. */
1167#ifdef ARGS_GROW_DOWNWARD
1168 args[i].slot_offset = *args_size;
1169
1170 args[i].slot_offset.constant = -args_size->constant;
1171 if (args_size->var)
fed3cef0 1172 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
d7cdf113
JL
1173#endif
1174
1175 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1176 have been used, etc. */
1177
959f3a06 1178 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1179 argpos < n_named_args);
1180 }
1181}
1182
599f37b6
JL
1183/* Update ARGS_SIZE to contain the total size for the argument block.
1184 Return the original constant component of the argument block's size.
1185
1186 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1187 for arguments passed in registers. */
1188
1189static int
c2f8b491
JH
1190compute_argument_block_size (reg_parm_stack_space, args_size,
1191 preferred_stack_boundary)
599f37b6
JL
1192 int reg_parm_stack_space;
1193 struct args_size *args_size;
c2f8b491 1194 int preferred_stack_boundary ATTRIBUTE_UNUSED;
599f37b6
JL
1195{
1196 int unadjusted_args_size = args_size->constant;
1197
1198 /* Compute the actual size of the argument block required. The variable
1199 and constant sizes must be combined, the size may have to be rounded,
1200 and there may be a minimum required size. */
1201
1202 if (args_size->var)
1203 {
1204 args_size->var = ARGS_SIZE_TREE (*args_size);
1205 args_size->constant = 0;
1206
1207#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491
JH
1208 preferred_stack_boundary /= BITS_PER_UNIT;
1209 if (preferred_stack_boundary > 1)
1210 args_size->var = round_up (args_size->var, preferred_stack_boundary);
599f37b6
JL
1211#endif
1212
1213 if (reg_parm_stack_space > 0)
1214 {
1215 args_size->var
1216 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1217 ssize_int (reg_parm_stack_space));
599f37b6
JL
1218
1219#ifndef OUTGOING_REG_PARM_STACK_SPACE
1220 /* The area corresponding to register parameters is not to count in
1221 the size of the block we need. So make the adjustment. */
1222 args_size->var
1223 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1224 ssize_int (reg_parm_stack_space));
599f37b6
JL
1225#endif
1226 }
1227 }
1228 else
1229 {
1230#ifdef PREFERRED_STACK_BOUNDARY
c2f8b491 1231 preferred_stack_boundary /= BITS_PER_UNIT;
fb5eebb9 1232 args_size->constant = (((args_size->constant
c2732da3 1233 + arg_space_so_far
fb5eebb9 1234 + pending_stack_adjust
c2f8b491
JH
1235 + preferred_stack_boundary - 1)
1236 / preferred_stack_boundary
1237 * preferred_stack_boundary)
c2732da3 1238 - arg_space_so_far
fb5eebb9 1239 - pending_stack_adjust);
599f37b6
JL
1240#endif
1241
1242 args_size->constant = MAX (args_size->constant,
1243 reg_parm_stack_space);
1244
1245#ifdef MAYBE_REG_PARM_STACK_SPACE
1246 if (reg_parm_stack_space == 0)
1247 args_size->constant = 0;
1248#endif
1249
1250#ifndef OUTGOING_REG_PARM_STACK_SPACE
1251 args_size->constant -= reg_parm_stack_space;
1252#endif
1253 }
1254 return unadjusted_args_size;
1255}
1256
19832c77 1257/* Precompute parameters as needed for a function call.
cc0b1adc
JL
1258
1259 IS_CONST indicates the target function is a pure function.
1260
1261 MUST_PREALLOCATE indicates that we must preallocate stack space for
1262 any stack arguments.
1263
1264 NUM_ACTUALS is the number of arguments.
1265
1266 ARGS is an array containing information for each argument; this routine
1267 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1268
1269 ARGS_SIZE contains information about the size of the arg list. */
1270
1271static void
1272precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1273 int is_const;
1274 int must_preallocate;
1275 int num_actuals;
1276 struct arg_data *args;
1277 struct args_size *args_size;
1278{
1279 int i;
1280
1281 /* If this function call is cse'able, precompute all the parameters.
1282 Note that if the parameter is constructed into a temporary, this will
1283 cause an additional copy because the parameter will be constructed
1284 into a temporary location and then copied into the outgoing arguments.
1285 If a parameter contains a call to alloca and this function uses the
1286 stack, precompute the parameter. */
1287
1288 /* If we preallocated the stack space, and some arguments must be passed
1289 on the stack, then we must precompute any parameter which contains a
1290 function call which will store arguments on the stack.
1291 Otherwise, evaluating the parameter may clobber previous parameters
1292 which have already been stored into the stack. */
1293
1294 for (i = 0; i < num_actuals; i++)
1295 if (is_const
1296 || ((args_size->var != 0 || args_size->constant != 0)
1297 && calls_function (args[i].tree_value, 1))
1298 || (must_preallocate
1299 && (args_size->var != 0 || args_size->constant != 0)
1300 && calls_function (args[i].tree_value, 0)))
1301 {
1302 /* If this is an addressable type, we cannot pre-evaluate it. */
1303 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1304 abort ();
1305
1306 push_temp_slots ();
1307
47841d1b 1308 args[i].value
cc0b1adc
JL
1309 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1310
1311 preserve_temp_slots (args[i].value);
1312 pop_temp_slots ();
1313
1314 /* ANSI doesn't require a sequence point here,
1315 but PCC has one, so this will avoid some problems. */
1316 emit_queue ();
1317
1318 args[i].initial_value = args[i].value
47841d1b 1319 = protect_from_queue (args[i].value, 0);
cc0b1adc
JL
1320
1321 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
47841d1b
JJ
1322 {
1323 args[i].value
1324 = convert_modes (args[i].mode,
1325 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1326 args[i].value, args[i].unsignedp);
1327#ifdef PROMOTE_FOR_CALL_ONLY
1328 /* CSE will replace this only if it contains args[i].value
1329 pseudo, so convert it down to the declared mode using
1330 a SUBREG. */
1331 if (GET_CODE (args[i].value) == REG
1332 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1333 {
1334 args[i].initial_value
1335 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1336 args[i].value, 0);
1337 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1338 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1339 = args[i].unsignedp;
1340 }
1341#endif
1342 }
cc0b1adc
JL
1343 }
1344}
1345
0f9b3ea6
JL
1346/* Given the current state of MUST_PREALLOCATE and information about
1347 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1348 compute and return the final value for MUST_PREALLOCATE. */
1349
1350static int
1351finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1352 int must_preallocate;
1353 int num_actuals;
1354 struct arg_data *args;
1355 struct args_size *args_size;
1356{
1357 /* See if we have or want to preallocate stack space.
1358
1359 If we would have to push a partially-in-regs parm
1360 before other stack parms, preallocate stack space instead.
1361
1362 If the size of some parm is not a multiple of the required stack
1363 alignment, we must preallocate.
1364
1365 If the total size of arguments that would otherwise create a copy in
1366 a temporary (such as a CALL) is more than half the total argument list
1367 size, preallocation is faster.
1368
1369 Another reason to preallocate is if we have a machine (like the m88k)
1370 where stack alignment is required to be maintained between every
1371 pair of insns, not just when the call is made. However, we assume here
1372 that such machines either do not have push insns (and hence preallocation
1373 would occur anyway) or the problem is taken care of with
1374 PUSH_ROUNDING. */
1375
1376 if (! must_preallocate)
1377 {
1378 int partial_seen = 0;
1379 int copy_to_evaluate_size = 0;
1380 int i;
1381
1382 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1383 {
1384 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1385 partial_seen = 1;
1386 else if (partial_seen && args[i].reg == 0)
1387 must_preallocate = 1;
1388
1389 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1390 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1391 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1392 || TREE_CODE (args[i].tree_value) == COND_EXPR
1393 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1394 copy_to_evaluate_size
1395 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1396 }
1397
1398 if (copy_to_evaluate_size * 2 >= args_size->constant
1399 && args_size->constant > 0)
1400 must_preallocate = 1;
1401 }
1402 return must_preallocate;
1403}
599f37b6 1404
a45bdd02
JL
1405/* If we preallocated stack space, compute the address of each argument
1406 and store it into the ARGS array.
1407
1408 We need not ensure it is a valid memory address here; it will be
1409 validized when it is used.
1410
1411 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1412
1413static void
1414compute_argument_addresses (args, argblock, num_actuals)
1415 struct arg_data *args;
1416 rtx argblock;
1417 int num_actuals;
1418{
1419 if (argblock)
1420 {
1421 rtx arg_reg = argblock;
1422 int i, arg_offset = 0;
1423
1424 if (GET_CODE (argblock) == PLUS)
1425 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1426
1427 for (i = 0; i < num_actuals; i++)
1428 {
1429 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1430 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1431 rtx addr;
1432
1433 /* Skip this parm if it will not be passed on the stack. */
1434 if (! args[i].pass_on_stack && args[i].reg != 0)
1435 continue;
1436
1437 if (GET_CODE (offset) == CONST_INT)
1438 addr = plus_constant (arg_reg, INTVAL (offset));
1439 else
1440 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1441
1442 addr = plus_constant (addr, arg_offset);
1443 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1444 MEM_SET_IN_STRUCT_P
1445 (args[i].stack,
1446 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1447
1448 if (GET_CODE (slot_offset) == CONST_INT)
1449 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1450 else
1451 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1452
1453 addr = plus_constant (addr, arg_offset);
1454 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1455 }
1456 }
1457}
1458
1459/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1460 in a call instruction.
1461
1462 FNDECL is the tree node for the target function. For an indirect call
1463 FNDECL will be NULL_TREE.
1464
1465 EXP is the CALL_EXPR for this call. */
1466
1467static rtx
1468rtx_for_function_call (fndecl, exp)
1469 tree fndecl;
1470 tree exp;
1471{
1472 rtx funexp;
1473
1474 /* Get the function to call, in the form of RTL. */
1475 if (fndecl)
1476 {
1477 /* If this is the first use of the function, see if we need to
1478 make an external definition for it. */
1479 if (! TREE_USED (fndecl))
1480 {
1481 assemble_external (fndecl);
1482 TREE_USED (fndecl) = 1;
1483 }
1484
1485 /* Get a SYMBOL_REF rtx for the function address. */
1486 funexp = XEXP (DECL_RTL (fndecl), 0);
1487 }
1488 else
1489 /* Generate an rtx (probably a pseudo-register) for the address. */
1490 {
91ab1046 1491 rtx funaddr;
a45bdd02 1492 push_temp_slots ();
91ab1046
DT
1493 funaddr = funexp =
1494 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a45bdd02
JL
1495 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1496
1497 /* Check the function is executable. */
1498 if (current_function_check_memory_usage)
91ab1046
DT
1499 {
1500#ifdef POINTERS_EXTEND_UNSIGNED
1501 /* It might be OK to convert funexp in place, but there's
1502 a lot going on between here and when it happens naturally
1503 that this seems safer. */
1504 funaddr = convert_memory_address (Pmode, funexp);
1505#endif
1506 emit_library_call (chkr_check_exec_libfunc, 1,
1507 VOIDmode, 1,
1508 funaddr, Pmode);
1509 }
a45bdd02
JL
1510 emit_queue ();
1511 }
1512 return funexp;
1513}
1514
21a3b983
JL
1515/* Do the register loads required for any wholly-register parms or any
1516 parms which are passed both on the stack and in a register. Their
1517 expressions were already evaluated.
1518
1519 Mark all register-parms as living through the call, putting these USE
1520 insns in the CALL_INSN_FUNCTION_USAGE field. */
1521
1522static void
1523load_register_parameters (args, num_actuals, call_fusage)
1524 struct arg_data *args;
1525 int num_actuals;
1526 rtx *call_fusage;
1527{
1528 int i, j;
1529
1530#ifdef LOAD_ARGS_REVERSED
1531 for (i = num_actuals - 1; i >= 0; i--)
1532#else
1533 for (i = 0; i < num_actuals; i++)
1534#endif
1535 {
1536 rtx reg = args[i].reg;
1537 int partial = args[i].partial;
1538 int nregs;
1539
1540 if (reg)
1541 {
1542 /* Set to non-negative if must move a word at a time, even if just
1543 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1544 we just use a normal move insn. This value can be zero if the
1545 argument is a zero size structure with no fields. */
1546 nregs = (partial ? partial
1547 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1548 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1549 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1550 : -1));
1551
1552 /* Handle calls that pass values in multiple non-contiguous
1553 locations. The Irix 6 ABI has examples of this. */
1554
1555 if (GET_CODE (reg) == PARALLEL)
1556 {
1557 emit_group_load (reg, args[i].value,
1558 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1559 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1560 / BITS_PER_UNIT));
1561 }
1562
1563 /* If simple case, just do move. If normal partial, store_one_arg
1564 has already loaded the register for us. In all other cases,
1565 load the register(s) from memory. */
1566
1567 else if (nregs == -1)
1568 emit_move_insn (reg, args[i].value);
1569
1570 /* If we have pre-computed the values to put in the registers in
1571 the case of non-aligned structures, copy them in now. */
1572
1573 else if (args[i].n_aligned_regs != 0)
1574 for (j = 0; j < args[i].n_aligned_regs; j++)
1575 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1576 args[i].aligned_regs[j]);
1577
1578 else if (partial == 0 || args[i].pass_on_stack)
1579 move_block_to_reg (REGNO (reg),
1580 validize_mem (args[i].value), nregs,
1581 args[i].mode);
1582
1583 /* Handle calls that pass values in multiple non-contiguous
1584 locations. The Irix 6 ABI has examples of this. */
1585 if (GET_CODE (reg) == PARALLEL)
1586 use_group_regs (call_fusage, reg);
1587 else if (nregs == -1)
1588 use_reg (call_fusage, reg);
1589 else
1590 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1591 }
1592 }
1593}
1594
51bbfa0c
RS
1595/* Generate all the code for a function call
1596 and return an rtx for its value.
1597 Store the value in TARGET (specified as an rtx) if convenient.
1598 If the value is stored in TARGET then TARGET is returned.
1599 If IGNORE is nonzero, then we ignore the value of the function call. */
1600
1601rtx
8129842c 1602expand_call (exp, target, ignore)
51bbfa0c
RS
1603 tree exp;
1604 rtx target;
1605 int ignore;
51bbfa0c
RS
1606{
1607 /* List of actual parameters. */
1608 tree actparms = TREE_OPERAND (exp, 1);
1609 /* RTX for the function to be called. */
1610 rtx funexp;
51bbfa0c
RS
1611 /* Data type of the function. */
1612 tree funtype;
1613 /* Declaration of the function being called,
1614 or 0 if the function is computed (not known by name). */
1615 tree fndecl = 0;
1616 char *name = 0;
c2939b57 1617 rtx before_call;
51bbfa0c
RS
1618
1619 /* Register in which non-BLKmode value will be returned,
1620 or 0 if no value or if value is BLKmode. */
1621 rtx valreg;
1622 /* Address where we should return a BLKmode value;
1623 0 if value not BLKmode. */
1624 rtx structure_value_addr = 0;
1625 /* Nonzero if that address is being passed by treating it as
1626 an extra, implicit first parameter. Otherwise,
1627 it is passed by being copied directly into struct_value_rtx. */
1628 int structure_value_addr_parm = 0;
1629 /* Size of aggregate value wanted, or zero if none wanted
1630 or if we are using the non-reentrant PCC calling convention
1631 or expecting the value in registers. */
e5e809f4 1632 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1633 /* Nonzero if called function returns an aggregate in memory PCC style,
1634 by returning the address of where to find it. */
1635 int pcc_struct_value = 0;
1636
1637 /* Number of actual parameters in this call, including struct value addr. */
1638 int num_actuals;
1639 /* Number of named args. Args after this are anonymous ones
1640 and they must all go on the stack. */
1641 int n_named_args;
51bbfa0c
RS
1642
1643 /* Vector of information about each argument.
1644 Arguments are numbered in the order they will be pushed,
1645 not the order they are written. */
1646 struct arg_data *args;
1647
1648 /* Total size in bytes of all the stack-parms scanned so far. */
1649 struct args_size args_size;
1650 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1651 int unadjusted_args_size;
51bbfa0c
RS
1652 /* Data on reg parms scanned so far. */
1653 CUMULATIVE_ARGS args_so_far;
1654 /* Nonzero if a reg parm has been scanned. */
1655 int reg_parm_seen;
efd65a8b 1656 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
1657
1658 /* Nonzero if we must avoid push-insns in the args for this call.
1659 If stack space is allocated for register parameters, but not by the
1660 caller, then it is preallocated in the fixed part of the stack frame.
1661 So the entire argument block must then be preallocated (i.e., we
1662 ignore PUSH_ROUNDING in that case). */
1663
51bbfa0c
RS
1664#ifdef PUSH_ROUNDING
1665 int must_preallocate = 0;
1666#else
1667 int must_preallocate = 1;
51bbfa0c
RS
1668#endif
1669
f72aed24 1670 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1671 int reg_parm_stack_space = 0;
1672
51bbfa0c
RS
1673 /* Address of space preallocated for stack parms
1674 (on machines that lack push insns), or 0 if space not preallocated. */
1675 rtx argblock = 0;
1676
1677 /* Nonzero if it is plausible that this is a call to alloca. */
1678 int may_be_alloca;
9ae8ffe7
JL
1679 /* Nonzero if this is a call to malloc or a related function. */
1680 int is_malloc;
51bbfa0c
RS
1681 /* Nonzero if this is a call to setjmp or a related function. */
1682 int returns_twice;
1683 /* Nonzero if this is a call to `longjmp'. */
1684 int is_longjmp;
fa76d9e0
JR
1685 /* Nonzero if this is a syscall that makes a new process in the image of
1686 the current one. */
1687 int fork_or_exec;
51bbfa0c
RS
1688 /* Nonzero if this is a call to an inline function. */
1689 int is_integrable = 0;
51bbfa0c
RS
1690 /* Nonzero if this is a call to a `const' function.
1691 Note that only explicitly named functions are handled as `const' here. */
1692 int is_const = 0;
1693 /* Nonzero if this is a call to a `volatile' function. */
1694 int is_volatile = 0;
12a22e76
JM
1695 /* Nonzero if this is a call to a function that won't throw an exception. */
1696 int nothrow = TREE_NOTHROW (exp);
51bbfa0c
RS
1697#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1698 /* Define the boundary of the register parm stack space that needs to be
1699 save, if any. */
1700 int low_to_save = -1, high_to_save;
1701 rtx save_area = 0; /* Place that it is saved */
1702#endif
1703
1704#ifdef ACCUMULATE_OUTGOING_ARGS
1705 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1706 char *initial_stack_usage_map = stack_usage_map;
a544cfd2 1707 int old_stack_arg_under_construction = 0;
51bbfa0c
RS
1708#endif
1709
1710 rtx old_stack_level = 0;
79be3418 1711 int old_pending_adj = 0;
51bbfa0c 1712 int old_inhibit_defer_pop = inhibit_defer_pop;
774e6b37 1713 int old_arg_space_so_far = arg_space_so_far;
77cac2f2 1714 rtx call_fusage = 0;
51bbfa0c 1715 register tree p;
21a3b983 1716 register int i;
c2f8b491
JH
1717#ifdef PREFERRED_STACK_BOUNDARY
1718 int preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1719#else
1720 /* In this case preferred_stack_boundary variable is meaningless.
1721 It is used only in order to keep ifdef noise down when calling
1722 compute_argument_block_size. */
1723 int preferred_stack_boundary = 0;
1724#endif
51bbfa0c 1725
7815214e
RK
1726 /* The value of the function call can be put in a hard register. But
1727 if -fcheck-memory-usage, code which invokes functions (and thus
1728 damages some hard registers) can be inserted before using the value.
1729 So, target is always a pseudo-register in that case. */
7d384cc0 1730 if (current_function_check_memory_usage)
7815214e
RK
1731 target = 0;
1732
51bbfa0c
RS
1733 /* See if we can find a DECL-node for the actual function.
1734 As a result, decide whether this is a call to an integrable function. */
1735
1736 p = TREE_OPERAND (exp, 0);
1737 if (TREE_CODE (p) == ADDR_EXPR)
1738 {
1739 fndecl = TREE_OPERAND (p, 0);
1740 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 1741 fndecl = 0;
51bbfa0c
RS
1742 else
1743 {
1744 if (!flag_no_inline
1745 && fndecl != current_function_decl
aa10adff 1746 && DECL_INLINE (fndecl)
1cf4f698 1747 && DECL_SAVED_INSNS (fndecl)
49ad7cfa 1748 && DECL_SAVED_INSNS (fndecl)->inlinable)
51bbfa0c
RS
1749 is_integrable = 1;
1750 else if (! TREE_ADDRESSABLE (fndecl))
1751 {
13d39dbc 1752 /* In case this function later becomes inlinable,
51bbfa0c
RS
1753 record that there was already a non-inline call to it.
1754
1755 Use abstraction instead of setting TREE_ADDRESSABLE
1756 directly. */
da8c1713
RK
1757 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1758 && optimize > 0)
1907795e
JM
1759 {
1760 warning_with_decl (fndecl, "can't inline call to `%s'");
1761 warning ("called from here");
1762 }
51bbfa0c
RS
1763 mark_addressable (fndecl);
1764 }
1765
d45cf215
RS
1766 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1767 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 1768 is_const = 1;
5e24110e
RS
1769
1770 if (TREE_THIS_VOLATILE (fndecl))
1771 is_volatile = 1;
12a22e76
JM
1772
1773 if (TREE_NOTHROW (fndecl))
1774 nothrow = 1;
51bbfa0c
RS
1775 }
1776 }
1777
fdff8c6d
RK
1778 /* If we don't have specific function to call, see if we have a
1779 constant or `noreturn' function from the type. */
1780 if (fndecl == 0)
1781 {
1782 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1783 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1784 }
1785
6f90e075
JW
1786#ifdef REG_PARM_STACK_SPACE
1787#ifdef MAYBE_REG_PARM_STACK_SPACE
1788 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1789#else
1790 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1791#endif
1792#endif
1793
e5e809f4
JL
1794#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1795 if (reg_parm_stack_space > 0)
1796 must_preallocate = 1;
1797#endif
1798
51bbfa0c
RS
1799 /* Warn if this value is an aggregate type,
1800 regardless of which calling convention we are using for it. */
05e3bdb9 1801 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
1802 warning ("function call has aggregate value");
1803
1804 /* Set up a place to return a structure. */
1805
1806 /* Cater to broken compilers. */
1807 if (aggregate_value_p (exp))
1808 {
1809 /* This call returns a big structure. */
1810 is_const = 0;
1811
1812#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
1813 {
1814 pcc_struct_value = 1;
0dd532dc
JW
1815 /* Easier than making that case work right. */
1816 if (is_integrable)
1817 {
1818 /* In case this is a static function, note that it has been
1819 used. */
1820 if (! TREE_ADDRESSABLE (fndecl))
1821 mark_addressable (fndecl);
1822 is_integrable = 0;
1823 }
9e7b1d0a
RS
1824 }
1825#else /* not PCC_STATIC_STRUCT_RETURN */
1826 {
1827 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 1828
9e7b1d0a
RS
1829 if (target && GET_CODE (target) == MEM)
1830 structure_value_addr = XEXP (target, 0);
1831 else
1832 {
e9a25f70
JL
1833 /* Assign a temporary to hold the value. */
1834 tree d;
51bbfa0c 1835
9e7b1d0a
RS
1836 /* For variable-sized objects, we must be called with a target
1837 specified. If we were to allocate space on the stack here,
1838 we would have no way of knowing when to free it. */
51bbfa0c 1839
002bdd6c
RK
1840 if (struct_value_size < 0)
1841 abort ();
1842
e9a25f70
JL
1843 /* This DECL is just something to feed to mark_addressable;
1844 it doesn't get pushed. */
1845 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1846 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1847 mark_addressable (d);
14a774a9 1848 mark_temp_addr_taken (DECL_RTL (d));
e9a25f70 1849 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 1850 TREE_USED (d) = 1;
9e7b1d0a
RS
1851 target = 0;
1852 }
1853 }
1854#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
1855 }
1856
1857 /* If called function is inline, try to integrate it. */
1858
1859 if (is_integrable)
1860 {
1861 rtx temp;
c2939b57 1862
69d4ca36 1863#ifdef ACCUMULATE_OUTGOING_ARGS
c2939b57 1864 before_call = get_last_insn ();
69d4ca36 1865#endif
51bbfa0c
RS
1866
1867 temp = expand_inline_function (fndecl, actparms, target,
1868 ignore, TREE_TYPE (exp),
1869 structure_value_addr);
1870
1871 /* If inlining succeeded, return. */
2e0dd623 1872 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 1873 {
d64f5a78 1874#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1875 /* If the outgoing argument list must be preserved, push
1876 the stack before executing the inlined function if it
1877 makes any calls. */
1878
1879 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1880 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1881 break;
1882
1883 if (stack_arg_under_construction || i >= 0)
1884 {
a1917650
RK
1885 rtx first_insn
1886 = before_call ? NEXT_INSN (before_call) : get_insns ();
6a651371 1887 rtx insn = NULL_RTX, seq;
2f4aa534 1888
d64f5a78 1889 /* Look for a call in the inline function code.
49ad7cfa 1890 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
d64f5a78
RS
1891 nonzero then there is a call and it is not necessary
1892 to scan the insns. */
1893
49ad7cfa 1894 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
a1917650 1895 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
1896 if (GET_CODE (insn) == CALL_INSN)
1897 break;
2f4aa534
RS
1898
1899 if (insn)
1900 {
d64f5a78
RS
1901 /* Reserve enough stack space so that the largest
1902 argument list of any function call in the inline
1903 function does not overlap the argument list being
1904 evaluated. This is usually an overestimate because
1905 allocate_dynamic_stack_space reserves space for an
1906 outgoing argument list in addition to the requested
1907 space, but there is no way to ask for stack space such
1908 that an argument list of a certain length can be
e5e809f4 1909 safely constructed.
d64f5a78 1910
e5e809f4
JL
1911 Add the stack space reserved for register arguments, if
1912 any, in the inline function. What is really needed is the
d64f5a78
RS
1913 largest value of reg_parm_stack_space in the inline
1914 function, but that is not available. Using the current
1915 value of reg_parm_stack_space is wrong, but gives
1916 correct results on all supported machines. */
e5e809f4 1917
49ad7cfa 1918 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
e5e809f4
JL
1919 + reg_parm_stack_space);
1920
2f4aa534 1921 start_sequence ();
ccf5d244 1922 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
1923 allocate_dynamic_stack_space (GEN_INT (adjust),
1924 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
1925 seq = get_insns ();
1926 end_sequence ();
a1917650 1927 emit_insns_before (seq, first_insn);
e5d70561 1928 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
1929 }
1930 }
d64f5a78 1931#endif
51bbfa0c
RS
1932
1933 /* If the result is equivalent to TARGET, return TARGET to simplify
1934 checks in store_expr. They can be equivalent but not equal in the
1935 case of a function that returns BLKmode. */
1936 if (temp != target && rtx_equal_p (temp, target))
1937 return target;
1938 return temp;
1939 }
1940
1941 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
1942 separately after all. If function was declared inline,
1943 give a warning. */
1944 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 1945 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
1946 {
1947 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1948 warning ("called from here");
1949 }
51bbfa0c
RS
1950 mark_addressable (fndecl);
1951 }
1952
51bbfa0c
RS
1953 function_call_count++;
1954
1955 if (fndecl && DECL_NAME (fndecl))
1956 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1957
c2f8b491
JH
1958 /* Ensure current function's preferred stack boundary is at least
1959 what we need. We don't have to increase alignment for recursive
1960 functions. */
1961 if (cfun->preferred_stack_boundary < preferred_stack_boundary
1962 && fndecl != current_function_decl)
1963 cfun->preferred_stack_boundary = preferred_stack_boundary;
1964
51bbfa0c 1965 /* See if this is a call to a function that can return more than once
20efdf74 1966 or a call to longjmp or malloc. */
fa76d9e0 1967 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
20efdf74 1968 &is_malloc, &may_be_alloca);
51bbfa0c 1969
51bbfa0c
RS
1970 if (may_be_alloca)
1971 current_function_calls_alloca = 1;
1972
39842893
JL
1973 /* Operand 0 is a pointer-to-function; get the type of the function. */
1974 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1975 if (! POINTER_TYPE_P (funtype))
1976 abort ();
1977 funtype = TREE_TYPE (funtype);
1978
1979 /* When calling a const function, we must pop the stack args right away,
1980 so that the pop is deleted or moved with the call. */
1981 if (is_const)
1982 NO_DEFER_POP;
1983
51bbfa0c
RS
1984 /* Don't let pending stack adjusts add up to too much.
1985 Also, do all pending adjustments now
1986 if there is any chance this might be a call to alloca. */
1987
1988 if (pending_stack_adjust >= 32
1989 || (pending_stack_adjust > 0 && may_be_alloca))
1990 do_pending_stack_adjust ();
1991
fa76d9e0
JR
1992 if (profile_arc_flag && fork_or_exec)
1993 {
1994 /* A fork duplicates the profile information, and an exec discards
1995 it. We can't rely on fork/exec to be paired. So write out the
1996 profile information we have gathered so far, and clear it. */
1997 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
1998 VOIDmode, 0);
1999
2000 /* ??? When __clone is called with CLONE_VM set, profiling is
2001 subject to race conditions, just as with multithreaded programs. */
2002 }
2003
cc79451b
RK
2004 /* Push the temporary stack slot level so that we can free any temporaries
2005 we make. */
51bbfa0c
RS
2006 push_temp_slots ();
2007
eecb6f50
JL
2008 /* Start updating where the next arg would go.
2009
2010 On some machines (such as the PA) indirect calls have a different
2011 calling convention than normal calls. The last argument in
2012 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2013 or not. */
2014 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
2015
2016 /* If struct_value_rtx is 0, it means pass the address
2017 as if it were an extra parameter. */
2018 if (structure_value_addr && struct_value_rtx == 0)
2019 {
5582b006
RK
2020 /* If structure_value_addr is a REG other than
2021 virtual_outgoing_args_rtx, we can use always use it. If it
2022 is not a REG, we must always copy it into a register.
2023 If it is virtual_outgoing_args_rtx, we must copy it to another
2024 register in some cases. */
2025 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 2026#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
2027 || (stack_arg_under_construction
2028 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 2029#endif
5582b006
RK
2030 ? copy_addr_to_reg (structure_value_addr)
2031 : structure_value_addr);
d64f5a78 2032
51bbfa0c
RS
2033 actparms
2034 = tree_cons (error_mark_node,
2035 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 2036 temp),
51bbfa0c
RS
2037 actparms);
2038 structure_value_addr_parm = 1;
2039 }
2040
2041 /* Count the arguments and set NUM_ACTUALS. */
2042 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2043 num_actuals = i;
2044
2045 /* Compute number of named args.
2046 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 2047 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
2048 (If no anonymous args follow, the result of list_length is actually
2049 one too large. This is harmless.)
51bbfa0c 2050
9ab70a9b
R
2051 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2052 zero, this machine will be able to place unnamed args that were passed in
469225d8
JW
2053 registers into the stack. So treat all args as named. This allows the
2054 insns emitting for a specific argument list to be independent of the
2055 function declaration.
51bbfa0c 2056
9ab70a9b 2057 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
51bbfa0c
RS
2058 way to pass unnamed args in registers, so we must force them into
2059 memory. */
e5e809f4
JL
2060
2061 if ((STRICT_ARGUMENT_NAMING
9ab70a9b 2062 || ! PRETEND_OUTGOING_VARARGS_NAMED)
e5e809f4 2063 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 2064 n_named_args
0ee902cb 2065 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 2066 /* Don't include the last named arg. */
d0f9021a 2067 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
0ee902cb
RM
2068 /* Count the struct value address, if it is passed as a parm. */
2069 + structure_value_addr_parm);
51bbfa0c 2070 else
51bbfa0c
RS
2071 /* If we know nothing, treat all args as named. */
2072 n_named_args = num_actuals;
2073
2074 /* Make a vector to hold all the information about each arg. */
2075 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 2076 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c 2077
d7cdf113
JL
2078 /* Build up entries inthe ARGS array, compute the size of the arguments
2079 into ARGS_SIZE, etc. */
2080 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
959f3a06 2081 actparms, fndecl, &args_so_far,
d7cdf113
JL
2082 reg_parm_stack_space, &old_stack_level,
2083 &old_pending_adj, &must_preallocate,
2084 &is_const);
51bbfa0c 2085
6f90e075
JW
2086#ifdef FINAL_REG_PARM_STACK_SPACE
2087 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2088 args_size.var);
2089#endif
2090
51bbfa0c
RS
2091 if (args_size.var)
2092 {
2093 /* If this function requires a variable-sized argument list, don't try to
2094 make a cse'able block for this call. We may be able to do this
2095 eventually, but it is too complicated to keep track of what insns go
2096 in the cse'able block and which don't. */
2097
2098 is_const = 0;
2099 must_preallocate = 1;
51bbfa0c 2100 }
e5e809f4 2101
599f37b6
JL
2102 /* Compute the actual size of the argument block required. The variable
2103 and constant sizes must be combined, the size may have to be rounded,
2104 and there may be a minimum required size. */
2105 unadjusted_args_size
c2f8b491
JH
2106 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2107 preferred_stack_boundary);
51bbfa0c 2108
0f9b3ea6
JL
2109 /* Now make final decision about preallocating stack space. */
2110 must_preallocate = finalize_must_preallocate (must_preallocate,
2111 num_actuals, args, &args_size);
51bbfa0c
RS
2112
2113 /* If the structure value address will reference the stack pointer, we must
2114 stabilize it. We don't need to do this if we know that we are not going
2115 to adjust the stack pointer in processing this call. */
2116
2117 if (structure_value_addr
2118 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2119 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2120 && (args_size.var
2121#ifndef ACCUMULATE_OUTGOING_ARGS
2122 || args_size.constant
2123#endif
2124 ))
2125 structure_value_addr = copy_to_reg (structure_value_addr);
2126
cc0b1adc
JL
2127 /* Precompute any arguments as needed. */
2128 precompute_arguments (is_const, must_preallocate, num_actuals,
2129 args, &args_size);
51bbfa0c
RS
2130
2131 /* Now we are about to start emitting insns that can be deleted
2132 if a libcall is deleted. */
9ae8ffe7 2133 if (is_const || is_malloc)
51bbfa0c
RS
2134 start_sequence ();
2135
2136 /* If we have no actual push instructions, or shouldn't use them,
2137 make space for all args right now. */
2138
2139 if (args_size.var != 0)
2140 {
2141 if (old_stack_level == 0)
2142 {
e5d70561 2143 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
2144 old_pending_adj = pending_stack_adjust;
2145 pending_stack_adjust = 0;
d64f5a78 2146#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2147 /* stack_arg_under_construction says whether a stack arg is
2148 being constructed at the old stack level. Pushing the stack
2149 gets a clean outgoing argument block. */
2150 old_stack_arg_under_construction = stack_arg_under_construction;
2151 stack_arg_under_construction = 0;
d64f5a78 2152#endif
51bbfa0c
RS
2153 }
2154 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2155 }
26a258fe 2156 else
51bbfa0c
RS
2157 {
2158 /* Note that we must go through the motions of allocating an argument
2159 block even if the size is zero because we may be storing args
2160 in the area reserved for register arguments, which may be part of
2161 the stack frame. */
26a258fe 2162
51bbfa0c
RS
2163 int needed = args_size.constant;
2164
0f41302f
MS
2165 /* Store the maximum argument space used. It will be pushed by
2166 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2167 checking). */
51bbfa0c
RS
2168
2169 if (needed > current_function_outgoing_args_size)
2170 current_function_outgoing_args_size = needed;
2171
26a258fe
PB
2172 if (must_preallocate)
2173 {
2174#ifdef ACCUMULATE_OUTGOING_ARGS
2175 /* Since the stack pointer will never be pushed, it is possible for
2176 the evaluation of a parm to clobber something we have already
2177 written to the stack. Since most function calls on RISC machines
2178 do not use the stack, this is uncommon, but must work correctly.
2179
2180 Therefore, we save any area of the stack that was already written
2181 and that we are using. Here we set up to do this by making a new
2182 stack usage map from the old one. The actual save will be done
2183 by store_one_arg.
2184
2185 Another approach might be to try to reorder the argument
2186 evaluations to avoid this conflicting stack usage. */
2187
e5e809f4 2188#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
2189 /* Since we will be writing into the entire argument area, the
2190 map must be allocated for its entire size, not just the part that
2191 is the responsibility of the caller. */
2192 needed += reg_parm_stack_space;
51bbfa0c
RS
2193#endif
2194
2195#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
2196 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2197 needed + 1);
51bbfa0c 2198#else
26a258fe
PB
2199 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2200 needed);
51bbfa0c 2201#endif
26a258fe 2202 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 2203
26a258fe
PB
2204 if (initial_highest_arg_in_use)
2205 bcopy (initial_stack_usage_map, stack_usage_map,
2206 initial_highest_arg_in_use);
51bbfa0c 2207
26a258fe
PB
2208 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2209 bzero (&stack_usage_map[initial_highest_arg_in_use],
2210 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2211 needed = 0;
2f4aa534 2212
26a258fe
PB
2213 /* The address of the outgoing argument list must not be copied to a
2214 register here, because argblock would be left pointing to the
2215 wrong place after the call to allocate_dynamic_stack_space below.
2216 */
2f4aa534 2217
26a258fe 2218 argblock = virtual_outgoing_args_rtx;
2f4aa534 2219
51bbfa0c 2220#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2221 if (inhibit_defer_pop == 0)
51bbfa0c 2222 {
26a258fe
PB
2223 /* Try to reuse some or all of the pending_stack_adjust
2224 to get this space. Maybe we can avoid any pushing. */
2225 if (needed > pending_stack_adjust)
2226 {
2227 needed -= pending_stack_adjust;
2228 pending_stack_adjust = 0;
2229 }
2230 else
2231 {
2232 pending_stack_adjust -= needed;
2233 needed = 0;
2234 }
51bbfa0c 2235 }
26a258fe
PB
2236 /* Special case this because overhead of `push_block' in this
2237 case is non-trivial. */
2238 if (needed == 0)
2239 argblock = virtual_outgoing_args_rtx;
51bbfa0c 2240 else
26a258fe
PB
2241 argblock = push_block (GEN_INT (needed), 0, 0);
2242
2243 /* We only really need to call `copy_to_reg' in the case where push
2244 insns are going to be used to pass ARGBLOCK to a function
2245 call in ARGS. In that case, the stack pointer changes value
2246 from the allocation point to the call point, and hence
2247 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2248 But might as well always do it. */
2249 argblock = copy_to_reg (argblock);
51bbfa0c 2250#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 2251 }
51bbfa0c
RS
2252 }
2253
bfbf933a
RS
2254#ifdef ACCUMULATE_OUTGOING_ARGS
2255 /* The save/restore code in store_one_arg handles all cases except one:
2256 a constructor call (including a C function returning a BLKmode struct)
2257 to initialize an argument. */
2258 if (stack_arg_under_construction)
2259 {
e5e809f4 2260#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 2261 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 2262#else
e5d70561 2263 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
2264#endif
2265 if (old_stack_level == 0)
2266 {
e5d70561 2267 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
2268 old_pending_adj = pending_stack_adjust;
2269 pending_stack_adjust = 0;
2270 /* stack_arg_under_construction says whether a stack arg is
2271 being constructed at the old stack level. Pushing the stack
2272 gets a clean outgoing argument block. */
2273 old_stack_arg_under_construction = stack_arg_under_construction;
2274 stack_arg_under_construction = 0;
2275 /* Make a new map for the new argument list. */
2276 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2277 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2278 highest_outgoing_arg_in_use = 0;
2279 }
e5d70561 2280 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
2281 }
2282 /* If argument evaluation might modify the stack pointer, copy the
2283 address of the argument list to a register. */
2284 for (i = 0; i < num_actuals; i++)
2285 if (args[i].pass_on_stack)
2286 {
2287 argblock = copy_addr_to_reg (argblock);
2288 break;
2289 }
2290#endif
2291
a45bdd02 2292 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2293
51bbfa0c 2294#ifdef PUSH_ARGS_REVERSED
c795bca9 2295#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2296 /* If we push args individually in reverse order, perform stack alignment
2297 before the first push (the last arg). */
4e217aed
JH
2298 if (args_size.constant != unadjusted_args_size)
2299 {
2300 /* When the stack adjustment is pending,
2301 we get better code by combining the adjustments. */
42f602d8
JH
2302 if (pending_stack_adjust && !is_const
2303 && !inhibit_defer_pop)
4e217aed
JH
2304 {
2305 args_size.constant = (unadjusted_args_size
2306 + ((pending_stack_adjust + args_size.constant
c2732da3 2307 + arg_space_so_far
4e217aed
JH
2308 - unadjusted_args_size)
2309 % (preferred_stack_boundary / BITS_PER_UNIT)));
2310 pending_stack_adjust -= args_size.constant - unadjusted_args_size;
2311 do_pending_stack_adjust ();
2312 }
2313 else if (argblock == 0)
2314 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
c2732da3
JM
2315 arg_space_so_far += args_size.constant - unadjusted_args_size;
2316
2317 /* Now that the stack is properly aligned, pops can't safely
2318 be deferred during the evaluation of the arguments. */
2319 NO_DEFER_POP;
4e217aed 2320 }
51bbfa0c
RS
2321#endif
2322#endif
2323
2324 /* Don't try to defer pops if preallocating, not even from the first arg,
2325 since ARGBLOCK probably refers to the SP. */
2326 if (argblock)
2327 NO_DEFER_POP;
2328
a45bdd02 2329 funexp = rtx_for_function_call (fndecl, exp);
51bbfa0c
RS
2330
2331 /* Figure out the register where the value, if any, will come back. */
2332 valreg = 0;
2333 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2334 && ! structure_value_addr)
2335 {
2336 if (pcc_struct_value)
2337 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
4dc07bd7 2338 fndecl, 0);
51bbfa0c 2339 else
4dc07bd7 2340 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
51bbfa0c
RS
2341 }
2342
2343 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 2344 once we have started filling any specific hard regs. */
20efdf74 2345 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c
RS
2346
2347#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 2348
20efdf74
JL
2349 /* Save the fixed argument area if it's part of the caller's frame and
2350 is clobbered by argument setup for this call. */
2351 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2352 &low_to_save, &high_to_save);
b94301c2 2353#endif
20efdf74 2354
51bbfa0c
RS
2355
2356 /* Now store (and compute if necessary) all non-register parms.
2357 These come before register parms, since they can require block-moves,
2358 which could clobber the registers used for register parms.
2359 Parms which have partial registers are not stored here,
2360 but we do preallocate space here if they want that. */
2361
2362 for (i = 0; i < num_actuals; i++)
2363 if (args[i].reg == 0 || args[i].pass_on_stack)
2364 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2365 args_size.var != 0, reg_parm_stack_space);
51bbfa0c 2366
4ab56118
RK
2367 /* If we have a parm that is passed in registers but not in memory
2368 and whose alignment does not permit a direct copy into registers,
2369 make a group of pseudos that correspond to each register that we
2370 will later fill. */
45d44c98 2371 if (STRICT_ALIGNMENT)
20efdf74 2372 store_unaligned_arguments_into_pseudos (args, num_actuals);
4ab56118 2373
51bbfa0c
RS
2374 /* Now store any partially-in-registers parm.
2375 This is the last place a block-move can happen. */
2376 if (reg_parm_seen)
2377 for (i = 0; i < num_actuals; i++)
2378 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2379 store_one_arg (&args[i], argblock, may_be_alloca,
c84e2712 2380 args_size.var != 0, reg_parm_stack_space);
51bbfa0c
RS
2381
2382#ifndef PUSH_ARGS_REVERSED
c795bca9 2383#ifdef PREFERRED_STACK_BOUNDARY
51bbfa0c
RS
2384 /* If we pushed args in forward order, perform stack alignment
2385 after pushing the last arg. */
2386 if (argblock == 0)
774e6b37
JH
2387 {
2388 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2389 arg_space_so_far += args_size.constant - unadjusted_args_size;
2390 }
51bbfa0c
RS
2391#endif
2392#endif
2393
756e0e12
RS
2394 /* If register arguments require space on the stack and stack space
2395 was not preallocated, allocate stack space here for arguments
2396 passed in registers. */
6e716e89 2397#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 2398 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 2399 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
2400#endif
2401
51bbfa0c
RS
2402 /* Pass the function the address in which to return a structure value. */
2403 if (structure_value_addr && ! structure_value_addr_parm)
2404 {
2405 emit_move_insn (struct_value_rtx,
2406 force_reg (Pmode,
e5d70561
RK
2407 force_operand (structure_value_addr,
2408 NULL_RTX)));
7815214e
RK
2409
2410 /* Mark the memory for the aggregate as write-only. */
7d384cc0 2411 if (current_function_check_memory_usage)
7815214e
RK
2412 emit_library_call (chkr_set_right_libfunc, 1,
2413 VOIDmode, 3,
6a9c4aed 2414 structure_value_addr, Pmode,
7815214e 2415 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
2416 GEN_INT (MEMORY_USE_WO),
2417 TYPE_MODE (integer_type_node));
7815214e 2418
51bbfa0c 2419 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 2420 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
2421 }
2422
77cac2f2 2423 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 2424
21a3b983 2425 load_register_parameters (args, num_actuals, &call_fusage);
51bbfa0c
RS
2426
2427 /* Perform postincrements before actually calling the function. */
2428 emit_queue ();
2429
c2939b57
JW
2430 /* Save a pointer to the last insn before the call, so that we can
2431 later safely search backwards to find the CALL_INSN. */
2432 before_call = get_last_insn ();
2433
51bbfa0c
RS
2434 /* All arguments and registers used for the call must be set up by now! */
2435
51bbfa0c 2436 /* Generate the actual call instruction. */
fb5eebb9
RH
2437 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2438 args_size.constant, struct_value_size,
51bbfa0c 2439 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
12a22e76 2440 valreg, old_inhibit_defer_pop, call_fusage, is_const, nothrow);
51bbfa0c 2441
774e6b37
JH
2442 /* Stack pointer ought to be restored to the value before call. */
2443 if (old_arg_space_so_far != arg_space_so_far)
2444 abort();
2445
51bbfa0c
RS
2446 /* If call is cse'able, make appropriate pair of reg-notes around it.
2447 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
2448 if return type is void. Disable for PARALLEL return values, because
2449 we have no way to move such values into a pseudo register. */
2450 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
2451 {
2452 rtx note = 0;
2453 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2454 rtx insns;
2455
9ae8ffe7
JL
2456 /* Mark the return value as a pointer if needed. */
2457 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2458 {
2459 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2460 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2461 }
2462
51bbfa0c
RS
2463 /* Construct an "equal form" for the value which mentions all the
2464 arguments in order as well as the function name. */
2465#ifdef PUSH_ARGS_REVERSED
2466 for (i = 0; i < num_actuals; i++)
38a448ca 2467 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
2468#else
2469 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 2470 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 2471#endif
38a448ca 2472 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
2473
2474 insns = get_insns ();
2475 end_sequence ();
2476
2477 emit_libcall_block (insns, temp, valreg, note);
2478
2479 valreg = temp;
2480 }
4f48d56a
RK
2481 else if (is_const)
2482 {
2483 /* Otherwise, just write out the sequence without a note. */
2484 rtx insns = get_insns ();
2485
2486 end_sequence ();
2487 emit_insns (insns);
2488 }
9ae8ffe7
JL
2489 else if (is_malloc)
2490 {
2491 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2492 rtx last, insns;
2493
2494 /* The return value from a malloc-like function is a pointer. */
2495 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2496 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2497
2498 emit_move_insn (temp, valreg);
2499
2500 /* The return value from a malloc-like function can not alias
2501 anything else. */
2502 last = get_last_insn ();
2503 REG_NOTES (last) =
38a448ca 2504 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2505
2506 /* Write out the sequence. */
2507 insns = get_insns ();
2508 end_sequence ();
2509 emit_insns (insns);
2510 valreg = temp;
2511 }
51bbfa0c
RS
2512
2513 /* For calls to `setjmp', etc., inform flow.c it should complain
2514 if nonvolatile values are live. */
2515
2516 if (returns_twice)
2517 {
c2939b57
JW
2518 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2519 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2520 we must search for it here. */
2521 rtx last = get_last_insn ();
2522 while (GET_CODE (last) != CALL_INSN)
2523 {
2524 last = PREV_INSN (last);
2525 /* There was no CALL_INSN? */
2526 if (last == before_call)
2527 abort ();
2528 }
2529 emit_note_after (NOTE_INSN_SETJMP, last);
51bbfa0c
RS
2530 current_function_calls_setjmp = 1;
2531 }
2532
2533 if (is_longjmp)
2534 current_function_calls_longjmp = 1;
2535
2536 /* Notice functions that cannot return.
2537 If optimizing, insns emitted below will be dead.
2538 If not optimizing, they will exist, which is useful
2539 if the user uses the `return' command in the debugger. */
2540
2541 if (is_volatile || is_longjmp)
2542 emit_barrier ();
2543
51bbfa0c
RS
2544 /* If value type not void, return an rtx for the value. */
2545
e976b8b2
MS
2546 /* If there are cleanups to be called, don't use a hard reg as target.
2547 We need to double check this and see if it matters anymore. */
e9a25f70 2548 if (any_pending_cleanups (1)
51bbfa0c
RS
2549 && target && REG_P (target)
2550 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2551 target = 0;
2552
2553 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2554 || ignore)
2555 {
2556 target = const0_rtx;
2557 }
2558 else if (structure_value_addr)
2559 {
2560 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2561 {
38a448ca
RH
2562 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2563 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2564 structure_value_addr));
c6df88cb
MM
2565 MEM_SET_IN_STRUCT_P (target,
2566 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
29008b51 2567 }
51bbfa0c
RS
2568 }
2569 else if (pcc_struct_value)
2570 {
f78b5ca1
JL
2571 /* This is the special C++ case where we need to
2572 know what the true target was. We take care to
2573 never use this value more than once in one expression. */
38a448ca
RH
2574 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2575 copy_to_reg (valreg));
c6df88cb 2576 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
51bbfa0c 2577 }
cacbd532
JW
2578 /* Handle calls that return values in multiple non-contiguous locations.
2579 The Irix 6 ABI has examples of this. */
2580 else if (GET_CODE (valreg) == PARALLEL)
2581 {
aac5cc16
RH
2582 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2583
cacbd532
JW
2584 if (target == 0)
2585 {
2b4092f2 2586 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
c6df88cb 2587 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
cacbd532
JW
2588 preserve_temp_slots (target);
2589 }
2590
c5c76735
JL
2591 if (! rtx_equal_p (target, valreg))
2592 emit_group_store (target, valreg, bytes,
2593 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
cacbd532 2594 }
059c3d84
JW
2595 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2596 && GET_MODE (target) == GET_MODE (valreg))
2597 /* TARGET and VALREG cannot be equal at this point because the latter
2598 would not have REG_FUNCTION_VALUE_P true, while the former would if
2599 it were referring to the same register.
2600
2601 If they refer to the same register, this move will be a no-op, except
2602 when function inlining is being done. */
2603 emit_move_insn (target, valreg);
766b19fb 2604 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
c36fce9a 2605 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
51bbfa0c
RS
2606 else
2607 target = copy_to_reg (valreg);
2608
84b55618 2609#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2610 /* If we promoted this return value, make the proper SUBREG. TARGET
2611 might be const0_rtx here, so be careful. */
2612 if (GET_CODE (target) == REG
766b19fb 2613 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2614 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2615 {
321e0bba
RK
2616 tree type = TREE_TYPE (exp);
2617 int unsignedp = TREE_UNSIGNED (type);
84b55618 2618
321e0bba
RK
2619 /* If we don't promote as expected, something is wrong. */
2620 if (GET_MODE (target)
2621 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2622 abort ();
2623
38a448ca 2624 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2625 SUBREG_PROMOTED_VAR_P (target) = 1;
2626 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2627 }
2628#endif
2629
2f4aa534
RS
2630 /* If size of args is variable or this was a constructor call for a stack
2631 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2632
2633 if (old_stack_level)
2634 {
e5d70561 2635 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2636 pending_stack_adjust = old_pending_adj;
d64f5a78 2637#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2638 stack_arg_under_construction = old_stack_arg_under_construction;
2639 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2640 stack_usage_map = initial_stack_usage_map;
d64f5a78 2641#endif
51bbfa0c 2642 }
51bbfa0c
RS
2643#ifdef ACCUMULATE_OUTGOING_ARGS
2644 else
2645 {
2646#ifdef REG_PARM_STACK_SPACE
2647 if (save_area)
20efdf74
JL
2648 restore_fixed_argument_area (save_area, argblock,
2649 high_to_save, low_to_save);
b94301c2 2650#endif
51bbfa0c 2651
51bbfa0c
RS
2652 /* If we saved any argument areas, restore them. */
2653 for (i = 0; i < num_actuals; i++)
2654 if (args[i].save_area)
2655 {
2656 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2657 rtx stack_area
38a448ca
RH
2658 = gen_rtx_MEM (save_mode,
2659 memory_address (save_mode,
2660 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2661
2662 if (save_mode != BLKmode)
2663 emit_move_insn (stack_area, args[i].save_area);
2664 else
2665 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2666 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2667 PARM_BOUNDARY / BITS_PER_UNIT);
2668 }
2669
2670 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2671 stack_usage_map = initial_stack_usage_map;
2672 }
2673#endif
2674
59257ff7
RK
2675 /* If this was alloca, record the new stack level for nonlocal gotos.
2676 Check for the handler slots since we might not have a save area
0f41302f 2677 for non-local gotos. */
59257ff7 2678
ba716ac9 2679 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
e5d70561 2680 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2681
2682 pop_temp_slots ();
2683
8e6a59fe
MM
2684 /* Free up storage we no longer need. */
2685 for (i = 0; i < num_actuals; ++i)
2686 if (args[i].aligned_regs)
2687 free (args[i].aligned_regs);
2688
51bbfa0c
RS
2689 return target;
2690}
2691\f
12a22e76
JM
2692/* Returns nonzero if FUN is the symbol for a library function which can
2693 not throw. */
2694
2695static int
2696libfunc_nothrow (fun)
2697 rtx fun;
2698{
2699 if (fun == throw_libfunc
2700 || fun == rethrow_libfunc
2701 || fun == sjthrow_libfunc
2702 || fun == sjpopnthrow_libfunc)
2703 return 0;
2704
2705 return 1;
2706}
322e3e34 2707\f
43bc5f13
JH
2708/* Emit a library call to function FUN (a SYMBOL_REF rtx).
2709 The RETVAL parameter specifies whether return value needs to be saved, other
2710 parameters are documented in the emit_library_call function bellow. */
322e3e34 2711
43bc5f13
JH
2712static rtx
2713emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p)
2714 int retval;
2715 rtx orgfun;
2716 rtx value;
2717 int no_queue;
2718 enum machine_mode outmode;
2719 int nargs;
2720 va_list p;
322e3e34 2721{
322e3e34
RK
2722 /* Total size in bytes of all the stack-parms scanned so far. */
2723 struct args_size args_size;
2724 /* Size of arguments before any adjustments (such as rounding). */
2725 struct args_size original_args_size;
2726 register int argnum;
322e3e34 2727 rtx fun;
322e3e34
RK
2728 int inc;
2729 int count;
4fc026cd 2730 struct args_size alignment_pad;
322e3e34
RK
2731 rtx argblock = 0;
2732 CUMULATIVE_ARGS args_so_far;
2733 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2734 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2735 struct arg *argvec;
2736 int old_inhibit_defer_pop = inhibit_defer_pop;
774e6b37 2737 int old_arg_space_so_far = arg_space_so_far;
77cac2f2 2738 rtx call_fusage = 0;
322e3e34 2739 rtx mem_value = 0;
fac0ad80 2740 int pcc_struct_value = 0;
4f389214 2741 int struct_value_size = 0;
d61bee95 2742 int is_const;
e5e809f4 2743 int reg_parm_stack_space = 0;
12a22e76 2744 int nothrow;
69d4ca36 2745#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2746 int needed;
69d4ca36 2747#endif
f046b3cc
JL
2748
2749#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2750 /* Define the boundary of the register parm stack space that needs to be
2751 save, if any. */
6a651371 2752 int low_to_save = -1, high_to_save = 0;
f046b3cc
JL
2753 rtx save_area = 0; /* Place that it is saved */
2754#endif
2755
2756#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36 2757 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2758 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2759 char *initial_stack_usage_map = stack_usage_map;
2760#endif
2761
2762#ifdef REG_PARM_STACK_SPACE
2763#ifdef MAYBE_REG_PARM_STACK_SPACE
2764 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2765#else
ab87f8c8 2766 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
f046b3cc
JL
2767#endif
2768#endif
322e3e34 2769
d61bee95 2770 is_const = no_queue;
4f90e4a0 2771 fun = orgfun;
322e3e34 2772
12a22e76
JM
2773 nothrow = libfunc_nothrow (fun);
2774
c2f8b491
JH
2775#ifdef PREFERRED_STACK_BOUNDARY
2776 /* Ensure current function's preferred stack boundary is at least
2777 what we need. */
2778 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
2779 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2780#endif
2781
322e3e34
RK
2782 /* If this kind of value comes back in memory,
2783 decide where in memory it should come back. */
43bc5f13 2784 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2785 {
fac0ad80
RS
2786#ifdef PCC_STATIC_STRUCT_RETURN
2787 rtx pointer_reg
2788 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
4dc07bd7 2789 0, 0);
38a448ca 2790 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
2791 pcc_struct_value = 1;
2792 if (value == 0)
2793 value = gen_reg_rtx (outmode);
2794#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2795 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2796 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2797 mem_value = value;
2798 else
2799 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2800#endif
779c643a
JW
2801
2802 /* This call returns a big structure. */
2803 is_const = 0;
322e3e34
RK
2804 }
2805
2806 /* ??? Unfinished: must pass the memory address as an argument. */
2807
2808 /* Copy all the libcall-arguments out of the varargs data
2809 and into a vector ARGVEC.
2810
2811 Compute how to pass each argument. We only support a very small subset
2812 of the full argument passing conventions to limit complexity here since
2813 library functions shouldn't have many args. */
2814
2815 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 2816 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 2817
eecb6f50 2818 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2819
2820 args_size.constant = 0;
2821 args_size.var = 0;
2822
2823 count = 0;
2824
888aa7a9
RS
2825 push_temp_slots ();
2826
322e3e34
RK
2827 /* If there's a structure value address to be passed,
2828 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2829 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2830 {
2831 rtx addr = XEXP (mem_value, 0);
fac0ad80 2832 nargs++;
322e3e34 2833
fac0ad80
RS
2834 /* Make sure it is a reasonable operand for a move or push insn. */
2835 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2836 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2837 addr = force_operand (addr, NULL_RTX);
322e3e34 2838
fac0ad80 2839 argvec[count].value = addr;
4fc3dcd5 2840 argvec[count].mode = Pmode;
fac0ad80 2841 argvec[count].partial = 0;
322e3e34 2842
4fc3dcd5 2843 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2844#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2845 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2846 abort ();
322e3e34
RK
2847#endif
2848
4fc3dcd5 2849 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2850 argvec[count].reg && argvec[count].partial == 0,
2851 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 2852 &argvec[count].size, &alignment_pad);
322e3e34
RK
2853
2854
fac0ad80 2855 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2856 || reg_parm_stack_space > 0)
fac0ad80 2857 args_size.constant += argvec[count].size.constant;
322e3e34 2858
0f41302f 2859 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2860
2861 count++;
322e3e34
RK
2862 }
2863
2864 for (; count < nargs; count++)
2865 {
2866 rtx val = va_arg (p, rtx);
2867 enum machine_mode mode = va_arg (p, enum machine_mode);
2868
2869 /* We cannot convert the arg value to the mode the library wants here;
2870 must do it earlier where we know the signedness of the arg. */
2871 if (mode == BLKmode
2872 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2873 abort ();
2874
2875 /* On some machines, there's no way to pass a float to a library fcn.
2876 Pass it as a double instead. */
2877#ifdef LIBGCC_NEEDS_DOUBLE
2878 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2879 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2880#endif
2881
2882 /* There's no need to call protect_from_queue, because
2883 either emit_move_insn or emit_push_insn will do that. */
2884
2885 /* Make sure it is a reasonable operand for a move or push insn. */
2886 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2887 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2888 val = force_operand (val, NULL_RTX);
2889
322e3e34
RK
2890#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2891 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2892 {
a44492f0
RK
2893 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2894 be viewed as just an efficiency improvement. */
888aa7a9
RS
2895 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2896 emit_move_insn (slot, val);
43bc5f13 2897 val = force_operand (XEXP (slot, 0), NULL_RTX);
888aa7a9
RS
2898 mode = Pmode;
2899 }
322e3e34
RK
2900#endif
2901
888aa7a9
RS
2902 argvec[count].value = val;
2903 argvec[count].mode = mode;
2904
322e3e34 2905 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bf44874e 2906
322e3e34
RK
2907#ifdef FUNCTION_ARG_PARTIAL_NREGS
2908 argvec[count].partial
2909 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2910#else
2911 argvec[count].partial = 0;
2912#endif
2913
2914 locate_and_pad_parm (mode, NULL_TREE,
2915 argvec[count].reg && argvec[count].partial == 0,
2916 NULL_TREE, &args_size, &argvec[count].offset,
4fc026cd 2917 &argvec[count].size, &alignment_pad);
322e3e34
RK
2918
2919 if (argvec[count].size.var)
2920 abort ();
2921
e5e809f4 2922 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2923 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2924
2925 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2926 || reg_parm_stack_space > 0)
322e3e34
RK
2927 args_size.constant += argvec[count].size.constant;
2928
0f41302f 2929 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34 2930 }
322e3e34 2931
f046b3cc
JL
2932#ifdef FINAL_REG_PARM_STACK_SPACE
2933 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2934 args_size.var);
2935#endif
322e3e34
RK
2936 /* If this machine requires an external definition for library
2937 functions, write one out. */
2938 assemble_external_libcall (fun);
2939
2940 original_args_size = args_size;
c795bca9 2941#ifdef PREFERRED_STACK_BOUNDARY
774e6b37
JH
2942 args_size.constant = (((args_size.constant
2943 + arg_space_so_far
2944 + pending_stack_adjust
2945 + STACK_BYTES - 1)
2946 / STACK_BYTES
2947 * STACK_BYTES)
2948 - arg_space_so_far
2949 - pending_stack_adjust);
322e3e34
RK
2950#endif
2951
322e3e34 2952 args_size.constant = MAX (args_size.constant,
f046b3cc 2953 reg_parm_stack_space);
e5e809f4 2954
322e3e34 2955#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 2956 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2957#endif
2958
322e3e34
RK
2959 if (args_size.constant > current_function_outgoing_args_size)
2960 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2961
2962#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2963 /* Since the stack pointer will never be pushed, it is possible for
2964 the evaluation of a parm to clobber something we have already
2965 written to the stack. Since most function calls on RISC machines
2966 do not use the stack, this is uncommon, but must work correctly.
2967
2968 Therefore, we save any area of the stack that was already written
2969 and that we are using. Here we set up to do this by making a new
2970 stack usage map from the old one.
2971
2972 Another approach might be to try to reorder the argument
2973 evaluations to avoid this conflicting stack usage. */
2974
2975 needed = args_size.constant;
e5e809f4
JL
2976
2977#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2978 /* Since we will be writing into the entire argument area, the
2979 map must be allocated for its entire size, not just the part that
2980 is the responsibility of the caller. */
2981 needed += reg_parm_stack_space;
2982#endif
2983
2984#ifdef ARGS_GROW_DOWNWARD
2985 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2986 needed + 1);
2987#else
2988 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2989 needed);
322e3e34 2990#endif
f046b3cc
JL
2991 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2992
2993 if (initial_highest_arg_in_use)
2994 bcopy (initial_stack_usage_map, stack_usage_map,
2995 initial_highest_arg_in_use);
2996
2997 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2998 bzero (&stack_usage_map[initial_highest_arg_in_use],
2999 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3000 needed = 0;
322e3e34 3001
f046b3cc
JL
3002 /* The address of the outgoing argument list must not be copied to a
3003 register here, because argblock would be left pointing to the
3004 wrong place after the call to allocate_dynamic_stack_space below.
3005 */
3006
3007 argblock = virtual_outgoing_args_rtx;
3008#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3009#ifndef PUSH_ROUNDING
3010 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3011#endif
f046b3cc 3012#endif
322e3e34
RK
3013
3014#ifdef PUSH_ARGS_REVERSED
c795bca9 3015#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3016 /* If we push args individually in reverse order, perform stack alignment
3017 before the first push (the last arg). */
3018 if (argblock == 0)
774e6b37
JH
3019 {
3020 anti_adjust_stack (GEN_INT (args_size.constant
3021 - original_args_size.constant));
3022 arg_space_so_far += args_size.constant - original_args_size.constant;
3023 }
322e3e34
RK
3024#endif
3025#endif
3026
3027#ifdef PUSH_ARGS_REVERSED
3028 inc = -1;
3029 argnum = nargs - 1;
3030#else
3031 inc = 1;
3032 argnum = 0;
3033#endif
3034
f046b3cc
JL
3035#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3036 /* The argument list is the property of the called routine and it
3037 may clobber it. If the fixed area has been used for previous
3038 parameters, we must save and restore it.
3039
3040 Here we compute the boundary of the that needs to be saved, if any. */
3041
3042#ifdef ARGS_GROW_DOWNWARD
3043 for (count = 0; count < reg_parm_stack_space + 1; count++)
3044#else
3045 for (count = 0; count < reg_parm_stack_space; count++)
3046#endif
3047 {
3048 if (count >= highest_outgoing_arg_in_use
3049 || stack_usage_map[count] == 0)
3050 continue;
3051
3052 if (low_to_save == -1)
3053 low_to_save = count;
3054
3055 high_to_save = count;
3056 }
3057
3058 if (low_to_save >= 0)
3059 {
3060 int num_to_save = high_to_save - low_to_save + 1;
3061 enum machine_mode save_mode
3062 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3063 rtx stack_area;
3064
3065 /* If we don't have the required alignment, must do this in BLKmode. */
3066 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3067 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3068 save_mode = BLKmode;
3069
ceb83206 3070#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3071 stack_area = gen_rtx_MEM (save_mode,
3072 memory_address (save_mode,
38a448ca 3073 plus_constant (argblock,
ceb83206 3074 - high_to_save)));
f046b3cc 3075#else
ceb83206
JL
3076 stack_area = gen_rtx_MEM (save_mode,
3077 memory_address (save_mode,
38a448ca 3078 plus_constant (argblock,
ceb83206 3079 low_to_save)));
f046b3cc 3080#endif
f046b3cc
JL
3081 if (save_mode == BLKmode)
3082 {
3083 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
f046b3cc
JL
3084 emit_block_move (validize_mem (save_area), stack_area,
3085 GEN_INT (num_to_save),
3086 PARM_BOUNDARY / BITS_PER_UNIT);
3087 }
3088 else
3089 {
3090 save_area = gen_reg_rtx (save_mode);
3091 emit_move_insn (save_area, stack_area);
3092 }
3093 }
3094#endif
3095
322e3e34
RK
3096 /* Push the args that need to be pushed. */
3097
5e26979c
JL
3098 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3099 are to be pushed. */
322e3e34
RK
3100 for (count = 0; count < nargs; count++, argnum += inc)
3101 {
3102 register enum machine_mode mode = argvec[argnum].mode;
3103 register rtx val = argvec[argnum].value;
3104 rtx reg = argvec[argnum].reg;
3105 int partial = argvec[argnum].partial;
69d4ca36 3106#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3107 int lower_bound, upper_bound, i;
69d4ca36 3108#endif
322e3e34
RK
3109
3110 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3111 {
3112#ifdef ACCUMULATE_OUTGOING_ARGS
3113 /* If this is being stored into a pre-allocated, fixed-size, stack
3114 area, save any previous data at that location. */
3115
3116#ifdef ARGS_GROW_DOWNWARD
3117 /* stack_slot is negative, but we want to index stack_usage_map
3118 with positive values. */
5e26979c
JL
3119 upper_bound = -argvec[argnum].offset.constant + 1;
3120 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3121#else
5e26979c
JL
3122 lower_bound = argvec[argnum].offset.constant;
3123 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3124#endif
3125
3126 for (i = lower_bound; i < upper_bound; i++)
3127 if (stack_usage_map[i]
f046b3cc
JL
3128 /* Don't store things in the fixed argument area at this point;
3129 it has already been saved. */
e5e809f4 3130 && i > reg_parm_stack_space)
f046b3cc
JL
3131 break;
3132
3133 if (i != upper_bound)
3134 {
e5e809f4 3135 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3136 enum machine_mode save_mode
5e26979c 3137 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3138 MODE_INT, 1);
3139 rtx stack_area
c5c76735
JL
3140 = gen_rtx_MEM
3141 (save_mode,
3142 memory_address
3143 (save_mode,
3144 plus_constant (argblock,
3145 argvec[argnum].offset.constant)));
5e26979c 3146 argvec[argnum].save_area = gen_reg_rtx (save_mode);
c5c76735 3147
5e26979c 3148 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3149 }
3150#endif
3151 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4 3152 argblock, GEN_INT (argvec[argnum].offset.constant),
4fc026cd 3153 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
774e6b37 3154 arg_space_so_far += argvec[argnum].size.constant;
f046b3cc
JL
3155
3156#ifdef ACCUMULATE_OUTGOING_ARGS
3157 /* Now mark the segment we just used. */
3158 for (i = lower_bound; i < upper_bound; i++)
3159 stack_usage_map[i] = 1;
3160#endif
3161
3162 NO_DEFER_POP;
3163 }
322e3e34
RK
3164 }
3165
3166#ifndef PUSH_ARGS_REVERSED
c795bca9 3167#ifdef PREFERRED_STACK_BOUNDARY
322e3e34
RK
3168 /* If we pushed args in forward order, perform stack alignment
3169 after pushing the last arg. */
3170 if (argblock == 0)
774e6b37
JH
3171 {
3172 anti_adjust_stack (GEN_INT (args_size.constant
3173 - original_args_size.constant));
3174 arg_space_so_far += args_size.constant - unadjusted_args_size;
3175 }
322e3e34
RK
3176#endif
3177#endif
3178
3179#ifdef PUSH_ARGS_REVERSED
3180 argnum = nargs - 1;
3181#else
3182 argnum = 0;
3183#endif
3184
77cac2f2 3185 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3186
322e3e34
RK
3187 /* Now load any reg parms into their regs. */
3188
5e26979c
JL
3189 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3190 are to be pushed. */
322e3e34
RK
3191 for (count = 0; count < nargs; count++, argnum += inc)
3192 {
322e3e34
RK
3193 register rtx val = argvec[argnum].value;
3194 rtx reg = argvec[argnum].reg;
3195 int partial = argvec[argnum].partial;
3196
bf44874e
JL
3197 /* Handle calls that pass values in multiple non-contiguous
3198 locations. The PA64 has examples of this for library calls. */
19e3f61a 3199 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bf44874e
JL
3200 emit_group_load (reg, val,
3201 GET_MODE_SIZE (GET_MODE (val)),
3202 GET_MODE_ALIGNMENT (GET_MODE (val)));
3203 else if (reg != 0 && partial == 0)
322e3e34 3204 emit_move_insn (reg, val);
bf44874e 3205
322e3e34
RK
3206 NO_DEFER_POP;
3207 }
3208
3209#if 0
3210 /* For version 1.37, try deleting this entirely. */
3211 if (! no_queue)
3212 emit_queue ();
3213#endif
3214
3215 /* Any regs containing parms remain in use through the call. */
322e3e34 3216 for (count = 0; count < nargs; count++)
bf44874e 3217 {
19e3f61a
JM
3218 rtx reg = argvec[count].reg;
3219 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3220 use_group_regs (&call_fusage, reg);
3221 else if (reg != 0)
3222 use_reg (&call_fusage, reg);
bf44874e 3223 }
322e3e34 3224
fac0ad80
RS
3225 /* Pass the function the address in which to return a structure value. */
3226 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3227 {
3228 emit_move_insn (struct_value_rtx,
3229 force_reg (Pmode,
3230 force_operand (XEXP (mem_value, 0),
3231 NULL_RTX)));
3232 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3233 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3234 }
3235
322e3e34
RK
3236 /* Don't allow popping to be deferred, since then
3237 cse'ing of library calls could delete a call and leave the pop. */
3238 NO_DEFER_POP;
3239
3240 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3241 will set inhibit_defer_pop to that value. */
43bc5f13
JH
3242 /* The return type is needed to decide how many bytes the function pops.
3243 Signedness plays no role in that, so for simplicity, we pretend it's
3244 always signed. We also assume that the list of arguments passed has
3245 no impact, so we pretend it is unknown. */
322e3e34 3246
2c8da025
RK
3247 emit_call_1 (fun,
3248 get_identifier (XSTR (orgfun, 0)),
43bc5f13
JH
3249 build_function_type (outmode == VOIDmode ? void_type_node
3250 : type_for_mode (outmode, 0), NULL_TREE),
fb5eebb9
RH
3251 original_args_size.constant, args_size.constant,
3252 struct_value_size,
322e3e34 3253 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
43bc5f13
JH
3254 (mem_value == 0 && outmode != VOIDmode
3255 ? hard_libcall_value (outmode) : NULL_RTX),
12a22e76 3256 old_inhibit_defer_pop + 1, call_fusage, is_const, nothrow);
322e3e34
RK
3257
3258 /* Now restore inhibit_defer_pop to its actual original value. */
3259 OK_DEFER_POP;
3260
888aa7a9
RS
3261 pop_temp_slots ();
3262
774e6b37
JH
3263 /* Stack pointer ought to be restored to the value before call. */
3264 if (old_arg_space_so_far != arg_space_so_far)
3265 abort();
3266
322e3e34 3267 /* Copy the value to the right place. */
43bc5f13 3268 if (outmode != VOIDmode && retval)
322e3e34
RK
3269 {
3270 if (mem_value)
3271 {
3272 if (value == 0)
fac0ad80 3273 value = mem_value;
322e3e34
RK
3274 if (value != mem_value)
3275 emit_move_insn (value, mem_value);
3276 }
3277 else if (value != 0)
3278 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3279 else
3280 value = hard_libcall_value (outmode);
322e3e34 3281 }
fac0ad80 3282
f046b3cc
JL
3283#ifdef ACCUMULATE_OUTGOING_ARGS
3284#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3285 if (save_area)
3286 {
3287 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3288#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3289 rtx stack_area
38a448ca
RH
3290 = gen_rtx_MEM (save_mode,
3291 memory_address (save_mode,
ceb83206
JL
3292 plus_constant (argblock,
3293 - high_to_save)));
f046b3cc 3294#else
ceb83206
JL
3295 rtx stack_area
3296 = gen_rtx_MEM (save_mode,
3297 memory_address (save_mode,
3298 plus_constant (argblock, low_to_save)));
f046b3cc 3299#endif
e9a25f70
JL
3300 if (save_mode != BLKmode)
3301 emit_move_insn (stack_area, save_area);
3302 else
3303 emit_block_move (stack_area, validize_mem (save_area),
3304 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3305 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3306 }
f046b3cc
JL
3307#endif
3308
3309 /* If we saved any argument areas, restore them. */
3310 for (count = 0; count < nargs; count++)
3311 if (argvec[count].save_area)
3312 {
3313 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3314 rtx stack_area
38a448ca 3315 = gen_rtx_MEM (save_mode,
c5c76735
JL
3316 memory_address
3317 (save_mode,
3318 plus_constant (argblock,
3319 argvec[count].offset.constant)));
f046b3cc
JL
3320
3321 emit_move_insn (stack_area, argvec[count].save_area);
3322 }
3323
3324 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3325 stack_usage_map = initial_stack_usage_map;
3326#endif
3327
43bc5f13
JH
3328 return value;
3329
3330}
3331\f
3332/* Output a library call to function FUN (a SYMBOL_REF rtx)
3333 (emitting the queue unless NO_QUEUE is nonzero),
3334 for a value of mode OUTMODE,
3335 with NARGS different arguments, passed as alternating rtx values
3336 and machine_modes to convert them to.
3337 The rtx values should have been passed through protect_from_queue already.
3338
3339 NO_QUEUE will be true if and only if the library call is a `const' call
3340 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
3341 to the variable is_const in expand_call.
3342
3343 NO_QUEUE must be true for const calls, because if it isn't, then
3344 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
3345 and will be lost if the libcall sequence is optimized away.
3346
3347 NO_QUEUE must be false for non-const calls, because if it isn't, the
3348 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
3349 optimized. For instance, the instruction scheduler may incorrectly
3350 move memory references across the non-const call. */
3351
3352void
3353emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
3354 int nargs, ...))
3355{
3356#ifndef ANSI_PROTOTYPES
3357 rtx orgfun;
3358 int no_queue;
3359 enum machine_mode outmode;
3360 int nargs;
3361#endif
3362 va_list p;
3363
3364 VA_START (p, nargs);
3365
3366#ifndef ANSI_PROTOTYPES
3367 orgfun = va_arg (p, rtx);
3368 no_queue = va_arg (p, int);
3369 outmode = va_arg (p, enum machine_mode);
3370 nargs = va_arg (p, int);
3371#endif
3372
3373 emit_library_call_value_1 (0, orgfun, NULL_RTX, no_queue, outmode, nargs, p);
3374
3375 va_end (p);
3376}
3377\f
3378/* Like emit_library_call except that an extra argument, VALUE,
3379 comes second and says where to store the result.
3380 (If VALUE is zero, this function chooses a convenient way
3381 to return the value.
3382
3383 This function returns an rtx for where the value is to be found.
3384 If VALUE is nonzero, VALUE is returned. */
3385
3386rtx
3387emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3388 enum machine_mode outmode, int nargs, ...))
3389{
3390#ifndef ANSI_PROTOTYPES
3391 rtx orgfun;
3392 rtx value;
3393 int no_queue;
3394 enum machine_mode outmode;
3395 int nargs;
3396#endif
3397 va_list p;
3398
3399 VA_START (p, nargs);
3400
3401#ifndef ANSI_PROTOTYPES
3402 orgfun = va_arg (p, rtx);
3403 value = va_arg (p, rtx);
3404 no_queue = va_arg (p, int);
3405 outmode = va_arg (p, enum machine_mode);
3406 nargs = va_arg (p, int);
3407#endif
3408
3409 value = emit_library_call_value_1 (1, orgfun, value, no_queue, outmode, nargs, p);
3410
3411 va_end (p);
3412
fac0ad80 3413 return value;
322e3e34
RK
3414}
3415\f
51bbfa0c
RS
3416#if 0
3417/* Return an rtx which represents a suitable home on the stack
3418 given TYPE, the type of the argument looking for a home.
3419 This is called only for BLKmode arguments.
3420
3421 SIZE is the size needed for this target.
3422 ARGS_ADDR is the address of the bottom of the argument block for this call.
3423 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3424 if this machine uses push insns. */
3425
3426static rtx
3427target_for_arg (type, size, args_addr, offset)
3428 tree type;
3429 rtx size;
3430 rtx args_addr;
3431 struct args_size offset;
3432{
3433 rtx target;
3434 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3435
3436 /* We do not call memory_address if possible,
3437 because we want to address as close to the stack
3438 as possible. For non-variable sized arguments,
3439 this will be stack-pointer relative addressing. */
3440 if (GET_CODE (offset_rtx) == CONST_INT)
3441 target = plus_constant (args_addr, INTVAL (offset_rtx));
3442 else
3443 {
3444 /* I have no idea how to guarantee that this
3445 will work in the presence of register parameters. */
38a448ca 3446 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3447 target = memory_address (QImode, target);
3448 }
3449
38a448ca 3450 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3451}
3452#endif
3453\f
3454/* Store a single argument for a function call
3455 into the register or memory area where it must be passed.
3456 *ARG describes the argument value and where to pass it.
3457
3458 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3459 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3460
3461 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3462 so must be careful about how the stack is used.
3463
3464 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3465 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3466 that we need not worry about saving and restoring the stack.
3467
3468 FNDECL is the declaration of the function we are calling. */
3469
3470static void
c84e2712 3471store_one_arg (arg, argblock, may_be_alloca, variable_size,
6f90e075 3472 reg_parm_stack_space)
51bbfa0c
RS
3473 struct arg_data *arg;
3474 rtx argblock;
3475 int may_be_alloca;
0f9b3ea6 3476 int variable_size ATTRIBUTE_UNUSED;
6f90e075 3477 int reg_parm_stack_space;
51bbfa0c
RS
3478{
3479 register tree pval = arg->tree_value;
3480 rtx reg = 0;
3481 int partial = 0;
3482 int used = 0;
69d4ca36 3483#ifdef ACCUMULATE_OUTGOING_ARGS
6a651371 3484 int i, lower_bound = 0, upper_bound = 0;
69d4ca36 3485#endif
51bbfa0c
RS
3486
3487 if (TREE_CODE (pval) == ERROR_MARK)
3488 return;
3489
cc79451b
RK
3490 /* Push a new temporary level for any temporaries we make for
3491 this argument. */
3492 push_temp_slots ();
3493
51bbfa0c
RS
3494#ifdef ACCUMULATE_OUTGOING_ARGS
3495 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3496 save any previous data at that location. */
3497 if (argblock && ! variable_size && arg->stack)
3498 {
3499#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3500 /* stack_slot is negative, but we want to index stack_usage_map
3501 with positive values. */
51bbfa0c
RS
3502 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3503 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3504 else
50eb43ca 3505 upper_bound = 0;
51bbfa0c
RS
3506
3507 lower_bound = upper_bound - arg->size.constant;
3508#else
3509 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3510 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3511 else
3512 lower_bound = 0;
3513
3514 upper_bound = lower_bound + arg->size.constant;
3515#endif
3516
3517 for (i = lower_bound; i < upper_bound; i++)
3518 if (stack_usage_map[i]
51bbfa0c
RS
3519 /* Don't store things in the fixed argument area at this point;
3520 it has already been saved. */
e5e809f4 3521 && i > reg_parm_stack_space)
51bbfa0c
RS
3522 break;
3523
3524 if (i != upper_bound)
3525 {
3526 /* We need to make a save area. See what mode we can make it. */
3527 enum machine_mode save_mode
3528 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3529 rtx stack_area
38a448ca
RH
3530 = gen_rtx_MEM (save_mode,
3531 memory_address (save_mode,
3532 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3533
3534 if (save_mode == BLKmode)
3535 {
3536 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3537 arg->size.constant, 0);
c6df88cb
MM
3538 MEM_SET_IN_STRUCT_P (arg->save_area,
3539 AGGREGATE_TYPE_P (TREE_TYPE
3540 (arg->tree_value)));
cc79451b 3541 preserve_temp_slots (arg->save_area);
51bbfa0c 3542 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3543 GEN_INT (arg->size.constant),
51bbfa0c
RS
3544 PARM_BOUNDARY / BITS_PER_UNIT);
3545 }
3546 else
3547 {
3548 arg->save_area = gen_reg_rtx (save_mode);
3549 emit_move_insn (arg->save_area, stack_area);
3550 }
3551 }
3552 }
b564df06
JL
3553
3554 /* Now that we have saved any slots that will be overwritten by this
3555 store, mark all slots this store will use. We must do this before
3556 we actually expand the argument since the expansion itself may
3557 trigger library calls which might need to use the same stack slot. */
3558 if (argblock && ! variable_size && arg->stack)
3559 for (i = lower_bound; i < upper_bound; i++)
3560 stack_usage_map[i] = 1;
51bbfa0c
RS
3561#endif
3562
3563 /* If this isn't going to be placed on both the stack and in registers,
3564 set up the register and number of words. */
3565 if (! arg->pass_on_stack)
3566 reg = arg->reg, partial = arg->partial;
3567
3568 if (reg != 0 && partial == 0)
3569 /* Being passed entirely in a register. We shouldn't be called in
3570 this case. */
3571 abort ();
3572
4ab56118
RK
3573 /* If this arg needs special alignment, don't load the registers
3574 here. */
3575 if (arg->n_aligned_regs != 0)
3576 reg = 0;
4ab56118 3577
4ab56118 3578 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3579 it directly into its stack slot. Otherwise, we can. */
3580 if (arg->value == 0)
d64f5a78
RS
3581 {
3582#ifdef ACCUMULATE_OUTGOING_ARGS
3583 /* stack_arg_under_construction is nonzero if a function argument is
3584 being evaluated directly into the outgoing argument list and
3585 expand_call must take special action to preserve the argument list
3586 if it is called recursively.
3587
3588 For scalar function arguments stack_usage_map is sufficient to
3589 determine which stack slots must be saved and restored. Scalar
3590 arguments in general have pass_on_stack == 0.
3591
3592 If this argument is initialized by a function which takes the
3593 address of the argument (a C++ constructor or a C function
3594 returning a BLKmode structure), then stack_usage_map is
3595 insufficient and expand_call must push the stack around the
3596 function call. Such arguments have pass_on_stack == 1.
3597
3598 Note that it is always safe to set stack_arg_under_construction,
3599 but this generates suboptimal code if set when not needed. */
3600
3601 if (arg->pass_on_stack)
3602 stack_arg_under_construction++;
3603#endif
3a08477a
RK
3604 arg->value = expand_expr (pval,
3605 (partial
3606 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3607 ? NULL_RTX : arg->stack,
e5d70561 3608 VOIDmode, 0);
1efe6448
RK
3609
3610 /* If we are promoting object (or for any other reason) the mode
3611 doesn't agree, convert the mode. */
3612
7373d92d
RK
3613 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3614 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3615 arg->value, arg->unsignedp);
1efe6448 3616
d64f5a78
RS
3617#ifdef ACCUMULATE_OUTGOING_ARGS
3618 if (arg->pass_on_stack)
3619 stack_arg_under_construction--;
3620#endif
3621 }
51bbfa0c
RS
3622
3623 /* Don't allow anything left on stack from computation
3624 of argument to alloca. */
3625 if (may_be_alloca)
3626 do_pending_stack_adjust ();
3627
3628 if (arg->value == arg->stack)
7815214e 3629 {
c5c76735 3630 /* If the value is already in the stack slot, we are done. */
7d384cc0 3631 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
7815214e 3632 {
7815214e 3633 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3634 XEXP (arg->stack, 0), Pmode,
7d384cc0 3635 ARGS_SIZE_RTX (arg->size),
7815214e 3636 TYPE_MODE (sizetype),
956d6950
JL
3637 GEN_INT (MEMORY_USE_RW),
3638 TYPE_MODE (integer_type_node));
7815214e
RK
3639 }
3640 }
1efe6448 3641 else if (arg->mode != BLKmode)
51bbfa0c
RS
3642 {
3643 register int size;
3644
3645 /* Argument is a scalar, not entirely passed in registers.
3646 (If part is passed in registers, arg->partial says how much
3647 and emit_push_insn will take care of putting it there.)
3648
3649 Push it, and if its size is less than the
3650 amount of space allocated to it,
3651 also bump stack pointer by the additional space.
3652 Note that in C the default argument promotions
3653 will prevent such mismatches. */
3654
1efe6448 3655 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3656 /* Compute how much space the push instruction will push.
3657 On many machines, pushing a byte will advance the stack
3658 pointer by a halfword. */
3659#ifdef PUSH_ROUNDING
3660 size = PUSH_ROUNDING (size);
3661#endif
3662 used = size;
3663
3664 /* Compute how much space the argument should get:
3665 round up to a multiple of the alignment for arguments. */
1efe6448 3666 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3667 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3668 / (PARM_BOUNDARY / BITS_PER_UNIT))
3669 * (PARM_BOUNDARY / BITS_PER_UNIT));
3670
3671 /* This isn't already where we want it on the stack, so put it there.
3672 This can either be done with push or copy insns. */
e5e809f4
JL
3673 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3674 partial, reg, used - size, argblock,
4fc026cd
CM
3675 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
3676 ARGS_SIZE_RTX (arg->alignment_pad));
3677
c2732da3 3678 arg_space_so_far += used;
51bbfa0c
RS
3679 }
3680 else
3681 {
3682 /* BLKmode, at least partly to be pushed. */
3683
3684 register int excess;
3685 rtx size_rtx;
3686
3687 /* Pushing a nonscalar.
3688 If part is passed in registers, PARTIAL says how much
3689 and emit_push_insn will take care of putting it there. */
3690
3691 /* Round its size up to a multiple
3692 of the allocation unit for arguments. */
3693
3694 if (arg->size.var != 0)
3695 {
3696 excess = 0;
3697 size_rtx = ARGS_SIZE_RTX (arg->size);
3698 }
3699 else
3700 {
51bbfa0c
RS
3701 /* PUSH_ROUNDING has no effect on us, because
3702 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3703 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3704 + partial * UNITS_PER_WORD);
e4f93898 3705 size_rtx = expr_size (pval);
c2732da3 3706 arg_space_so_far += excess + INTVAL (size_rtx);
51bbfa0c
RS
3707 }
3708
1efe6448 3709 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 3710 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4 3711 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4fc026cd
CM
3712 reg_parm_stack_space,
3713 ARGS_SIZE_RTX (arg->alignment_pad));
51bbfa0c
RS
3714 }
3715
3716
3717 /* Unless this is a partially-in-register argument, the argument is now
3718 in the stack.
3719
3720 ??? Note that this can change arg->value from arg->stack to
3721 arg->stack_slot and it matters when they are not the same.
3722 It isn't totally clear that this is correct in all cases. */
3723 if (partial == 0)
3b917a55 3724 arg->value = arg->stack_slot;
51bbfa0c
RS
3725
3726 /* Once we have pushed something, pops can't safely
3727 be deferred during the rest of the arguments. */
3728 NO_DEFER_POP;
3729
3730 /* ANSI doesn't require a sequence point here,
3731 but PCC has one, so this will avoid some problems. */
3732 emit_queue ();
3733
db907e7b
RK
3734 /* Free any temporary slots made in processing this argument. Show
3735 that we might have taken the address of something and pushed that
3736 as an operand. */
3737 preserve_temp_slots (NULL_RTX);
51bbfa0c 3738 free_temp_slots ();
cc79451b 3739 pop_temp_slots ();
51bbfa0c 3740}
This page took 1.24821 seconds and 5 git commands to generate.