]> gcc.gnu.org Git - gcc.git/blob - gcc/calls.c
Fix for ia64 setjmp miscompilation problem.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "function.h"
28 #include "regs.h"
29 #include "insn-flags.h"
30 #include "toplev.h"
31 #include "output.h"
32 #include "tm_p.h"
33
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 #endif
37
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
40
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
43
44 #ifdef PUSH_ROUNDING
45
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
48 #endif
49
50 #endif
51
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
54
55 /* Data structure and subroutines used within expand_call. */
56
57 struct arg_data
58 {
59 /* Tree node for this argument. */
60 tree tree_value;
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
64 rtx value;
65 /* Initially-compute RTL value for argument; only for const functions. */
66 rtx initial_value;
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
69 registers. */
70 rtx reg;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
94 rtx stack;
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
98 rtx stack_slot;
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
101 rtx save_area;
102 #endif
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
107 rtx *aligned_regs;
108 int n_aligned_regs;
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad;
112 };
113
114 #ifdef ACCUMULATE_OUTGOING_ARGS
115 /* A vector of one char per byte of stack space. A byte if non-zero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map;
120
121 /* Size of STACK_USAGE_MAP. */
122 static int highest_outgoing_arg_in_use;
123
124 /* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129 int stack_arg_under_construction;
130 #endif
131
132 static int calls_function PARAMS ((tree, int));
133 static int calls_function_1 PARAMS ((tree, int));
134 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
135 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
136 rtx, int, rtx, int));
137 static void precompute_register_parameters PARAMS ((int,
138 struct arg_data *,
139 int *));
140 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
141 int));
142 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
143 int));
144 static int finalize_must_preallocate PARAMS ((int, int,
145 struct arg_data *,
146 struct args_size *));
147 static void precompute_arguments PARAMS ((int, int, int,
148 struct arg_data *,
149 struct args_size *));
150 static int compute_argument_block_size PARAMS ((int,
151 struct args_size *,
152 int));
153 static void initialize_argument_information PARAMS ((int,
154 struct arg_data *,
155 struct args_size *,
156 int, tree, tree,
157 CUMULATIVE_ARGS *,
158 int, rtx *, int *,
159 int *, int *));
160 static void compute_argument_addresses PARAMS ((struct arg_data *,
161 rtx, int));
162 static rtx rtx_for_function_call PARAMS ((tree, tree));
163 static void load_register_parameters PARAMS ((struct arg_data *,
164 int, rtx *));
165
166 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
167 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
168 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
169 #endif
170 \f
171 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
172 `alloca'.
173
174 If WHICH is 0, return 1 if EXP contains a call to any function.
175 Actually, we only need return 1 if evaluating EXP would require pushing
176 arguments on the stack, but that is too difficult to compute, so we just
177 assume any function call might require the stack. */
178
179 static tree calls_function_save_exprs;
180
181 static int
182 calls_function (exp, which)
183 tree exp;
184 int which;
185 {
186 int val;
187 calls_function_save_exprs = 0;
188 val = calls_function_1 (exp, which);
189 calls_function_save_exprs = 0;
190 return val;
191 }
192
193 static int
194 calls_function_1 (exp, which)
195 tree exp;
196 int which;
197 {
198 register int i;
199 enum tree_code code = TREE_CODE (exp);
200 int type = TREE_CODE_CLASS (code);
201 int length = tree_code_length[(int) code];
202
203 /* If this code is language-specific, we don't know what it will do. */
204 if ((int) code >= NUM_TREE_CODES)
205 return 1;
206
207 /* Only expressions and references can contain calls. */
208 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
209 && type != 'b')
210 return 0;
211
212 switch (code)
213 {
214 case CALL_EXPR:
215 if (which == 0)
216 return 1;
217 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
218 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
219 == FUNCTION_DECL))
220 {
221 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
222
223 if ((DECL_BUILT_IN (fndecl)
224 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
225 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
226 || (DECL_SAVED_INSNS (fndecl)
227 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
228 return 1;
229 }
230
231 /* Third operand is RTL. */
232 length = 2;
233 break;
234
235 case SAVE_EXPR:
236 if (SAVE_EXPR_RTL (exp) != 0)
237 return 0;
238 if (value_member (exp, calls_function_save_exprs))
239 return 0;
240 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
241 calls_function_save_exprs);
242 return (TREE_OPERAND (exp, 0) != 0
243 && calls_function_1 (TREE_OPERAND (exp, 0), which));
244
245 case BLOCK:
246 {
247 register tree local;
248
249 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
250 if (DECL_INITIAL (local) != 0
251 && calls_function_1 (DECL_INITIAL (local), which))
252 return 1;
253 }
254 {
255 register tree subblock;
256
257 for (subblock = BLOCK_SUBBLOCKS (exp);
258 subblock;
259 subblock = TREE_CHAIN (subblock))
260 if (calls_function_1 (subblock, which))
261 return 1;
262 }
263 return 0;
264
265 case METHOD_CALL_EXPR:
266 length = 3;
267 break;
268
269 case WITH_CLEANUP_EXPR:
270 length = 1;
271 break;
272
273 case RTL_EXPR:
274 return 0;
275
276 default:
277 break;
278 }
279
280 for (i = 0; i < length; i++)
281 if (TREE_OPERAND (exp, i) != 0
282 && calls_function_1 (TREE_OPERAND (exp, i), which))
283 return 1;
284
285 return 0;
286 }
287 \f
288 /* Force FUNEXP into a form suitable for the address of a CALL,
289 and return that as an rtx. Also load the static chain register
290 if FNDECL is a nested function.
291
292 CALL_FUSAGE points to a variable holding the prospective
293 CALL_INSN_FUNCTION_USAGE information. */
294
295 rtx
296 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
297 rtx funexp;
298 tree fndecl;
299 rtx *call_fusage;
300 int reg_parm_seen;
301 {
302 rtx static_chain_value = 0;
303
304 funexp = protect_from_queue (funexp, 0);
305
306 if (fndecl != 0)
307 /* Get possible static chain value for nested function in C. */
308 static_chain_value = lookup_static_chain (fndecl);
309
310 /* Make a valid memory address and copy constants thru pseudo-regs,
311 but not for a constant address if -fno-function-cse. */
312 if (GET_CODE (funexp) != SYMBOL_REF)
313 /* If we are using registers for parameters, force the
314 function address into a register now. */
315 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
316 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
317 : memory_address (FUNCTION_MODE, funexp));
318 else
319 {
320 #ifndef NO_FUNCTION_CSE
321 if (optimize && ! flag_no_function_cse)
322 #ifdef NO_RECURSIVE_FUNCTION_CSE
323 if (fndecl != current_function_decl)
324 #endif
325 funexp = force_reg (Pmode, funexp);
326 #endif
327 }
328
329 if (static_chain_value != 0)
330 {
331 emit_move_insn (static_chain_rtx, static_chain_value);
332
333 if (GET_CODE (static_chain_rtx) == REG)
334 use_reg (call_fusage, static_chain_rtx);
335 }
336
337 return funexp;
338 }
339
340 /* Generate instructions to call function FUNEXP,
341 and optionally pop the results.
342 The CALL_INSN is the first insn generated.
343
344 FNDECL is the declaration node of the function. This is given to the
345 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
346
347 FUNTYPE is the data type of the function. This is given to the macro
348 RETURN_POPS_ARGS to determine whether this function pops its own args.
349 We used to allow an identifier for library functions, but that doesn't
350 work when the return type is an aggregate type and the calling convention
351 says that the pointer to this aggregate is to be popped by the callee.
352
353 STACK_SIZE is the number of bytes of arguments on the stack,
354 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
355 This is both to put into the call insn and
356 to generate explicit popping code if necessary.
357
358 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
359 It is zero if this call doesn't want a structure value.
360
361 NEXT_ARG_REG is the rtx that results from executing
362 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
363 just after all the args have had their registers assigned.
364 This could be whatever you like, but normally it is the first
365 arg-register beyond those used for args in this call,
366 or 0 if all the arg-registers are used in this call.
367 It is passed on to `gen_call' so you can put this info in the call insn.
368
369 VALREG is a hard register in which a value is returned,
370 or 0 if the call does not return a value.
371
372 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
373 the args to this call were processed.
374 We restore `inhibit_defer_pop' to that value.
375
376 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
377 denote registers used by the called function.
378
379 IS_CONST is true if this is a `const' call. */
380
381 static void
382 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
383 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
384 call_fusage, is_const)
385 rtx funexp;
386 tree fndecl ATTRIBUTE_UNUSED;
387 tree funtype ATTRIBUTE_UNUSED;
388 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
389 HOST_WIDE_INT rounded_stack_size;
390 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
391 rtx next_arg_reg;
392 rtx valreg;
393 int old_inhibit_defer_pop;
394 rtx call_fusage;
395 int is_const;
396 {
397 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
398 #if defined (HAVE_call) && defined (HAVE_call_value)
399 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
400 #endif
401 rtx call_insn;
402 #ifndef ACCUMULATE_OUTGOING_ARGS
403 int already_popped = 0;
404 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
405 #endif
406
407 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
408 and we don't want to load it into a register as an optimization,
409 because prepare_call_address already did it if it should be done. */
410 if (GET_CODE (funexp) != SYMBOL_REF)
411 funexp = memory_address (FUNCTION_MODE, funexp);
412
413 #ifndef ACCUMULATE_OUTGOING_ARGS
414 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
415 /* If the target has "call" or "call_value" insns, then prefer them
416 if no arguments are actually popped. If the target does not have
417 "call" or "call_value" insns, then we must use the popping versions
418 even if the call has no arguments to pop. */
419 #if defined (HAVE_call) && defined (HAVE_call_value)
420 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
421 && n_popped > 0)
422 #else
423 if (HAVE_call_pop && HAVE_call_value_pop)
424 #endif
425 {
426 rtx n_pop = GEN_INT (n_popped);
427 rtx pat;
428
429 /* If this subroutine pops its own args, record that in the call insn
430 if possible, for the sake of frame pointer elimination. */
431
432 if (valreg)
433 pat = gen_call_value_pop (valreg,
434 gen_rtx_MEM (FUNCTION_MODE, funexp),
435 rounded_stack_size_rtx, next_arg_reg, n_pop);
436 else
437 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
438 rounded_stack_size_rtx, next_arg_reg, n_pop);
439
440 emit_call_insn (pat);
441 already_popped = 1;
442 }
443 else
444 #endif
445 #endif
446
447 #if defined (HAVE_call) && defined (HAVE_call_value)
448 if (HAVE_call && HAVE_call_value)
449 {
450 if (valreg)
451 emit_call_insn (gen_call_value (valreg,
452 gen_rtx_MEM (FUNCTION_MODE, funexp),
453 rounded_stack_size_rtx, next_arg_reg,
454 NULL_RTX));
455 else
456 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
457 rounded_stack_size_rtx, next_arg_reg,
458 struct_value_size_rtx));
459 }
460 else
461 #endif
462 abort ();
463
464 /* Find the CALL insn we just emitted. */
465 for (call_insn = get_last_insn ();
466 call_insn && GET_CODE (call_insn) != CALL_INSN;
467 call_insn = PREV_INSN (call_insn))
468 ;
469
470 if (! call_insn)
471 abort ();
472
473 /* Put the register usage information on the CALL. If there is already
474 some usage information, put ours at the end. */
475 if (CALL_INSN_FUNCTION_USAGE (call_insn))
476 {
477 rtx link;
478
479 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
480 link = XEXP (link, 1))
481 ;
482
483 XEXP (link, 1) = call_fusage;
484 }
485 else
486 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
487
488 /* If this is a const call, then set the insn's unchanging bit. */
489 if (is_const)
490 CONST_CALL_P (call_insn) = 1;
491
492 /* Restore this now, so that we do defer pops for this call's args
493 if the context of the call as a whole permits. */
494 inhibit_defer_pop = old_inhibit_defer_pop;
495
496 #ifndef ACCUMULATE_OUTGOING_ARGS
497 /* If returning from the subroutine does not automatically pop the args,
498 we need an instruction to pop them sooner or later.
499 Perhaps do it now; perhaps just record how much space to pop later.
500
501 If returning from the subroutine does pop the args, indicate that the
502 stack pointer will be changed. */
503
504 if (n_popped > 0)
505 {
506 if (!already_popped)
507 CALL_INSN_FUNCTION_USAGE (call_insn)
508 = gen_rtx_EXPR_LIST (VOIDmode,
509 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
510 CALL_INSN_FUNCTION_USAGE (call_insn));
511 rounded_stack_size -= n_popped;
512 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
513 }
514
515 if (rounded_stack_size != 0)
516 {
517 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
518 pending_stack_adjust += rounded_stack_size;
519 else
520 adjust_stack (rounded_stack_size_rtx);
521 }
522 #endif
523 }
524
525 /* Determine if the function identified by NAME and FNDECL is one with
526 special properties we wish to know about.
527
528 For example, if the function might return more than one time (setjmp), then
529 set RETURNS_TWICE to a nonzero value.
530
531 Similarly set IS_LONGJMP for if the function is in the longjmp family.
532
533 Set IS_MALLOC for any of the standard memory allocation functions which
534 allocate from the heap.
535
536 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
537 space from the stack such as alloca. */
538
539 void
540 special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
541 is_malloc, may_be_alloca)
542 tree fndecl;
543 int *returns_twice;
544 int *is_longjmp;
545 int *fork_or_exec;
546 int *is_malloc;
547 int *may_be_alloca;
548 {
549 *returns_twice = 0;
550 *is_longjmp = 0;
551 *fork_or_exec = 0;
552 *may_be_alloca = 0;
553
554 /* The function decl may have the `malloc' attribute. */
555 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
556
557 if (! *is_malloc
558 && fndecl && DECL_NAME (fndecl)
559 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
560 /* Exclude functions not at the file scope, or not `extern',
561 since they are not the magic functions we would otherwise
562 think they are. */
563 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
564 {
565 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
566 char *tname = name;
567
568 /* We assume that alloca will always be called by name. It
569 makes no sense to pass it as a pointer-to-function to
570 anything that does not understand its behavior. */
571 *may_be_alloca
572 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
573 && name[0] == 'a'
574 && ! strcmp (name, "alloca"))
575 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
576 && name[0] == '_'
577 && ! strcmp (name, "__builtin_alloca"))));
578
579 /* Disregard prefix _, __ or __x. */
580 if (name[0] == '_')
581 {
582 if (name[1] == '_' && name[2] == 'x')
583 tname += 3;
584 else if (name[1] == '_')
585 tname += 2;
586 else
587 tname += 1;
588 }
589
590 if (tname[0] == 's')
591 {
592 *returns_twice
593 = ((tname[1] == 'e'
594 && (! strcmp (tname, "setjmp")
595 || ! strcmp (tname, "setjmp_syscall")))
596 || (tname[1] == 'i'
597 && ! strcmp (tname, "sigsetjmp"))
598 || (tname[1] == 'a'
599 && ! strcmp (tname, "savectx")));
600 if (tname[1] == 'i'
601 && ! strcmp (tname, "siglongjmp"))
602 *is_longjmp = 1;
603 }
604 else if ((tname[0] == 'q' && tname[1] == 's'
605 && ! strcmp (tname, "qsetjmp"))
606 || (tname[0] == 'v' && tname[1] == 'f'
607 && ! strcmp (tname, "vfork")))
608 *returns_twice = 1;
609
610 else if (tname[0] == 'l' && tname[1] == 'o'
611 && ! strcmp (tname, "longjmp"))
612 *is_longjmp = 1;
613
614 else if ((tname[0] == 'f' && tname[1] == 'o'
615 && ! strcmp (tname, "fork"))
616 /* Linux specific: __clone. check NAME to insist on the
617 leading underscores, to avoid polluting the ISO / POSIX
618 namespace. */
619 || (name[0] == '_' && name[1] == '_'
620 && ! strcmp (tname, "clone"))
621 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
622 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
623 && (tname[5] == '\0'
624 || ((tname[5] == 'p' || tname[5] == 'e')
625 && tname[6] == '\0'))))
626 *fork_or_exec = 1;
627
628 /* Do not add any more malloc-like functions to this list,
629 instead mark them as malloc functions using the malloc attribute.
630 Note, realloc is not suitable for attribute malloc since
631 it may return the same address across multiple calls. */
632 else if (! strcmp (tname, "malloc")
633 || ! strcmp (tname, "calloc")
634 || ! strcmp (tname, "strdup")
635 /* Note use of NAME rather than TNAME here. These functions
636 are only reserved when preceded with __. */
637 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
638 || ! strcmp (name, "__nw") /* mangled __builtin_new */
639 || ! strcmp (name, "__builtin_new")
640 || ! strcmp (name, "__builtin_vec_new"))
641 *is_malloc = 1;
642 }
643 }
644
645 /* Precompute all register parameters as described by ARGS, storing values
646 into fields within the ARGS array.
647
648 NUM_ACTUALS indicates the total number elements in the ARGS array.
649
650 Set REG_PARM_SEEN if we encounter a register parameter. */
651
652 static void
653 precompute_register_parameters (num_actuals, args, reg_parm_seen)
654 int num_actuals;
655 struct arg_data *args;
656 int *reg_parm_seen;
657 {
658 int i;
659
660 *reg_parm_seen = 0;
661
662 for (i = 0; i < num_actuals; i++)
663 if (args[i].reg != 0 && ! args[i].pass_on_stack)
664 {
665 *reg_parm_seen = 1;
666
667 if (args[i].value == 0)
668 {
669 push_temp_slots ();
670 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
671 VOIDmode, 0);
672 preserve_temp_slots (args[i].value);
673 pop_temp_slots ();
674
675 /* ANSI doesn't require a sequence point here,
676 but PCC has one, so this will avoid some problems. */
677 emit_queue ();
678 }
679
680 /* If we are to promote the function arg to a wider mode,
681 do it now. */
682
683 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
684 args[i].value
685 = convert_modes (args[i].mode,
686 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
687 args[i].value, args[i].unsignedp);
688
689 /* If the value is expensive, and we are inside an appropriately
690 short loop, put the value into a pseudo and then put the pseudo
691 into the hard reg.
692
693 For small register classes, also do this if this call uses
694 register parameters. This is to avoid reload conflicts while
695 loading the parameters registers. */
696
697 if ((! (GET_CODE (args[i].value) == REG
698 || (GET_CODE (args[i].value) == SUBREG
699 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
700 && args[i].mode != BLKmode
701 && rtx_cost (args[i].value, SET) > 2
702 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
703 || preserve_subexpressions_p ()))
704 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
705 }
706 }
707
708 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
709
710 /* The argument list is the property of the called routine and it
711 may clobber it. If the fixed area has been used for previous
712 parameters, we must save and restore it. */
713 static rtx
714 save_fixed_argument_area (reg_parm_stack_space, argblock,
715 low_to_save, high_to_save)
716 int reg_parm_stack_space;
717 rtx argblock;
718 int *low_to_save;
719 int *high_to_save;
720 {
721 int i;
722 rtx save_area = NULL_RTX;
723
724 /* Compute the boundary of the that needs to be saved, if any. */
725 #ifdef ARGS_GROW_DOWNWARD
726 for (i = 0; i < reg_parm_stack_space + 1; i++)
727 #else
728 for (i = 0; i < reg_parm_stack_space; i++)
729 #endif
730 {
731 if (i >= highest_outgoing_arg_in_use
732 || stack_usage_map[i] == 0)
733 continue;
734
735 if (*low_to_save == -1)
736 *low_to_save = i;
737
738 *high_to_save = i;
739 }
740
741 if (*low_to_save >= 0)
742 {
743 int num_to_save = *high_to_save - *low_to_save + 1;
744 enum machine_mode save_mode
745 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
746 rtx stack_area;
747
748 /* If we don't have the required alignment, must do this in BLKmode. */
749 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
750 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
751 save_mode = BLKmode;
752
753 #ifdef ARGS_GROW_DOWNWARD
754 stack_area = gen_rtx_MEM (save_mode,
755 memory_address (save_mode,
756 plus_constant (argblock,
757 - *high_to_save)));
758 #else
759 stack_area = gen_rtx_MEM (save_mode,
760 memory_address (save_mode,
761 plus_constant (argblock,
762 *low_to_save)));
763 #endif
764 if (save_mode == BLKmode)
765 {
766 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
767 /* Cannot use emit_block_move here because it can be done by a library
768 call which in turn gets into this place again and deadly infinite
769 recursion happens. */
770 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
771 PARM_BOUNDARY / BITS_PER_UNIT);
772 }
773 else
774 {
775 save_area = gen_reg_rtx (save_mode);
776 emit_move_insn (save_area, stack_area);
777 }
778 }
779 return save_area;
780 }
781
782 static void
783 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
784 rtx save_area;
785 rtx argblock;
786 int high_to_save;
787 int low_to_save;
788 {
789 enum machine_mode save_mode = GET_MODE (save_area);
790 #ifdef ARGS_GROW_DOWNWARD
791 rtx stack_area
792 = gen_rtx_MEM (save_mode,
793 memory_address (save_mode,
794 plus_constant (argblock,
795 - high_to_save)));
796 #else
797 rtx stack_area
798 = gen_rtx_MEM (save_mode,
799 memory_address (save_mode,
800 plus_constant (argblock,
801 low_to_save)));
802 #endif
803
804 if (save_mode != BLKmode)
805 emit_move_insn (stack_area, save_area);
806 else
807 /* Cannot use emit_block_move here because it can be done by a library
808 call which in turn gets into this place again and deadly infinite
809 recursion happens. */
810 move_by_pieces (stack_area, validize_mem (save_area),
811 high_to_save - low_to_save + 1,
812 PARM_BOUNDARY / BITS_PER_UNIT);
813 }
814 #endif
815
816 /* If any elements in ARGS refer to parameters that are to be passed in
817 registers, but not in memory, and whose alignment does not permit a
818 direct copy into registers. Copy the values into a group of pseudos
819 which we will later copy into the appropriate hard registers.
820
821 Pseudos for each unaligned argument will be stored into the array
822 args[argnum].aligned_regs. The caller is responsible for deallocating
823 the aligned_regs array if it is nonzero. */
824
825 static void
826 store_unaligned_arguments_into_pseudos (args, num_actuals)
827 struct arg_data *args;
828 int num_actuals;
829 {
830 int i, j;
831
832 for (i = 0; i < num_actuals; i++)
833 if (args[i].reg != 0 && ! args[i].pass_on_stack
834 && args[i].mode == BLKmode
835 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
836 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
837 {
838 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
839 int big_endian_correction = 0;
840
841 args[i].n_aligned_regs
842 = args[i].partial ? args[i].partial
843 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
844
845 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
846 * args[i].n_aligned_regs);
847
848 /* Structures smaller than a word are aligned to the least
849 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
850 this means we must skip the empty high order bytes when
851 calculating the bit offset. */
852 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
853 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
854
855 for (j = 0; j < args[i].n_aligned_regs; j++)
856 {
857 rtx reg = gen_reg_rtx (word_mode);
858 rtx word = operand_subword_force (args[i].value, j, BLKmode);
859 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
860 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
861
862 args[i].aligned_regs[j] = reg;
863
864 /* There is no need to restrict this code to loading items
865 in TYPE_ALIGN sized hunks. The bitfield instructions can
866 load up entire word sized registers efficiently.
867
868 ??? This may not be needed anymore.
869 We use to emit a clobber here but that doesn't let later
870 passes optimize the instructions we emit. By storing 0 into
871 the register later passes know the first AND to zero out the
872 bitfield being set in the register is unnecessary. The store
873 of 0 will be deleted as will at least the first AND. */
874
875 emit_move_insn (reg, const0_rtx);
876
877 bytes -= bitsize / BITS_PER_UNIT;
878 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
879 extract_bit_field (word, bitsize, 0, 1,
880 NULL_RTX, word_mode,
881 word_mode,
882 bitalign / BITS_PER_UNIT,
883 BITS_PER_WORD),
884 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
885 }
886 }
887 }
888
889 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
890 ACTPARMS.
891
892 NUM_ACTUALS is the total number of parameters.
893
894 N_NAMED_ARGS is the total number of named arguments.
895
896 FNDECL is the tree code for the target of this call (if known)
897
898 ARGS_SO_FAR holds state needed by the target to know where to place
899 the next argument.
900
901 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
902 for arguments which are passed in registers.
903
904 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
905 and may be modified by this routine.
906
907 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
908 flags which may may be modified by this routine. */
909
910 static void
911 initialize_argument_information (num_actuals, args, args_size, n_named_args,
912 actparms, fndecl, args_so_far,
913 reg_parm_stack_space, old_stack_level,
914 old_pending_adj, must_preallocate, is_const)
915 int num_actuals ATTRIBUTE_UNUSED;
916 struct arg_data *args;
917 struct args_size *args_size;
918 int n_named_args ATTRIBUTE_UNUSED;
919 tree actparms;
920 tree fndecl;
921 CUMULATIVE_ARGS *args_so_far;
922 int reg_parm_stack_space;
923 rtx *old_stack_level;
924 int *old_pending_adj;
925 int *must_preallocate;
926 int *is_const;
927 {
928 /* 1 if scanning parms front to back, -1 if scanning back to front. */
929 int inc;
930
931 /* Count arg position in order args appear. */
932 int argpos;
933
934 struct args_size alignment_pad;
935 int i;
936 tree p;
937
938 args_size->constant = 0;
939 args_size->var = 0;
940
941 /* In this loop, we consider args in the order they are written.
942 We fill up ARGS from the front or from the back if necessary
943 so that in any case the first arg to be pushed ends up at the front. */
944
945 #ifdef PUSH_ARGS_REVERSED
946 i = num_actuals - 1, inc = -1;
947 /* In this case, must reverse order of args
948 so that we compute and push the last arg first. */
949 #else
950 i = 0, inc = 1;
951 #endif
952
953 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
954 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
955 {
956 tree type = TREE_TYPE (TREE_VALUE (p));
957 int unsignedp;
958 enum machine_mode mode;
959
960 args[i].tree_value = TREE_VALUE (p);
961
962 /* Replace erroneous argument with constant zero. */
963 if (type == error_mark_node || TYPE_SIZE (type) == 0)
964 args[i].tree_value = integer_zero_node, type = integer_type_node;
965
966 /* If TYPE is a transparent union, pass things the way we would
967 pass the first field of the union. We have already verified that
968 the modes are the same. */
969 if (TYPE_TRANSPARENT_UNION (type))
970 type = TREE_TYPE (TYPE_FIELDS (type));
971
972 /* Decide where to pass this arg.
973
974 args[i].reg is nonzero if all or part is passed in registers.
975
976 args[i].partial is nonzero if part but not all is passed in registers,
977 and the exact value says how many words are passed in registers.
978
979 args[i].pass_on_stack is nonzero if the argument must at least be
980 computed on the stack. It may then be loaded back into registers
981 if args[i].reg is nonzero.
982
983 These decisions are driven by the FUNCTION_... macros and must agree
984 with those made by function.c. */
985
986 /* See if this argument should be passed by invisible reference. */
987 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
988 && contains_placeholder_p (TYPE_SIZE (type)))
989 || TREE_ADDRESSABLE (type)
990 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
991 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
992 type, argpos < n_named_args)
993 #endif
994 )
995 {
996 /* If we're compiling a thunk, pass through invisible
997 references instead of making a copy. */
998 if (current_function_is_thunk
999 #ifdef FUNCTION_ARG_CALLEE_COPIES
1000 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1001 type, argpos < n_named_args)
1002 /* If it's in a register, we must make a copy of it too. */
1003 /* ??? Is this a sufficient test? Is there a better one? */
1004 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1005 && REG_P (DECL_RTL (args[i].tree_value)))
1006 && ! TREE_ADDRESSABLE (type))
1007 #endif
1008 )
1009 {
1010 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1011 new object from the argument. If we are passing by
1012 invisible reference, the callee will do that for us, so we
1013 can strip off the TARGET_EXPR. This is not always safe,
1014 but it is safe in the only case where this is a useful
1015 optimization; namely, when the argument is a plain object.
1016 In that case, the frontend is just asking the backend to
1017 make a bitwise copy of the argument. */
1018
1019 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1020 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1021 (args[i].tree_value, 1)))
1022 == 'd')
1023 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1024 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1025
1026 args[i].tree_value = build1 (ADDR_EXPR,
1027 build_pointer_type (type),
1028 args[i].tree_value);
1029 type = build_pointer_type (type);
1030 }
1031 else
1032 {
1033 /* We make a copy of the object and pass the address to the
1034 function being called. */
1035 rtx copy;
1036
1037 if (TYPE_SIZE (type) == 0
1038 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1039 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1040 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1041 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1042 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1043 {
1044 /* This is a variable-sized object. Make space on the stack
1045 for it. */
1046 rtx size_rtx = expr_size (TREE_VALUE (p));
1047
1048 if (*old_stack_level == 0)
1049 {
1050 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1051 *old_pending_adj = pending_stack_adjust;
1052 pending_stack_adjust = 0;
1053 }
1054
1055 copy = gen_rtx_MEM (BLKmode,
1056 allocate_dynamic_stack_space (size_rtx,
1057 NULL_RTX,
1058 TYPE_ALIGN (type)));
1059 }
1060 else
1061 {
1062 int size = int_size_in_bytes (type);
1063 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1064 }
1065
1066 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1067
1068 store_expr (args[i].tree_value, copy, 0);
1069 *is_const = 0;
1070
1071 args[i].tree_value = build1 (ADDR_EXPR,
1072 build_pointer_type (type),
1073 make_tree (type, copy));
1074 type = build_pointer_type (type);
1075 }
1076 }
1077
1078 mode = TYPE_MODE (type);
1079 unsignedp = TREE_UNSIGNED (type);
1080
1081 #ifdef PROMOTE_FUNCTION_ARGS
1082 mode = promote_mode (type, mode, &unsignedp, 1);
1083 #endif
1084
1085 args[i].unsignedp = unsignedp;
1086 args[i].mode = mode;
1087 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1088 argpos < n_named_args);
1089 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1090 if (args[i].reg)
1091 args[i].partial
1092 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1093 argpos < n_named_args);
1094 #endif
1095
1096 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1097
1098 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1099 it means that we are to pass this arg in the register(s) designated
1100 by the PARALLEL, but also to pass it in the stack. */
1101 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1102 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1103 args[i].pass_on_stack = 1;
1104
1105 /* If this is an addressable type, we must preallocate the stack
1106 since we must evaluate the object into its final location.
1107
1108 If this is to be passed in both registers and the stack, it is simpler
1109 to preallocate. */
1110 if (TREE_ADDRESSABLE (type)
1111 || (args[i].pass_on_stack && args[i].reg != 0))
1112 *must_preallocate = 1;
1113
1114 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1115 we cannot consider this function call constant. */
1116 if (TREE_ADDRESSABLE (type))
1117 *is_const = 0;
1118
1119 /* Compute the stack-size of this argument. */
1120 if (args[i].reg == 0 || args[i].partial != 0
1121 || reg_parm_stack_space > 0
1122 || args[i].pass_on_stack)
1123 locate_and_pad_parm (mode, type,
1124 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1125 1,
1126 #else
1127 args[i].reg != 0,
1128 #endif
1129 fndecl, args_size, &args[i].offset,
1130 &args[i].size, &alignment_pad);
1131
1132 #ifndef ARGS_GROW_DOWNWARD
1133 args[i].slot_offset = *args_size;
1134 #endif
1135
1136 args[i].alignment_pad = alignment_pad;
1137
1138 /* If a part of the arg was put into registers,
1139 don't include that part in the amount pushed. */
1140 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1141 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1142 / (PARM_BOUNDARY / BITS_PER_UNIT)
1143 * (PARM_BOUNDARY / BITS_PER_UNIT));
1144
1145 /* Update ARGS_SIZE, the total stack space for args so far. */
1146
1147 args_size->constant += args[i].size.constant;
1148 if (args[i].size.var)
1149 {
1150 ADD_PARM_SIZE (*args_size, args[i].size.var);
1151 }
1152
1153 /* Since the slot offset points to the bottom of the slot,
1154 we must record it after incrementing if the args grow down. */
1155 #ifdef ARGS_GROW_DOWNWARD
1156 args[i].slot_offset = *args_size;
1157
1158 args[i].slot_offset.constant = -args_size->constant;
1159 if (args_size->var)
1160 {
1161 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1162 }
1163 #endif
1164
1165 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1166 have been used, etc. */
1167
1168 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1169 argpos < n_named_args);
1170 }
1171 }
1172
1173 /* Update ARGS_SIZE to contain the total size for the argument block.
1174 Return the original constant component of the argument block's size.
1175
1176 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1177 for arguments passed in registers. */
1178
1179 static int
1180 compute_argument_block_size (reg_parm_stack_space, args_size,
1181 preferred_stack_boundary)
1182 int reg_parm_stack_space;
1183 struct args_size *args_size;
1184 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1185 {
1186 int unadjusted_args_size = args_size->constant;
1187
1188 /* Compute the actual size of the argument block required. The variable
1189 and constant sizes must be combined, the size may have to be rounded,
1190 and there may be a minimum required size. */
1191
1192 if (args_size->var)
1193 {
1194 args_size->var = ARGS_SIZE_TREE (*args_size);
1195 args_size->constant = 0;
1196
1197 #ifdef PREFERRED_STACK_BOUNDARY
1198 preferred_stack_boundary /= BITS_PER_UNIT;
1199 if (preferred_stack_boundary > 1)
1200 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1201 #endif
1202
1203 if (reg_parm_stack_space > 0)
1204 {
1205 args_size->var
1206 = size_binop (MAX_EXPR, args_size->var,
1207 size_int (reg_parm_stack_space));
1208
1209 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1210 /* The area corresponding to register parameters is not to count in
1211 the size of the block we need. So make the adjustment. */
1212 args_size->var
1213 = size_binop (MINUS_EXPR, args_size->var,
1214 size_int (reg_parm_stack_space));
1215 #endif
1216 }
1217 }
1218 else
1219 {
1220 #ifdef PREFERRED_STACK_BOUNDARY
1221 preferred_stack_boundary /= BITS_PER_UNIT;
1222 args_size->constant = (((args_size->constant
1223 + pending_stack_adjust
1224 + preferred_stack_boundary - 1)
1225 / preferred_stack_boundary
1226 * preferred_stack_boundary)
1227 - pending_stack_adjust);
1228 #endif
1229
1230 args_size->constant = MAX (args_size->constant,
1231 reg_parm_stack_space);
1232
1233 #ifdef MAYBE_REG_PARM_STACK_SPACE
1234 if (reg_parm_stack_space == 0)
1235 args_size->constant = 0;
1236 #endif
1237
1238 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1239 args_size->constant -= reg_parm_stack_space;
1240 #endif
1241 }
1242 return unadjusted_args_size;
1243 }
1244
1245 /* Precompute parameters as needed for a function call.
1246
1247 IS_CONST indicates the target function is a pure function.
1248
1249 MUST_PREALLOCATE indicates that we must preallocate stack space for
1250 any stack arguments.
1251
1252 NUM_ACTUALS is the number of arguments.
1253
1254 ARGS is an array containing information for each argument; this routine
1255 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1256
1257 ARGS_SIZE contains information about the size of the arg list. */
1258
1259 static void
1260 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1261 int is_const;
1262 int must_preallocate;
1263 int num_actuals;
1264 struct arg_data *args;
1265 struct args_size *args_size;
1266 {
1267 int i;
1268
1269 /* If this function call is cse'able, precompute all the parameters.
1270 Note that if the parameter is constructed into a temporary, this will
1271 cause an additional copy because the parameter will be constructed
1272 into a temporary location and then copied into the outgoing arguments.
1273 If a parameter contains a call to alloca and this function uses the
1274 stack, precompute the parameter. */
1275
1276 /* If we preallocated the stack space, and some arguments must be passed
1277 on the stack, then we must precompute any parameter which contains a
1278 function call which will store arguments on the stack.
1279 Otherwise, evaluating the parameter may clobber previous parameters
1280 which have already been stored into the stack. */
1281
1282 for (i = 0; i < num_actuals; i++)
1283 if (is_const
1284 || ((args_size->var != 0 || args_size->constant != 0)
1285 && calls_function (args[i].tree_value, 1))
1286 || (must_preallocate
1287 && (args_size->var != 0 || args_size->constant != 0)
1288 && calls_function (args[i].tree_value, 0)))
1289 {
1290 /* If this is an addressable type, we cannot pre-evaluate it. */
1291 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1292 abort ();
1293
1294 push_temp_slots ();
1295
1296 args[i].value
1297 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1298
1299 preserve_temp_slots (args[i].value);
1300 pop_temp_slots ();
1301
1302 /* ANSI doesn't require a sequence point here,
1303 but PCC has one, so this will avoid some problems. */
1304 emit_queue ();
1305
1306 args[i].initial_value = args[i].value
1307 = protect_from_queue (args[i].value, 0);
1308
1309 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1310 {
1311 args[i].value
1312 = convert_modes (args[i].mode,
1313 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1314 args[i].value, args[i].unsignedp);
1315 #ifdef PROMOTE_FOR_CALL_ONLY
1316 /* CSE will replace this only if it contains args[i].value
1317 pseudo, so convert it down to the declared mode using
1318 a SUBREG. */
1319 if (GET_CODE (args[i].value) == REG
1320 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1321 {
1322 args[i].initial_value
1323 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1324 args[i].value, 0);
1325 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1326 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1327 = args[i].unsignedp;
1328 }
1329 #endif
1330 }
1331 }
1332 }
1333
1334 /* Given the current state of MUST_PREALLOCATE and information about
1335 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1336 compute and return the final value for MUST_PREALLOCATE. */
1337
1338 static int
1339 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1340 int must_preallocate;
1341 int num_actuals;
1342 struct arg_data *args;
1343 struct args_size *args_size;
1344 {
1345 /* See if we have or want to preallocate stack space.
1346
1347 If we would have to push a partially-in-regs parm
1348 before other stack parms, preallocate stack space instead.
1349
1350 If the size of some parm is not a multiple of the required stack
1351 alignment, we must preallocate.
1352
1353 If the total size of arguments that would otherwise create a copy in
1354 a temporary (such as a CALL) is more than half the total argument list
1355 size, preallocation is faster.
1356
1357 Another reason to preallocate is if we have a machine (like the m88k)
1358 where stack alignment is required to be maintained between every
1359 pair of insns, not just when the call is made. However, we assume here
1360 that such machines either do not have push insns (and hence preallocation
1361 would occur anyway) or the problem is taken care of with
1362 PUSH_ROUNDING. */
1363
1364 if (! must_preallocate)
1365 {
1366 int partial_seen = 0;
1367 int copy_to_evaluate_size = 0;
1368 int i;
1369
1370 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1371 {
1372 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1373 partial_seen = 1;
1374 else if (partial_seen && args[i].reg == 0)
1375 must_preallocate = 1;
1376
1377 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1378 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1379 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1380 || TREE_CODE (args[i].tree_value) == COND_EXPR
1381 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1382 copy_to_evaluate_size
1383 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1384 }
1385
1386 if (copy_to_evaluate_size * 2 >= args_size->constant
1387 && args_size->constant > 0)
1388 must_preallocate = 1;
1389 }
1390 return must_preallocate;
1391 }
1392
1393 /* If we preallocated stack space, compute the address of each argument
1394 and store it into the ARGS array.
1395
1396 We need not ensure it is a valid memory address here; it will be
1397 validized when it is used.
1398
1399 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1400
1401 static void
1402 compute_argument_addresses (args, argblock, num_actuals)
1403 struct arg_data *args;
1404 rtx argblock;
1405 int num_actuals;
1406 {
1407 if (argblock)
1408 {
1409 rtx arg_reg = argblock;
1410 int i, arg_offset = 0;
1411
1412 if (GET_CODE (argblock) == PLUS)
1413 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1414
1415 for (i = 0; i < num_actuals; i++)
1416 {
1417 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1418 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1419 rtx addr;
1420
1421 /* Skip this parm if it will not be passed on the stack. */
1422 if (! args[i].pass_on_stack && args[i].reg != 0)
1423 continue;
1424
1425 if (GET_CODE (offset) == CONST_INT)
1426 addr = plus_constant (arg_reg, INTVAL (offset));
1427 else
1428 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1429
1430 addr = plus_constant (addr, arg_offset);
1431 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1432 MEM_SET_IN_STRUCT_P
1433 (args[i].stack,
1434 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1435
1436 if (GET_CODE (slot_offset) == CONST_INT)
1437 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1438 else
1439 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1440
1441 addr = plus_constant (addr, arg_offset);
1442 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1443 }
1444 }
1445 }
1446
1447 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1448 in a call instruction.
1449
1450 FNDECL is the tree node for the target function. For an indirect call
1451 FNDECL will be NULL_TREE.
1452
1453 EXP is the CALL_EXPR for this call. */
1454
1455 static rtx
1456 rtx_for_function_call (fndecl, exp)
1457 tree fndecl;
1458 tree exp;
1459 {
1460 rtx funexp;
1461
1462 /* Get the function to call, in the form of RTL. */
1463 if (fndecl)
1464 {
1465 /* If this is the first use of the function, see if we need to
1466 make an external definition for it. */
1467 if (! TREE_USED (fndecl))
1468 {
1469 assemble_external (fndecl);
1470 TREE_USED (fndecl) = 1;
1471 }
1472
1473 /* Get a SYMBOL_REF rtx for the function address. */
1474 funexp = XEXP (DECL_RTL (fndecl), 0);
1475 }
1476 else
1477 /* Generate an rtx (probably a pseudo-register) for the address. */
1478 {
1479 rtx funaddr;
1480 push_temp_slots ();
1481 funaddr = funexp =
1482 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1483 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1484
1485 /* Check the function is executable. */
1486 if (current_function_check_memory_usage)
1487 {
1488 #ifdef POINTERS_EXTEND_UNSIGNED
1489 /* It might be OK to convert funexp in place, but there's
1490 a lot going on between here and when it happens naturally
1491 that this seems safer. */
1492 funaddr = convert_memory_address (Pmode, funexp);
1493 #endif
1494 emit_library_call (chkr_check_exec_libfunc, 1,
1495 VOIDmode, 1,
1496 funaddr, Pmode);
1497 }
1498 emit_queue ();
1499 }
1500 return funexp;
1501 }
1502
1503 /* Do the register loads required for any wholly-register parms or any
1504 parms which are passed both on the stack and in a register. Their
1505 expressions were already evaluated.
1506
1507 Mark all register-parms as living through the call, putting these USE
1508 insns in the CALL_INSN_FUNCTION_USAGE field. */
1509
1510 static void
1511 load_register_parameters (args, num_actuals, call_fusage)
1512 struct arg_data *args;
1513 int num_actuals;
1514 rtx *call_fusage;
1515 {
1516 int i, j;
1517
1518 #ifdef LOAD_ARGS_REVERSED
1519 for (i = num_actuals - 1; i >= 0; i--)
1520 #else
1521 for (i = 0; i < num_actuals; i++)
1522 #endif
1523 {
1524 rtx reg = args[i].reg;
1525 int partial = args[i].partial;
1526 int nregs;
1527
1528 if (reg)
1529 {
1530 /* Set to non-negative if must move a word at a time, even if just
1531 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1532 we just use a normal move insn. This value can be zero if the
1533 argument is a zero size structure with no fields. */
1534 nregs = (partial ? partial
1535 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1536 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1537 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1538 : -1));
1539
1540 /* Handle calls that pass values in multiple non-contiguous
1541 locations. The Irix 6 ABI has examples of this. */
1542
1543 if (GET_CODE (reg) == PARALLEL)
1544 {
1545 emit_group_load (reg, args[i].value,
1546 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1547 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1548 / BITS_PER_UNIT));
1549 }
1550
1551 /* If simple case, just do move. If normal partial, store_one_arg
1552 has already loaded the register for us. In all other cases,
1553 load the register(s) from memory. */
1554
1555 else if (nregs == -1)
1556 emit_move_insn (reg, args[i].value);
1557
1558 /* If we have pre-computed the values to put in the registers in
1559 the case of non-aligned structures, copy them in now. */
1560
1561 else if (args[i].n_aligned_regs != 0)
1562 for (j = 0; j < args[i].n_aligned_regs; j++)
1563 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1564 args[i].aligned_regs[j]);
1565
1566 else if (partial == 0 || args[i].pass_on_stack)
1567 move_block_to_reg (REGNO (reg),
1568 validize_mem (args[i].value), nregs,
1569 args[i].mode);
1570
1571 /* Handle calls that pass values in multiple non-contiguous
1572 locations. The Irix 6 ABI has examples of this. */
1573 if (GET_CODE (reg) == PARALLEL)
1574 use_group_regs (call_fusage, reg);
1575 else if (nregs == -1)
1576 use_reg (call_fusage, reg);
1577 else
1578 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1579 }
1580 }
1581 }
1582
1583 /* Generate all the code for a function call
1584 and return an rtx for its value.
1585 Store the value in TARGET (specified as an rtx) if convenient.
1586 If the value is stored in TARGET then TARGET is returned.
1587 If IGNORE is nonzero, then we ignore the value of the function call. */
1588
1589 rtx
1590 expand_call (exp, target, ignore)
1591 tree exp;
1592 rtx target;
1593 int ignore;
1594 {
1595 /* List of actual parameters. */
1596 tree actparms = TREE_OPERAND (exp, 1);
1597 /* RTX for the function to be called. */
1598 rtx funexp;
1599 /* Data type of the function. */
1600 tree funtype;
1601 /* Declaration of the function being called,
1602 or 0 if the function is computed (not known by name). */
1603 tree fndecl = 0;
1604 char *name = 0;
1605 rtx before_call;
1606
1607 /* Register in which non-BLKmode value will be returned,
1608 or 0 if no value or if value is BLKmode. */
1609 rtx valreg;
1610 /* Address where we should return a BLKmode value;
1611 0 if value not BLKmode. */
1612 rtx structure_value_addr = 0;
1613 /* Nonzero if that address is being passed by treating it as
1614 an extra, implicit first parameter. Otherwise,
1615 it is passed by being copied directly into struct_value_rtx. */
1616 int structure_value_addr_parm = 0;
1617 /* Size of aggregate value wanted, or zero if none wanted
1618 or if we are using the non-reentrant PCC calling convention
1619 or expecting the value in registers. */
1620 HOST_WIDE_INT struct_value_size = 0;
1621 /* Nonzero if called function returns an aggregate in memory PCC style,
1622 by returning the address of where to find it. */
1623 int pcc_struct_value = 0;
1624
1625 /* Number of actual parameters in this call, including struct value addr. */
1626 int num_actuals;
1627 /* Number of named args. Args after this are anonymous ones
1628 and they must all go on the stack. */
1629 int n_named_args;
1630
1631 /* Vector of information about each argument.
1632 Arguments are numbered in the order they will be pushed,
1633 not the order they are written. */
1634 struct arg_data *args;
1635
1636 /* Total size in bytes of all the stack-parms scanned so far. */
1637 struct args_size args_size;
1638 /* Size of arguments before any adjustments (such as rounding). */
1639 int unadjusted_args_size;
1640 /* Data on reg parms scanned so far. */
1641 CUMULATIVE_ARGS args_so_far;
1642 /* Nonzero if a reg parm has been scanned. */
1643 int reg_parm_seen;
1644 /* Nonzero if this is an indirect function call. */
1645
1646 /* Nonzero if we must avoid push-insns in the args for this call.
1647 If stack space is allocated for register parameters, but not by the
1648 caller, then it is preallocated in the fixed part of the stack frame.
1649 So the entire argument block must then be preallocated (i.e., we
1650 ignore PUSH_ROUNDING in that case). */
1651
1652 #ifdef PUSH_ROUNDING
1653 int must_preallocate = 0;
1654 #else
1655 int must_preallocate = 1;
1656 #endif
1657
1658 /* Size of the stack reserved for parameter registers. */
1659 int reg_parm_stack_space = 0;
1660
1661 /* Address of space preallocated for stack parms
1662 (on machines that lack push insns), or 0 if space not preallocated. */
1663 rtx argblock = 0;
1664
1665 /* Nonzero if it is plausible that this is a call to alloca. */
1666 int may_be_alloca;
1667 /* Nonzero if this is a call to malloc or a related function. */
1668 int is_malloc;
1669 /* Nonzero if this is a call to setjmp or a related function. */
1670 int returns_twice;
1671 /* Nonzero if this is a call to `longjmp'. */
1672 int is_longjmp;
1673 /* Nonzero if this is a syscall that makes a new process in the image of
1674 the current one. */
1675 int fork_or_exec;
1676 /* Nonzero if this is a call to an inline function. */
1677 int is_integrable = 0;
1678 /* Nonzero if this is a call to a `const' function.
1679 Note that only explicitly named functions are handled as `const' here. */
1680 int is_const = 0;
1681 /* Nonzero if this is a call to a `volatile' function. */
1682 int is_volatile = 0;
1683 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1684 /* Define the boundary of the register parm stack space that needs to be
1685 save, if any. */
1686 int low_to_save = -1, high_to_save;
1687 rtx save_area = 0; /* Place that it is saved */
1688 #endif
1689
1690 #ifdef ACCUMULATE_OUTGOING_ARGS
1691 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1692 char *initial_stack_usage_map = stack_usage_map;
1693 int old_stack_arg_under_construction = 0;
1694 #endif
1695
1696 rtx old_stack_level = 0;
1697 int old_pending_adj = 0;
1698 int old_inhibit_defer_pop = inhibit_defer_pop;
1699 rtx call_fusage = 0;
1700 register tree p;
1701 register int i;
1702 #ifdef PREFERRED_STACK_BOUNDARY
1703 int preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1704 #else
1705 /* In this case preferred_stack_boundary variable is meaningless.
1706 It is used only in order to keep ifdef noise down when calling
1707 compute_argument_block_size. */
1708 int preferred_stack_boundary = 0;
1709 #endif
1710
1711 /* The value of the function call can be put in a hard register. But
1712 if -fcheck-memory-usage, code which invokes functions (and thus
1713 damages some hard registers) can be inserted before using the value.
1714 So, target is always a pseudo-register in that case. */
1715 if (current_function_check_memory_usage)
1716 target = 0;
1717
1718 /* See if we can find a DECL-node for the actual function.
1719 As a result, decide whether this is a call to an integrable function. */
1720
1721 p = TREE_OPERAND (exp, 0);
1722 if (TREE_CODE (p) == ADDR_EXPR)
1723 {
1724 fndecl = TREE_OPERAND (p, 0);
1725 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1726 fndecl = 0;
1727 else
1728 {
1729 if (!flag_no_inline
1730 && fndecl != current_function_decl
1731 && DECL_INLINE (fndecl)
1732 && DECL_SAVED_INSNS (fndecl)
1733 && DECL_SAVED_INSNS (fndecl)->inlinable)
1734 is_integrable = 1;
1735 else if (! TREE_ADDRESSABLE (fndecl))
1736 {
1737 /* In case this function later becomes inlinable,
1738 record that there was already a non-inline call to it.
1739
1740 Use abstraction instead of setting TREE_ADDRESSABLE
1741 directly. */
1742 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1743 && optimize > 0)
1744 {
1745 warning_with_decl (fndecl, "can't inline call to `%s'");
1746 warning ("called from here");
1747 }
1748 mark_addressable (fndecl);
1749 }
1750
1751 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1752 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1753 is_const = 1;
1754
1755 if (TREE_THIS_VOLATILE (fndecl))
1756 is_volatile = 1;
1757 }
1758 }
1759
1760 /* If we don't have specific function to call, see if we have a
1761 constant or `noreturn' function from the type. */
1762 if (fndecl == 0)
1763 {
1764 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1765 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1766 }
1767
1768 #ifdef REG_PARM_STACK_SPACE
1769 #ifdef MAYBE_REG_PARM_STACK_SPACE
1770 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1771 #else
1772 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1773 #endif
1774 #endif
1775
1776 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1777 if (reg_parm_stack_space > 0)
1778 must_preallocate = 1;
1779 #endif
1780
1781 /* Warn if this value is an aggregate type,
1782 regardless of which calling convention we are using for it. */
1783 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1784 warning ("function call has aggregate value");
1785
1786 /* Set up a place to return a structure. */
1787
1788 /* Cater to broken compilers. */
1789 if (aggregate_value_p (exp))
1790 {
1791 /* This call returns a big structure. */
1792 is_const = 0;
1793
1794 #ifdef PCC_STATIC_STRUCT_RETURN
1795 {
1796 pcc_struct_value = 1;
1797 /* Easier than making that case work right. */
1798 if (is_integrable)
1799 {
1800 /* In case this is a static function, note that it has been
1801 used. */
1802 if (! TREE_ADDRESSABLE (fndecl))
1803 mark_addressable (fndecl);
1804 is_integrable = 0;
1805 }
1806 }
1807 #else /* not PCC_STATIC_STRUCT_RETURN */
1808 {
1809 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1810
1811 if (target && GET_CODE (target) == MEM)
1812 structure_value_addr = XEXP (target, 0);
1813 else
1814 {
1815 /* Assign a temporary to hold the value. */
1816 tree d;
1817
1818 /* For variable-sized objects, we must be called with a target
1819 specified. If we were to allocate space on the stack here,
1820 we would have no way of knowing when to free it. */
1821
1822 if (struct_value_size < 0)
1823 abort ();
1824
1825 /* This DECL is just something to feed to mark_addressable;
1826 it doesn't get pushed. */
1827 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1828 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1829 mark_addressable (d);
1830 mark_temp_addr_taken (DECL_RTL (d));
1831 structure_value_addr = XEXP (DECL_RTL (d), 0);
1832 TREE_USED (d) = 1;
1833 target = 0;
1834 }
1835 }
1836 #endif /* not PCC_STATIC_STRUCT_RETURN */
1837 }
1838
1839 /* If called function is inline, try to integrate it. */
1840
1841 if (is_integrable)
1842 {
1843 rtx temp;
1844
1845 #ifdef ACCUMULATE_OUTGOING_ARGS
1846 before_call = get_last_insn ();
1847 #endif
1848
1849 temp = expand_inline_function (fndecl, actparms, target,
1850 ignore, TREE_TYPE (exp),
1851 structure_value_addr);
1852
1853 /* If inlining succeeded, return. */
1854 if (temp != (rtx) (HOST_WIDE_INT) -1)
1855 {
1856 #ifdef ACCUMULATE_OUTGOING_ARGS
1857 /* If the outgoing argument list must be preserved, push
1858 the stack before executing the inlined function if it
1859 makes any calls. */
1860
1861 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1862 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1863 break;
1864
1865 if (stack_arg_under_construction || i >= 0)
1866 {
1867 rtx first_insn
1868 = before_call ? NEXT_INSN (before_call) : get_insns ();
1869 rtx insn = NULL_RTX, seq;
1870
1871 /* Look for a call in the inline function code.
1872 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1873 nonzero then there is a call and it is not necessary
1874 to scan the insns. */
1875
1876 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1877 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1878 if (GET_CODE (insn) == CALL_INSN)
1879 break;
1880
1881 if (insn)
1882 {
1883 /* Reserve enough stack space so that the largest
1884 argument list of any function call in the inline
1885 function does not overlap the argument list being
1886 evaluated. This is usually an overestimate because
1887 allocate_dynamic_stack_space reserves space for an
1888 outgoing argument list in addition to the requested
1889 space, but there is no way to ask for stack space such
1890 that an argument list of a certain length can be
1891 safely constructed.
1892
1893 Add the stack space reserved for register arguments, if
1894 any, in the inline function. What is really needed is the
1895 largest value of reg_parm_stack_space in the inline
1896 function, but that is not available. Using the current
1897 value of reg_parm_stack_space is wrong, but gives
1898 correct results on all supported machines. */
1899
1900 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1901 + reg_parm_stack_space);
1902
1903 start_sequence ();
1904 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1905 allocate_dynamic_stack_space (GEN_INT (adjust),
1906 NULL_RTX, BITS_PER_UNIT);
1907 seq = get_insns ();
1908 end_sequence ();
1909 emit_insns_before (seq, first_insn);
1910 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1911 }
1912 }
1913 #endif
1914
1915 /* If the result is equivalent to TARGET, return TARGET to simplify
1916 checks in store_expr. They can be equivalent but not equal in the
1917 case of a function that returns BLKmode. */
1918 if (temp != target && rtx_equal_p (temp, target))
1919 return target;
1920 return temp;
1921 }
1922
1923 /* If inlining failed, mark FNDECL as needing to be compiled
1924 separately after all. If function was declared inline,
1925 give a warning. */
1926 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1927 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1928 {
1929 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1930 warning ("called from here");
1931 }
1932 mark_addressable (fndecl);
1933 }
1934
1935 function_call_count++;
1936
1937 if (fndecl && DECL_NAME (fndecl))
1938 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1939
1940 /* Ensure current function's preferred stack boundary is at least
1941 what we need. We don't have to increase alignment for recursive
1942 functions. */
1943 if (cfun->preferred_stack_boundary < preferred_stack_boundary
1944 && fndecl != current_function_decl)
1945 cfun->preferred_stack_boundary = preferred_stack_boundary;
1946
1947 /* See if this is a call to a function that can return more than once
1948 or a call to longjmp or malloc. */
1949 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
1950 &is_malloc, &may_be_alloca);
1951
1952 if (may_be_alloca)
1953 current_function_calls_alloca = 1;
1954
1955 /* Operand 0 is a pointer-to-function; get the type of the function. */
1956 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1957 if (! POINTER_TYPE_P (funtype))
1958 abort ();
1959 funtype = TREE_TYPE (funtype);
1960
1961 /* When calling a const function, we must pop the stack args right away,
1962 so that the pop is deleted or moved with the call. */
1963 if (is_const)
1964 NO_DEFER_POP;
1965
1966 /* Don't let pending stack adjusts add up to too much.
1967 Also, do all pending adjustments now
1968 if there is any chance this might be a call to alloca. */
1969
1970 if (pending_stack_adjust >= 32
1971 || (pending_stack_adjust > 0 && may_be_alloca))
1972 do_pending_stack_adjust ();
1973
1974 if (profile_arc_flag && fork_or_exec)
1975 {
1976 /* A fork duplicates the profile information, and an exec discards
1977 it. We can't rely on fork/exec to be paired. So write out the
1978 profile information we have gathered so far, and clear it. */
1979 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
1980 VOIDmode, 0);
1981
1982 /* ??? When __clone is called with CLONE_VM set, profiling is
1983 subject to race conditions, just as with multithreaded programs. */
1984 }
1985
1986 /* Push the temporary stack slot level so that we can free any temporaries
1987 we make. */
1988 push_temp_slots ();
1989
1990 /* Start updating where the next arg would go.
1991
1992 On some machines (such as the PA) indirect calls have a different
1993 calling convention than normal calls. The last argument in
1994 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1995 or not. */
1996 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1997
1998 /* If struct_value_rtx is 0, it means pass the address
1999 as if it were an extra parameter. */
2000 if (structure_value_addr && struct_value_rtx == 0)
2001 {
2002 /* If structure_value_addr is a REG other than
2003 virtual_outgoing_args_rtx, we can use always use it. If it
2004 is not a REG, we must always copy it into a register.
2005 If it is virtual_outgoing_args_rtx, we must copy it to another
2006 register in some cases. */
2007 rtx temp = (GET_CODE (structure_value_addr) != REG
2008 #ifdef ACCUMULATE_OUTGOING_ARGS
2009 || (stack_arg_under_construction
2010 && structure_value_addr == virtual_outgoing_args_rtx)
2011 #endif
2012 ? copy_addr_to_reg (structure_value_addr)
2013 : structure_value_addr);
2014
2015 actparms
2016 = tree_cons (error_mark_node,
2017 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2018 temp),
2019 actparms);
2020 structure_value_addr_parm = 1;
2021 }
2022
2023 /* Count the arguments and set NUM_ACTUALS. */
2024 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2025 num_actuals = i;
2026
2027 /* Compute number of named args.
2028 Normally, don't include the last named arg if anonymous args follow.
2029 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2030 (If no anonymous args follow, the result of list_length is actually
2031 one too large. This is harmless.)
2032
2033 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2034 zero, this machine will be able to place unnamed args that were passed in
2035 registers into the stack. So treat all args as named. This allows the
2036 insns emitting for a specific argument list to be independent of the
2037 function declaration.
2038
2039 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
2040 way to pass unnamed args in registers, so we must force them into
2041 memory. */
2042
2043 if ((STRICT_ARGUMENT_NAMING
2044 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2045 && TYPE_ARG_TYPES (funtype) != 0)
2046 n_named_args
2047 = (list_length (TYPE_ARG_TYPES (funtype))
2048 /* Don't include the last named arg. */
2049 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2050 /* Count the struct value address, if it is passed as a parm. */
2051 + structure_value_addr_parm);
2052 else
2053 /* If we know nothing, treat all args as named. */
2054 n_named_args = num_actuals;
2055
2056 /* Make a vector to hold all the information about each arg. */
2057 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2058 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2059
2060 /* Build up entries inthe ARGS array, compute the size of the arguments
2061 into ARGS_SIZE, etc. */
2062 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
2063 actparms, fndecl, &args_so_far,
2064 reg_parm_stack_space, &old_stack_level,
2065 &old_pending_adj, &must_preallocate,
2066 &is_const);
2067
2068 #ifdef FINAL_REG_PARM_STACK_SPACE
2069 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2070 args_size.var);
2071 #endif
2072
2073 if (args_size.var)
2074 {
2075 /* If this function requires a variable-sized argument list, don't try to
2076 make a cse'able block for this call. We may be able to do this
2077 eventually, but it is too complicated to keep track of what insns go
2078 in the cse'able block and which don't. */
2079
2080 is_const = 0;
2081 must_preallocate = 1;
2082 }
2083
2084 /* Compute the actual size of the argument block required. The variable
2085 and constant sizes must be combined, the size may have to be rounded,
2086 and there may be a minimum required size. */
2087 unadjusted_args_size
2088 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2089 preferred_stack_boundary);
2090
2091 /* Now make final decision about preallocating stack space. */
2092 must_preallocate = finalize_must_preallocate (must_preallocate,
2093 num_actuals, args, &args_size);
2094
2095 /* If the structure value address will reference the stack pointer, we must
2096 stabilize it. We don't need to do this if we know that we are not going
2097 to adjust the stack pointer in processing this call. */
2098
2099 if (structure_value_addr
2100 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2101 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2102 && (args_size.var
2103 #ifndef ACCUMULATE_OUTGOING_ARGS
2104 || args_size.constant
2105 #endif
2106 ))
2107 structure_value_addr = copy_to_reg (structure_value_addr);
2108
2109 /* Precompute any arguments as needed. */
2110 precompute_arguments (is_const, must_preallocate, num_actuals,
2111 args, &args_size);
2112
2113 /* Now we are about to start emitting insns that can be deleted
2114 if a libcall is deleted. */
2115 if (is_const || is_malloc)
2116 start_sequence ();
2117
2118 /* If we have no actual push instructions, or shouldn't use them,
2119 make space for all args right now. */
2120
2121 if (args_size.var != 0)
2122 {
2123 if (old_stack_level == 0)
2124 {
2125 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2126 old_pending_adj = pending_stack_adjust;
2127 pending_stack_adjust = 0;
2128 #ifdef ACCUMULATE_OUTGOING_ARGS
2129 /* stack_arg_under_construction says whether a stack arg is
2130 being constructed at the old stack level. Pushing the stack
2131 gets a clean outgoing argument block. */
2132 old_stack_arg_under_construction = stack_arg_under_construction;
2133 stack_arg_under_construction = 0;
2134 #endif
2135 }
2136 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2137 }
2138 else
2139 {
2140 /* Note that we must go through the motions of allocating an argument
2141 block even if the size is zero because we may be storing args
2142 in the area reserved for register arguments, which may be part of
2143 the stack frame. */
2144
2145 int needed = args_size.constant;
2146
2147 /* Store the maximum argument space used. It will be pushed by
2148 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2149 checking). */
2150
2151 if (needed > current_function_outgoing_args_size)
2152 current_function_outgoing_args_size = needed;
2153
2154 if (must_preallocate)
2155 {
2156 #ifdef ACCUMULATE_OUTGOING_ARGS
2157 /* Since the stack pointer will never be pushed, it is possible for
2158 the evaluation of a parm to clobber something we have already
2159 written to the stack. Since most function calls on RISC machines
2160 do not use the stack, this is uncommon, but must work correctly.
2161
2162 Therefore, we save any area of the stack that was already written
2163 and that we are using. Here we set up to do this by making a new
2164 stack usage map from the old one. The actual save will be done
2165 by store_one_arg.
2166
2167 Another approach might be to try to reorder the argument
2168 evaluations to avoid this conflicting stack usage. */
2169
2170 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2171 /* Since we will be writing into the entire argument area, the
2172 map must be allocated for its entire size, not just the part that
2173 is the responsibility of the caller. */
2174 needed += reg_parm_stack_space;
2175 #endif
2176
2177 #ifdef ARGS_GROW_DOWNWARD
2178 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2179 needed + 1);
2180 #else
2181 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2182 needed);
2183 #endif
2184 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2185
2186 if (initial_highest_arg_in_use)
2187 bcopy (initial_stack_usage_map, stack_usage_map,
2188 initial_highest_arg_in_use);
2189
2190 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2191 bzero (&stack_usage_map[initial_highest_arg_in_use],
2192 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2193 needed = 0;
2194
2195 /* The address of the outgoing argument list must not be copied to a
2196 register here, because argblock would be left pointing to the
2197 wrong place after the call to allocate_dynamic_stack_space below.
2198 */
2199
2200 argblock = virtual_outgoing_args_rtx;
2201
2202 #else /* not ACCUMULATE_OUTGOING_ARGS */
2203 if (inhibit_defer_pop == 0)
2204 {
2205 /* Try to reuse some or all of the pending_stack_adjust
2206 to get this space. Maybe we can avoid any pushing. */
2207 if (needed > pending_stack_adjust)
2208 {
2209 needed -= pending_stack_adjust;
2210 pending_stack_adjust = 0;
2211 }
2212 else
2213 {
2214 pending_stack_adjust -= needed;
2215 needed = 0;
2216 }
2217 }
2218 /* Special case this because overhead of `push_block' in this
2219 case is non-trivial. */
2220 if (needed == 0)
2221 argblock = virtual_outgoing_args_rtx;
2222 else
2223 argblock = push_block (GEN_INT (needed), 0, 0);
2224
2225 /* We only really need to call `copy_to_reg' in the case where push
2226 insns are going to be used to pass ARGBLOCK to a function
2227 call in ARGS. In that case, the stack pointer changes value
2228 from the allocation point to the call point, and hence
2229 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2230 But might as well always do it. */
2231 argblock = copy_to_reg (argblock);
2232 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2233 }
2234 }
2235
2236 #ifdef ACCUMULATE_OUTGOING_ARGS
2237 /* The save/restore code in store_one_arg handles all cases except one:
2238 a constructor call (including a C function returning a BLKmode struct)
2239 to initialize an argument. */
2240 if (stack_arg_under_construction)
2241 {
2242 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2243 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2244 #else
2245 rtx push_size = GEN_INT (args_size.constant);
2246 #endif
2247 if (old_stack_level == 0)
2248 {
2249 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2250 old_pending_adj = pending_stack_adjust;
2251 pending_stack_adjust = 0;
2252 /* stack_arg_under_construction says whether a stack arg is
2253 being constructed at the old stack level. Pushing the stack
2254 gets a clean outgoing argument block. */
2255 old_stack_arg_under_construction = stack_arg_under_construction;
2256 stack_arg_under_construction = 0;
2257 /* Make a new map for the new argument list. */
2258 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2259 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2260 highest_outgoing_arg_in_use = 0;
2261 }
2262 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2263 }
2264 /* If argument evaluation might modify the stack pointer, copy the
2265 address of the argument list to a register. */
2266 for (i = 0; i < num_actuals; i++)
2267 if (args[i].pass_on_stack)
2268 {
2269 argblock = copy_addr_to_reg (argblock);
2270 break;
2271 }
2272 #endif
2273
2274 compute_argument_addresses (args, argblock, num_actuals);
2275
2276 #ifdef PUSH_ARGS_REVERSED
2277 #ifdef PREFERRED_STACK_BOUNDARY
2278 /* If we push args individually in reverse order, perform stack alignment
2279 before the first push (the last arg). */
2280 if (argblock == 0)
2281 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2282 #endif
2283 #endif
2284
2285 /* Don't try to defer pops if preallocating, not even from the first arg,
2286 since ARGBLOCK probably refers to the SP. */
2287 if (argblock)
2288 NO_DEFER_POP;
2289
2290 funexp = rtx_for_function_call (fndecl, exp);
2291
2292 /* Figure out the register where the value, if any, will come back. */
2293 valreg = 0;
2294 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2295 && ! structure_value_addr)
2296 {
2297 if (pcc_struct_value)
2298 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2299 fndecl, 0);
2300 else
2301 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
2302 }
2303
2304 /* Precompute all register parameters. It isn't safe to compute anything
2305 once we have started filling any specific hard regs. */
2306 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2307
2308 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2309
2310 /* Save the fixed argument area if it's part of the caller's frame and
2311 is clobbered by argument setup for this call. */
2312 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2313 &low_to_save, &high_to_save);
2314 #endif
2315
2316
2317 /* Now store (and compute if necessary) all non-register parms.
2318 These come before register parms, since they can require block-moves,
2319 which could clobber the registers used for register parms.
2320 Parms which have partial registers are not stored here,
2321 but we do preallocate space here if they want that. */
2322
2323 for (i = 0; i < num_actuals; i++)
2324 if (args[i].reg == 0 || args[i].pass_on_stack)
2325 store_one_arg (&args[i], argblock, may_be_alloca,
2326 args_size.var != 0, reg_parm_stack_space);
2327
2328 /* If we have a parm that is passed in registers but not in memory
2329 and whose alignment does not permit a direct copy into registers,
2330 make a group of pseudos that correspond to each register that we
2331 will later fill. */
2332 if (STRICT_ALIGNMENT)
2333 store_unaligned_arguments_into_pseudos (args, num_actuals);
2334
2335 /* Now store any partially-in-registers parm.
2336 This is the last place a block-move can happen. */
2337 if (reg_parm_seen)
2338 for (i = 0; i < num_actuals; i++)
2339 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2340 store_one_arg (&args[i], argblock, may_be_alloca,
2341 args_size.var != 0, reg_parm_stack_space);
2342
2343 #ifndef PUSH_ARGS_REVERSED
2344 #ifdef PREFERRED_STACK_BOUNDARY
2345 /* If we pushed args in forward order, perform stack alignment
2346 after pushing the last arg. */
2347 if (argblock == 0)
2348 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2349 #endif
2350 #endif
2351
2352 /* If register arguments require space on the stack and stack space
2353 was not preallocated, allocate stack space here for arguments
2354 passed in registers. */
2355 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2356 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2357 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2358 #endif
2359
2360 /* Pass the function the address in which to return a structure value. */
2361 if (structure_value_addr && ! structure_value_addr_parm)
2362 {
2363 emit_move_insn (struct_value_rtx,
2364 force_reg (Pmode,
2365 force_operand (structure_value_addr,
2366 NULL_RTX)));
2367
2368 /* Mark the memory for the aggregate as write-only. */
2369 if (current_function_check_memory_usage)
2370 emit_library_call (chkr_set_right_libfunc, 1,
2371 VOIDmode, 3,
2372 structure_value_addr, Pmode,
2373 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2374 GEN_INT (MEMORY_USE_WO),
2375 TYPE_MODE (integer_type_node));
2376
2377 if (GET_CODE (struct_value_rtx) == REG)
2378 use_reg (&call_fusage, struct_value_rtx);
2379 }
2380
2381 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2382
2383 load_register_parameters (args, num_actuals, &call_fusage);
2384
2385 /* Perform postincrements before actually calling the function. */
2386 emit_queue ();
2387
2388 /* Save a pointer to the last insn before the call, so that we can
2389 later safely search backwards to find the CALL_INSN. */
2390 before_call = get_last_insn ();
2391
2392 /* All arguments and registers used for the call must be set up by now! */
2393
2394 /* Generate the actual call instruction. */
2395 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2396 args_size.constant, struct_value_size,
2397 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2398 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2399
2400 /* If call is cse'able, make appropriate pair of reg-notes around it.
2401 Test valreg so we don't crash; may safely ignore `const'
2402 if return type is void. Disable for PARALLEL return values, because
2403 we have no way to move such values into a pseudo register. */
2404 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2405 {
2406 rtx note = 0;
2407 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2408 rtx insns;
2409
2410 /* Mark the return value as a pointer if needed. */
2411 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2412 {
2413 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2414 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2415 }
2416
2417 /* Construct an "equal form" for the value which mentions all the
2418 arguments in order as well as the function name. */
2419 #ifdef PUSH_ARGS_REVERSED
2420 for (i = 0; i < num_actuals; i++)
2421 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2422 #else
2423 for (i = num_actuals - 1; i >= 0; i--)
2424 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2425 #endif
2426 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2427
2428 insns = get_insns ();
2429 end_sequence ();
2430
2431 emit_libcall_block (insns, temp, valreg, note);
2432
2433 valreg = temp;
2434 }
2435 else if (is_const)
2436 {
2437 /* Otherwise, just write out the sequence without a note. */
2438 rtx insns = get_insns ();
2439
2440 end_sequence ();
2441 emit_insns (insns);
2442 }
2443 else if (is_malloc)
2444 {
2445 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2446 rtx last, insns;
2447
2448 /* The return value from a malloc-like function is a pointer. */
2449 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2450 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2451
2452 emit_move_insn (temp, valreg);
2453
2454 /* The return value from a malloc-like function can not alias
2455 anything else. */
2456 last = get_last_insn ();
2457 REG_NOTES (last) =
2458 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2459
2460 /* Write out the sequence. */
2461 insns = get_insns ();
2462 end_sequence ();
2463 emit_insns (insns);
2464 valreg = temp;
2465 }
2466
2467 /* For calls to `setjmp', etc., inform flow.c it should complain
2468 if nonvolatile values are live. */
2469
2470 if (returns_twice)
2471 {
2472 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2473 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2474 we must search for it here. */
2475 rtx last = get_last_insn ();
2476 while (GET_CODE (last) != CALL_INSN)
2477 {
2478 last = PREV_INSN (last);
2479 /* There was no CALL_INSN? */
2480 if (last == before_call)
2481 abort ();
2482 }
2483 emit_note_after (NOTE_INSN_SETJMP, last);
2484 current_function_calls_setjmp = 1;
2485 }
2486
2487 if (is_longjmp)
2488 current_function_calls_longjmp = 1;
2489
2490 /* Notice functions that cannot return.
2491 If optimizing, insns emitted below will be dead.
2492 If not optimizing, they will exist, which is useful
2493 if the user uses the `return' command in the debugger. */
2494
2495 if (is_volatile || is_longjmp)
2496 emit_barrier ();
2497
2498 /* If value type not void, return an rtx for the value. */
2499
2500 /* If there are cleanups to be called, don't use a hard reg as target.
2501 We need to double check this and see if it matters anymore. */
2502 if (any_pending_cleanups (1)
2503 && target && REG_P (target)
2504 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2505 target = 0;
2506
2507 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2508 || ignore)
2509 {
2510 target = const0_rtx;
2511 }
2512 else if (structure_value_addr)
2513 {
2514 if (target == 0 || GET_CODE (target) != MEM)
2515 {
2516 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2517 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2518 structure_value_addr));
2519 MEM_SET_IN_STRUCT_P (target,
2520 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2521 }
2522 }
2523 else if (pcc_struct_value)
2524 {
2525 /* This is the special C++ case where we need to
2526 know what the true target was. We take care to
2527 never use this value more than once in one expression. */
2528 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2529 copy_to_reg (valreg));
2530 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2531 }
2532 /* Handle calls that return values in multiple non-contiguous locations.
2533 The Irix 6 ABI has examples of this. */
2534 else if (GET_CODE (valreg) == PARALLEL)
2535 {
2536 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2537
2538 if (target == 0)
2539 {
2540 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2541 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2542 preserve_temp_slots (target);
2543 }
2544
2545 if (! rtx_equal_p (target, valreg))
2546 emit_group_store (target, valreg, bytes,
2547 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2548 }
2549 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2550 && GET_MODE (target) == GET_MODE (valreg))
2551 /* TARGET and VALREG cannot be equal at this point because the latter
2552 would not have REG_FUNCTION_VALUE_P true, while the former would if
2553 it were referring to the same register.
2554
2555 If they refer to the same register, this move will be a no-op, except
2556 when function inlining is being done. */
2557 emit_move_insn (target, valreg);
2558 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2559 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2560 else
2561 target = copy_to_reg (valreg);
2562
2563 #ifdef PROMOTE_FUNCTION_RETURN
2564 /* If we promoted this return value, make the proper SUBREG. TARGET
2565 might be const0_rtx here, so be careful. */
2566 if (GET_CODE (target) == REG
2567 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2568 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2569 {
2570 tree type = TREE_TYPE (exp);
2571 int unsignedp = TREE_UNSIGNED (type);
2572
2573 /* If we don't promote as expected, something is wrong. */
2574 if (GET_MODE (target)
2575 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2576 abort ();
2577
2578 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2579 SUBREG_PROMOTED_VAR_P (target) = 1;
2580 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2581 }
2582 #endif
2583
2584 /* If size of args is variable or this was a constructor call for a stack
2585 argument, restore saved stack-pointer value. */
2586
2587 if (old_stack_level)
2588 {
2589 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2590 pending_stack_adjust = old_pending_adj;
2591 #ifdef ACCUMULATE_OUTGOING_ARGS
2592 stack_arg_under_construction = old_stack_arg_under_construction;
2593 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2594 stack_usage_map = initial_stack_usage_map;
2595 #endif
2596 }
2597 #ifdef ACCUMULATE_OUTGOING_ARGS
2598 else
2599 {
2600 #ifdef REG_PARM_STACK_SPACE
2601 if (save_area)
2602 restore_fixed_argument_area (save_area, argblock,
2603 high_to_save, low_to_save);
2604 #endif
2605
2606 /* If we saved any argument areas, restore them. */
2607 for (i = 0; i < num_actuals; i++)
2608 if (args[i].save_area)
2609 {
2610 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2611 rtx stack_area
2612 = gen_rtx_MEM (save_mode,
2613 memory_address (save_mode,
2614 XEXP (args[i].stack_slot, 0)));
2615
2616 if (save_mode != BLKmode)
2617 emit_move_insn (stack_area, args[i].save_area);
2618 else
2619 emit_block_move (stack_area, validize_mem (args[i].save_area),
2620 GEN_INT (args[i].size.constant),
2621 PARM_BOUNDARY / BITS_PER_UNIT);
2622 }
2623
2624 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2625 stack_usage_map = initial_stack_usage_map;
2626 }
2627 #endif
2628
2629 /* If this was alloca, record the new stack level for nonlocal gotos.
2630 Check for the handler slots since we might not have a save area
2631 for non-local gotos. */
2632
2633 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2634 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2635
2636 pop_temp_slots ();
2637
2638 /* Free up storage we no longer need. */
2639 for (i = 0; i < num_actuals; ++i)
2640 if (args[i].aligned_regs)
2641 free (args[i].aligned_regs);
2642
2643 return target;
2644 }
2645 \f
2646 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2647 (emitting the queue unless NO_QUEUE is nonzero),
2648 for a value of mode OUTMODE,
2649 with NARGS different arguments, passed as alternating rtx values
2650 and machine_modes to convert them to.
2651 The rtx values should have been passed through protect_from_queue already.
2652
2653 NO_QUEUE will be true if and only if the library call is a `const' call
2654 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2655 to the variable is_const in expand_call.
2656
2657 NO_QUEUE must be true for const calls, because if it isn't, then
2658 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2659 and will be lost if the libcall sequence is optimized away.
2660
2661 NO_QUEUE must be false for non-const calls, because if it isn't, the
2662 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2663 optimized. For instance, the instruction scheduler may incorrectly
2664 move memory references across the non-const call. */
2665
2666 void
2667 emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
2668 int nargs, ...))
2669 {
2670 #ifndef ANSI_PROTOTYPES
2671 rtx orgfun;
2672 int no_queue;
2673 enum machine_mode outmode;
2674 int nargs;
2675 #endif
2676 va_list p;
2677 /* Total size in bytes of all the stack-parms scanned so far. */
2678 struct args_size args_size;
2679 /* Size of arguments before any adjustments (such as rounding). */
2680 struct args_size original_args_size;
2681 register int argnum;
2682 rtx fun;
2683 int inc;
2684 int count;
2685 struct args_size alignment_pad;
2686 rtx argblock = 0;
2687 CUMULATIVE_ARGS args_so_far;
2688 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2689 struct args_size offset; struct args_size size; rtx save_area; };
2690 struct arg *argvec;
2691 int old_inhibit_defer_pop = inhibit_defer_pop;
2692 rtx call_fusage = 0;
2693 int reg_parm_stack_space = 0;
2694 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2695 /* Define the boundary of the register parm stack space that needs to be
2696 save, if any. */
2697 int low_to_save = -1, high_to_save = 0;
2698 rtx save_area = 0; /* Place that it is saved */
2699 #endif
2700
2701 #ifdef ACCUMULATE_OUTGOING_ARGS
2702 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2703 char *initial_stack_usage_map = stack_usage_map;
2704 int needed;
2705 #endif
2706
2707 #ifdef REG_PARM_STACK_SPACE
2708 /* Size of the stack reserved for parameter registers. */
2709 #ifdef MAYBE_REG_PARM_STACK_SPACE
2710 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2711 #else
2712 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2713 #endif
2714 #endif
2715
2716 VA_START (p, nargs);
2717
2718 #ifndef ANSI_PROTOTYPES
2719 orgfun = va_arg (p, rtx);
2720 no_queue = va_arg (p, int);
2721 outmode = va_arg (p, enum machine_mode);
2722 nargs = va_arg (p, int);
2723 #endif
2724
2725 fun = orgfun;
2726
2727 /* Copy all the libcall-arguments out of the varargs data
2728 and into a vector ARGVEC.
2729
2730 Compute how to pass each argument. We only support a very small subset
2731 of the full argument passing conventions to limit complexity here since
2732 library functions shouldn't have many args. */
2733
2734 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2735 bzero ((char *) argvec, nargs * sizeof (struct arg));
2736
2737
2738 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2739
2740 args_size.constant = 0;
2741 args_size.var = 0;
2742
2743 push_temp_slots ();
2744
2745 #ifdef PREFERRED_STACK_BOUNDARY
2746 /* Ensure current function's preferred stack boundary is at least
2747 what we need. */
2748 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
2749 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2750 #endif
2751
2752 for (count = 0; count < nargs; count++)
2753 {
2754 rtx val = va_arg (p, rtx);
2755 enum machine_mode mode = va_arg (p, enum machine_mode);
2756
2757 /* We cannot convert the arg value to the mode the library wants here;
2758 must do it earlier where we know the signedness of the arg. */
2759 if (mode == BLKmode
2760 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2761 abort ();
2762
2763 /* On some machines, there's no way to pass a float to a library fcn.
2764 Pass it as a double instead. */
2765 #ifdef LIBGCC_NEEDS_DOUBLE
2766 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2767 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2768 #endif
2769
2770 /* There's no need to call protect_from_queue, because
2771 either emit_move_insn or emit_push_insn will do that. */
2772
2773 /* Make sure it is a reasonable operand for a move or push insn. */
2774 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2775 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2776 val = force_operand (val, NULL_RTX);
2777
2778 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2779 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2780 {
2781 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2782 be viewed as just an efficiency improvement. */
2783 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2784 emit_move_insn (slot, val);
2785 val = force_operand (XEXP (slot, 0), NULL_RTX);
2786 mode = Pmode;
2787 }
2788 #endif
2789
2790 argvec[count].value = val;
2791 argvec[count].mode = mode;
2792
2793 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2794 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2795 abort ();
2796 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2797 argvec[count].partial
2798 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2799 #else
2800 argvec[count].partial = 0;
2801 #endif
2802
2803 locate_and_pad_parm (mode, NULL_TREE,
2804 argvec[count].reg && argvec[count].partial == 0,
2805 NULL_TREE, &args_size, &argvec[count].offset,
2806 &argvec[count].size, &alignment_pad);
2807
2808 if (argvec[count].size.var)
2809 abort ();
2810
2811 if (reg_parm_stack_space == 0 && argvec[count].partial)
2812 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2813
2814 if (argvec[count].reg == 0 || argvec[count].partial != 0
2815 || reg_parm_stack_space > 0)
2816 args_size.constant += argvec[count].size.constant;
2817
2818 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2819 }
2820 va_end (p);
2821
2822 #ifdef FINAL_REG_PARM_STACK_SPACE
2823 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2824 args_size.var);
2825 #endif
2826
2827 /* If this machine requires an external definition for library
2828 functions, write one out. */
2829 assemble_external_libcall (fun);
2830
2831 original_args_size = args_size;
2832 #ifdef PREFERRED_STACK_BOUNDARY
2833 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2834 / STACK_BYTES) * STACK_BYTES);
2835 #endif
2836
2837 args_size.constant = MAX (args_size.constant,
2838 reg_parm_stack_space);
2839
2840 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2841 args_size.constant -= reg_parm_stack_space;
2842 #endif
2843
2844 if (args_size.constant > current_function_outgoing_args_size)
2845 current_function_outgoing_args_size = args_size.constant;
2846
2847 #ifdef ACCUMULATE_OUTGOING_ARGS
2848 /* Since the stack pointer will never be pushed, it is possible for
2849 the evaluation of a parm to clobber something we have already
2850 written to the stack. Since most function calls on RISC machines
2851 do not use the stack, this is uncommon, but must work correctly.
2852
2853 Therefore, we save any area of the stack that was already written
2854 and that we are using. Here we set up to do this by making a new
2855 stack usage map from the old one.
2856
2857 Another approach might be to try to reorder the argument
2858 evaluations to avoid this conflicting stack usage. */
2859
2860 needed = args_size.constant;
2861
2862 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2863 /* Since we will be writing into the entire argument area, the
2864 map must be allocated for its entire size, not just the part that
2865 is the responsibility of the caller. */
2866 needed += reg_parm_stack_space;
2867 #endif
2868
2869 #ifdef ARGS_GROW_DOWNWARD
2870 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2871 needed + 1);
2872 #else
2873 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2874 needed);
2875 #endif
2876 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2877
2878 if (initial_highest_arg_in_use)
2879 bcopy (initial_stack_usage_map, stack_usage_map,
2880 initial_highest_arg_in_use);
2881
2882 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2883 bzero (&stack_usage_map[initial_highest_arg_in_use],
2884 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2885 needed = 0;
2886
2887 /* The address of the outgoing argument list must not be copied to a
2888 register here, because argblock would be left pointing to the
2889 wrong place after the call to allocate_dynamic_stack_space below.
2890 */
2891
2892 argblock = virtual_outgoing_args_rtx;
2893 #else /* not ACCUMULATE_OUTGOING_ARGS */
2894 #ifndef PUSH_ROUNDING
2895 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2896 #endif
2897 #endif
2898
2899 #ifdef PUSH_ARGS_REVERSED
2900 #ifdef PREFERRED_STACK_BOUNDARY
2901 /* If we push args individually in reverse order, perform stack alignment
2902 before the first push (the last arg). */
2903 if (argblock == 0)
2904 anti_adjust_stack (GEN_INT (args_size.constant
2905 - original_args_size.constant));
2906 #endif
2907 #endif
2908
2909 #ifdef PUSH_ARGS_REVERSED
2910 inc = -1;
2911 argnum = nargs - 1;
2912 #else
2913 inc = 1;
2914 argnum = 0;
2915 #endif
2916
2917 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2918 /* The argument list is the property of the called routine and it
2919 may clobber it. If the fixed area has been used for previous
2920 parameters, we must save and restore it.
2921
2922 Here we compute the boundary of the that needs to be saved, if any. */
2923
2924 #ifdef ARGS_GROW_DOWNWARD
2925 for (count = 0; count < reg_parm_stack_space + 1; count++)
2926 #else
2927 for (count = 0; count < reg_parm_stack_space; count++)
2928 #endif
2929 {
2930 if (count >= highest_outgoing_arg_in_use
2931 || stack_usage_map[count] == 0)
2932 continue;
2933
2934 if (low_to_save == -1)
2935 low_to_save = count;
2936
2937 high_to_save = count;
2938 }
2939
2940 if (low_to_save >= 0)
2941 {
2942 int num_to_save = high_to_save - low_to_save + 1;
2943 enum machine_mode save_mode
2944 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2945 rtx stack_area;
2946
2947 /* If we don't have the required alignment, must do this in BLKmode. */
2948 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2949 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2950 save_mode = BLKmode;
2951
2952 #ifdef ARGS_GROW_DOWNWARD
2953 stack_area = gen_rtx_MEM (save_mode,
2954 memory_address (save_mode,
2955 plus_constant (argblock,
2956 - high_to_save)));
2957 #else
2958 stack_area = gen_rtx_MEM (save_mode,
2959 memory_address (save_mode,
2960 plus_constant (argblock,
2961 low_to_save)));
2962 #endif
2963 if (save_mode == BLKmode)
2964 {
2965 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2966 emit_block_move (validize_mem (save_area), stack_area,
2967 GEN_INT (num_to_save),
2968 PARM_BOUNDARY / BITS_PER_UNIT);
2969 }
2970 else
2971 {
2972 save_area = gen_reg_rtx (save_mode);
2973 emit_move_insn (save_area, stack_area);
2974 }
2975 }
2976 #endif
2977
2978 /* Push the args that need to be pushed. */
2979
2980 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2981 are to be pushed. */
2982 for (count = 0; count < nargs; count++, argnum += inc)
2983 {
2984 register enum machine_mode mode = argvec[argnum].mode;
2985 register rtx val = argvec[argnum].value;
2986 rtx reg = argvec[argnum].reg;
2987 int partial = argvec[argnum].partial;
2988 #ifdef ACCUMULATE_OUTGOING_ARGS
2989 int lower_bound, upper_bound, i;
2990 #endif
2991
2992 if (! (reg != 0 && partial == 0))
2993 {
2994 #ifdef ACCUMULATE_OUTGOING_ARGS
2995 /* If this is being stored into a pre-allocated, fixed-size, stack
2996 area, save any previous data at that location. */
2997
2998 #ifdef ARGS_GROW_DOWNWARD
2999 /* stack_slot is negative, but we want to index stack_usage_map
3000 with positive values. */
3001 upper_bound = -argvec[argnum].offset.constant + 1;
3002 lower_bound = upper_bound - argvec[argnum].size.constant;
3003 #else
3004 lower_bound = argvec[argnum].offset.constant;
3005 upper_bound = lower_bound + argvec[argnum].size.constant;
3006 #endif
3007
3008 for (i = lower_bound; i < upper_bound; i++)
3009 if (stack_usage_map[i]
3010 /* Don't store things in the fixed argument area at this point;
3011 it has already been saved. */
3012 && i > reg_parm_stack_space)
3013 break;
3014
3015 if (i != upper_bound)
3016 {
3017 /* We need to make a save area. See what mode we can make it. */
3018 enum machine_mode save_mode
3019 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3020 MODE_INT, 1);
3021 rtx stack_area
3022 = gen_rtx_MEM
3023 (save_mode,
3024 memory_address
3025 (save_mode,
3026 plus_constant (argblock,
3027 argvec[argnum].offset.constant)));
3028
3029 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3030 emit_move_insn (argvec[argnum].save_area, stack_area);
3031 }
3032 #endif
3033 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3034 argblock, GEN_INT (argvec[argnum].offset.constant),
3035 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3036
3037 #ifdef ACCUMULATE_OUTGOING_ARGS
3038 /* Now mark the segment we just used. */
3039 for (i = lower_bound; i < upper_bound; i++)
3040 stack_usage_map[i] = 1;
3041 #endif
3042
3043 NO_DEFER_POP;
3044 }
3045 }
3046
3047 #ifndef PUSH_ARGS_REVERSED
3048 #ifdef PREFERRED_STACK_BOUNDARY
3049 /* If we pushed args in forward order, perform stack alignment
3050 after pushing the last arg. */
3051 if (argblock == 0)
3052 anti_adjust_stack (GEN_INT (args_size.constant
3053 - original_args_size.constant));
3054 #endif
3055 #endif
3056
3057 #ifdef PUSH_ARGS_REVERSED
3058 argnum = nargs - 1;
3059 #else
3060 argnum = 0;
3061 #endif
3062
3063 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3064
3065 /* Now load any reg parms into their regs. */
3066
3067 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3068 are to be pushed. */
3069 for (count = 0; count < nargs; count++, argnum += inc)
3070 {
3071 register rtx val = argvec[argnum].value;
3072 rtx reg = argvec[argnum].reg;
3073 int partial = argvec[argnum].partial;
3074
3075 if (reg != 0 && partial == 0)
3076 emit_move_insn (reg, val);
3077 NO_DEFER_POP;
3078 }
3079
3080 /* For version 1.37, try deleting this entirely. */
3081 if (! no_queue)
3082 emit_queue ();
3083
3084 /* Any regs containing parms remain in use through the call. */
3085 for (count = 0; count < nargs; count++)
3086 if (argvec[count].reg != 0)
3087 use_reg (&call_fusage, argvec[count].reg);
3088
3089 /* Don't allow popping to be deferred, since then
3090 cse'ing of library calls could delete a call and leave the pop. */
3091 NO_DEFER_POP;
3092
3093 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3094 will set inhibit_defer_pop to that value. */
3095
3096 /* The return type is needed to decide how many bytes the function pops.
3097 Signedness plays no role in that, so for simplicity, we pretend it's
3098 always signed. We also assume that the list of arguments passed has
3099 no impact, so we pretend it is unknown. */
3100
3101 emit_call_1 (fun,
3102 get_identifier (XSTR (orgfun, 0)),
3103 build_function_type (outmode == VOIDmode ? void_type_node
3104 : type_for_mode (outmode, 0), NULL_TREE),
3105 original_args_size.constant, args_size.constant, 0,
3106 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3107 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
3108 old_inhibit_defer_pop + 1, call_fusage, no_queue);
3109
3110 pop_temp_slots ();
3111
3112 /* Now restore inhibit_defer_pop to its actual original value. */
3113 OK_DEFER_POP;
3114
3115 #ifdef ACCUMULATE_OUTGOING_ARGS
3116 #ifdef REG_PARM_STACK_SPACE
3117 if (save_area)
3118 {
3119 enum machine_mode save_mode = GET_MODE (save_area);
3120 #ifdef ARGS_GROW_DOWNWARD
3121 rtx stack_area
3122 = gen_rtx_MEM (save_mode,
3123 memory_address (save_mode,
3124 plus_constant (argblock,
3125 - high_to_save)));
3126 #else
3127 rtx stack_area
3128 = gen_rtx_MEM (save_mode,
3129 memory_address (save_mode,
3130 plus_constant (argblock, low_to_save)));
3131 #endif
3132
3133 if (save_mode != BLKmode)
3134 emit_move_insn (stack_area, save_area);
3135 else
3136 emit_block_move (stack_area, validize_mem (save_area),
3137 GEN_INT (high_to_save - low_to_save + 1),
3138 PARM_BOUNDARY / BITS_PER_UNIT);
3139 }
3140 #endif
3141
3142 /* If we saved any argument areas, restore them. */
3143 for (count = 0; count < nargs; count++)
3144 if (argvec[count].save_area)
3145 {
3146 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3147 rtx stack_area
3148 = gen_rtx_MEM (save_mode,
3149 memory_address
3150 (save_mode,
3151 plus_constant (argblock,
3152 argvec[count].offset.constant)));
3153
3154 emit_move_insn (stack_area, argvec[count].save_area);
3155 }
3156
3157 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3158 stack_usage_map = initial_stack_usage_map;
3159 #endif
3160 }
3161 \f
3162 /* Like emit_library_call except that an extra argument, VALUE,
3163 comes second and says where to store the result.
3164 (If VALUE is zero, this function chooses a convenient way
3165 to return the value.
3166
3167 This function returns an rtx for where the value is to be found.
3168 If VALUE is nonzero, VALUE is returned. */
3169
3170 rtx
3171 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3172 enum machine_mode outmode, int nargs, ...))
3173 {
3174 #ifndef ANSI_PROTOTYPES
3175 rtx orgfun;
3176 rtx value;
3177 int no_queue;
3178 enum machine_mode outmode;
3179 int nargs;
3180 #endif
3181 va_list p;
3182 /* Total size in bytes of all the stack-parms scanned so far. */
3183 struct args_size args_size;
3184 /* Size of arguments before any adjustments (such as rounding). */
3185 struct args_size original_args_size;
3186 register int argnum;
3187 rtx fun;
3188 int inc;
3189 int count;
3190 struct args_size alignment_pad;
3191 rtx argblock = 0;
3192 CUMULATIVE_ARGS args_so_far;
3193 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3194 struct args_size offset; struct args_size size; rtx save_area; };
3195 struct arg *argvec;
3196 int old_inhibit_defer_pop = inhibit_defer_pop;
3197 rtx call_fusage = 0;
3198 rtx mem_value = 0;
3199 int pcc_struct_value = 0;
3200 int struct_value_size = 0;
3201 int is_const;
3202 int reg_parm_stack_space = 0;
3203 #ifdef ACCUMULATE_OUTGOING_ARGS
3204 int needed;
3205 #endif
3206
3207 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3208 /* Define the boundary of the register parm stack space that needs to be
3209 save, if any. */
3210 int low_to_save = -1, high_to_save = 0;
3211 rtx save_area = 0; /* Place that it is saved */
3212 #endif
3213
3214 #ifdef ACCUMULATE_OUTGOING_ARGS
3215 /* Size of the stack reserved for parameter registers. */
3216 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3217 char *initial_stack_usage_map = stack_usage_map;
3218 #endif
3219
3220 #ifdef REG_PARM_STACK_SPACE
3221 #ifdef MAYBE_REG_PARM_STACK_SPACE
3222 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3223 #else
3224 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3225 #endif
3226 #endif
3227
3228 VA_START (p, nargs);
3229
3230 #ifndef ANSI_PROTOTYPES
3231 orgfun = va_arg (p, rtx);
3232 value = va_arg (p, rtx);
3233 no_queue = va_arg (p, int);
3234 outmode = va_arg (p, enum machine_mode);
3235 nargs = va_arg (p, int);
3236 #endif
3237
3238 is_const = no_queue;
3239 fun = orgfun;
3240
3241 #ifdef PREFERRED_STACK_BOUNDARY
3242 /* Ensure current function's preferred stack boundary is at least
3243 what we need. */
3244 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3245 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3246 #endif
3247
3248 /* If this kind of value comes back in memory,
3249 decide where in memory it should come back. */
3250 if (aggregate_value_p (type_for_mode (outmode, 0)))
3251 {
3252 #ifdef PCC_STATIC_STRUCT_RETURN
3253 rtx pointer_reg
3254 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3255 0, 0);
3256 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3257 pcc_struct_value = 1;
3258 if (value == 0)
3259 value = gen_reg_rtx (outmode);
3260 #else /* not PCC_STATIC_STRUCT_RETURN */
3261 struct_value_size = GET_MODE_SIZE (outmode);
3262 if (value != 0 && GET_CODE (value) == MEM)
3263 mem_value = value;
3264 else
3265 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3266 #endif
3267
3268 /* This call returns a big structure. */
3269 is_const = 0;
3270 }
3271
3272 /* ??? Unfinished: must pass the memory address as an argument. */
3273
3274 /* Copy all the libcall-arguments out of the varargs data
3275 and into a vector ARGVEC.
3276
3277 Compute how to pass each argument. We only support a very small subset
3278 of the full argument passing conventions to limit complexity here since
3279 library functions shouldn't have many args. */
3280
3281 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3282 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3283
3284 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3285
3286 args_size.constant = 0;
3287 args_size.var = 0;
3288
3289 count = 0;
3290
3291 push_temp_slots ();
3292
3293 /* If there's a structure value address to be passed,
3294 either pass it in the special place, or pass it as an extra argument. */
3295 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3296 {
3297 rtx addr = XEXP (mem_value, 0);
3298 nargs++;
3299
3300 /* Make sure it is a reasonable operand for a move or push insn. */
3301 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3302 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3303 addr = force_operand (addr, NULL_RTX);
3304
3305 argvec[count].value = addr;
3306 argvec[count].mode = Pmode;
3307 argvec[count].partial = 0;
3308
3309 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3310 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3311 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3312 abort ();
3313 #endif
3314
3315 locate_and_pad_parm (Pmode, NULL_TREE,
3316 argvec[count].reg && argvec[count].partial == 0,
3317 NULL_TREE, &args_size, &argvec[count].offset,
3318 &argvec[count].size, &alignment_pad);
3319
3320
3321 if (argvec[count].reg == 0 || argvec[count].partial != 0
3322 || reg_parm_stack_space > 0)
3323 args_size.constant += argvec[count].size.constant;
3324
3325 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3326
3327 count++;
3328 }
3329
3330 for (; count < nargs; count++)
3331 {
3332 rtx val = va_arg (p, rtx);
3333 enum machine_mode mode = va_arg (p, enum machine_mode);
3334
3335 /* We cannot convert the arg value to the mode the library wants here;
3336 must do it earlier where we know the signedness of the arg. */
3337 if (mode == BLKmode
3338 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3339 abort ();
3340
3341 /* On some machines, there's no way to pass a float to a library fcn.
3342 Pass it as a double instead. */
3343 #ifdef LIBGCC_NEEDS_DOUBLE
3344 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3345 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3346 #endif
3347
3348 /* There's no need to call protect_from_queue, because
3349 either emit_move_insn or emit_push_insn will do that. */
3350
3351 /* Make sure it is a reasonable operand for a move or push insn. */
3352 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3353 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3354 val = force_operand (val, NULL_RTX);
3355
3356 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3357 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3358 {
3359 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3360 be viewed as just an efficiency improvement. */
3361 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3362 emit_move_insn (slot, val);
3363 val = XEXP (slot, 0);
3364 mode = Pmode;
3365 }
3366 #endif
3367
3368 argvec[count].value = val;
3369 argvec[count].mode = mode;
3370
3371 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3372 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3373 abort ();
3374 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3375 argvec[count].partial
3376 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3377 #else
3378 argvec[count].partial = 0;
3379 #endif
3380
3381 locate_and_pad_parm (mode, NULL_TREE,
3382 argvec[count].reg && argvec[count].partial == 0,
3383 NULL_TREE, &args_size, &argvec[count].offset,
3384 &argvec[count].size, &alignment_pad);
3385
3386 if (argvec[count].size.var)
3387 abort ();
3388
3389 if (reg_parm_stack_space == 0 && argvec[count].partial)
3390 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3391
3392 if (argvec[count].reg == 0 || argvec[count].partial != 0
3393 || reg_parm_stack_space > 0)
3394 args_size.constant += argvec[count].size.constant;
3395
3396 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3397 }
3398 va_end (p);
3399
3400 #ifdef FINAL_REG_PARM_STACK_SPACE
3401 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3402 args_size.var);
3403 #endif
3404 /* If this machine requires an external definition for library
3405 functions, write one out. */
3406 assemble_external_libcall (fun);
3407
3408 original_args_size = args_size;
3409 #ifdef PREFERRED_STACK_BOUNDARY
3410 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3411 / STACK_BYTES) * STACK_BYTES);
3412 #endif
3413
3414 args_size.constant = MAX (args_size.constant,
3415 reg_parm_stack_space);
3416
3417 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3418 args_size.constant -= reg_parm_stack_space;
3419 #endif
3420
3421 if (args_size.constant > current_function_outgoing_args_size)
3422 current_function_outgoing_args_size = args_size.constant;
3423
3424 #ifdef ACCUMULATE_OUTGOING_ARGS
3425 /* Since the stack pointer will never be pushed, it is possible for
3426 the evaluation of a parm to clobber something we have already
3427 written to the stack. Since most function calls on RISC machines
3428 do not use the stack, this is uncommon, but must work correctly.
3429
3430 Therefore, we save any area of the stack that was already written
3431 and that we are using. Here we set up to do this by making a new
3432 stack usage map from the old one.
3433
3434 Another approach might be to try to reorder the argument
3435 evaluations to avoid this conflicting stack usage. */
3436
3437 needed = args_size.constant;
3438
3439 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3440 /* Since we will be writing into the entire argument area, the
3441 map must be allocated for its entire size, not just the part that
3442 is the responsibility of the caller. */
3443 needed += reg_parm_stack_space;
3444 #endif
3445
3446 #ifdef ARGS_GROW_DOWNWARD
3447 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3448 needed + 1);
3449 #else
3450 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3451 needed);
3452 #endif
3453 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3454
3455 if (initial_highest_arg_in_use)
3456 bcopy (initial_stack_usage_map, stack_usage_map,
3457 initial_highest_arg_in_use);
3458
3459 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3460 bzero (&stack_usage_map[initial_highest_arg_in_use],
3461 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3462 needed = 0;
3463
3464 /* The address of the outgoing argument list must not be copied to a
3465 register here, because argblock would be left pointing to the
3466 wrong place after the call to allocate_dynamic_stack_space below.
3467 */
3468
3469 argblock = virtual_outgoing_args_rtx;
3470 #else /* not ACCUMULATE_OUTGOING_ARGS */
3471 #ifndef PUSH_ROUNDING
3472 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3473 #endif
3474 #endif
3475
3476 #ifdef PUSH_ARGS_REVERSED
3477 #ifdef PREFERRED_STACK_BOUNDARY
3478 /* If we push args individually in reverse order, perform stack alignment
3479 before the first push (the last arg). */
3480 if (argblock == 0)
3481 anti_adjust_stack (GEN_INT (args_size.constant
3482 - original_args_size.constant));
3483 #endif
3484 #endif
3485
3486 #ifdef PUSH_ARGS_REVERSED
3487 inc = -1;
3488 argnum = nargs - 1;
3489 #else
3490 inc = 1;
3491 argnum = 0;
3492 #endif
3493
3494 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3495 /* The argument list is the property of the called routine and it
3496 may clobber it. If the fixed area has been used for previous
3497 parameters, we must save and restore it.
3498
3499 Here we compute the boundary of the that needs to be saved, if any. */
3500
3501 #ifdef ARGS_GROW_DOWNWARD
3502 for (count = 0; count < reg_parm_stack_space + 1; count++)
3503 #else
3504 for (count = 0; count < reg_parm_stack_space; count++)
3505 #endif
3506 {
3507 if (count >= highest_outgoing_arg_in_use
3508 || stack_usage_map[count] == 0)
3509 continue;
3510
3511 if (low_to_save == -1)
3512 low_to_save = count;
3513
3514 high_to_save = count;
3515 }
3516
3517 if (low_to_save >= 0)
3518 {
3519 int num_to_save = high_to_save - low_to_save + 1;
3520 enum machine_mode save_mode
3521 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3522 rtx stack_area;
3523
3524 /* If we don't have the required alignment, must do this in BLKmode. */
3525 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3526 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3527 save_mode = BLKmode;
3528
3529 #ifdef ARGS_GROW_DOWNWARD
3530 stack_area = gen_rtx_MEM (save_mode,
3531 memory_address (save_mode,
3532 plus_constant (argblock,
3533 - high_to_save)));
3534 #else
3535 stack_area = gen_rtx_MEM (save_mode,
3536 memory_address (save_mode,
3537 plus_constant (argblock,
3538 low_to_save)));
3539 #endif
3540 if (save_mode == BLKmode)
3541 {
3542 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3543 emit_block_move (validize_mem (save_area), stack_area,
3544 GEN_INT (num_to_save),
3545 PARM_BOUNDARY / BITS_PER_UNIT);
3546 }
3547 else
3548 {
3549 save_area = gen_reg_rtx (save_mode);
3550 emit_move_insn (save_area, stack_area);
3551 }
3552 }
3553 #endif
3554
3555 /* Push the args that need to be pushed. */
3556
3557 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3558 are to be pushed. */
3559 for (count = 0; count < nargs; count++, argnum += inc)
3560 {
3561 register enum machine_mode mode = argvec[argnum].mode;
3562 register rtx val = argvec[argnum].value;
3563 rtx reg = argvec[argnum].reg;
3564 int partial = argvec[argnum].partial;
3565 #ifdef ACCUMULATE_OUTGOING_ARGS
3566 int lower_bound, upper_bound, i;
3567 #endif
3568
3569 if (! (reg != 0 && partial == 0))
3570 {
3571 #ifdef ACCUMULATE_OUTGOING_ARGS
3572 /* If this is being stored into a pre-allocated, fixed-size, stack
3573 area, save any previous data at that location. */
3574
3575 #ifdef ARGS_GROW_DOWNWARD
3576 /* stack_slot is negative, but we want to index stack_usage_map
3577 with positive values. */
3578 upper_bound = -argvec[argnum].offset.constant + 1;
3579 lower_bound = upper_bound - argvec[argnum].size.constant;
3580 #else
3581 lower_bound = argvec[argnum].offset.constant;
3582 upper_bound = lower_bound + argvec[argnum].size.constant;
3583 #endif
3584
3585 for (i = lower_bound; i < upper_bound; i++)
3586 if (stack_usage_map[i]
3587 /* Don't store things in the fixed argument area at this point;
3588 it has already been saved. */
3589 && i > reg_parm_stack_space)
3590 break;
3591
3592 if (i != upper_bound)
3593 {
3594 /* We need to make a save area. See what mode we can make it. */
3595 enum machine_mode save_mode
3596 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3597 MODE_INT, 1);
3598 rtx stack_area
3599 = gen_rtx_MEM
3600 (save_mode,
3601 memory_address
3602 (save_mode,
3603 plus_constant (argblock,
3604 argvec[argnum].offset.constant)));
3605 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3606
3607 emit_move_insn (argvec[argnum].save_area, stack_area);
3608 }
3609 #endif
3610 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3611 argblock, GEN_INT (argvec[argnum].offset.constant),
3612 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3613
3614 #ifdef ACCUMULATE_OUTGOING_ARGS
3615 /* Now mark the segment we just used. */
3616 for (i = lower_bound; i < upper_bound; i++)
3617 stack_usage_map[i] = 1;
3618 #endif
3619
3620 NO_DEFER_POP;
3621 }
3622 }
3623
3624 #ifndef PUSH_ARGS_REVERSED
3625 #ifdef PREFERRED_STACK_BOUNDARY
3626 /* If we pushed args in forward order, perform stack alignment
3627 after pushing the last arg. */
3628 if (argblock == 0)
3629 anti_adjust_stack (GEN_INT (args_size.constant
3630 - original_args_size.constant));
3631 #endif
3632 #endif
3633
3634 #ifdef PUSH_ARGS_REVERSED
3635 argnum = nargs - 1;
3636 #else
3637 argnum = 0;
3638 #endif
3639
3640 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3641
3642 /* Now load any reg parms into their regs. */
3643
3644 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3645 are to be pushed. */
3646 for (count = 0; count < nargs; count++, argnum += inc)
3647 {
3648 register rtx val = argvec[argnum].value;
3649 rtx reg = argvec[argnum].reg;
3650 int partial = argvec[argnum].partial;
3651
3652 if (reg != 0 && partial == 0)
3653 emit_move_insn (reg, val);
3654 NO_DEFER_POP;
3655 }
3656
3657 #if 0
3658 /* For version 1.37, try deleting this entirely. */
3659 if (! no_queue)
3660 emit_queue ();
3661 #endif
3662
3663 /* Any regs containing parms remain in use through the call. */
3664 for (count = 0; count < nargs; count++)
3665 if (argvec[count].reg != 0)
3666 use_reg (&call_fusage, argvec[count].reg);
3667
3668 /* Pass the function the address in which to return a structure value. */
3669 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3670 {
3671 emit_move_insn (struct_value_rtx,
3672 force_reg (Pmode,
3673 force_operand (XEXP (mem_value, 0),
3674 NULL_RTX)));
3675 if (GET_CODE (struct_value_rtx) == REG)
3676 use_reg (&call_fusage, struct_value_rtx);
3677 }
3678
3679 /* Don't allow popping to be deferred, since then
3680 cse'ing of library calls could delete a call and leave the pop. */
3681 NO_DEFER_POP;
3682
3683 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3684 will set inhibit_defer_pop to that value. */
3685 /* See the comment in emit_library_call about the function type we build
3686 and pass here. */
3687
3688 emit_call_1 (fun,
3689 get_identifier (XSTR (orgfun, 0)),
3690 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3691 original_args_size.constant, args_size.constant,
3692 struct_value_size,
3693 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3694 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3695 old_inhibit_defer_pop + 1, call_fusage, is_const);
3696
3697 /* Now restore inhibit_defer_pop to its actual original value. */
3698 OK_DEFER_POP;
3699
3700 pop_temp_slots ();
3701
3702 /* Copy the value to the right place. */
3703 if (outmode != VOIDmode)
3704 {
3705 if (mem_value)
3706 {
3707 if (value == 0)
3708 value = mem_value;
3709 if (value != mem_value)
3710 emit_move_insn (value, mem_value);
3711 }
3712 else if (value != 0)
3713 emit_move_insn (value, hard_libcall_value (outmode));
3714 else
3715 value = hard_libcall_value (outmode);
3716 }
3717
3718 #ifdef ACCUMULATE_OUTGOING_ARGS
3719 #ifdef REG_PARM_STACK_SPACE
3720 if (save_area)
3721 {
3722 enum machine_mode save_mode = GET_MODE (save_area);
3723 #ifdef ARGS_GROW_DOWNWARD
3724 rtx stack_area
3725 = gen_rtx_MEM (save_mode,
3726 memory_address (save_mode,
3727 plus_constant (argblock,
3728 - high_to_save)));
3729 #else
3730 rtx stack_area
3731 = gen_rtx_MEM (save_mode,
3732 memory_address (save_mode,
3733 plus_constant (argblock, low_to_save)));
3734 #endif
3735 if (save_mode != BLKmode)
3736 emit_move_insn (stack_area, save_area);
3737 else
3738 emit_block_move (stack_area, validize_mem (save_area),
3739 GEN_INT (high_to_save - low_to_save + 1),
3740 PARM_BOUNDARY / BITS_PER_UNIT);
3741 }
3742 #endif
3743
3744 /* If we saved any argument areas, restore them. */
3745 for (count = 0; count < nargs; count++)
3746 if (argvec[count].save_area)
3747 {
3748 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3749 rtx stack_area
3750 = gen_rtx_MEM (save_mode,
3751 memory_address
3752 (save_mode,
3753 plus_constant (argblock,
3754 argvec[count].offset.constant)));
3755
3756 emit_move_insn (stack_area, argvec[count].save_area);
3757 }
3758
3759 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3760 stack_usage_map = initial_stack_usage_map;
3761 #endif
3762
3763 return value;
3764 }
3765 \f
3766 #if 0
3767 /* Return an rtx which represents a suitable home on the stack
3768 given TYPE, the type of the argument looking for a home.
3769 This is called only for BLKmode arguments.
3770
3771 SIZE is the size needed for this target.
3772 ARGS_ADDR is the address of the bottom of the argument block for this call.
3773 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3774 if this machine uses push insns. */
3775
3776 static rtx
3777 target_for_arg (type, size, args_addr, offset)
3778 tree type;
3779 rtx size;
3780 rtx args_addr;
3781 struct args_size offset;
3782 {
3783 rtx target;
3784 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3785
3786 /* We do not call memory_address if possible,
3787 because we want to address as close to the stack
3788 as possible. For non-variable sized arguments,
3789 this will be stack-pointer relative addressing. */
3790 if (GET_CODE (offset_rtx) == CONST_INT)
3791 target = plus_constant (args_addr, INTVAL (offset_rtx));
3792 else
3793 {
3794 /* I have no idea how to guarantee that this
3795 will work in the presence of register parameters. */
3796 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3797 target = memory_address (QImode, target);
3798 }
3799
3800 return gen_rtx_MEM (BLKmode, target);
3801 }
3802 #endif
3803 \f
3804 /* Store a single argument for a function call
3805 into the register or memory area where it must be passed.
3806 *ARG describes the argument value and where to pass it.
3807
3808 ARGBLOCK is the address of the stack-block for all the arguments,
3809 or 0 on a machine where arguments are pushed individually.
3810
3811 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3812 so must be careful about how the stack is used.
3813
3814 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3815 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3816 that we need not worry about saving and restoring the stack.
3817
3818 FNDECL is the declaration of the function we are calling. */
3819
3820 static void
3821 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3822 reg_parm_stack_space)
3823 struct arg_data *arg;
3824 rtx argblock;
3825 int may_be_alloca;
3826 int variable_size ATTRIBUTE_UNUSED;
3827 int reg_parm_stack_space;
3828 {
3829 register tree pval = arg->tree_value;
3830 rtx reg = 0;
3831 int partial = 0;
3832 int used = 0;
3833 #ifdef ACCUMULATE_OUTGOING_ARGS
3834 int i, lower_bound = 0, upper_bound = 0;
3835 #endif
3836
3837 if (TREE_CODE (pval) == ERROR_MARK)
3838 return;
3839
3840 /* Push a new temporary level for any temporaries we make for
3841 this argument. */
3842 push_temp_slots ();
3843
3844 #ifdef ACCUMULATE_OUTGOING_ARGS
3845 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3846 save any previous data at that location. */
3847 if (argblock && ! variable_size && arg->stack)
3848 {
3849 #ifdef ARGS_GROW_DOWNWARD
3850 /* stack_slot is negative, but we want to index stack_usage_map
3851 with positive values. */
3852 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3853 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3854 else
3855 upper_bound = 0;
3856
3857 lower_bound = upper_bound - arg->size.constant;
3858 #else
3859 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3860 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3861 else
3862 lower_bound = 0;
3863
3864 upper_bound = lower_bound + arg->size.constant;
3865 #endif
3866
3867 for (i = lower_bound; i < upper_bound; i++)
3868 if (stack_usage_map[i]
3869 /* Don't store things in the fixed argument area at this point;
3870 it has already been saved. */
3871 && i > reg_parm_stack_space)
3872 break;
3873
3874 if (i != upper_bound)
3875 {
3876 /* We need to make a save area. See what mode we can make it. */
3877 enum machine_mode save_mode
3878 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3879 rtx stack_area
3880 = gen_rtx_MEM (save_mode,
3881 memory_address (save_mode,
3882 XEXP (arg->stack_slot, 0)));
3883
3884 if (save_mode == BLKmode)
3885 {
3886 arg->save_area = assign_stack_temp (BLKmode,
3887 arg->size.constant, 0);
3888 MEM_SET_IN_STRUCT_P (arg->save_area,
3889 AGGREGATE_TYPE_P (TREE_TYPE
3890 (arg->tree_value)));
3891 preserve_temp_slots (arg->save_area);
3892 emit_block_move (validize_mem (arg->save_area), stack_area,
3893 GEN_INT (arg->size.constant),
3894 PARM_BOUNDARY / BITS_PER_UNIT);
3895 }
3896 else
3897 {
3898 arg->save_area = gen_reg_rtx (save_mode);
3899 emit_move_insn (arg->save_area, stack_area);
3900 }
3901 }
3902 }
3903
3904 /* Now that we have saved any slots that will be overwritten by this
3905 store, mark all slots this store will use. We must do this before
3906 we actually expand the argument since the expansion itself may
3907 trigger library calls which might need to use the same stack slot. */
3908 if (argblock && ! variable_size && arg->stack)
3909 for (i = lower_bound; i < upper_bound; i++)
3910 stack_usage_map[i] = 1;
3911 #endif
3912
3913 /* If this isn't going to be placed on both the stack and in registers,
3914 set up the register and number of words. */
3915 if (! arg->pass_on_stack)
3916 reg = arg->reg, partial = arg->partial;
3917
3918 if (reg != 0 && partial == 0)
3919 /* Being passed entirely in a register. We shouldn't be called in
3920 this case. */
3921 abort ();
3922
3923 /* If this arg needs special alignment, don't load the registers
3924 here. */
3925 if (arg->n_aligned_regs != 0)
3926 reg = 0;
3927
3928 /* If this is being passed partially in a register, we can't evaluate
3929 it directly into its stack slot. Otherwise, we can. */
3930 if (arg->value == 0)
3931 {
3932 #ifdef ACCUMULATE_OUTGOING_ARGS
3933 /* stack_arg_under_construction is nonzero if a function argument is
3934 being evaluated directly into the outgoing argument list and
3935 expand_call must take special action to preserve the argument list
3936 if it is called recursively.
3937
3938 For scalar function arguments stack_usage_map is sufficient to
3939 determine which stack slots must be saved and restored. Scalar
3940 arguments in general have pass_on_stack == 0.
3941
3942 If this argument is initialized by a function which takes the
3943 address of the argument (a C++ constructor or a C function
3944 returning a BLKmode structure), then stack_usage_map is
3945 insufficient and expand_call must push the stack around the
3946 function call. Such arguments have pass_on_stack == 1.
3947
3948 Note that it is always safe to set stack_arg_under_construction,
3949 but this generates suboptimal code if set when not needed. */
3950
3951 if (arg->pass_on_stack)
3952 stack_arg_under_construction++;
3953 #endif
3954 arg->value = expand_expr (pval,
3955 (partial
3956 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3957 ? NULL_RTX : arg->stack,
3958 VOIDmode, 0);
3959
3960 /* If we are promoting object (or for any other reason) the mode
3961 doesn't agree, convert the mode. */
3962
3963 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3964 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3965 arg->value, arg->unsignedp);
3966
3967 #ifdef ACCUMULATE_OUTGOING_ARGS
3968 if (arg->pass_on_stack)
3969 stack_arg_under_construction--;
3970 #endif
3971 }
3972
3973 /* Don't allow anything left on stack from computation
3974 of argument to alloca. */
3975 if (may_be_alloca)
3976 do_pending_stack_adjust ();
3977
3978 if (arg->value == arg->stack)
3979 {
3980 /* If the value is already in the stack slot, we are done. */
3981 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3982 {
3983 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3984 XEXP (arg->stack, 0), Pmode,
3985 ARGS_SIZE_RTX (arg->size),
3986 TYPE_MODE (sizetype),
3987 GEN_INT (MEMORY_USE_RW),
3988 TYPE_MODE (integer_type_node));
3989 }
3990 }
3991 else if (arg->mode != BLKmode)
3992 {
3993 register int size;
3994
3995 /* Argument is a scalar, not entirely passed in registers.
3996 (If part is passed in registers, arg->partial says how much
3997 and emit_push_insn will take care of putting it there.)
3998
3999 Push it, and if its size is less than the
4000 amount of space allocated to it,
4001 also bump stack pointer by the additional space.
4002 Note that in C the default argument promotions
4003 will prevent such mismatches. */
4004
4005 size = GET_MODE_SIZE (arg->mode);
4006 /* Compute how much space the push instruction will push.
4007 On many machines, pushing a byte will advance the stack
4008 pointer by a halfword. */
4009 #ifdef PUSH_ROUNDING
4010 size = PUSH_ROUNDING (size);
4011 #endif
4012 used = size;
4013
4014 /* Compute how much space the argument should get:
4015 round up to a multiple of the alignment for arguments. */
4016 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4017 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4018 / (PARM_BOUNDARY / BITS_PER_UNIT))
4019 * (PARM_BOUNDARY / BITS_PER_UNIT));
4020
4021 /* This isn't already where we want it on the stack, so put it there.
4022 This can either be done with push or copy insns. */
4023 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4024 partial, reg, used - size, argblock,
4025 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4026 ARGS_SIZE_RTX (arg->alignment_pad));
4027
4028 }
4029 else
4030 {
4031 /* BLKmode, at least partly to be pushed. */
4032
4033 register int excess;
4034 rtx size_rtx;
4035
4036 /* Pushing a nonscalar.
4037 If part is passed in registers, PARTIAL says how much
4038 and emit_push_insn will take care of putting it there. */
4039
4040 /* Round its size up to a multiple
4041 of the allocation unit for arguments. */
4042
4043 if (arg->size.var != 0)
4044 {
4045 excess = 0;
4046 size_rtx = ARGS_SIZE_RTX (arg->size);
4047 }
4048 else
4049 {
4050 /* PUSH_ROUNDING has no effect on us, because
4051 emit_push_insn for BLKmode is careful to avoid it. */
4052 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4053 + partial * UNITS_PER_WORD);
4054 size_rtx = expr_size (pval);
4055 }
4056
4057 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4058 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
4059 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4060 reg_parm_stack_space,
4061 ARGS_SIZE_RTX (arg->alignment_pad));
4062 }
4063
4064
4065 /* Unless this is a partially-in-register argument, the argument is now
4066 in the stack.
4067
4068 ??? Note that this can change arg->value from arg->stack to
4069 arg->stack_slot and it matters when they are not the same.
4070 It isn't totally clear that this is correct in all cases. */
4071 if (partial == 0)
4072 arg->value = arg->stack_slot;
4073
4074 /* Once we have pushed something, pops can't safely
4075 be deferred during the rest of the arguments. */
4076 NO_DEFER_POP;
4077
4078 /* ANSI doesn't require a sequence point here,
4079 but PCC has one, so this will avoid some problems. */
4080 emit_queue ();
4081
4082 /* Free any temporary slots made in processing this argument. Show
4083 that we might have taken the address of something and pushed that
4084 as an operand. */
4085 preserve_temp_slots (NULL_RTX);
4086 free_temp_slots ();
4087 pop_temp_slots ();
4088 }
This page took 0.225122 seconds and 5 git commands to generate.