]>
Commit | Line | Data |
---|---|---|
51bbfa0c | 1 | /* Convert function calls to rtl insns, for GNU C compiler. |
5624e564 | 2 | Copyright (C) 1989-2015 Free Software Foundation, Inc. |
51bbfa0c | 3 | |
1322177d | 4 | This file is part of GCC. |
51bbfa0c | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
51bbfa0c | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
51bbfa0c RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
51bbfa0c RS |
19 | |
20 | #include "config.h" | |
670ee920 | 21 | #include "system.h" |
4977bab6 | 22 | #include "coretypes.h" |
c7131fb2 | 23 | #include "backend.h" |
9fdcd34e | 24 | #include "predict.h" |
c7131fb2 AM |
25 | #include "tree.h" |
26 | #include "gimple.h" | |
670ee920 | 27 | #include "rtl.h" |
40e23961 | 28 | #include "alias.h" |
40e23961 | 29 | #include "fold-const.h" |
d8a2d370 DN |
30 | #include "stor-layout.h" |
31 | #include "varasm.h" | |
32 | #include "stringpool.h" | |
33 | #include "attribs.h" | |
2fb9a547 | 34 | #include "internal-fn.h" |
670ee920 | 35 | #include "flags.h" |
36566b39 PK |
36 | #include "insn-config.h" |
37 | #include "expmed.h" | |
38 | #include "dojump.h" | |
39 | #include "explow.h" | |
40 | #include "calls.h" | |
41 | #include "emit-rtl.h" | |
42 | #include "stmt.h" | |
670ee920 | 43 | #include "expr.h" |
b0710fe1 | 44 | #include "insn-codes.h" |
6e985040 | 45 | #include "optabs.h" |
e78d8e51 | 46 | #include "libfuncs.h" |
670ee920 | 47 | #include "regs.h" |
718f9c0f | 48 | #include "diagnostic-core.h" |
d6f4ec51 | 49 | #include "output.h" |
b1474bb7 | 50 | #include "tm_p.h" |
ea11ca7e | 51 | #include "timevar.h" |
b0c48229 | 52 | #include "langhooks.h" |
23626154 | 53 | #include "target.h" |
b255a036 | 54 | #include "cgraph.h" |
b2dd096b | 55 | #include "except.h" |
6fb5fa3c | 56 | #include "dbgcnt.h" |
e9f56944 | 57 | #include "rtl-iter.h" |
d5e254e1 IE |
58 | #include "tree-chkp.h" |
59 | #include "rtl-chkp.h" | |
51bbfa0c | 60 | |
76e048a8 | 61 | |
c795bca9 BS |
62 | /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */ |
63 | #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) | |
51bbfa0c RS |
64 | |
65 | /* Data structure and subroutines used within expand_call. */ | |
66 | ||
67 | struct arg_data | |
68 | { | |
69 | /* Tree node for this argument. */ | |
70 | tree tree_value; | |
1efe6448 | 71 | /* Mode for value; TYPE_MODE unless promoted. */ |
ef4bddc2 | 72 | machine_mode mode; |
51bbfa0c RS |
73 | /* Current RTL value for argument, or 0 if it isn't precomputed. */ |
74 | rtx value; | |
75 | /* Initially-compute RTL value for argument; only for const functions. */ | |
76 | rtx initial_value; | |
77 | /* Register to pass this argument in, 0 if passed on stack, or an | |
cacbd532 | 78 | PARALLEL if the arg is to be copied into multiple non-contiguous |
51bbfa0c RS |
79 | registers. */ |
80 | rtx reg; | |
099e9712 JH |
81 | /* Register to pass this argument in when generating tail call sequence. |
82 | This is not the same register as for normal calls on machines with | |
83 | register windows. */ | |
84 | rtx tail_call_reg; | |
8df3dbb7 RH |
85 | /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct |
86 | form for emit_group_move. */ | |
87 | rtx parallel_value; | |
d5e254e1 IE |
88 | /* If value is passed in neither reg nor stack, this field holds a number |
89 | of a special slot to be used. */ | |
90 | rtx special_slot; | |
91 | /* For pointer bounds hold an index of parm bounds are bound to. -1 if | |
92 | there is no such pointer. */ | |
93 | int pointer_arg; | |
94 | /* If pointer_arg refers a structure, then pointer_offset holds an offset | |
95 | of a pointer in this structure. */ | |
96 | int pointer_offset; | |
84b55618 RK |
97 | /* If REG was promoted from the actual mode of the argument expression, |
98 | indicates whether the promotion is sign- or zero-extended. */ | |
99 | int unsignedp; | |
f0078f86 AM |
100 | /* Number of bytes to put in registers. 0 means put the whole arg |
101 | in registers. Also 0 if not passed in registers. */ | |
51bbfa0c | 102 | int partial; |
da7d8304 | 103 | /* Nonzero if argument must be passed on stack. |
d64f5a78 RS |
104 | Note that some arguments may be passed on the stack |
105 | even though pass_on_stack is zero, just because FUNCTION_ARG says so. | |
106 | pass_on_stack identifies arguments that *cannot* go in registers. */ | |
51bbfa0c | 107 | int pass_on_stack; |
e7949876 AM |
108 | /* Some fields packaged up for locate_and_pad_parm. */ |
109 | struct locate_and_pad_arg_data locate; | |
51bbfa0c RS |
110 | /* Location on the stack at which parameter should be stored. The store |
111 | has already been done if STACK == VALUE. */ | |
112 | rtx stack; | |
113 | /* Location on the stack of the start of this argument slot. This can | |
114 | differ from STACK if this arg pads downward. This location is known | |
c2ed6cf8 | 115 | to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */ |
51bbfa0c | 116 | rtx stack_slot; |
51bbfa0c RS |
117 | /* Place that this stack area has been saved, if needed. */ |
118 | rtx save_area; | |
4ab56118 RK |
119 | /* If an argument's alignment does not permit direct copying into registers, |
120 | copy in smaller-sized pieces into pseudos. These are stored in a | |
121 | block pointed to by this field. The next field says how many | |
122 | word-sized pseudos we made. */ | |
123 | rtx *aligned_regs; | |
124 | int n_aligned_regs; | |
51bbfa0c RS |
125 | }; |
126 | ||
da7d8304 | 127 | /* A vector of one char per byte of stack space. A byte if nonzero if |
51bbfa0c RS |
128 | the corresponding stack location has been used. |
129 | This vector is used to prevent a function call within an argument from | |
130 | clobbering any stack already set up. */ | |
131 | static char *stack_usage_map; | |
132 | ||
133 | /* Size of STACK_USAGE_MAP. */ | |
134 | static int highest_outgoing_arg_in_use; | |
2f4aa534 | 135 | |
c67846f2 JJ |
136 | /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding |
137 | stack location's tail call argument has been already stored into the stack. | |
138 | This bitmap is used to prevent sibling call optimization if function tries | |
139 | to use parent's incoming argument slots when they have been already | |
140 | overwritten with tail call arguments. */ | |
141 | static sbitmap stored_args_map; | |
142 | ||
2f4aa534 RS |
143 | /* stack_arg_under_construction is nonzero when an argument may be |
144 | initialized with a constructor call (including a C function that | |
145 | returns a BLKmode struct) and expand_call must take special action | |
146 | to make sure the object being constructed does not overlap the | |
147 | argument list for the constructor call. */ | |
0405cc0e | 148 | static int stack_arg_under_construction; |
51bbfa0c | 149 | |
6de9cd9a | 150 | static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, |
d329e058 | 151 | HOST_WIDE_INT, rtx, rtx, int, rtx, int, |
d5cc9181 | 152 | cumulative_args_t); |
d329e058 | 153 | static void precompute_register_parameters (int, struct arg_data *, int *); |
d5e254e1 | 154 | static void store_bounds (struct arg_data *, struct arg_data *); |
d329e058 AJ |
155 | static int store_one_arg (struct arg_data *, rtx, int, int, int); |
156 | static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); | |
157 | static int finalize_must_preallocate (int, int, struct arg_data *, | |
158 | struct args_size *); | |
84b8030f | 159 | static void precompute_arguments (int, struct arg_data *); |
5d059ed9 | 160 | static int compute_argument_block_size (int, struct args_size *, tree, tree, int); |
d329e058 | 161 | static void initialize_argument_information (int, struct arg_data *, |
078a18a4 SL |
162 | struct args_size *, int, |
163 | tree, tree, | |
d5cc9181 | 164 | tree, tree, cumulative_args_t, int, |
dd292d0a | 165 | rtx *, int *, int *, int *, |
6de9cd9a | 166 | bool *, bool); |
d329e058 AJ |
167 | static void compute_argument_addresses (struct arg_data *, rtx, int); |
168 | static rtx rtx_for_function_call (tree, tree); | |
169 | static void load_register_parameters (struct arg_data *, int, rtx *, int, | |
170 | int, int *); | |
171 | static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, | |
ef4bddc2 | 172 | machine_mode, int, va_list); |
6ea2b70d | 173 | static int special_function_p (const_tree, int); |
d329e058 | 174 | static int check_sibcall_argument_overlap_1 (rtx); |
48810515 | 175 | static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int); |
d329e058 AJ |
176 | |
177 | static int combine_pending_stack_adjustment_and_call (int, struct args_size *, | |
95899b34 | 178 | unsigned int); |
2f2b4a02 | 179 | static tree split_complex_types (tree); |
21a3b983 | 180 | |
f73ad30e | 181 | #ifdef REG_PARM_STACK_SPACE |
d329e058 AJ |
182 | static rtx save_fixed_argument_area (int, rtx, int *, int *); |
183 | static void restore_fixed_argument_area (rtx, rtx, int, int); | |
20efdf74 | 184 | #endif |
51bbfa0c | 185 | \f |
51bbfa0c RS |
186 | /* Force FUNEXP into a form suitable for the address of a CALL, |
187 | and return that as an rtx. Also load the static chain register | |
188 | if FNDECL is a nested function. | |
189 | ||
77cac2f2 RK |
190 | CALL_FUSAGE points to a variable holding the prospective |
191 | CALL_INSN_FUNCTION_USAGE information. */ | |
51bbfa0c | 192 | |
03dacb02 | 193 | rtx |
f2d3d07e | 194 | prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value, |
6de9cd9a | 195 | rtx *call_fusage, int reg_parm_seen, int sibcallp) |
51bbfa0c | 196 | { |
ba228239 | 197 | /* Make a valid memory address and copy constants through pseudo-regs, |
51bbfa0c RS |
198 | but not for a constant address if -fno-function-cse. */ |
199 | if (GET_CODE (funexp) != SYMBOL_REF) | |
01368078 | 200 | /* If we are using registers for parameters, force the |
e9a25f70 | 201 | function address into a register now. */ |
42db504c SB |
202 | funexp = ((reg_parm_seen |
203 | && targetm.small_register_classes_for_mode_p (FUNCTION_MODE)) | |
e9a25f70 JL |
204 | ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) |
205 | : memory_address (FUNCTION_MODE, funexp)); | |
4bb794e2 ST |
206 | else if (flag_pic |
207 | && fndecl_or_type | |
b91fd3c7 | 208 | && TREE_CODE (fndecl_or_type) == FUNCTION_DECL |
4bb794e2 ST |
209 | && (!flag_plt |
210 | || lookup_attribute ("noplt", DECL_ATTRIBUTES (fndecl_or_type))) | |
b91fd3c7 AM |
211 | && !targetm.binds_local_p (fndecl_or_type)) |
212 | { | |
4bb794e2 ST |
213 | /* This is done only for PIC code. There is no easy interface to force the |
214 | function address into GOT for non-PIC case. non-PIC case needs to be | |
215 | handled specially by the backend. */ | |
b91fd3c7 AM |
216 | funexp = force_reg (Pmode, funexp); |
217 | } | |
3affaf29 | 218 | else if (! sibcallp) |
51bbfa0c | 219 | { |
1e8552c2 | 220 | if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse) |
082a099c | 221 | funexp = force_reg (Pmode, funexp); |
51bbfa0c RS |
222 | } |
223 | ||
f2d3d07e RH |
224 | if (static_chain_value != 0 |
225 | && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL | |
226 | || DECL_STATIC_CHAIN (fndecl_or_type))) | |
51bbfa0c | 227 | { |
531ca746 RH |
228 | rtx chain; |
229 | ||
f2d3d07e | 230 | chain = targetm.calls.static_chain (fndecl_or_type, false); |
5e89a381 | 231 | static_chain_value = convert_memory_address (Pmode, static_chain_value); |
51bbfa0c | 232 | |
531ca746 RH |
233 | emit_move_insn (chain, static_chain_value); |
234 | if (REG_P (chain)) | |
235 | use_reg (call_fusage, chain); | |
51bbfa0c RS |
236 | } |
237 | ||
238 | return funexp; | |
239 | } | |
240 | ||
241 | /* Generate instructions to call function FUNEXP, | |
242 | and optionally pop the results. | |
243 | The CALL_INSN is the first insn generated. | |
244 | ||
607ea900 | 245 | FNDECL is the declaration node of the function. This is given to the |
079e7538 NF |
246 | hook TARGET_RETURN_POPS_ARGS to determine whether this function pops |
247 | its own args. | |
2c8da025 | 248 | |
079e7538 NF |
249 | FUNTYPE is the data type of the function. This is given to the hook |
250 | TARGET_RETURN_POPS_ARGS to determine whether this function pops its | |
251 | own args. We used to allow an identifier for library functions, but | |
252 | that doesn't work when the return type is an aggregate type and the | |
253 | calling convention says that the pointer to this aggregate is to be | |
254 | popped by the callee. | |
51bbfa0c RS |
255 | |
256 | STACK_SIZE is the number of bytes of arguments on the stack, | |
c2732da3 JM |
257 | ROUNDED_STACK_SIZE is that number rounded up to |
258 | PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is | |
259 | both to put into the call insn and to generate explicit popping | |
260 | code if necessary. | |
51bbfa0c RS |
261 | |
262 | STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value. | |
263 | It is zero if this call doesn't want a structure value. | |
264 | ||
265 | NEXT_ARG_REG is the rtx that results from executing | |
3c07301f | 266 | targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true) |
51bbfa0c RS |
267 | just after all the args have had their registers assigned. |
268 | This could be whatever you like, but normally it is the first | |
269 | arg-register beyond those used for args in this call, | |
270 | or 0 if all the arg-registers are used in this call. | |
271 | It is passed on to `gen_call' so you can put this info in the call insn. | |
272 | ||
273 | VALREG is a hard register in which a value is returned, | |
274 | or 0 if the call does not return a value. | |
275 | ||
276 | OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before | |
277 | the args to this call were processed. | |
278 | We restore `inhibit_defer_pop' to that value. | |
279 | ||
94b25f81 | 280 | CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that |
6d2f8887 | 281 | denote registers used by the called function. */ |
f725a3ec | 282 | |
322e3e34 | 283 | static void |
28ed065e | 284 | emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, |
6de9cd9a | 285 | tree funtype ATTRIBUTE_UNUSED, |
d329e058 AJ |
286 | HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, |
287 | HOST_WIDE_INT rounded_stack_size, | |
288 | HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, | |
289 | rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, | |
290 | int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, | |
d5cc9181 | 291 | cumulative_args_t args_so_far ATTRIBUTE_UNUSED) |
51bbfa0c | 292 | { |
062e7fd8 | 293 | rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); |
48810515 | 294 | rtx call, funmem; |
51bbfa0c | 295 | int already_popped = 0; |
079e7538 NF |
296 | HOST_WIDE_INT n_popped |
297 | = targetm.calls.return_pops_args (fndecl, funtype, stack_size); | |
51bbfa0c | 298 | |
fa5322fa | 299 | #ifdef CALL_POPS_ARGS |
d5cc9181 | 300 | n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far)); |
fa5322fa | 301 | #endif |
d329e058 | 302 | |
51bbfa0c RS |
303 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
304 | and we don't want to load it into a register as an optimization, | |
305 | because prepare_call_address already did it if it should be done. */ | |
306 | if (GET_CODE (funexp) != SYMBOL_REF) | |
307 | funexp = memory_address (FUNCTION_MODE, funexp); | |
308 | ||
325f5379 JJ |
309 | funmem = gen_rtx_MEM (FUNCTION_MODE, funexp); |
310 | if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL) | |
047d33a0 AO |
311 | { |
312 | tree t = fndecl; | |
e79983f4 | 313 | |
047d33a0 AO |
314 | /* Although a built-in FUNCTION_DECL and its non-__builtin |
315 | counterpart compare equal and get a shared mem_attrs, they | |
316 | produce different dump output in compare-debug compilations, | |
317 | if an entry gets garbage collected in one compilation, then | |
318 | adds a different (but equivalent) entry, while the other | |
319 | doesn't run the garbage collector at the same spot and then | |
320 | shares the mem_attr with the equivalent entry. */ | |
e79983f4 MM |
321 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
322 | { | |
323 | tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t)); | |
324 | if (t2) | |
325 | t = t2; | |
326 | } | |
327 | ||
328 | set_mem_expr (funmem, t); | |
047d33a0 | 329 | } |
325f5379 | 330 | else if (fntree) |
e19f6650 | 331 | set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree))); |
325f5379 | 332 | |
0a1c58a2 JL |
333 | #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop) |
334 | if ((ecf_flags & ECF_SIBCALL) | |
335 | && HAVE_sibcall_pop && HAVE_sibcall_value_pop | |
f132f529 | 336 | && (n_popped > 0 || stack_size == 0)) |
0a1c58a2 | 337 | { |
8ac61af7 | 338 | rtx n_pop = GEN_INT (n_popped); |
0a1c58a2 JL |
339 | rtx pat; |
340 | ||
341 | /* If this subroutine pops its own args, record that in the call insn | |
342 | if possible, for the sake of frame pointer elimination. */ | |
343 | ||
344 | if (valreg) | |
325f5379 JJ |
345 | pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx, |
346 | next_arg_reg, n_pop); | |
0a1c58a2 | 347 | else |
325f5379 JJ |
348 | pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg, |
349 | n_pop); | |
0a1c58a2 JL |
350 | |
351 | emit_call_insn (pat); | |
352 | already_popped = 1; | |
353 | } | |
354 | else | |
355 | #endif | |
356 | ||
51bbfa0c | 357 | #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop) |
8ac61af7 RK |
358 | /* If the target has "call" or "call_value" insns, then prefer them |
359 | if no arguments are actually popped. If the target does not have | |
360 | "call" or "call_value" insns, then we must use the popping versions | |
361 | even if the call has no arguments to pop. */ | |
8bcafee3 JDA |
362 | #if defined (HAVE_call) && defined (HAVE_call_value) |
363 | if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop | |
9dd9bf80 | 364 | && n_popped > 0) |
8bcafee3 JDA |
365 | #else |
366 | if (HAVE_call_pop && HAVE_call_value_pop) | |
367 | #endif | |
51bbfa0c | 368 | { |
fb5eebb9 | 369 | rtx n_pop = GEN_INT (n_popped); |
51bbfa0c RS |
370 | rtx pat; |
371 | ||
372 | /* If this subroutine pops its own args, record that in the call insn | |
373 | if possible, for the sake of frame pointer elimination. */ | |
2c8da025 | 374 | |
51bbfa0c | 375 | if (valreg) |
325f5379 JJ |
376 | pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx, |
377 | next_arg_reg, n_pop); | |
51bbfa0c | 378 | else |
325f5379 JJ |
379 | pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg, |
380 | n_pop); | |
51bbfa0c RS |
381 | |
382 | emit_call_insn (pat); | |
383 | already_popped = 1; | |
384 | } | |
385 | else | |
386 | #endif | |
51bbfa0c | 387 | |
0a1c58a2 JL |
388 | #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value) |
389 | if ((ecf_flags & ECF_SIBCALL) | |
390 | && HAVE_sibcall && HAVE_sibcall_value) | |
391 | { | |
392 | if (valreg) | |
325f5379 | 393 | emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem, |
0a1c58a2 JL |
394 | rounded_stack_size_rtx, |
395 | next_arg_reg, NULL_RTX)); | |
396 | else | |
325f5379 JJ |
397 | emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx, |
398 | next_arg_reg, | |
0f900dfa | 399 | GEN_INT (struct_value_size))); |
0a1c58a2 JL |
400 | } |
401 | else | |
402 | #endif | |
403 | ||
51bbfa0c RS |
404 | #if defined (HAVE_call) && defined (HAVE_call_value) |
405 | if (HAVE_call && HAVE_call_value) | |
406 | { | |
407 | if (valreg) | |
325f5379 JJ |
408 | emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx, |
409 | next_arg_reg, NULL_RTX)); | |
51bbfa0c | 410 | else |
325f5379 | 411 | emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg, |
0f900dfa | 412 | GEN_INT (struct_value_size))); |
51bbfa0c RS |
413 | } |
414 | else | |
415 | #endif | |
366de0ce | 416 | gcc_unreachable (); |
51bbfa0c | 417 | |
ee960939 | 418 | /* Find the call we just emitted. */ |
e67d1102 | 419 | rtx_call_insn *call_insn = last_call_insn (); |
51bbfa0c | 420 | |
325f5379 JJ |
421 | /* Some target create a fresh MEM instead of reusing the one provided |
422 | above. Set its MEM_EXPR. */ | |
da4fdf2d SB |
423 | call = get_call_rtx_from (call_insn); |
424 | if (call | |
325f5379 JJ |
425 | && MEM_EXPR (XEXP (call, 0)) == NULL_TREE |
426 | && MEM_EXPR (funmem) != NULL_TREE) | |
427 | set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem)); | |
428 | ||
d5e254e1 IE |
429 | /* Mark instrumented calls. */ |
430 | if (call && fntree) | |
431 | CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree); | |
432 | ||
ee960939 OH |
433 | /* Put the register usage information there. */ |
434 | add_function_usage_to (call_insn, call_fusage); | |
51bbfa0c RS |
435 | |
436 | /* If this is a const call, then set the insn's unchanging bit. */ | |
becfd6e5 KZ |
437 | if (ecf_flags & ECF_CONST) |
438 | RTL_CONST_CALL_P (call_insn) = 1; | |
439 | ||
440 | /* If this is a pure call, then set the insn's unchanging bit. */ | |
441 | if (ecf_flags & ECF_PURE) | |
442 | RTL_PURE_CALL_P (call_insn) = 1; | |
443 | ||
444 | /* If this is a const call, then set the insn's unchanging bit. */ | |
445 | if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) | |
446 | RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1; | |
51bbfa0c | 447 | |
1d65f45c RH |
448 | /* Create a nothrow REG_EH_REGION note, if needed. */ |
449 | make_reg_eh_region_note (call_insn, ecf_flags, 0); | |
12a22e76 | 450 | |
ca3920ad | 451 | if (ecf_flags & ECF_NORETURN) |
65c5f2a6 | 452 | add_reg_note (call_insn, REG_NORETURN, const0_rtx); |
ca3920ad | 453 | |
570a98eb | 454 | if (ecf_flags & ECF_RETURNS_TWICE) |
9defc9b7 | 455 | { |
65c5f2a6 | 456 | add_reg_note (call_insn, REG_SETJMP, const0_rtx); |
e3b5732b | 457 | cfun->calls_setjmp = 1; |
9defc9b7 | 458 | } |
570a98eb | 459 | |
0a1c58a2 JL |
460 | SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0); |
461 | ||
b1e64e0d RS |
462 | /* Restore this now, so that we do defer pops for this call's args |
463 | if the context of the call as a whole permits. */ | |
464 | inhibit_defer_pop = old_inhibit_defer_pop; | |
465 | ||
fb5eebb9 | 466 | if (n_popped > 0) |
51bbfa0c RS |
467 | { |
468 | if (!already_popped) | |
e3da301d | 469 | CALL_INSN_FUNCTION_USAGE (call_insn) |
38a448ca RH |
470 | = gen_rtx_EXPR_LIST (VOIDmode, |
471 | gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), | |
472 | CALL_INSN_FUNCTION_USAGE (call_insn)); | |
fb5eebb9 | 473 | rounded_stack_size -= n_popped; |
062e7fd8 | 474 | rounded_stack_size_rtx = GEN_INT (rounded_stack_size); |
1503a7ec | 475 | stack_pointer_delta -= n_popped; |
2e3f842f | 476 | |
9a08d230 RH |
477 | add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); |
478 | ||
2e3f842f L |
479 | /* If popup is needed, stack realign must use DRAP */ |
480 | if (SUPPORTS_STACK_ALIGNMENT) | |
481 | crtl->need_drap = true; | |
51bbfa0c | 482 | } |
f8f75b16 JJ |
483 | /* For noreturn calls when not accumulating outgoing args force |
484 | REG_ARGS_SIZE note to prevent crossjumping of calls with different | |
485 | args sizes. */ | |
486 | else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0) | |
487 | add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
51bbfa0c | 488 | |
f73ad30e | 489 | if (!ACCUMULATE_OUTGOING_ARGS) |
51bbfa0c | 490 | { |
f73ad30e JH |
491 | /* If returning from the subroutine does not automatically pop the args, |
492 | we need an instruction to pop them sooner or later. | |
493 | Perhaps do it now; perhaps just record how much space to pop later. | |
494 | ||
495 | If returning from the subroutine does pop the args, indicate that the | |
496 | stack pointer will be changed. */ | |
497 | ||
f79a65c0 | 498 | if (rounded_stack_size != 0) |
f73ad30e | 499 | { |
9dd9bf80 | 500 | if (ecf_flags & ECF_NORETURN) |
f79a65c0 RK |
501 | /* Just pretend we did the pop. */ |
502 | stack_pointer_delta -= rounded_stack_size; | |
503 | else if (flag_defer_pop && inhibit_defer_pop == 0 | |
7393c642 | 504 | && ! (ecf_flags & (ECF_CONST | ECF_PURE))) |
f73ad30e JH |
505 | pending_stack_adjust += rounded_stack_size; |
506 | else | |
507 | adjust_stack (rounded_stack_size_rtx); | |
508 | } | |
51bbfa0c | 509 | } |
f73ad30e JH |
510 | /* When we accumulate outgoing args, we must avoid any stack manipulations. |
511 | Restore the stack pointer to its original value now. Usually | |
512 | ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions. | |
513 | On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and | |
514 | popping variants of functions exist as well. | |
515 | ||
516 | ??? We may optimize similar to defer_pop above, but it is | |
517 | probably not worthwhile. | |
f725a3ec | 518 | |
f73ad30e JH |
519 | ??? It will be worthwhile to enable combine_stack_adjustments even for |
520 | such machines. */ | |
521 | else if (n_popped) | |
522 | anti_adjust_stack (GEN_INT (n_popped)); | |
51bbfa0c RS |
523 | } |
524 | ||
20efdf74 JL |
525 | /* Determine if the function identified by NAME and FNDECL is one with |
526 | special properties we wish to know about. | |
527 | ||
528 | For example, if the function might return more than one time (setjmp), then | |
529 | set RETURNS_TWICE to a nonzero value. | |
530 | ||
bae802f9 | 531 | Similarly set NORETURN if the function is in the longjmp family. |
20efdf74 | 532 | |
20efdf74 JL |
533 | Set MAY_BE_ALLOCA for any memory allocation function that might allocate |
534 | space from the stack such as alloca. */ | |
535 | ||
f2d33f13 | 536 | static int |
6ea2b70d | 537 | special_function_p (const_tree fndecl, int flags) |
20efdf74 | 538 | { |
d5e254e1 IE |
539 | tree name_decl = DECL_NAME (fndecl); |
540 | ||
541 | /* For instrumentation clones we want to derive flags | |
542 | from the original name. */ | |
543 | if (cgraph_node::get (fndecl) | |
544 | && cgraph_node::get (fndecl)->instrumentation_clone) | |
545 | name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl); | |
546 | ||
547 | if (fndecl && name_decl | |
548 | && IDENTIFIER_LENGTH (name_decl) <= 17 | |
20efdf74 JL |
549 | /* Exclude functions not at the file scope, or not `extern', |
550 | since they are not the magic functions we would otherwise | |
d1bd0ded | 551 | think they are. |
c22cacf3 MS |
552 | FIXME: this should be handled with attributes, not with this |
553 | hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong | |
554 | because you can declare fork() inside a function if you | |
555 | wish. */ | |
7ae4ad28 | 556 | && (DECL_CONTEXT (fndecl) == NULL_TREE |
d1bd0ded GK |
557 | || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) |
558 | && TREE_PUBLIC (fndecl)) | |
20efdf74 | 559 | { |
d5e254e1 | 560 | const char *name = IDENTIFIER_POINTER (name_decl); |
63ad61ed | 561 | const char *tname = name; |
20efdf74 | 562 | |
ca54603f JL |
563 | /* We assume that alloca will always be called by name. It |
564 | makes no sense to pass it as a pointer-to-function to | |
565 | anything that does not understand its behavior. */ | |
d5e254e1 | 566 | if (((IDENTIFIER_LENGTH (name_decl) == 6 |
f2d33f13 JH |
567 | && name[0] == 'a' |
568 | && ! strcmp (name, "alloca")) | |
d5e254e1 | 569 | || (IDENTIFIER_LENGTH (name_decl) == 16 |
f2d33f13 JH |
570 | && name[0] == '_' |
571 | && ! strcmp (name, "__builtin_alloca")))) | |
572 | flags |= ECF_MAY_BE_ALLOCA; | |
ca54603f | 573 | |
b545e411 | 574 | /* Disregard prefix _, __, __x or __builtin_. */ |
20efdf74 JL |
575 | if (name[0] == '_') |
576 | { | |
b545e411 JJ |
577 | if (name[1] == '_' |
578 | && name[2] == 'b' | |
579 | && !strncmp (name + 3, "uiltin_", 7)) | |
580 | tname += 10; | |
581 | else if (name[1] == '_' && name[2] == 'x') | |
20efdf74 JL |
582 | tname += 3; |
583 | else if (name[1] == '_') | |
584 | tname += 2; | |
585 | else | |
586 | tname += 1; | |
587 | } | |
588 | ||
589 | if (tname[0] == 's') | |
590 | { | |
f2d33f13 JH |
591 | if ((tname[1] == 'e' |
592 | && (! strcmp (tname, "setjmp") | |
593 | || ! strcmp (tname, "setjmp_syscall"))) | |
594 | || (tname[1] == 'i' | |
595 | && ! strcmp (tname, "sigsetjmp")) | |
596 | || (tname[1] == 'a' | |
597 | && ! strcmp (tname, "savectx"))) | |
3f8825c0 | 598 | flags |= ECF_RETURNS_TWICE | ECF_LEAF; |
f2d33f13 | 599 | |
20efdf74 JL |
600 | if (tname[1] == 'i' |
601 | && ! strcmp (tname, "siglongjmp")) | |
6e14af16 | 602 | flags |= ECF_NORETURN; |
20efdf74 JL |
603 | } |
604 | else if ((tname[0] == 'q' && tname[1] == 's' | |
605 | && ! strcmp (tname, "qsetjmp")) | |
606 | || (tname[0] == 'v' && tname[1] == 'f' | |
cd9ed4b4 EB |
607 | && ! strcmp (tname, "vfork")) |
608 | || (tname[0] == 'g' && tname[1] == 'e' | |
609 | && !strcmp (tname, "getcontext"))) | |
3f8825c0 | 610 | flags |= ECF_RETURNS_TWICE | ECF_LEAF; |
20efdf74 JL |
611 | |
612 | else if (tname[0] == 'l' && tname[1] == 'o' | |
613 | && ! strcmp (tname, "longjmp")) | |
6e14af16 | 614 | flags |= ECF_NORETURN; |
20efdf74 | 615 | } |
d1c38823 | 616 | |
f2d33f13 | 617 | return flags; |
20efdf74 JL |
618 | } |
619 | ||
e384e6b5 BS |
620 | /* Similar to special_function_p; return a set of ERF_ flags for the |
621 | function FNDECL. */ | |
622 | static int | |
623 | decl_return_flags (tree fndecl) | |
624 | { | |
625 | tree attr; | |
626 | tree type = TREE_TYPE (fndecl); | |
627 | if (!type) | |
628 | return 0; | |
629 | ||
630 | attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type)); | |
631 | if (!attr) | |
632 | return 0; | |
633 | ||
634 | attr = TREE_VALUE (TREE_VALUE (attr)); | |
635 | if (!attr || TREE_STRING_LENGTH (attr) < 1) | |
636 | return 0; | |
637 | ||
638 | switch (TREE_STRING_POINTER (attr)[0]) | |
639 | { | |
640 | case '1': | |
641 | case '2': | |
642 | case '3': | |
643 | case '4': | |
644 | return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1'); | |
645 | ||
646 | case 'm': | |
647 | return ERF_NOALIAS; | |
648 | ||
649 | case '.': | |
650 | default: | |
651 | return 0; | |
652 | } | |
653 | } | |
654 | ||
bae802f9 | 655 | /* Return nonzero when FNDECL represents a call to setjmp. */ |
7393c642 | 656 | |
f2d33f13 | 657 | int |
6ea2b70d | 658 | setjmp_call_p (const_tree fndecl) |
f2d33f13 | 659 | { |
275311c4 MP |
660 | if (DECL_IS_RETURNS_TWICE (fndecl)) |
661 | return ECF_RETURNS_TWICE; | |
f2d33f13 JH |
662 | return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE; |
663 | } | |
664 | ||
726a989a RB |
665 | |
666 | /* Return true if STMT is an alloca call. */ | |
667 | ||
668 | bool | |
669 | gimple_alloca_call_p (const_gimple stmt) | |
670 | { | |
671 | tree fndecl; | |
672 | ||
673 | if (!is_gimple_call (stmt)) | |
674 | return false; | |
675 | ||
676 | fndecl = gimple_call_fndecl (stmt); | |
677 | if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA)) | |
678 | return true; | |
679 | ||
680 | return false; | |
681 | } | |
682 | ||
c986baf6 | 683 | /* Return true when exp contains alloca call. */ |
726a989a | 684 | |
c986baf6 | 685 | bool |
6ea2b70d | 686 | alloca_call_p (const_tree exp) |
c986baf6 | 687 | { |
2284b034 | 688 | tree fndecl; |
c986baf6 | 689 | if (TREE_CODE (exp) == CALL_EXPR |
2284b034 MG |
690 | && (fndecl = get_callee_fndecl (exp)) |
691 | && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA)) | |
c986baf6 JH |
692 | return true; |
693 | return false; | |
694 | } | |
695 | ||
0a35513e AH |
696 | /* Return TRUE if FNDECL is either a TM builtin or a TM cloned |
697 | function. Return FALSE otherwise. */ | |
698 | ||
699 | static bool | |
700 | is_tm_builtin (const_tree fndecl) | |
701 | { | |
702 | if (fndecl == NULL) | |
703 | return false; | |
704 | ||
705 | if (decl_is_tm_clone (fndecl)) | |
706 | return true; | |
707 | ||
708 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
709 | { | |
710 | switch (DECL_FUNCTION_CODE (fndecl)) | |
711 | { | |
712 | case BUILT_IN_TM_COMMIT: | |
713 | case BUILT_IN_TM_COMMIT_EH: | |
714 | case BUILT_IN_TM_ABORT: | |
715 | case BUILT_IN_TM_IRREVOCABLE: | |
716 | case BUILT_IN_TM_GETTMCLONE_IRR: | |
717 | case BUILT_IN_TM_MEMCPY: | |
718 | case BUILT_IN_TM_MEMMOVE: | |
719 | case BUILT_IN_TM_MEMSET: | |
720 | CASE_BUILT_IN_TM_STORE (1): | |
721 | CASE_BUILT_IN_TM_STORE (2): | |
722 | CASE_BUILT_IN_TM_STORE (4): | |
723 | CASE_BUILT_IN_TM_STORE (8): | |
724 | CASE_BUILT_IN_TM_STORE (FLOAT): | |
725 | CASE_BUILT_IN_TM_STORE (DOUBLE): | |
726 | CASE_BUILT_IN_TM_STORE (LDOUBLE): | |
727 | CASE_BUILT_IN_TM_STORE (M64): | |
728 | CASE_BUILT_IN_TM_STORE (M128): | |
729 | CASE_BUILT_IN_TM_STORE (M256): | |
730 | CASE_BUILT_IN_TM_LOAD (1): | |
731 | CASE_BUILT_IN_TM_LOAD (2): | |
732 | CASE_BUILT_IN_TM_LOAD (4): | |
733 | CASE_BUILT_IN_TM_LOAD (8): | |
734 | CASE_BUILT_IN_TM_LOAD (FLOAT): | |
735 | CASE_BUILT_IN_TM_LOAD (DOUBLE): | |
736 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): | |
737 | CASE_BUILT_IN_TM_LOAD (M64): | |
738 | CASE_BUILT_IN_TM_LOAD (M128): | |
739 | CASE_BUILT_IN_TM_LOAD (M256): | |
740 | case BUILT_IN_TM_LOG: | |
741 | case BUILT_IN_TM_LOG_1: | |
742 | case BUILT_IN_TM_LOG_2: | |
743 | case BUILT_IN_TM_LOG_4: | |
744 | case BUILT_IN_TM_LOG_8: | |
745 | case BUILT_IN_TM_LOG_FLOAT: | |
746 | case BUILT_IN_TM_LOG_DOUBLE: | |
747 | case BUILT_IN_TM_LOG_LDOUBLE: | |
748 | case BUILT_IN_TM_LOG_M64: | |
749 | case BUILT_IN_TM_LOG_M128: | |
750 | case BUILT_IN_TM_LOG_M256: | |
751 | return true; | |
752 | default: | |
753 | break; | |
754 | } | |
755 | } | |
756 | return false; | |
757 | } | |
758 | ||
b5cd4ed4 | 759 | /* Detect flags (function attributes) from the function decl or type node. */ |
7393c642 | 760 | |
4977bab6 | 761 | int |
6ea2b70d | 762 | flags_from_decl_or_type (const_tree exp) |
f2d33f13 JH |
763 | { |
764 | int flags = 0; | |
36dbb93d | 765 | |
f2d33f13 JH |
766 | if (DECL_P (exp)) |
767 | { | |
768 | /* The function exp may have the `malloc' attribute. */ | |
36dbb93d | 769 | if (DECL_IS_MALLOC (exp)) |
f2d33f13 JH |
770 | flags |= ECF_MALLOC; |
771 | ||
6e9a3221 AN |
772 | /* The function exp may have the `returns_twice' attribute. */ |
773 | if (DECL_IS_RETURNS_TWICE (exp)) | |
774 | flags |= ECF_RETURNS_TWICE; | |
775 | ||
becfd6e5 | 776 | /* Process the pure and const attributes. */ |
9e3920e9 | 777 | if (TREE_READONLY (exp)) |
becfd6e5 KZ |
778 | flags |= ECF_CONST; |
779 | if (DECL_PURE_P (exp)) | |
e238ccac | 780 | flags |= ECF_PURE; |
becfd6e5 KZ |
781 | if (DECL_LOOPING_CONST_OR_PURE_P (exp)) |
782 | flags |= ECF_LOOPING_CONST_OR_PURE; | |
2a8f6b90 | 783 | |
dcd6de6d ZD |
784 | if (DECL_IS_NOVOPS (exp)) |
785 | flags |= ECF_NOVOPS; | |
46a4da10 JH |
786 | if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp))) |
787 | flags |= ECF_LEAF; | |
dcd6de6d | 788 | |
f2d33f13 JH |
789 | if (TREE_NOTHROW (exp)) |
790 | flags |= ECF_NOTHROW; | |
2b187c63 | 791 | |
0a35513e AH |
792 | if (flag_tm) |
793 | { | |
794 | if (is_tm_builtin (exp)) | |
795 | flags |= ECF_TM_BUILTIN; | |
fe924d9f | 796 | else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0 |
0a35513e AH |
797 | || lookup_attribute ("transaction_pure", |
798 | TYPE_ATTRIBUTES (TREE_TYPE (exp)))) | |
799 | flags |= ECF_TM_PURE; | |
800 | } | |
801 | ||
6de9cd9a | 802 | flags = special_function_p (exp, flags); |
f2d33f13 | 803 | } |
0a35513e AH |
804 | else if (TYPE_P (exp)) |
805 | { | |
806 | if (TYPE_READONLY (exp)) | |
807 | flags |= ECF_CONST; | |
808 | ||
809 | if (flag_tm | |
810 | && ((flags & ECF_CONST) != 0 | |
811 | || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp)))) | |
812 | flags |= ECF_TM_PURE; | |
813 | } | |
17fc8d6f AH |
814 | else |
815 | gcc_unreachable (); | |
f2d33f13 JH |
816 | |
817 | if (TREE_THIS_VOLATILE (exp)) | |
9e3920e9 JJ |
818 | { |
819 | flags |= ECF_NORETURN; | |
820 | if (flags & (ECF_CONST|ECF_PURE)) | |
821 | flags |= ECF_LOOPING_CONST_OR_PURE; | |
822 | } | |
f2d33f13 JH |
823 | |
824 | return flags; | |
825 | } | |
826 | ||
f027e0a2 JM |
827 | /* Detect flags from a CALL_EXPR. */ |
828 | ||
829 | int | |
fa233e34 | 830 | call_expr_flags (const_tree t) |
f027e0a2 JM |
831 | { |
832 | int flags; | |
833 | tree decl = get_callee_fndecl (t); | |
834 | ||
835 | if (decl) | |
836 | flags = flags_from_decl_or_type (decl); | |
1691b2e1 TV |
837 | else if (CALL_EXPR_FN (t) == NULL_TREE) |
838 | flags = internal_fn_flags (CALL_EXPR_IFN (t)); | |
f027e0a2 JM |
839 | else |
840 | { | |
5039610b | 841 | t = TREE_TYPE (CALL_EXPR_FN (t)); |
f027e0a2 JM |
842 | if (t && TREE_CODE (t) == POINTER_TYPE) |
843 | flags = flags_from_decl_or_type (TREE_TYPE (t)); | |
844 | else | |
845 | flags = 0; | |
846 | } | |
847 | ||
848 | return flags; | |
849 | } | |
850 | ||
16a16ec7 AM |
851 | /* Return true if TYPE should be passed by invisible reference. */ |
852 | ||
853 | bool | |
854 | pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode, | |
855 | tree type, bool named_arg) | |
856 | { | |
857 | if (type) | |
858 | { | |
859 | /* If this type contains non-trivial constructors, then it is | |
860 | forbidden for the middle-end to create any new copies. */ | |
861 | if (TREE_ADDRESSABLE (type)) | |
862 | return true; | |
863 | ||
864 | /* GCC post 3.4 passes *all* variable sized types by reference. */ | |
865 | if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
866 | return true; | |
867 | ||
868 | /* If a record type should be passed the same as its first (and only) | |
869 | member, use the type and mode of that member. */ | |
870 | if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) | |
871 | { | |
872 | type = TREE_TYPE (first_field (type)); | |
873 | mode = TYPE_MODE (type); | |
874 | } | |
875 | } | |
876 | ||
877 | return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode, | |
878 | type, named_arg); | |
879 | } | |
880 | ||
881 | /* Return true if TYPE, which is passed by reference, should be callee | |
882 | copied instead of caller copied. */ | |
883 | ||
884 | bool | |
885 | reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode, | |
886 | tree type, bool named_arg) | |
887 | { | |
888 | if (type && TREE_ADDRESSABLE (type)) | |
889 | return false; | |
890 | return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type, | |
891 | named_arg); | |
892 | } | |
893 | ||
894 | ||
20efdf74 JL |
895 | /* Precompute all register parameters as described by ARGS, storing values |
896 | into fields within the ARGS array. | |
897 | ||
898 | NUM_ACTUALS indicates the total number elements in the ARGS array. | |
899 | ||
900 | Set REG_PARM_SEEN if we encounter a register parameter. */ | |
901 | ||
902 | static void | |
27e29549 RH |
903 | precompute_register_parameters (int num_actuals, struct arg_data *args, |
904 | int *reg_parm_seen) | |
20efdf74 JL |
905 | { |
906 | int i; | |
907 | ||
908 | *reg_parm_seen = 0; | |
909 | ||
910 | for (i = 0; i < num_actuals; i++) | |
911 | if (args[i].reg != 0 && ! args[i].pass_on_stack) | |
912 | { | |
913 | *reg_parm_seen = 1; | |
914 | ||
915 | if (args[i].value == 0) | |
916 | { | |
917 | push_temp_slots (); | |
84217346 | 918 | args[i].value = expand_normal (args[i].tree_value); |
20efdf74 JL |
919 | preserve_temp_slots (args[i].value); |
920 | pop_temp_slots (); | |
20efdf74 JL |
921 | } |
922 | ||
923 | /* If we are to promote the function arg to a wider mode, | |
924 | do it now. */ | |
925 | ||
926 | if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) | |
927 | args[i].value | |
928 | = convert_modes (args[i].mode, | |
929 | TYPE_MODE (TREE_TYPE (args[i].tree_value)), | |
930 | args[i].value, args[i].unsignedp); | |
931 | ||
a7adbbcb L |
932 | /* If the value is a non-legitimate constant, force it into a |
933 | pseudo now. TLS symbols sometimes need a call to resolve. */ | |
934 | if (CONSTANT_P (args[i].value) | |
935 | && !targetm.legitimate_constant_p (args[i].mode, args[i].value)) | |
936 | args[i].value = force_reg (args[i].mode, args[i].value); | |
937 | ||
27e29549 RH |
938 | /* If we're going to have to load the value by parts, pull the |
939 | parts into pseudos. The part extraction process can involve | |
940 | non-trivial computation. */ | |
941 | if (GET_CODE (args[i].reg) == PARALLEL) | |
942 | { | |
943 | tree type = TREE_TYPE (args[i].tree_value); | |
8df3dbb7 | 944 | args[i].parallel_value |
27e29549 RH |
945 | = emit_group_load_into_temps (args[i].reg, args[i].value, |
946 | type, int_size_in_bytes (type)); | |
947 | } | |
948 | ||
f725a3ec | 949 | /* If the value is expensive, and we are inside an appropriately |
20efdf74 JL |
950 | short loop, put the value into a pseudo and then put the pseudo |
951 | into the hard reg. | |
952 | ||
953 | For small register classes, also do this if this call uses | |
954 | register parameters. This is to avoid reload conflicts while | |
955 | loading the parameters registers. */ | |
956 | ||
27e29549 RH |
957 | else if ((! (REG_P (args[i].value) |
958 | || (GET_CODE (args[i].value) == SUBREG | |
959 | && REG_P (SUBREG_REG (args[i].value))))) | |
960 | && args[i].mode != BLKmode | |
e548c9df AM |
961 | && (set_src_cost (args[i].value, args[i].mode, |
962 | optimize_insn_for_speed_p ()) | |
963 | > COSTS_N_INSNS (1)) | |
42db504c SB |
964 | && ((*reg_parm_seen |
965 | && targetm.small_register_classes_for_mode_p (args[i].mode)) | |
27e29549 | 966 | || optimize)) |
20efdf74 JL |
967 | args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); |
968 | } | |
969 | } | |
970 | ||
f73ad30e | 971 | #ifdef REG_PARM_STACK_SPACE |
20efdf74 JL |
972 | |
973 | /* The argument list is the property of the called routine and it | |
974 | may clobber it. If the fixed area has been used for previous | |
975 | parameters, we must save and restore it. */ | |
3bdf5ad1 | 976 | |
20efdf74 | 977 | static rtx |
d329e058 | 978 | save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save) |
20efdf74 | 979 | { |
b820d2b8 AM |
980 | int low; |
981 | int high; | |
20efdf74 | 982 | |
b820d2b8 AM |
983 | /* Compute the boundary of the area that needs to be saved, if any. */ |
984 | high = reg_parm_stack_space; | |
6dad9361 TS |
985 | if (ARGS_GROW_DOWNWARD) |
986 | high += 1; | |
987 | ||
b820d2b8 AM |
988 | if (high > highest_outgoing_arg_in_use) |
989 | high = highest_outgoing_arg_in_use; | |
20efdf74 | 990 | |
b820d2b8 AM |
991 | for (low = 0; low < high; low++) |
992 | if (stack_usage_map[low] != 0) | |
993 | { | |
994 | int num_to_save; | |
ef4bddc2 | 995 | machine_mode save_mode; |
b820d2b8 | 996 | int delta; |
0a81f074 | 997 | rtx addr; |
b820d2b8 AM |
998 | rtx stack_area; |
999 | rtx save_area; | |
20efdf74 | 1000 | |
b820d2b8 AM |
1001 | while (stack_usage_map[--high] == 0) |
1002 | ; | |
20efdf74 | 1003 | |
b820d2b8 AM |
1004 | *low_to_save = low; |
1005 | *high_to_save = high; | |
1006 | ||
1007 | num_to_save = high - low + 1; | |
1008 | save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); | |
20efdf74 | 1009 | |
b820d2b8 AM |
1010 | /* If we don't have the required alignment, must do this |
1011 | in BLKmode. */ | |
1012 | if ((low & (MIN (GET_MODE_SIZE (save_mode), | |
1013 | BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) | |
1014 | save_mode = BLKmode; | |
20efdf74 | 1015 | |
6dad9361 TS |
1016 | if (ARGS_GROW_DOWNWARD) |
1017 | delta = -high; | |
1018 | else | |
1019 | delta = low; | |
1020 | ||
0a81f074 RS |
1021 | addr = plus_constant (Pmode, argblock, delta); |
1022 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); | |
8ac61af7 | 1023 | |
b820d2b8 AM |
1024 | set_mem_align (stack_area, PARM_BOUNDARY); |
1025 | if (save_mode == BLKmode) | |
1026 | { | |
9474e8ab | 1027 | save_area = assign_stack_temp (BLKmode, num_to_save); |
b820d2b8 AM |
1028 | emit_block_move (validize_mem (save_area), stack_area, |
1029 | GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); | |
1030 | } | |
1031 | else | |
1032 | { | |
1033 | save_area = gen_reg_rtx (save_mode); | |
1034 | emit_move_insn (save_area, stack_area); | |
1035 | } | |
8ac61af7 | 1036 | |
b820d2b8 AM |
1037 | return save_area; |
1038 | } | |
1039 | ||
1040 | return NULL_RTX; | |
20efdf74 JL |
1041 | } |
1042 | ||
1043 | static void | |
d329e058 | 1044 | restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save) |
20efdf74 | 1045 | { |
ef4bddc2 | 1046 | machine_mode save_mode = GET_MODE (save_area); |
b820d2b8 | 1047 | int delta; |
0a81f074 | 1048 | rtx addr, stack_area; |
b820d2b8 | 1049 | |
6dad9361 TS |
1050 | if (ARGS_GROW_DOWNWARD) |
1051 | delta = -high_to_save; | |
1052 | else | |
1053 | delta = low_to_save; | |
1054 | ||
0a81f074 RS |
1055 | addr = plus_constant (Pmode, argblock, delta); |
1056 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); | |
b820d2b8 | 1057 | set_mem_align (stack_area, PARM_BOUNDARY); |
20efdf74 JL |
1058 | |
1059 | if (save_mode != BLKmode) | |
1060 | emit_move_insn (stack_area, save_area); | |
1061 | else | |
44bb111a RH |
1062 | emit_block_move (stack_area, validize_mem (save_area), |
1063 | GEN_INT (high_to_save - low_to_save + 1), | |
1064 | BLOCK_OP_CALL_PARM); | |
20efdf74 | 1065 | } |
19652adf | 1066 | #endif /* REG_PARM_STACK_SPACE */ |
f725a3ec | 1067 | |
20efdf74 JL |
1068 | /* If any elements in ARGS refer to parameters that are to be passed in |
1069 | registers, but not in memory, and whose alignment does not permit a | |
1070 | direct copy into registers. Copy the values into a group of pseudos | |
f725a3ec | 1071 | which we will later copy into the appropriate hard registers. |
8e6a59fe MM |
1072 | |
1073 | Pseudos for each unaligned argument will be stored into the array | |
1074 | args[argnum].aligned_regs. The caller is responsible for deallocating | |
1075 | the aligned_regs array if it is nonzero. */ | |
1076 | ||
20efdf74 | 1077 | static void |
d329e058 | 1078 | store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals) |
20efdf74 JL |
1079 | { |
1080 | int i, j; | |
f725a3ec | 1081 | |
20efdf74 JL |
1082 | for (i = 0; i < num_actuals; i++) |
1083 | if (args[i].reg != 0 && ! args[i].pass_on_stack | |
a7973050 | 1084 | && GET_CODE (args[i].reg) != PARALLEL |
20efdf74 | 1085 | && args[i].mode == BLKmode |
852d22b4 EB |
1086 | && MEM_P (args[i].value) |
1087 | && (MEM_ALIGN (args[i].value) | |
20efdf74 JL |
1088 | < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) |
1089 | { | |
1090 | int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
6e985040 | 1091 | int endian_correction = 0; |
20efdf74 | 1092 | |
78a52f11 RH |
1093 | if (args[i].partial) |
1094 | { | |
1095 | gcc_assert (args[i].partial % UNITS_PER_WORD == 0); | |
1096 | args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; | |
1097 | } | |
1098 | else | |
1099 | { | |
1100 | args[i].n_aligned_regs | |
1101 | = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | |
1102 | } | |
1103 | ||
5ed6ace5 | 1104 | args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); |
20efdf74 | 1105 | |
6e985040 AM |
1106 | /* Structures smaller than a word are normally aligned to the |
1107 | least significant byte. On a BYTES_BIG_ENDIAN machine, | |
20efdf74 JL |
1108 | this means we must skip the empty high order bytes when |
1109 | calculating the bit offset. */ | |
6e985040 AM |
1110 | if (bytes < UNITS_PER_WORD |
1111 | #ifdef BLOCK_REG_PADDING | |
1112 | && (BLOCK_REG_PADDING (args[i].mode, | |
1113 | TREE_TYPE (args[i].tree_value), 1) | |
1114 | == downward) | |
1115 | #else | |
1116 | && BYTES_BIG_ENDIAN | |
1117 | #endif | |
1118 | ) | |
1119 | endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; | |
20efdf74 JL |
1120 | |
1121 | for (j = 0; j < args[i].n_aligned_regs; j++) | |
1122 | { | |
1123 | rtx reg = gen_reg_rtx (word_mode); | |
1124 | rtx word = operand_subword_force (args[i].value, j, BLKmode); | |
1125 | int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); | |
20efdf74 JL |
1126 | |
1127 | args[i].aligned_regs[j] = reg; | |
c6285bd7 | 1128 | word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, |
b3520980 | 1129 | word_mode, word_mode); |
20efdf74 JL |
1130 | |
1131 | /* There is no need to restrict this code to loading items | |
1132 | in TYPE_ALIGN sized hunks. The bitfield instructions can | |
1133 | load up entire word sized registers efficiently. | |
1134 | ||
1135 | ??? This may not be needed anymore. | |
1136 | We use to emit a clobber here but that doesn't let later | |
1137 | passes optimize the instructions we emit. By storing 0 into | |
1138 | the register later passes know the first AND to zero out the | |
1139 | bitfield being set in the register is unnecessary. The store | |
1140 | of 0 will be deleted as will at least the first AND. */ | |
1141 | ||
1142 | emit_move_insn (reg, const0_rtx); | |
1143 | ||
1144 | bytes -= bitsize / BITS_PER_UNIT; | |
1169e45d AH |
1145 | store_bit_field (reg, bitsize, endian_correction, 0, 0, |
1146 | word_mode, word); | |
20efdf74 JL |
1147 | } |
1148 | } | |
1149 | } | |
1150 | ||
d7cdf113 | 1151 | /* Fill in ARGS_SIZE and ARGS array based on the parameters found in |
b8698a0f | 1152 | CALL_EXPR EXP. |
d7cdf113 JL |
1153 | |
1154 | NUM_ACTUALS is the total number of parameters. | |
1155 | ||
1156 | N_NAMED_ARGS is the total number of named arguments. | |
1157 | ||
078a18a4 SL |
1158 | STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return |
1159 | value, or null. | |
1160 | ||
d7cdf113 JL |
1161 | FNDECL is the tree code for the target of this call (if known) |
1162 | ||
1163 | ARGS_SO_FAR holds state needed by the target to know where to place | |
1164 | the next argument. | |
1165 | ||
1166 | REG_PARM_STACK_SPACE is the number of bytes of stack space reserved | |
1167 | for arguments which are passed in registers. | |
1168 | ||
1169 | OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level | |
1170 | and may be modified by this routine. | |
1171 | ||
f2d33f13 | 1172 | OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer |
026c3cfd | 1173 | flags which may be modified by this routine. |
dd292d0a | 1174 | |
6de9cd9a DN |
1175 | MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference |
1176 | that requires allocation of stack space. | |
1177 | ||
dd292d0a MM |
1178 | CALL_FROM_THUNK_P is true if this call is the jump from a thunk to |
1179 | the thunked-to function. */ | |
d7cdf113 JL |
1180 | |
1181 | static void | |
d329e058 AJ |
1182 | initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, |
1183 | struct arg_data *args, | |
1184 | struct args_size *args_size, | |
1185 | int n_named_args ATTRIBUTE_UNUSED, | |
078a18a4 | 1186 | tree exp, tree struct_value_addr_value, |
45769134 | 1187 | tree fndecl, tree fntype, |
d5cc9181 | 1188 | cumulative_args_t args_so_far, |
d329e058 AJ |
1189 | int reg_parm_stack_space, |
1190 | rtx *old_stack_level, int *old_pending_adj, | |
dd292d0a | 1191 | int *must_preallocate, int *ecf_flags, |
6de9cd9a | 1192 | bool *may_tailcall, bool call_from_thunk_p) |
d7cdf113 | 1193 | { |
d5cc9181 | 1194 | CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far); |
db3927fb | 1195 | location_t loc = EXPR_LOCATION (exp); |
d7cdf113 JL |
1196 | |
1197 | /* Count arg position in order args appear. */ | |
1198 | int argpos; | |
1199 | ||
1200 | int i; | |
f725a3ec | 1201 | |
d7cdf113 JL |
1202 | args_size->constant = 0; |
1203 | args_size->var = 0; | |
1204 | ||
d5e254e1 IE |
1205 | bitmap_obstack_initialize (NULL); |
1206 | ||
d7cdf113 | 1207 | /* In this loop, we consider args in the order they are written. |
3d9684ae | 1208 | We fill up ARGS from the back. */ |
d7cdf113 | 1209 | |
3d9684ae | 1210 | i = num_actuals - 1; |
078a18a4 | 1211 | { |
d5e254e1 | 1212 | int j = i, ptr_arg = -1; |
078a18a4 SL |
1213 | call_expr_arg_iterator iter; |
1214 | tree arg; | |
d5e254e1 | 1215 | bitmap slots = NULL; |
078a18a4 SL |
1216 | |
1217 | if (struct_value_addr_value) | |
1218 | { | |
1219 | args[j].tree_value = struct_value_addr_value; | |
3d9684ae | 1220 | j--; |
d5e254e1 IE |
1221 | |
1222 | /* If we pass structure address then we need to | |
1223 | create bounds for it. Since created bounds is | |
1224 | a call statement, we expand it right here to avoid | |
1225 | fixing all other places where it may be expanded. */ | |
1226 | if (CALL_WITH_BOUNDS_P (exp)) | |
1227 | { | |
1228 | args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ()); | |
1229 | args[j].tree_value | |
1230 | = chkp_make_bounds_for_struct_addr (struct_value_addr_value); | |
1231 | expand_expr_real (args[j].tree_value, args[j].value, VOIDmode, | |
1232 | EXPAND_NORMAL, 0, false); | |
1233 | args[j].pointer_arg = j + 1; | |
1234 | j--; | |
1235 | } | |
078a18a4 SL |
1236 | } |
1237 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) | |
1238 | { | |
1239 | tree argtype = TREE_TYPE (arg); | |
d5e254e1 IE |
1240 | |
1241 | /* Remember last param with pointer and associate it | |
1242 | with following pointer bounds. */ | |
1243 | if (CALL_WITH_BOUNDS_P (exp) | |
1244 | && chkp_type_has_pointer (argtype)) | |
1245 | { | |
1246 | if (slots) | |
1247 | BITMAP_FREE (slots); | |
1248 | ptr_arg = j; | |
1249 | if (!BOUNDED_TYPE_P (argtype)) | |
1250 | { | |
1251 | slots = BITMAP_ALLOC (NULL); | |
1252 | chkp_find_bound_slots (argtype, slots); | |
1253 | } | |
1254 | } | |
1255 | else if (POINTER_BOUNDS_TYPE_P (argtype)) | |
1256 | { | |
1257 | /* We expect bounds in instrumented calls only. | |
1258 | Otherwise it is a sign we lost flag due to some optimization | |
1259 | and may emit call args incorrectly. */ | |
1260 | gcc_assert (CALL_WITH_BOUNDS_P (exp)); | |
1261 | ||
1262 | /* For structures look for the next available pointer. */ | |
1263 | if (ptr_arg != -1 && slots) | |
1264 | { | |
1265 | unsigned bnd_no = bitmap_first_set_bit (slots); | |
1266 | args[j].pointer_offset = | |
1267 | bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
1268 | ||
1269 | bitmap_clear_bit (slots, bnd_no); | |
1270 | ||
1271 | /* Check we have no more pointers in the structure. */ | |
1272 | if (bitmap_empty_p (slots)) | |
1273 | BITMAP_FREE (slots); | |
1274 | } | |
1275 | args[j].pointer_arg = ptr_arg; | |
1276 | ||
1277 | /* Check we covered all pointers in the previous | |
1278 | non bounds arg. */ | |
1279 | if (!slots) | |
1280 | ptr_arg = -1; | |
1281 | } | |
1282 | else | |
1283 | ptr_arg = -1; | |
1284 | ||
078a18a4 SL |
1285 | if (targetm.calls.split_complex_arg |
1286 | && argtype | |
1287 | && TREE_CODE (argtype) == COMPLEX_TYPE | |
1288 | && targetm.calls.split_complex_arg (argtype)) | |
1289 | { | |
1290 | tree subtype = TREE_TYPE (argtype); | |
078a18a4 | 1291 | args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); |
3d9684ae | 1292 | j--; |
078a18a4 SL |
1293 | args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); |
1294 | } | |
1295 | else | |
1296 | args[j].tree_value = arg; | |
3d9684ae | 1297 | j--; |
078a18a4 | 1298 | } |
d5e254e1 IE |
1299 | |
1300 | if (slots) | |
1301 | BITMAP_FREE (slots); | |
078a18a4 SL |
1302 | } |
1303 | ||
d5e254e1 IE |
1304 | bitmap_obstack_release (NULL); |
1305 | ||
d7cdf113 | 1306 | /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ |
3d9684ae | 1307 | for (argpos = 0; argpos < num_actuals; i--, argpos++) |
d7cdf113 | 1308 | { |
078a18a4 | 1309 | tree type = TREE_TYPE (args[i].tree_value); |
d7cdf113 | 1310 | int unsignedp; |
ef4bddc2 | 1311 | machine_mode mode; |
d7cdf113 | 1312 | |
d7cdf113 | 1313 | /* Replace erroneous argument with constant zero. */ |
d0f062fb | 1314 | if (type == error_mark_node || !COMPLETE_TYPE_P (type)) |
d7cdf113 JL |
1315 | args[i].tree_value = integer_zero_node, type = integer_type_node; |
1316 | ||
ebf0bf7f JJ |
1317 | /* If TYPE is a transparent union or record, pass things the way |
1318 | we would pass the first field of the union or record. We have | |
1319 | already verified that the modes are the same. */ | |
1320 | if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE) | |
1321 | && TYPE_TRANSPARENT_AGGR (type)) | |
1322 | type = TREE_TYPE (first_field (type)); | |
d7cdf113 JL |
1323 | |
1324 | /* Decide where to pass this arg. | |
1325 | ||
1326 | args[i].reg is nonzero if all or part is passed in registers. | |
1327 | ||
1328 | args[i].partial is nonzero if part but not all is passed in registers, | |
78a52f11 | 1329 | and the exact value says how many bytes are passed in registers. |
d7cdf113 JL |
1330 | |
1331 | args[i].pass_on_stack is nonzero if the argument must at least be | |
1332 | computed on the stack. It may then be loaded back into registers | |
1333 | if args[i].reg is nonzero. | |
1334 | ||
1335 | These decisions are driven by the FUNCTION_... macros and must agree | |
1336 | with those made by function.c. */ | |
1337 | ||
1338 | /* See if this argument should be passed by invisible reference. */ | |
d5cc9181 | 1339 | if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type), |
0976078c | 1340 | type, argpos < n_named_args)) |
d7cdf113 | 1341 | { |
9969aaf6 | 1342 | bool callee_copies; |
d6e1acf6 | 1343 | tree base = NULL_TREE; |
9969aaf6 RH |
1344 | |
1345 | callee_copies | |
d5cc9181 | 1346 | = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type), |
6cdd5672 | 1347 | type, argpos < n_named_args); |
9969aaf6 RH |
1348 | |
1349 | /* If we're compiling a thunk, pass through invisible references | |
1350 | instead of making a copy. */ | |
dd292d0a | 1351 | if (call_from_thunk_p |
9969aaf6 RH |
1352 | || (callee_copies |
1353 | && !TREE_ADDRESSABLE (type) | |
1354 | && (base = get_base_address (args[i].tree_value)) | |
9c3d55b4 | 1355 | && TREE_CODE (base) != SSA_NAME |
9969aaf6 | 1356 | && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) |
d7cdf113 | 1357 | { |
006e317a JH |
1358 | /* We may have turned the parameter value into an SSA name. |
1359 | Go back to the original parameter so we can take the | |
1360 | address. */ | |
1361 | if (TREE_CODE (args[i].tree_value) == SSA_NAME) | |
1362 | { | |
1363 | gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value)); | |
1364 | args[i].tree_value = SSA_NAME_VAR (args[i].tree_value); | |
1365 | gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL); | |
1366 | } | |
fe8dd12e JH |
1367 | /* Argument setup code may have copied the value to register. We |
1368 | revert that optimization now because the tail call code must | |
1369 | use the original location. */ | |
1370 | if (TREE_CODE (args[i].tree_value) == PARM_DECL | |
1371 | && !MEM_P (DECL_RTL (args[i].tree_value)) | |
1372 | && DECL_INCOMING_RTL (args[i].tree_value) | |
1373 | && MEM_P (DECL_INCOMING_RTL (args[i].tree_value))) | |
1374 | set_decl_rtl (args[i].tree_value, | |
1375 | DECL_INCOMING_RTL (args[i].tree_value)); | |
1376 | ||
c4b9a87e ER |
1377 | mark_addressable (args[i].tree_value); |
1378 | ||
9969aaf6 RH |
1379 | /* We can't use sibcalls if a callee-copied argument is |
1380 | stored in the current function's frame. */ | |
1381 | if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) | |
9fd47435 RS |
1382 | *may_tailcall = false; |
1383 | ||
db3927fb AH |
1384 | args[i].tree_value = build_fold_addr_expr_loc (loc, |
1385 | args[i].tree_value); | |
9969aaf6 RH |
1386 | type = TREE_TYPE (args[i].tree_value); |
1387 | ||
becfd6e5 KZ |
1388 | if (*ecf_flags & ECF_CONST) |
1389 | *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); | |
f21add07 | 1390 | } |
d7cdf113 JL |
1391 | else |
1392 | { | |
1393 | /* We make a copy of the object and pass the address to the | |
1394 | function being called. */ | |
1395 | rtx copy; | |
1396 | ||
d0f062fb | 1397 | if (!COMPLETE_TYPE_P (type) |
b38f3813 EB |
1398 | || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST |
1399 | || (flag_stack_check == GENERIC_STACK_CHECK | |
1400 | && compare_tree_int (TYPE_SIZE_UNIT (type), | |
1401 | STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
d7cdf113 JL |
1402 | { |
1403 | /* This is a variable-sized object. Make space on the stack | |
1404 | for it. */ | |
078a18a4 | 1405 | rtx size_rtx = expr_size (args[i].tree_value); |
d7cdf113 JL |
1406 | |
1407 | if (*old_stack_level == 0) | |
1408 | { | |
9eac0f2a | 1409 | emit_stack_save (SAVE_BLOCK, old_stack_level); |
d7cdf113 JL |
1410 | *old_pending_adj = pending_stack_adjust; |
1411 | pending_stack_adjust = 0; | |
1412 | } | |
1413 | ||
d3c12306 EB |
1414 | /* We can pass TRUE as the 4th argument because we just |
1415 | saved the stack pointer and will restore it right after | |
1416 | the call. */ | |
3a42502d RH |
1417 | copy = allocate_dynamic_stack_space (size_rtx, |
1418 | TYPE_ALIGN (type), | |
1419 | TYPE_ALIGN (type), | |
1420 | true); | |
1421 | copy = gen_rtx_MEM (BLKmode, copy); | |
3bdf5ad1 | 1422 | set_mem_attributes (copy, type, 1); |
d7cdf113 JL |
1423 | } |
1424 | else | |
9474e8ab | 1425 | copy = assign_temp (type, 1, 0); |
d7cdf113 | 1426 | |
79f5e442 | 1427 | store_expr (args[i].tree_value, copy, 0, false); |
d7cdf113 | 1428 | |
becfd6e5 KZ |
1429 | /* Just change the const function to pure and then let |
1430 | the next test clear the pure based on | |
1431 | callee_copies. */ | |
1432 | if (*ecf_flags & ECF_CONST) | |
1433 | { | |
1434 | *ecf_flags &= ~ECF_CONST; | |
1435 | *ecf_flags |= ECF_PURE; | |
1436 | } | |
1437 | ||
1438 | if (!callee_copies && *ecf_flags & ECF_PURE) | |
1439 | *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); | |
9969aaf6 RH |
1440 | |
1441 | args[i].tree_value | |
db3927fb | 1442 | = build_fold_addr_expr_loc (loc, make_tree (type, copy)); |
9969aaf6 | 1443 | type = TREE_TYPE (args[i].tree_value); |
6de9cd9a | 1444 | *may_tailcall = false; |
d7cdf113 JL |
1445 | } |
1446 | } | |
1447 | ||
8df83eae | 1448 | unsignedp = TYPE_UNSIGNED (type); |
cde0f3fd PB |
1449 | mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, |
1450 | fndecl ? TREE_TYPE (fndecl) : fntype, 0); | |
d7cdf113 JL |
1451 | |
1452 | args[i].unsignedp = unsignedp; | |
1453 | args[i].mode = mode; | |
7d167afd | 1454 | |
3c07301f NF |
1455 | args[i].reg = targetm.calls.function_arg (args_so_far, mode, type, |
1456 | argpos < n_named_args); | |
1457 | ||
d5e254e1 IE |
1458 | if (args[i].reg && CONST_INT_P (args[i].reg)) |
1459 | { | |
1460 | args[i].special_slot = args[i].reg; | |
1461 | args[i].reg = NULL; | |
1462 | } | |
1463 | ||
7d167afd JJ |
1464 | /* If this is a sibling call and the machine has register windows, the |
1465 | register window has to be unwinded before calling the routine, so | |
1466 | arguments have to go into the incoming registers. */ | |
3c07301f NF |
1467 | if (targetm.calls.function_incoming_arg != targetm.calls.function_arg) |
1468 | args[i].tail_call_reg | |
1469 | = targetm.calls.function_incoming_arg (args_so_far, mode, type, | |
1470 | argpos < n_named_args); | |
1471 | else | |
1472 | args[i].tail_call_reg = args[i].reg; | |
7d167afd | 1473 | |
d7cdf113 JL |
1474 | if (args[i].reg) |
1475 | args[i].partial | |
78a52f11 RH |
1476 | = targetm.calls.arg_partial_bytes (args_so_far, mode, type, |
1477 | argpos < n_named_args); | |
d7cdf113 | 1478 | |
fe984136 | 1479 | args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type); |
d7cdf113 JL |
1480 | |
1481 | /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), | |
1482 | it means that we are to pass this arg in the register(s) designated | |
1483 | by the PARALLEL, but also to pass it in the stack. */ | |
1484 | if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL | |
1485 | && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) | |
1486 | args[i].pass_on_stack = 1; | |
1487 | ||
1488 | /* If this is an addressable type, we must preallocate the stack | |
1489 | since we must evaluate the object into its final location. | |
1490 | ||
1491 | If this is to be passed in both registers and the stack, it is simpler | |
1492 | to preallocate. */ | |
1493 | if (TREE_ADDRESSABLE (type) | |
1494 | || (args[i].pass_on_stack && args[i].reg != 0)) | |
1495 | *must_preallocate = 1; | |
1496 | ||
d5e254e1 IE |
1497 | /* No stack allocation and padding for bounds. */ |
1498 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
1499 | ; | |
d7cdf113 | 1500 | /* Compute the stack-size of this argument. */ |
d5e254e1 IE |
1501 | else if (args[i].reg == 0 || args[i].partial != 0 |
1502 | || reg_parm_stack_space > 0 | |
1503 | || args[i].pass_on_stack) | |
d7cdf113 JL |
1504 | locate_and_pad_parm (mode, type, |
1505 | #ifdef STACK_PARMS_IN_REG_PARM_AREA | |
1506 | 1, | |
1507 | #else | |
1508 | args[i].reg != 0, | |
1509 | #endif | |
2e4ceca5 | 1510 | reg_parm_stack_space, |
e7949876 AM |
1511 | args[i].pass_on_stack ? 0 : args[i].partial, |
1512 | fndecl, args_size, &args[i].locate); | |
648bb159 RS |
1513 | #ifdef BLOCK_REG_PADDING |
1514 | else | |
1515 | /* The argument is passed entirely in registers. See at which | |
1516 | end it should be padded. */ | |
1517 | args[i].locate.where_pad = | |
1518 | BLOCK_REG_PADDING (mode, type, | |
1519 | int_size_in_bytes (type) <= UNITS_PER_WORD); | |
1520 | #endif | |
f725a3ec | 1521 | |
d7cdf113 JL |
1522 | /* Update ARGS_SIZE, the total stack space for args so far. */ |
1523 | ||
e7949876 AM |
1524 | args_size->constant += args[i].locate.size.constant; |
1525 | if (args[i].locate.size.var) | |
1526 | ADD_PARM_SIZE (*args_size, args[i].locate.size.var); | |
d7cdf113 JL |
1527 | |
1528 | /* Increment ARGS_SO_FAR, which has info about which arg-registers | |
1529 | have been used, etc. */ | |
1530 | ||
3c07301f NF |
1531 | targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type), |
1532 | type, argpos < n_named_args); | |
d7cdf113 JL |
1533 | } |
1534 | } | |
1535 | ||
599f37b6 JL |
1536 | /* Update ARGS_SIZE to contain the total size for the argument block. |
1537 | Return the original constant component of the argument block's size. | |
1538 | ||
1539 | REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved | |
1540 | for arguments passed in registers. */ | |
1541 | ||
1542 | static int | |
d329e058 AJ |
1543 | compute_argument_block_size (int reg_parm_stack_space, |
1544 | struct args_size *args_size, | |
033df0b9 | 1545 | tree fndecl ATTRIBUTE_UNUSED, |
5d059ed9 | 1546 | tree fntype ATTRIBUTE_UNUSED, |
d329e058 | 1547 | int preferred_stack_boundary ATTRIBUTE_UNUSED) |
599f37b6 JL |
1548 | { |
1549 | int unadjusted_args_size = args_size->constant; | |
1550 | ||
f73ad30e JH |
1551 | /* For accumulate outgoing args mode we don't need to align, since the frame |
1552 | will be already aligned. Align to STACK_BOUNDARY in order to prevent | |
f5143c46 | 1553 | backends from generating misaligned frame sizes. */ |
f73ad30e JH |
1554 | if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY) |
1555 | preferred_stack_boundary = STACK_BOUNDARY; | |
f73ad30e | 1556 | |
599f37b6 JL |
1557 | /* Compute the actual size of the argument block required. The variable |
1558 | and constant sizes must be combined, the size may have to be rounded, | |
1559 | and there may be a minimum required size. */ | |
1560 | ||
1561 | if (args_size->var) | |
1562 | { | |
1563 | args_size->var = ARGS_SIZE_TREE (*args_size); | |
1564 | args_size->constant = 0; | |
1565 | ||
c2f8b491 JH |
1566 | preferred_stack_boundary /= BITS_PER_UNIT; |
1567 | if (preferred_stack_boundary > 1) | |
1503a7ec JH |
1568 | { |
1569 | /* We don't handle this case yet. To handle it correctly we have | |
f5143c46 | 1570 | to add the delta, round and subtract the delta. |
1503a7ec | 1571 | Currently no machine description requires this support. */ |
366de0ce | 1572 | gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); |
1503a7ec JH |
1573 | args_size->var = round_up (args_size->var, preferred_stack_boundary); |
1574 | } | |
599f37b6 JL |
1575 | |
1576 | if (reg_parm_stack_space > 0) | |
1577 | { | |
1578 | args_size->var | |
1579 | = size_binop (MAX_EXPR, args_size->var, | |
fed3cef0 | 1580 | ssize_int (reg_parm_stack_space)); |
599f37b6 | 1581 | |
599f37b6 JL |
1582 | /* The area corresponding to register parameters is not to count in |
1583 | the size of the block we need. So make the adjustment. */ | |
5d059ed9 | 1584 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
ac294f0b KT |
1585 | args_size->var |
1586 | = size_binop (MINUS_EXPR, args_size->var, | |
1587 | ssize_int (reg_parm_stack_space)); | |
599f37b6 JL |
1588 | } |
1589 | } | |
1590 | else | |
1591 | { | |
c2f8b491 | 1592 | preferred_stack_boundary /= BITS_PER_UNIT; |
0a1c58a2 JL |
1593 | if (preferred_stack_boundary < 1) |
1594 | preferred_stack_boundary = 1; | |
fb5eebb9 | 1595 | args_size->constant = (((args_size->constant |
1503a7ec | 1596 | + stack_pointer_delta |
c2f8b491 JH |
1597 | + preferred_stack_boundary - 1) |
1598 | / preferred_stack_boundary | |
1599 | * preferred_stack_boundary) | |
1503a7ec | 1600 | - stack_pointer_delta); |
599f37b6 JL |
1601 | |
1602 | args_size->constant = MAX (args_size->constant, | |
1603 | reg_parm_stack_space); | |
1604 | ||
5d059ed9 | 1605 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
ac294f0b | 1606 | args_size->constant -= reg_parm_stack_space; |
599f37b6 JL |
1607 | } |
1608 | return unadjusted_args_size; | |
1609 | } | |
1610 | ||
19832c77 | 1611 | /* Precompute parameters as needed for a function call. |
cc0b1adc | 1612 | |
f2d33f13 | 1613 | FLAGS is mask of ECF_* constants. |
cc0b1adc | 1614 | |
cc0b1adc JL |
1615 | NUM_ACTUALS is the number of arguments. |
1616 | ||
f725a3ec KH |
1617 | ARGS is an array containing information for each argument; this |
1618 | routine fills in the INITIAL_VALUE and VALUE fields for each | |
1619 | precomputed argument. */ | |
cc0b1adc JL |
1620 | |
1621 | static void | |
84b8030f | 1622 | precompute_arguments (int num_actuals, struct arg_data *args) |
cc0b1adc JL |
1623 | { |
1624 | int i; | |
1625 | ||
3638733b | 1626 | /* If this is a libcall, then precompute all arguments so that we do not |
82c82743 | 1627 | get extraneous instructions emitted as part of the libcall sequence. */ |
6a4e56a9 JJ |
1628 | |
1629 | /* If we preallocated the stack space, and some arguments must be passed | |
1630 | on the stack, then we must precompute any parameter which contains a | |
1631 | function call which will store arguments on the stack. | |
1632 | Otherwise, evaluating the parameter may clobber previous parameters | |
1633 | which have already been stored into the stack. (we have code to avoid | |
1634 | such case by saving the outgoing stack arguments, but it results in | |
1635 | worse code) */ | |
84b8030f | 1636 | if (!ACCUMULATE_OUTGOING_ARGS) |
82c82743 | 1637 | return; |
7ae4ad28 | 1638 | |
cc0b1adc | 1639 | for (i = 0; i < num_actuals; i++) |
82c82743 | 1640 | { |
cde0f3fd | 1641 | tree type; |
ef4bddc2 | 1642 | machine_mode mode; |
ddef6bc7 | 1643 | |
84b8030f | 1644 | if (TREE_CODE (args[i].tree_value) != CALL_EXPR) |
6a4e56a9 JJ |
1645 | continue; |
1646 | ||
82c82743 | 1647 | /* If this is an addressable type, we cannot pre-evaluate it. */ |
cde0f3fd PB |
1648 | type = TREE_TYPE (args[i].tree_value); |
1649 | gcc_assert (!TREE_ADDRESSABLE (type)); | |
cc0b1adc | 1650 | |
82c82743 | 1651 | args[i].initial_value = args[i].value |
84217346 | 1652 | = expand_normal (args[i].tree_value); |
cc0b1adc | 1653 | |
cde0f3fd | 1654 | mode = TYPE_MODE (type); |
82c82743 RH |
1655 | if (mode != args[i].mode) |
1656 | { | |
cde0f3fd | 1657 | int unsignedp = args[i].unsignedp; |
82c82743 RH |
1658 | args[i].value |
1659 | = convert_modes (args[i].mode, mode, | |
1660 | args[i].value, args[i].unsignedp); | |
cde0f3fd | 1661 | |
82c82743 RH |
1662 | /* CSE will replace this only if it contains args[i].value |
1663 | pseudo, so convert it down to the declared mode using | |
1664 | a SUBREG. */ | |
1665 | if (REG_P (args[i].value) | |
cde0f3fd PB |
1666 | && GET_MODE_CLASS (args[i].mode) == MODE_INT |
1667 | && promote_mode (type, mode, &unsignedp) != args[i].mode) | |
82c82743 RH |
1668 | { |
1669 | args[i].initial_value | |
1670 | = gen_lowpart_SUBREG (mode, args[i].value); | |
1671 | SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; | |
27be0c32 | 1672 | SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp); |
82c82743 | 1673 | } |
82c82743 RH |
1674 | } |
1675 | } | |
cc0b1adc JL |
1676 | } |
1677 | ||
0f9b3ea6 JL |
1678 | /* Given the current state of MUST_PREALLOCATE and information about |
1679 | arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, | |
1680 | compute and return the final value for MUST_PREALLOCATE. */ | |
1681 | ||
1682 | static int | |
b8698a0f | 1683 | finalize_must_preallocate (int must_preallocate, int num_actuals, |
5039610b | 1684 | struct arg_data *args, struct args_size *args_size) |
0f9b3ea6 JL |
1685 | { |
1686 | /* See if we have or want to preallocate stack space. | |
1687 | ||
1688 | If we would have to push a partially-in-regs parm | |
1689 | before other stack parms, preallocate stack space instead. | |
1690 | ||
1691 | If the size of some parm is not a multiple of the required stack | |
1692 | alignment, we must preallocate. | |
1693 | ||
1694 | If the total size of arguments that would otherwise create a copy in | |
1695 | a temporary (such as a CALL) is more than half the total argument list | |
1696 | size, preallocation is faster. | |
1697 | ||
1698 | Another reason to preallocate is if we have a machine (like the m88k) | |
1699 | where stack alignment is required to be maintained between every | |
1700 | pair of insns, not just when the call is made. However, we assume here | |
1701 | that such machines either do not have push insns (and hence preallocation | |
1702 | would occur anyway) or the problem is taken care of with | |
1703 | PUSH_ROUNDING. */ | |
1704 | ||
1705 | if (! must_preallocate) | |
1706 | { | |
1707 | int partial_seen = 0; | |
1708 | int copy_to_evaluate_size = 0; | |
1709 | int i; | |
1710 | ||
1711 | for (i = 0; i < num_actuals && ! must_preallocate; i++) | |
1712 | { | |
1713 | if (args[i].partial > 0 && ! args[i].pass_on_stack) | |
1714 | partial_seen = 1; | |
1715 | else if (partial_seen && args[i].reg == 0) | |
1716 | must_preallocate = 1; | |
d5e254e1 IE |
1717 | /* We preallocate in case there are bounds passed |
1718 | in the bounds table to have precomputed address | |
1719 | for bounds association. */ | |
1720 | else if (POINTER_BOUNDS_P (args[i].tree_value) | |
1721 | && !args[i].reg) | |
1722 | must_preallocate = 1; | |
0f9b3ea6 JL |
1723 | |
1724 | if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode | |
1725 | && (TREE_CODE (args[i].tree_value) == CALL_EXPR | |
1726 | || TREE_CODE (args[i].tree_value) == TARGET_EXPR | |
1727 | || TREE_CODE (args[i].tree_value) == COND_EXPR | |
1728 | || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) | |
1729 | copy_to_evaluate_size | |
1730 | += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
1731 | } | |
1732 | ||
1733 | if (copy_to_evaluate_size * 2 >= args_size->constant | |
1734 | && args_size->constant > 0) | |
1735 | must_preallocate = 1; | |
1736 | } | |
1737 | return must_preallocate; | |
1738 | } | |
599f37b6 | 1739 | |
a45bdd02 JL |
1740 | /* If we preallocated stack space, compute the address of each argument |
1741 | and store it into the ARGS array. | |
1742 | ||
f725a3ec | 1743 | We need not ensure it is a valid memory address here; it will be |
a45bdd02 JL |
1744 | validized when it is used. |
1745 | ||
1746 | ARGBLOCK is an rtx for the address of the outgoing arguments. */ | |
1747 | ||
1748 | static void | |
d329e058 | 1749 | compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals) |
a45bdd02 JL |
1750 | { |
1751 | if (argblock) | |
1752 | { | |
1753 | rtx arg_reg = argblock; | |
1754 | int i, arg_offset = 0; | |
1755 | ||
1756 | if (GET_CODE (argblock) == PLUS) | |
1757 | arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); | |
1758 | ||
1759 | for (i = 0; i < num_actuals; i++) | |
1760 | { | |
e7949876 AM |
1761 | rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); |
1762 | rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); | |
a45bdd02 | 1763 | rtx addr; |
bfc45551 | 1764 | unsigned int align, boundary; |
7816b87e | 1765 | unsigned int units_on_stack = 0; |
ef4bddc2 | 1766 | machine_mode partial_mode = VOIDmode; |
a45bdd02 JL |
1767 | |
1768 | /* Skip this parm if it will not be passed on the stack. */ | |
7816b87e JC |
1769 | if (! args[i].pass_on_stack |
1770 | && args[i].reg != 0 | |
1771 | && args[i].partial == 0) | |
a45bdd02 JL |
1772 | continue; |
1773 | ||
d5e254e1 IE |
1774 | /* Pointer Bounds are never passed on the stack. */ |
1775 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
1776 | continue; | |
1777 | ||
481683e1 | 1778 | if (CONST_INT_P (offset)) |
0a81f074 | 1779 | addr = plus_constant (Pmode, arg_reg, INTVAL (offset)); |
a45bdd02 JL |
1780 | else |
1781 | addr = gen_rtx_PLUS (Pmode, arg_reg, offset); | |
1782 | ||
0a81f074 | 1783 | addr = plus_constant (Pmode, addr, arg_offset); |
7816b87e JC |
1784 | |
1785 | if (args[i].partial != 0) | |
1786 | { | |
1787 | /* Only part of the parameter is being passed on the stack. | |
1788 | Generate a simple memory reference of the correct size. */ | |
1789 | units_on_stack = args[i].locate.size.constant; | |
1790 | partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, | |
1791 | MODE_INT, 1); | |
1792 | args[i].stack = gen_rtx_MEM (partial_mode, addr); | |
f5541398 | 1793 | set_mem_size (args[i].stack, units_on_stack); |
7816b87e JC |
1794 | } |
1795 | else | |
1796 | { | |
1797 | args[i].stack = gen_rtx_MEM (args[i].mode, addr); | |
1798 | set_mem_attributes (args[i].stack, | |
1799 | TREE_TYPE (args[i].tree_value), 1); | |
1800 | } | |
bfc45551 AM |
1801 | align = BITS_PER_UNIT; |
1802 | boundary = args[i].locate.boundary; | |
1803 | if (args[i].locate.where_pad != downward) | |
1804 | align = boundary; | |
481683e1 | 1805 | else if (CONST_INT_P (offset)) |
bfc45551 AM |
1806 | { |
1807 | align = INTVAL (offset) * BITS_PER_UNIT | boundary; | |
1808 | align = align & -align; | |
1809 | } | |
1810 | set_mem_align (args[i].stack, align); | |
a45bdd02 | 1811 | |
481683e1 | 1812 | if (CONST_INT_P (slot_offset)) |
0a81f074 | 1813 | addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset)); |
a45bdd02 JL |
1814 | else |
1815 | addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); | |
1816 | ||
0a81f074 | 1817 | addr = plus_constant (Pmode, addr, arg_offset); |
7816b87e JC |
1818 | |
1819 | if (args[i].partial != 0) | |
1820 | { | |
1821 | /* Only part of the parameter is being passed on the stack. | |
1822 | Generate a simple memory reference of the correct size. | |
1823 | */ | |
1824 | args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); | |
f5541398 | 1825 | set_mem_size (args[i].stack_slot, units_on_stack); |
7816b87e JC |
1826 | } |
1827 | else | |
1828 | { | |
1829 | args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); | |
1830 | set_mem_attributes (args[i].stack_slot, | |
1831 | TREE_TYPE (args[i].tree_value), 1); | |
1832 | } | |
bfc45551 | 1833 | set_mem_align (args[i].stack_slot, args[i].locate.boundary); |
7ab923cc JJ |
1834 | |
1835 | /* Function incoming arguments may overlap with sibling call | |
1836 | outgoing arguments and we cannot allow reordering of reads | |
1837 | from function arguments with stores to outgoing arguments | |
1838 | of sibling calls. */ | |
ba4828e0 RK |
1839 | set_mem_alias_set (args[i].stack, 0); |
1840 | set_mem_alias_set (args[i].stack_slot, 0); | |
a45bdd02 JL |
1841 | } |
1842 | } | |
1843 | } | |
f725a3ec | 1844 | |
a45bdd02 JL |
1845 | /* Given a FNDECL and EXP, return an rtx suitable for use as a target address |
1846 | in a call instruction. | |
1847 | ||
1848 | FNDECL is the tree node for the target function. For an indirect call | |
1849 | FNDECL will be NULL_TREE. | |
1850 | ||
09e2bf48 | 1851 | ADDR is the operand 0 of CALL_EXPR for this call. */ |
a45bdd02 JL |
1852 | |
1853 | static rtx | |
d329e058 | 1854 | rtx_for_function_call (tree fndecl, tree addr) |
a45bdd02 JL |
1855 | { |
1856 | rtx funexp; | |
1857 | ||
1858 | /* Get the function to call, in the form of RTL. */ | |
1859 | if (fndecl) | |
1860 | { | |
ad960f56 | 1861 | if (!TREE_USED (fndecl) && fndecl != current_function_decl) |
bbee5843 | 1862 | TREE_USED (fndecl) = 1; |
a45bdd02 JL |
1863 | |
1864 | /* Get a SYMBOL_REF rtx for the function address. */ | |
1865 | funexp = XEXP (DECL_RTL (fndecl), 0); | |
1866 | } | |
1867 | else | |
1868 | /* Generate an rtx (probably a pseudo-register) for the address. */ | |
1869 | { | |
1870 | push_temp_slots (); | |
84217346 | 1871 | funexp = expand_normal (addr); |
f725a3ec | 1872 | pop_temp_slots (); /* FUNEXP can't be BLKmode. */ |
a45bdd02 JL |
1873 | } |
1874 | return funexp; | |
1875 | } | |
1876 | ||
5275901c JJ |
1877 | /* Internal state for internal_arg_pointer_based_exp and its helpers. */ |
1878 | static struct | |
1879 | { | |
1880 | /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan, | |
1881 | or NULL_RTX if none has been scanned yet. */ | |
48810515 | 1882 | rtx_insn *scan_start; |
5275901c JJ |
1883 | /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is |
1884 | based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the | |
1885 | pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it | |
1886 | with fixed offset, or PC if this is with variable or unknown offset. */ | |
9771b263 | 1887 | vec<rtx> cache; |
5275901c JJ |
1888 | } internal_arg_pointer_exp_state; |
1889 | ||
e9f56944 | 1890 | static rtx internal_arg_pointer_based_exp (const_rtx, bool); |
5275901c JJ |
1891 | |
1892 | /* Helper function for internal_arg_pointer_based_exp. Scan insns in | |
1893 | the tail call sequence, starting with first insn that hasn't been | |
1894 | scanned yet, and note for each pseudo on the LHS whether it is based | |
1895 | on crtl->args.internal_arg_pointer or not, and what offset from that | |
1896 | that pointer it has. */ | |
1897 | ||
1898 | static void | |
1899 | internal_arg_pointer_based_exp_scan (void) | |
1900 | { | |
48810515 | 1901 | rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start; |
5275901c JJ |
1902 | |
1903 | if (scan_start == NULL_RTX) | |
1904 | insn = get_insns (); | |
1905 | else | |
1906 | insn = NEXT_INSN (scan_start); | |
1907 | ||
1908 | while (insn) | |
1909 | { | |
1910 | rtx set = single_set (insn); | |
1911 | if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set))) | |
1912 | { | |
1913 | rtx val = NULL_RTX; | |
1914 | unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER; | |
1915 | /* Punt on pseudos set multiple times. */ | |
9771b263 DN |
1916 | if (idx < internal_arg_pointer_exp_state.cache.length () |
1917 | && (internal_arg_pointer_exp_state.cache[idx] | |
5275901c JJ |
1918 | != NULL_RTX)) |
1919 | val = pc_rtx; | |
1920 | else | |
1921 | val = internal_arg_pointer_based_exp (SET_SRC (set), false); | |
1922 | if (val != NULL_RTX) | |
1923 | { | |
9771b263 | 1924 | if (idx >= internal_arg_pointer_exp_state.cache.length ()) |
c3284718 RS |
1925 | internal_arg_pointer_exp_state.cache |
1926 | .safe_grow_cleared (idx + 1); | |
9771b263 | 1927 | internal_arg_pointer_exp_state.cache[idx] = val; |
5275901c JJ |
1928 | } |
1929 | } | |
1930 | if (NEXT_INSN (insn) == NULL_RTX) | |
1931 | scan_start = insn; | |
1932 | insn = NEXT_INSN (insn); | |
1933 | } | |
1934 | ||
1935 | internal_arg_pointer_exp_state.scan_start = scan_start; | |
1936 | } | |
1937 | ||
5275901c JJ |
1938 | /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return |
1939 | NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on | |
1940 | it with fixed offset, or PC if this is with variable or unknown offset. | |
1941 | TOPLEVEL is true if the function is invoked at the topmost level. */ | |
1942 | ||
1943 | static rtx | |
e9f56944 | 1944 | internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel) |
5275901c JJ |
1945 | { |
1946 | if (CONSTANT_P (rtl)) | |
1947 | return NULL_RTX; | |
1948 | ||
1949 | if (rtl == crtl->args.internal_arg_pointer) | |
1950 | return const0_rtx; | |
1951 | ||
1952 | if (REG_P (rtl) && HARD_REGISTER_P (rtl)) | |
1953 | return NULL_RTX; | |
1954 | ||
1955 | if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1))) | |
1956 | { | |
1957 | rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel); | |
1958 | if (val == NULL_RTX || val == pc_rtx) | |
1959 | return val; | |
0a81f074 | 1960 | return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1))); |
5275901c JJ |
1961 | } |
1962 | ||
1963 | /* When called at the topmost level, scan pseudo assignments in between the | |
1964 | last scanned instruction in the tail call sequence and the latest insn | |
1965 | in that sequence. */ | |
1966 | if (toplevel) | |
1967 | internal_arg_pointer_based_exp_scan (); | |
1968 | ||
1969 | if (REG_P (rtl)) | |
1970 | { | |
1971 | unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER; | |
9771b263 DN |
1972 | if (idx < internal_arg_pointer_exp_state.cache.length ()) |
1973 | return internal_arg_pointer_exp_state.cache[idx]; | |
5275901c JJ |
1974 | |
1975 | return NULL_RTX; | |
1976 | } | |
1977 | ||
e9f56944 RS |
1978 | subrtx_iterator::array_type array; |
1979 | FOR_EACH_SUBRTX (iter, array, rtl, NONCONST) | |
1980 | { | |
1981 | const_rtx x = *iter; | |
1982 | if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX) | |
1983 | return pc_rtx; | |
1984 | if (MEM_P (x)) | |
1985 | iter.skip_subrtxes (); | |
1986 | } | |
5275901c JJ |
1987 | |
1988 | return NULL_RTX; | |
1989 | } | |
1990 | ||
07eef816 KH |
1991 | /* Return true if and only if SIZE storage units (usually bytes) |
1992 | starting from address ADDR overlap with already clobbered argument | |
1993 | area. This function is used to determine if we should give up a | |
1994 | sibcall. */ | |
1995 | ||
1996 | static bool | |
1997 | mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size) | |
1998 | { | |
1999 | HOST_WIDE_INT i; | |
5275901c | 2000 | rtx val; |
07eef816 | 2001 | |
f61e445a | 2002 | if (bitmap_empty_p (stored_args_map)) |
4189fb53 | 2003 | return false; |
5275901c JJ |
2004 | val = internal_arg_pointer_based_exp (addr, true); |
2005 | if (val == NULL_RTX) | |
2006 | return false; | |
2007 | else if (val == pc_rtx) | |
6c3cb698 | 2008 | return true; |
07eef816 | 2009 | else |
5275901c | 2010 | i = INTVAL (val); |
76e048a8 KT |
2011 | |
2012 | if (STACK_GROWS_DOWNWARD) | |
2013 | i -= crtl->args.pretend_args_size; | |
2014 | else | |
2015 | i += crtl->args.pretend_args_size; | |
2016 | ||
07eef816 | 2017 | |
6dad9361 TS |
2018 | if (ARGS_GROW_DOWNWARD) |
2019 | i = -i - size; | |
2020 | ||
07eef816 KH |
2021 | if (size > 0) |
2022 | { | |
2023 | unsigned HOST_WIDE_INT k; | |
2024 | ||
2025 | for (k = 0; k < size; k++) | |
5829cc0f | 2026 | if (i + k < SBITMAP_SIZE (stored_args_map) |
d7c028c0 | 2027 | && bitmap_bit_p (stored_args_map, i + k)) |
07eef816 KH |
2028 | return true; |
2029 | } | |
2030 | ||
2031 | return false; | |
2032 | } | |
2033 | ||
21a3b983 JL |
2034 | /* Do the register loads required for any wholly-register parms or any |
2035 | parms which are passed both on the stack and in a register. Their | |
f725a3ec | 2036 | expressions were already evaluated. |
21a3b983 JL |
2037 | |
2038 | Mark all register-parms as living through the call, putting these USE | |
d329e058 AJ |
2039 | insns in the CALL_INSN_FUNCTION_USAGE field. |
2040 | ||
40b0345d | 2041 | When IS_SIBCALL, perform the check_sibcall_argument_overlap |
0cdca92b | 2042 | checking, setting *SIBCALL_FAILURE if appropriate. */ |
21a3b983 JL |
2043 | |
2044 | static void | |
d329e058 AJ |
2045 | load_register_parameters (struct arg_data *args, int num_actuals, |
2046 | rtx *call_fusage, int flags, int is_sibcall, | |
2047 | int *sibcall_failure) | |
21a3b983 JL |
2048 | { |
2049 | int i, j; | |
2050 | ||
21a3b983 | 2051 | for (i = 0; i < num_actuals; i++) |
21a3b983 | 2052 | { |
099e9712 JH |
2053 | rtx reg = ((flags & ECF_SIBCALL) |
2054 | ? args[i].tail_call_reg : args[i].reg); | |
21a3b983 JL |
2055 | if (reg) |
2056 | { | |
6e985040 AM |
2057 | int partial = args[i].partial; |
2058 | int nregs; | |
2059 | int size = 0; | |
48810515 | 2060 | rtx_insn *before_arg = get_last_insn (); |
f0078f86 AM |
2061 | /* Set non-negative if we must move a word at a time, even if |
2062 | just one word (e.g, partial == 4 && mode == DFmode). Set | |
2063 | to -1 if we just use a normal move insn. This value can be | |
2064 | zero if the argument is a zero size structure. */ | |
6e985040 | 2065 | nregs = -1; |
78a52f11 RH |
2066 | if (GET_CODE (reg) == PARALLEL) |
2067 | ; | |
2068 | else if (partial) | |
2069 | { | |
2070 | gcc_assert (partial % UNITS_PER_WORD == 0); | |
2071 | nregs = partial / UNITS_PER_WORD; | |
2072 | } | |
6e985040 AM |
2073 | else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) |
2074 | { | |
2075 | size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
2076 | nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; | |
2077 | } | |
2078 | else | |
2079 | size = GET_MODE_SIZE (args[i].mode); | |
21a3b983 JL |
2080 | |
2081 | /* Handle calls that pass values in multiple non-contiguous | |
2082 | locations. The Irix 6 ABI has examples of this. */ | |
2083 | ||
2084 | if (GET_CODE (reg) == PARALLEL) | |
8df3dbb7 | 2085 | emit_group_move (reg, args[i].parallel_value); |
21a3b983 JL |
2086 | |
2087 | /* If simple case, just do move. If normal partial, store_one_arg | |
2088 | has already loaded the register for us. In all other cases, | |
2089 | load the register(s) from memory. */ | |
2090 | ||
9206d736 AM |
2091 | else if (nregs == -1) |
2092 | { | |
2093 | emit_move_insn (reg, args[i].value); | |
6e985040 | 2094 | #ifdef BLOCK_REG_PADDING |
9206d736 AM |
2095 | /* Handle case where we have a value that needs shifting |
2096 | up to the msb. eg. a QImode value and we're padding | |
2097 | upward on a BYTES_BIG_ENDIAN machine. */ | |
2098 | if (size < UNITS_PER_WORD | |
2099 | && (args[i].locate.where_pad | |
2100 | == (BYTES_BIG_ENDIAN ? upward : downward))) | |
2101 | { | |
9206d736 AM |
2102 | rtx x; |
2103 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
980f6e8e AM |
2104 | |
2105 | /* Assigning REG here rather than a temp makes CALL_FUSAGE | |
2106 | report the whole reg as used. Strictly speaking, the | |
2107 | call only uses SIZE bytes at the msb end, but it doesn't | |
2108 | seem worth generating rtl to say that. */ | |
2109 | reg = gen_rtx_REG (word_mode, REGNO (reg)); | |
eb6c3df1 | 2110 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); |
980f6e8e AM |
2111 | if (x != reg) |
2112 | emit_move_insn (reg, x); | |
9206d736 | 2113 | } |
6e985040 | 2114 | #endif |
9206d736 | 2115 | } |
21a3b983 JL |
2116 | |
2117 | /* If we have pre-computed the values to put in the registers in | |
2118 | the case of non-aligned structures, copy them in now. */ | |
2119 | ||
2120 | else if (args[i].n_aligned_regs != 0) | |
2121 | for (j = 0; j < args[i].n_aligned_regs; j++) | |
2122 | emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), | |
2123 | args[i].aligned_regs[j]); | |
2124 | ||
3b2ee170 | 2125 | else if (partial == 0 || args[i].pass_on_stack) |
6e985040 | 2126 | { |
1a8cb155 | 2127 | rtx mem = validize_mem (copy_rtx (args[i].value)); |
6e985040 | 2128 | |
3b2ee170 IS |
2129 | /* Check for overlap with already clobbered argument area, |
2130 | providing that this has non-zero size. */ | |
07eef816 | 2131 | if (is_sibcall |
3b2ee170 IS |
2132 | && (size == 0 |
2133 | || mem_overlaps_already_clobbered_arg_p | |
2134 | (XEXP (args[i].value, 0), size))) | |
07eef816 KH |
2135 | *sibcall_failure = 1; |
2136 | ||
984b2054 AM |
2137 | if (size % UNITS_PER_WORD == 0 |
2138 | || MEM_ALIGN (mem) % BITS_PER_WORD == 0) | |
2139 | move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); | |
2140 | else | |
2141 | { | |
2142 | if (nregs > 1) | |
2143 | move_block_to_reg (REGNO (reg), mem, nregs - 1, | |
2144 | args[i].mode); | |
2145 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1); | |
2146 | unsigned int bitoff = (nregs - 1) * BITS_PER_WORD; | |
2147 | unsigned int bitsize = size * BITS_PER_UNIT - bitoff; | |
2148 | rtx x = extract_bit_field (mem, bitsize, bitoff, 1, | |
2149 | dest, word_mode, word_mode); | |
2150 | if (BYTES_BIG_ENDIAN) | |
2151 | x = expand_shift (LSHIFT_EXPR, word_mode, x, | |
2152 | BITS_PER_WORD - bitsize, dest, 1); | |
2153 | if (x != dest) | |
2154 | emit_move_insn (dest, x); | |
2155 | } | |
2156 | ||
6e985040 | 2157 | /* Handle a BLKmode that needs shifting. */ |
9206d736 | 2158 | if (nregs == 1 && size < UNITS_PER_WORD |
03ca1672 UW |
2159 | #ifdef BLOCK_REG_PADDING |
2160 | && args[i].locate.where_pad == downward | |
2161 | #else | |
2162 | && BYTES_BIG_ENDIAN | |
2163 | #endif | |
984b2054 | 2164 | ) |
6e985040 | 2165 | { |
984b2054 | 2166 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg)); |
6e985040 | 2167 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; |
984b2054 AM |
2168 | enum tree_code dir = (BYTES_BIG_ENDIAN |
2169 | ? RSHIFT_EXPR : LSHIFT_EXPR); | |
2170 | rtx x; | |
6e985040 | 2171 | |
984b2054 AM |
2172 | x = expand_shift (dir, word_mode, dest, shift, dest, 1); |
2173 | if (x != dest) | |
2174 | emit_move_insn (dest, x); | |
6e985040 | 2175 | } |
6e985040 | 2176 | } |
21a3b983 | 2177 | |
0cdca92b DJ |
2178 | /* When a parameter is a block, and perhaps in other cases, it is |
2179 | possible that it did a load from an argument slot that was | |
32dd366d | 2180 | already clobbered. */ |
0cdca92b DJ |
2181 | if (is_sibcall |
2182 | && check_sibcall_argument_overlap (before_arg, &args[i], 0)) | |
2183 | *sibcall_failure = 1; | |
2184 | ||
21a3b983 JL |
2185 | /* Handle calls that pass values in multiple non-contiguous |
2186 | locations. The Irix 6 ABI has examples of this. */ | |
2187 | if (GET_CODE (reg) == PARALLEL) | |
2188 | use_group_regs (call_fusage, reg); | |
2189 | else if (nregs == -1) | |
7d810276 JJ |
2190 | use_reg_mode (call_fusage, reg, |
2191 | TYPE_MODE (TREE_TYPE (args[i].tree_value))); | |
faa00334 AO |
2192 | else if (nregs > 0) |
2193 | use_regs (call_fusage, REGNO (reg), nregs); | |
21a3b983 JL |
2194 | } |
2195 | } | |
2196 | } | |
2197 | ||
739fb049 MM |
2198 | /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments |
2199 | wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY | |
2200 | bytes, then we would need to push some additional bytes to pad the | |
ce48579b RH |
2201 | arguments. So, we compute an adjust to the stack pointer for an |
2202 | amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE | |
2203 | bytes. Then, when the arguments are pushed the stack will be perfectly | |
2204 | aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should | |
2205 | be popped after the call. Returns the adjustment. */ | |
739fb049 | 2206 | |
ce48579b | 2207 | static int |
d329e058 AJ |
2208 | combine_pending_stack_adjustment_and_call (int unadjusted_args_size, |
2209 | struct args_size *args_size, | |
95899b34 | 2210 | unsigned int preferred_unit_stack_boundary) |
739fb049 MM |
2211 | { |
2212 | /* The number of bytes to pop so that the stack will be | |
2213 | under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ | |
2214 | HOST_WIDE_INT adjustment; | |
2215 | /* The alignment of the stack after the arguments are pushed, if we | |
2216 | just pushed the arguments without adjust the stack here. */ | |
95899b34 | 2217 | unsigned HOST_WIDE_INT unadjusted_alignment; |
739fb049 | 2218 | |
f725a3ec | 2219 | unadjusted_alignment |
739fb049 MM |
2220 | = ((stack_pointer_delta + unadjusted_args_size) |
2221 | % preferred_unit_stack_boundary); | |
2222 | ||
2223 | /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes | |
2224 | as possible -- leaving just enough left to cancel out the | |
2225 | UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the | |
2226 | PENDING_STACK_ADJUST is non-negative, and congruent to | |
2227 | -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */ | |
2228 | ||
2229 | /* Begin by trying to pop all the bytes. */ | |
f725a3ec KH |
2230 | unadjusted_alignment |
2231 | = (unadjusted_alignment | |
739fb049 MM |
2232 | - (pending_stack_adjust % preferred_unit_stack_boundary)); |
2233 | adjustment = pending_stack_adjust; | |
2234 | /* Push enough additional bytes that the stack will be aligned | |
2235 | after the arguments are pushed. */ | |
e079dcdb HB |
2236 | if (preferred_unit_stack_boundary > 1) |
2237 | { | |
3e555c7d | 2238 | if (unadjusted_alignment > 0) |
f725a3ec | 2239 | adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; |
e079dcdb | 2240 | else |
f725a3ec | 2241 | adjustment += unadjusted_alignment; |
e079dcdb | 2242 | } |
f725a3ec | 2243 | |
739fb049 MM |
2244 | /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of |
2245 | bytes after the call. The right number is the entire | |
2246 | PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required | |
2247 | by the arguments in the first place. */ | |
f725a3ec | 2248 | args_size->constant |
739fb049 MM |
2249 | = pending_stack_adjust - adjustment + unadjusted_args_size; |
2250 | ||
ce48579b | 2251 | return adjustment; |
739fb049 MM |
2252 | } |
2253 | ||
c67846f2 JJ |
2254 | /* Scan X expression if it does not dereference any argument slots |
2255 | we already clobbered by tail call arguments (as noted in stored_args_map | |
2256 | bitmap). | |
da7d8304 | 2257 | Return nonzero if X expression dereferences such argument slots, |
c67846f2 JJ |
2258 | zero otherwise. */ |
2259 | ||
2260 | static int | |
d329e058 | 2261 | check_sibcall_argument_overlap_1 (rtx x) |
c67846f2 JJ |
2262 | { |
2263 | RTX_CODE code; | |
2264 | int i, j; | |
c67846f2 JJ |
2265 | const char *fmt; |
2266 | ||
2267 | if (x == NULL_RTX) | |
2268 | return 0; | |
2269 | ||
2270 | code = GET_CODE (x); | |
2271 | ||
6c3cb698 KY |
2272 | /* We need not check the operands of the CALL expression itself. */ |
2273 | if (code == CALL) | |
2274 | return 0; | |
2275 | ||
c67846f2 | 2276 | if (code == MEM) |
07eef816 KH |
2277 | return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), |
2278 | GET_MODE_SIZE (GET_MODE (x))); | |
c67846f2 | 2279 | |
f725a3ec | 2280 | /* Scan all subexpressions. */ |
c67846f2 JJ |
2281 | fmt = GET_RTX_FORMAT (code); |
2282 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
2283 | { | |
2284 | if (*fmt == 'e') | |
f725a3ec KH |
2285 | { |
2286 | if (check_sibcall_argument_overlap_1 (XEXP (x, i))) | |
2287 | return 1; | |
2288 | } | |
c67846f2 | 2289 | else if (*fmt == 'E') |
f725a3ec KH |
2290 | { |
2291 | for (j = 0; j < XVECLEN (x, i); j++) | |
2292 | if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) | |
2293 | return 1; | |
2294 | } | |
c67846f2 JJ |
2295 | } |
2296 | return 0; | |
c67846f2 JJ |
2297 | } |
2298 | ||
2299 | /* Scan sequence after INSN if it does not dereference any argument slots | |
2300 | we already clobbered by tail call arguments (as noted in stored_args_map | |
0cdca92b DJ |
2301 | bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to |
2302 | stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP | |
2303 | should be 0). Return nonzero if sequence after INSN dereferences such argument | |
2304 | slots, zero otherwise. */ | |
c67846f2 JJ |
2305 | |
2306 | static int | |
48810515 DM |
2307 | check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg, |
2308 | int mark_stored_args_map) | |
f725a3ec | 2309 | { |
c67846f2 JJ |
2310 | int low, high; |
2311 | ||
2312 | if (insn == NULL_RTX) | |
2313 | insn = get_insns (); | |
2314 | else | |
2315 | insn = NEXT_INSN (insn); | |
2316 | ||
2317 | for (; insn; insn = NEXT_INSN (insn)) | |
f725a3ec KH |
2318 | if (INSN_P (insn) |
2319 | && check_sibcall_argument_overlap_1 (PATTERN (insn))) | |
c67846f2 JJ |
2320 | break; |
2321 | ||
0cdca92b DJ |
2322 | if (mark_stored_args_map) |
2323 | { | |
6dad9361 TS |
2324 | if (ARGS_GROW_DOWNWARD) |
2325 | low = -arg->locate.slot_offset.constant - arg->locate.size.constant; | |
2326 | else | |
2327 | low = arg->locate.slot_offset.constant; | |
d60eab50 | 2328 | |
e7949876 | 2329 | for (high = low + arg->locate.size.constant; low < high; low++) |
d7c028c0 | 2330 | bitmap_set_bit (stored_args_map, low); |
0cdca92b | 2331 | } |
c67846f2 JJ |
2332 | return insn != NULL_RTX; |
2333 | } | |
2334 | ||
bef5d8b6 RS |
2335 | /* Given that a function returns a value of mode MODE at the most |
2336 | significant end of hard register VALUE, shift VALUE left or right | |
2337 | as specified by LEFT_P. Return true if some action was needed. */ | |
c988af2b | 2338 | |
bef5d8b6 | 2339 | bool |
ef4bddc2 | 2340 | shift_return_value (machine_mode mode, bool left_p, rtx value) |
c988af2b | 2341 | { |
bef5d8b6 RS |
2342 | HOST_WIDE_INT shift; |
2343 | ||
2344 | gcc_assert (REG_P (value) && HARD_REGISTER_P (value)); | |
2345 | shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode); | |
2346 | if (shift == 0) | |
2347 | return false; | |
2348 | ||
2349 | /* Use ashr rather than lshr for right shifts. This is for the benefit | |
2350 | of the MIPS port, which requires SImode values to be sign-extended | |
2351 | when stored in 64-bit registers. */ | |
2352 | if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab, | |
2353 | value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) | |
2354 | gcc_unreachable (); | |
2355 | return true; | |
c988af2b RS |
2356 | } |
2357 | ||
3fb30019 RS |
2358 | /* If X is a likely-spilled register value, copy it to a pseudo |
2359 | register and return that register. Return X otherwise. */ | |
2360 | ||
2361 | static rtx | |
2362 | avoid_likely_spilled_reg (rtx x) | |
2363 | { | |
82d6e6fc | 2364 | rtx new_rtx; |
3fb30019 RS |
2365 | |
2366 | if (REG_P (x) | |
2367 | && HARD_REGISTER_P (x) | |
07b8f0a8 | 2368 | && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x)))) |
3fb30019 RS |
2369 | { |
2370 | /* Make sure that we generate a REG rather than a CONCAT. | |
2371 | Moves into CONCATs can need nontrivial instructions, | |
2372 | and the whole point of this function is to avoid | |
2373 | using the hard register directly in such a situation. */ | |
2374 | generating_concat_p = 0; | |
82d6e6fc | 2375 | new_rtx = gen_reg_rtx (GET_MODE (x)); |
3fb30019 | 2376 | generating_concat_p = 1; |
82d6e6fc KG |
2377 | emit_move_insn (new_rtx, x); |
2378 | return new_rtx; | |
3fb30019 RS |
2379 | } |
2380 | return x; | |
2381 | } | |
2382 | ||
5039610b | 2383 | /* Generate all the code for a CALL_EXPR exp |
51bbfa0c RS |
2384 | and return an rtx for its value. |
2385 | Store the value in TARGET (specified as an rtx) if convenient. | |
2386 | If the value is stored in TARGET then TARGET is returned. | |
2387 | If IGNORE is nonzero, then we ignore the value of the function call. */ | |
2388 | ||
2389 | rtx | |
d329e058 | 2390 | expand_call (tree exp, rtx target, int ignore) |
51bbfa0c | 2391 | { |
0a1c58a2 JL |
2392 | /* Nonzero if we are currently expanding a call. */ |
2393 | static int currently_expanding_call = 0; | |
2394 | ||
51bbfa0c RS |
2395 | /* RTX for the function to be called. */ |
2396 | rtx funexp; | |
0a1c58a2 | 2397 | /* Sequence of insns to perform a normal "call". */ |
48810515 | 2398 | rtx_insn *normal_call_insns = NULL; |
6de9cd9a | 2399 | /* Sequence of insns to perform a tail "call". */ |
48810515 | 2400 | rtx_insn *tail_call_insns = NULL; |
51bbfa0c RS |
2401 | /* Data type of the function. */ |
2402 | tree funtype; | |
ded9bf77 | 2403 | tree type_arg_types; |
28ed065e | 2404 | tree rettype; |
51bbfa0c RS |
2405 | /* Declaration of the function being called, |
2406 | or 0 if the function is computed (not known by name). */ | |
2407 | tree fndecl = 0; | |
57782ad8 MM |
2408 | /* The type of the function being called. */ |
2409 | tree fntype; | |
6de9cd9a | 2410 | bool try_tail_call = CALL_EXPR_TAILCALL (exp); |
0a1c58a2 | 2411 | int pass; |
51bbfa0c RS |
2412 | |
2413 | /* Register in which non-BLKmode value will be returned, | |
2414 | or 0 if no value or if value is BLKmode. */ | |
2415 | rtx valreg; | |
d5e254e1 IE |
2416 | /* Register(s) in which bounds are returned. */ |
2417 | rtx valbnd = NULL; | |
51bbfa0c RS |
2418 | /* Address where we should return a BLKmode value; |
2419 | 0 if value not BLKmode. */ | |
2420 | rtx structure_value_addr = 0; | |
2421 | /* Nonzero if that address is being passed by treating it as | |
2422 | an extra, implicit first parameter. Otherwise, | |
2423 | it is passed by being copied directly into struct_value_rtx. */ | |
2424 | int structure_value_addr_parm = 0; | |
078a18a4 SL |
2425 | /* Holds the value of implicit argument for the struct value. */ |
2426 | tree structure_value_addr_value = NULL_TREE; | |
51bbfa0c RS |
2427 | /* Size of aggregate value wanted, or zero if none wanted |
2428 | or if we are using the non-reentrant PCC calling convention | |
2429 | or expecting the value in registers. */ | |
e5e809f4 | 2430 | HOST_WIDE_INT struct_value_size = 0; |
51bbfa0c RS |
2431 | /* Nonzero if called function returns an aggregate in memory PCC style, |
2432 | by returning the address of where to find it. */ | |
2433 | int pcc_struct_value = 0; | |
61f71b34 | 2434 | rtx struct_value = 0; |
51bbfa0c RS |
2435 | |
2436 | /* Number of actual parameters in this call, including struct value addr. */ | |
2437 | int num_actuals; | |
2438 | /* Number of named args. Args after this are anonymous ones | |
2439 | and they must all go on the stack. */ | |
2440 | int n_named_args; | |
078a18a4 SL |
2441 | /* Number of complex actual arguments that need to be split. */ |
2442 | int num_complex_actuals = 0; | |
51bbfa0c RS |
2443 | |
2444 | /* Vector of information about each argument. | |
2445 | Arguments are numbered in the order they will be pushed, | |
2446 | not the order they are written. */ | |
2447 | struct arg_data *args; | |
2448 | ||
2449 | /* Total size in bytes of all the stack-parms scanned so far. */ | |
2450 | struct args_size args_size; | |
099e9712 | 2451 | struct args_size adjusted_args_size; |
51bbfa0c | 2452 | /* Size of arguments before any adjustments (such as rounding). */ |
599f37b6 | 2453 | int unadjusted_args_size; |
51bbfa0c | 2454 | /* Data on reg parms scanned so far. */ |
d5cc9181 JR |
2455 | CUMULATIVE_ARGS args_so_far_v; |
2456 | cumulative_args_t args_so_far; | |
51bbfa0c RS |
2457 | /* Nonzero if a reg parm has been scanned. */ |
2458 | int reg_parm_seen; | |
efd65a8b | 2459 | /* Nonzero if this is an indirect function call. */ |
51bbfa0c | 2460 | |
f725a3ec | 2461 | /* Nonzero if we must avoid push-insns in the args for this call. |
51bbfa0c RS |
2462 | If stack space is allocated for register parameters, but not by the |
2463 | caller, then it is preallocated in the fixed part of the stack frame. | |
2464 | So the entire argument block must then be preallocated (i.e., we | |
2465 | ignore PUSH_ROUNDING in that case). */ | |
2466 | ||
f73ad30e | 2467 | int must_preallocate = !PUSH_ARGS; |
51bbfa0c | 2468 | |
f72aed24 | 2469 | /* Size of the stack reserved for parameter registers. */ |
6f90e075 JW |
2470 | int reg_parm_stack_space = 0; |
2471 | ||
51bbfa0c RS |
2472 | /* Address of space preallocated for stack parms |
2473 | (on machines that lack push insns), or 0 if space not preallocated. */ | |
2474 | rtx argblock = 0; | |
2475 | ||
e384e6b5 | 2476 | /* Mask of ECF_ and ERF_ flags. */ |
f2d33f13 | 2477 | int flags = 0; |
e384e6b5 | 2478 | int return_flags = 0; |
f73ad30e | 2479 | #ifdef REG_PARM_STACK_SPACE |
51bbfa0c | 2480 | /* Define the boundary of the register parm stack space that needs to be |
b820d2b8 AM |
2481 | saved, if any. */ |
2482 | int low_to_save, high_to_save; | |
51bbfa0c RS |
2483 | rtx save_area = 0; /* Place that it is saved */ |
2484 | #endif | |
2485 | ||
51bbfa0c RS |
2486 | int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
2487 | char *initial_stack_usage_map = stack_usage_map; | |
d9725c41 | 2488 | char *stack_usage_map_buf = NULL; |
51bbfa0c | 2489 | |
38afb23f OH |
2490 | int old_stack_allocated; |
2491 | ||
2492 | /* State variables to track stack modifications. */ | |
51bbfa0c | 2493 | rtx old_stack_level = 0; |
38afb23f | 2494 | int old_stack_arg_under_construction = 0; |
79be3418 | 2495 | int old_pending_adj = 0; |
51bbfa0c | 2496 | int old_inhibit_defer_pop = inhibit_defer_pop; |
38afb23f OH |
2497 | |
2498 | /* Some stack pointer alterations we make are performed via | |
2499 | allocate_dynamic_stack_space. This modifies the stack_pointer_delta, | |
2500 | which we then also need to save/restore along the way. */ | |
a259f218 | 2501 | int old_stack_pointer_delta = 0; |
38afb23f | 2502 | |
0a1c58a2 | 2503 | rtx call_fusage; |
5039610b | 2504 | tree addr = CALL_EXPR_FN (exp); |
b3694847 | 2505 | int i; |
739fb049 | 2506 | /* The alignment of the stack, in bits. */ |
95899b34 | 2507 | unsigned HOST_WIDE_INT preferred_stack_boundary; |
739fb049 | 2508 | /* The alignment of the stack, in bytes. */ |
95899b34 | 2509 | unsigned HOST_WIDE_INT preferred_unit_stack_boundary; |
6de9cd9a DN |
2510 | /* The static chain value to use for this call. */ |
2511 | rtx static_chain_value; | |
f2d33f13 JH |
2512 | /* See if this is "nothrow" function call. */ |
2513 | if (TREE_NOTHROW (exp)) | |
2514 | flags |= ECF_NOTHROW; | |
2515 | ||
6de9cd9a DN |
2516 | /* See if we can find a DECL-node for the actual function, and get the |
2517 | function attributes (flags) from the function decl or type node. */ | |
39b0dce7 JM |
2518 | fndecl = get_callee_fndecl (exp); |
2519 | if (fndecl) | |
51bbfa0c | 2520 | { |
57782ad8 | 2521 | fntype = TREE_TYPE (fndecl); |
39b0dce7 | 2522 | flags |= flags_from_decl_or_type (fndecl); |
e384e6b5 | 2523 | return_flags |= decl_return_flags (fndecl); |
51bbfa0c | 2524 | } |
39b0dce7 | 2525 | else |
72954a4f | 2526 | { |
28ed065e | 2527 | fntype = TREE_TYPE (TREE_TYPE (addr)); |
57782ad8 | 2528 | flags |= flags_from_decl_or_type (fntype); |
72954a4f | 2529 | } |
28ed065e | 2530 | rettype = TREE_TYPE (exp); |
7393c642 | 2531 | |
57782ad8 | 2532 | struct_value = targetm.calls.struct_value_rtx (fntype, 0); |
61f71b34 | 2533 | |
8c6a8269 RS |
2534 | /* Warn if this value is an aggregate type, |
2535 | regardless of which calling convention we are using for it. */ | |
28ed065e | 2536 | if (AGGREGATE_TYPE_P (rettype)) |
ccf08a6e | 2537 | warning (OPT_Waggregate_return, "function call has aggregate value"); |
8c6a8269 | 2538 | |
becfd6e5 KZ |
2539 | /* If the result of a non looping pure or const function call is |
2540 | ignored (or void), and none of its arguments are volatile, we can | |
2541 | avoid expanding the call and just evaluate the arguments for | |
2542 | side-effects. */ | |
8c6a8269 | 2543 | if ((flags & (ECF_CONST | ECF_PURE)) |
becfd6e5 | 2544 | && (!(flags & ECF_LOOPING_CONST_OR_PURE)) |
8c6a8269 | 2545 | && (ignore || target == const0_rtx |
28ed065e | 2546 | || TYPE_MODE (rettype) == VOIDmode)) |
8c6a8269 RS |
2547 | { |
2548 | bool volatilep = false; | |
2549 | tree arg; | |
078a18a4 | 2550 | call_expr_arg_iterator iter; |
8c6a8269 | 2551 | |
078a18a4 SL |
2552 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
2553 | if (TREE_THIS_VOLATILE (arg)) | |
8c6a8269 RS |
2554 | { |
2555 | volatilep = true; | |
2556 | break; | |
2557 | } | |
2558 | ||
2559 | if (! volatilep) | |
2560 | { | |
078a18a4 SL |
2561 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
2562 | expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
8c6a8269 RS |
2563 | return const0_rtx; |
2564 | } | |
2565 | } | |
2566 | ||
6f90e075 | 2567 | #ifdef REG_PARM_STACK_SPACE |
5d059ed9 | 2568 | reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl); |
6f90e075 | 2569 | #endif |
6f90e075 | 2570 | |
5d059ed9 | 2571 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
81464b2c | 2572 | && reg_parm_stack_space > 0 && PUSH_ARGS) |
e5e809f4 | 2573 | must_preallocate = 1; |
e5e809f4 | 2574 | |
51bbfa0c RS |
2575 | /* Set up a place to return a structure. */ |
2576 | ||
2577 | /* Cater to broken compilers. */ | |
d47d0a8d | 2578 | if (aggregate_value_p (exp, fntype)) |
51bbfa0c RS |
2579 | { |
2580 | /* This call returns a big structure. */ | |
84b8030f | 2581 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
51bbfa0c RS |
2582 | |
2583 | #ifdef PCC_STATIC_STRUCT_RETURN | |
9e7b1d0a RS |
2584 | { |
2585 | pcc_struct_value = 1; | |
9e7b1d0a RS |
2586 | } |
2587 | #else /* not PCC_STATIC_STRUCT_RETURN */ | |
2588 | { | |
28ed065e | 2589 | struct_value_size = int_size_in_bytes (rettype); |
51bbfa0c | 2590 | |
391756ad EB |
2591 | /* Even if it is semantically safe to use the target as the return |
2592 | slot, it may be not sufficiently aligned for the return type. */ | |
2593 | if (CALL_EXPR_RETURN_SLOT_OPT (exp) | |
2594 | && target | |
2595 | && MEM_P (target) | |
2596 | && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype) | |
2597 | && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype), | |
2598 | MEM_ALIGN (target)))) | |
9e7b1d0a RS |
2599 | structure_value_addr = XEXP (target, 0); |
2600 | else | |
2601 | { | |
9e7b1d0a RS |
2602 | /* For variable-sized objects, we must be called with a target |
2603 | specified. If we were to allocate space on the stack here, | |
2604 | we would have no way of knowing when to free it. */ | |
9474e8ab | 2605 | rtx d = assign_temp (rettype, 1, 1); |
4361b41d | 2606 | structure_value_addr = XEXP (d, 0); |
9e7b1d0a RS |
2607 | target = 0; |
2608 | } | |
2609 | } | |
2610 | #endif /* not PCC_STATIC_STRUCT_RETURN */ | |
51bbfa0c RS |
2611 | } |
2612 | ||
099e9712 | 2613 | /* Figure out the amount to which the stack should be aligned. */ |
099e9712 | 2614 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
b255a036 JH |
2615 | if (fndecl) |
2616 | { | |
3dafb85c | 2617 | struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl); |
17b29c0a L |
2618 | /* Without automatic stack alignment, we can't increase preferred |
2619 | stack boundary. With automatic stack alignment, it is | |
2620 | unnecessary since unless we can guarantee that all callers will | |
2621 | align the outgoing stack properly, callee has to align its | |
2622 | stack anyway. */ | |
2623 | if (i | |
2624 | && i->preferred_incoming_stack_boundary | |
2625 | && i->preferred_incoming_stack_boundary < preferred_stack_boundary) | |
b255a036 JH |
2626 | preferred_stack_boundary = i->preferred_incoming_stack_boundary; |
2627 | } | |
099e9712 JH |
2628 | |
2629 | /* Operand 0 is a pointer-to-function; get the type of the function. */ | |
09e2bf48 | 2630 | funtype = TREE_TYPE (addr); |
366de0ce | 2631 | gcc_assert (POINTER_TYPE_P (funtype)); |
099e9712 JH |
2632 | funtype = TREE_TYPE (funtype); |
2633 | ||
078a18a4 SL |
2634 | /* Count whether there are actual complex arguments that need to be split |
2635 | into their real and imaginary parts. Munge the type_arg_types | |
2636 | appropriately here as well. */ | |
42ba5130 | 2637 | if (targetm.calls.split_complex_arg) |
ded9bf77 | 2638 | { |
078a18a4 SL |
2639 | call_expr_arg_iterator iter; |
2640 | tree arg; | |
2641 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) | |
2642 | { | |
2643 | tree type = TREE_TYPE (arg); | |
2644 | if (type && TREE_CODE (type) == COMPLEX_TYPE | |
2645 | && targetm.calls.split_complex_arg (type)) | |
2646 | num_complex_actuals++; | |
2647 | } | |
ded9bf77 | 2648 | type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); |
ded9bf77 AH |
2649 | } |
2650 | else | |
2651 | type_arg_types = TYPE_ARG_TYPES (funtype); | |
2652 | ||
099e9712 | 2653 | if (flags & ECF_MAY_BE_ALLOCA) |
e3b5732b | 2654 | cfun->calls_alloca = 1; |
099e9712 JH |
2655 | |
2656 | /* If struct_value_rtx is 0, it means pass the address | |
078a18a4 SL |
2657 | as if it were an extra parameter. Put the argument expression |
2658 | in structure_value_addr_value. */ | |
61f71b34 | 2659 | if (structure_value_addr && struct_value == 0) |
099e9712 JH |
2660 | { |
2661 | /* If structure_value_addr is a REG other than | |
2662 | virtual_outgoing_args_rtx, we can use always use it. If it | |
2663 | is not a REG, we must always copy it into a register. | |
2664 | If it is virtual_outgoing_args_rtx, we must copy it to another | |
2665 | register in some cases. */ | |
f8cfc6aa | 2666 | rtx temp = (!REG_P (structure_value_addr) |
099e9712 JH |
2667 | || (ACCUMULATE_OUTGOING_ARGS |
2668 | && stack_arg_under_construction | |
2669 | && structure_value_addr == virtual_outgoing_args_rtx) | |
7ae4ad28 | 2670 | ? copy_addr_to_reg (convert_memory_address |
57782ad8 | 2671 | (Pmode, structure_value_addr)) |
099e9712 JH |
2672 | : structure_value_addr); |
2673 | ||
078a18a4 SL |
2674 | structure_value_addr_value = |
2675 | make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); | |
d5e254e1 | 2676 | structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1; |
099e9712 JH |
2677 | } |
2678 | ||
2679 | /* Count the arguments and set NUM_ACTUALS. */ | |
078a18a4 SL |
2680 | num_actuals = |
2681 | call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm; | |
099e9712 JH |
2682 | |
2683 | /* Compute number of named args. | |
3a4d587b AM |
2684 | First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */ |
2685 | ||
2686 | if (type_arg_types != 0) | |
2687 | n_named_args | |
2688 | = (list_length (type_arg_types) | |
2689 | /* Count the struct value address, if it is passed as a parm. */ | |
2690 | + structure_value_addr_parm); | |
2691 | else | |
2692 | /* If we know nothing, treat all args as named. */ | |
2693 | n_named_args = num_actuals; | |
2694 | ||
2695 | /* Start updating where the next arg would go. | |
2696 | ||
2697 | On some machines (such as the PA) indirect calls have a different | |
2698 | calling convention than normal calls. The fourth argument in | |
2699 | INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call | |
2700 | or not. */ | |
d5cc9181 JR |
2701 | INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args); |
2702 | args_so_far = pack_cumulative_args (&args_so_far_v); | |
3a4d587b AM |
2703 | |
2704 | /* Now possibly adjust the number of named args. | |
099e9712 | 2705 | Normally, don't include the last named arg if anonymous args follow. |
3a179764 KH |
2706 | We do include the last named arg if |
2707 | targetm.calls.strict_argument_naming() returns nonzero. | |
099e9712 JH |
2708 | (If no anonymous args follow, the result of list_length is actually |
2709 | one too large. This is harmless.) | |
2710 | ||
4ac8340c | 2711 | If targetm.calls.pretend_outgoing_varargs_named() returns |
3a179764 KH |
2712 | nonzero, and targetm.calls.strict_argument_naming() returns zero, |
2713 | this machine will be able to place unnamed args that were passed | |
2714 | in registers into the stack. So treat all args as named. This | |
2715 | allows the insns emitting for a specific argument list to be | |
2716 | independent of the function declaration. | |
4ac8340c KH |
2717 | |
2718 | If targetm.calls.pretend_outgoing_varargs_named() returns zero, | |
2719 | we do not have any reliable way to pass unnamed args in | |
2720 | registers, so we must force them into memory. */ | |
099e9712 | 2721 | |
3a4d587b | 2722 | if (type_arg_types != 0 |
d5cc9181 | 2723 | && targetm.calls.strict_argument_naming (args_so_far)) |
3a4d587b AM |
2724 | ; |
2725 | else if (type_arg_types != 0 | |
d5cc9181 | 2726 | && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far)) |
3a4d587b AM |
2727 | /* Don't include the last named arg. */ |
2728 | --n_named_args; | |
099e9712 | 2729 | else |
3a4d587b | 2730 | /* Treat all args as named. */ |
099e9712 JH |
2731 | n_named_args = num_actuals; |
2732 | ||
099e9712 | 2733 | /* Make a vector to hold all the information about each arg. */ |
f883e0a7 | 2734 | args = XALLOCAVEC (struct arg_data, num_actuals); |
703ad42b | 2735 | memset (args, 0, num_actuals * sizeof (struct arg_data)); |
099e9712 | 2736 | |
d80d2d2a KH |
2737 | /* Build up entries in the ARGS array, compute the size of the |
2738 | arguments into ARGS_SIZE, etc. */ | |
099e9712 | 2739 | initialize_argument_information (num_actuals, args, &args_size, |
078a18a4 | 2740 | n_named_args, exp, |
45769134 | 2741 | structure_value_addr_value, fndecl, fntype, |
d5cc9181 | 2742 | args_so_far, reg_parm_stack_space, |
099e9712 | 2743 | &old_stack_level, &old_pending_adj, |
dd292d0a | 2744 | &must_preallocate, &flags, |
6de9cd9a | 2745 | &try_tail_call, CALL_FROM_THUNK_P (exp)); |
099e9712 JH |
2746 | |
2747 | if (args_size.var) | |
84b8030f | 2748 | must_preallocate = 1; |
099e9712 JH |
2749 | |
2750 | /* Now make final decision about preallocating stack space. */ | |
2751 | must_preallocate = finalize_must_preallocate (must_preallocate, | |
2752 | num_actuals, args, | |
2753 | &args_size); | |
2754 | ||
2755 | /* If the structure value address will reference the stack pointer, we | |
2756 | must stabilize it. We don't need to do this if we know that we are | |
2757 | not going to adjust the stack pointer in processing this call. */ | |
2758 | ||
2759 | if (structure_value_addr | |
2760 | && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) | |
2761 | || reg_mentioned_p (virtual_outgoing_args_rtx, | |
2762 | structure_value_addr)) | |
2763 | && (args_size.var | |
2764 | || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) | |
2765 | structure_value_addr = copy_to_reg (structure_value_addr); | |
0a1c58a2 | 2766 | |
7ae4ad28 | 2767 | /* Tail calls can make things harder to debug, and we've traditionally |
194c7c45 | 2768 | pushed these optimizations into -O2. Don't try if we're already |
fb158467 | 2769 | expanding a call, as that means we're an argument. Don't try if |
3fbd86b1 | 2770 | there's cleanups, as we know there's code to follow the call. */ |
0a1c58a2 | 2771 | |
099e9712 JH |
2772 | if (currently_expanding_call++ != 0 |
2773 | || !flag_optimize_sibling_calls | |
6de9cd9a | 2774 | || args_size.var |
6fb5fa3c | 2775 | || dbg_cnt (tail_call) == false) |
6de9cd9a | 2776 | try_tail_call = 0; |
099e9712 JH |
2777 | |
2778 | /* Rest of purposes for tail call optimizations to fail. */ | |
e86a9946 RS |
2779 | if (!try_tail_call |
2780 | || !targetm.have_sibcall_epilogue () | |
099e9712 JH |
2781 | /* Doing sibling call optimization needs some work, since |
2782 | structure_value_addr can be allocated on the stack. | |
2783 | It does not seem worth the effort since few optimizable | |
2784 | sibling calls will return a structure. */ | |
2785 | || structure_value_addr != NULL_RTX | |
130423d7 | 2786 | #ifdef REG_PARM_STACK_SPACE |
0e456625 JH |
2787 | /* If outgoing reg parm stack space changes, we can not do sibcall. */ |
2788 | || (OUTGOING_REG_PARM_STACK_SPACE (funtype) | |
2789 | != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) | |
deb1de67 | 2790 | || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)) |
130423d7 | 2791 | #endif |
4977bab6 ZW |
2792 | /* Check whether the target is able to optimize the call |
2793 | into a sibcall. */ | |
5fd9b178 | 2794 | || !targetm.function_ok_for_sibcall (fndecl, exp) |
4977bab6 | 2795 | /* Functions that do not return exactly once may not be sibcall |
c22cacf3 | 2796 | optimized. */ |
6e14af16 | 2797 | || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN)) |
09e2bf48 | 2798 | || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))) |
6a48df45 | 2799 | /* If the called function is nested in the current one, it might access |
c22cacf3 MS |
2800 | some of the caller's arguments, but could clobber them beforehand if |
2801 | the argument areas are shared. */ | |
6a48df45 | 2802 | || (fndecl && decl_function_context (fndecl) == current_function_decl) |
099e9712 | 2803 | /* If this function requires more stack slots than the current |
ff7f012a | 2804 | function, we cannot change it into a sibling call. |
38173d38 | 2805 | crtl->args.pretend_args_size is not part of the |
ff7f012a | 2806 | stack allocated by our caller. */ |
38173d38 JH |
2807 | || args_size.constant > (crtl->args.size |
2808 | - crtl->args.pretend_args_size) | |
099e9712 JH |
2809 | /* If the callee pops its own arguments, then it must pop exactly |
2810 | the same number of arguments as the current function. */ | |
079e7538 NF |
2811 | || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant) |
2812 | != targetm.calls.return_pops_args (current_function_decl, | |
2813 | TREE_TYPE (current_function_decl), | |
2814 | crtl->args.size)) | |
ae2bcd98 | 2815 | || !lang_hooks.decls.ok_for_sibcall (fndecl)) |
e6f64875 | 2816 | try_tail_call = 0; |
497eb8c3 | 2817 | |
c69cd1f5 JJ |
2818 | /* Check if caller and callee disagree in promotion of function |
2819 | return value. */ | |
2820 | if (try_tail_call) | |
2821 | { | |
ef4bddc2 RS |
2822 | machine_mode caller_mode, caller_promoted_mode; |
2823 | machine_mode callee_mode, callee_promoted_mode; | |
c69cd1f5 JJ |
2824 | int caller_unsignedp, callee_unsignedp; |
2825 | tree caller_res = DECL_RESULT (current_function_decl); | |
2826 | ||
2827 | caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); | |
cde0f3fd | 2828 | caller_mode = DECL_MODE (caller_res); |
c69cd1f5 | 2829 | callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); |
cde0f3fd PB |
2830 | callee_mode = TYPE_MODE (TREE_TYPE (funtype)); |
2831 | caller_promoted_mode | |
2832 | = promote_function_mode (TREE_TYPE (caller_res), caller_mode, | |
2833 | &caller_unsignedp, | |
2834 | TREE_TYPE (current_function_decl), 1); | |
2835 | callee_promoted_mode | |
666e3ceb | 2836 | = promote_function_mode (TREE_TYPE (funtype), callee_mode, |
cde0f3fd | 2837 | &callee_unsignedp, |
666e3ceb | 2838 | funtype, 1); |
c69cd1f5 JJ |
2839 | if (caller_mode != VOIDmode |
2840 | && (caller_promoted_mode != callee_promoted_mode | |
2841 | || ((caller_mode != caller_promoted_mode | |
2842 | || callee_mode != callee_promoted_mode) | |
2843 | && (caller_unsignedp != callee_unsignedp | |
2844 | || GET_MODE_BITSIZE (caller_mode) | |
2845 | < GET_MODE_BITSIZE (callee_mode))))) | |
2846 | try_tail_call = 0; | |
2847 | } | |
2848 | ||
01973e26 L |
2849 | /* Ensure current function's preferred stack boundary is at least |
2850 | what we need. Stack alignment may also increase preferred stack | |
2851 | boundary. */ | |
b5f772ce | 2852 | if (crtl->preferred_stack_boundary < preferred_stack_boundary) |
cb91fab0 | 2853 | crtl->preferred_stack_boundary = preferred_stack_boundary; |
01973e26 L |
2854 | else |
2855 | preferred_stack_boundary = crtl->preferred_stack_boundary; | |
c2f8b491 | 2856 | |
099e9712 | 2857 | preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; |
497eb8c3 | 2858 | |
0a1c58a2 JL |
2859 | /* We want to make two insn chains; one for a sibling call, the other |
2860 | for a normal call. We will select one of the two chains after | |
2861 | initial RTL generation is complete. */ | |
b820d2b8 | 2862 | for (pass = try_tail_call ? 0 : 1; pass < 2; pass++) |
0a1c58a2 JL |
2863 | { |
2864 | int sibcall_failure = 0; | |
f5143c46 | 2865 | /* We want to emit any pending stack adjustments before the tail |
0a1c58a2 | 2866 | recursion "call". That way we know any adjustment after the tail |
7ae4ad28 | 2867 | recursion call can be ignored if we indeed use the tail |
0a1c58a2 | 2868 | call expansion. */ |
7f2f0a01 | 2869 | saved_pending_stack_adjust save; |
48810515 DM |
2870 | rtx_insn *insns, *before_call, *after_args; |
2871 | rtx next_arg_reg; | |
39842893 | 2872 | |
0a1c58a2 JL |
2873 | if (pass == 0) |
2874 | { | |
0a1c58a2 JL |
2875 | /* State variables we need to save and restore between |
2876 | iterations. */ | |
7f2f0a01 | 2877 | save_pending_stack_adjust (&save); |
0a1c58a2 | 2878 | } |
f2d33f13 JH |
2879 | if (pass) |
2880 | flags &= ~ECF_SIBCALL; | |
2881 | else | |
2882 | flags |= ECF_SIBCALL; | |
51bbfa0c | 2883 | |
0a1c58a2 | 2884 | /* Other state variables that we must reinitialize each time |
f2d33f13 | 2885 | through the loop (that are not initialized by the loop itself). */ |
0a1c58a2 JL |
2886 | argblock = 0; |
2887 | call_fusage = 0; | |
fa76d9e0 | 2888 | |
f725a3ec | 2889 | /* Start a new sequence for the normal call case. |
51bbfa0c | 2890 | |
0a1c58a2 JL |
2891 | From this point on, if the sibling call fails, we want to set |
2892 | sibcall_failure instead of continuing the loop. */ | |
2893 | start_sequence (); | |
eecb6f50 | 2894 | |
0a1c58a2 JL |
2895 | /* Don't let pending stack adjusts add up to too much. |
2896 | Also, do all pending adjustments now if there is any chance | |
2897 | this might be a call to alloca or if we are expanding a sibling | |
9dd9bf80 | 2898 | call sequence. |
63579539 DJ |
2899 | Also do the adjustments before a throwing call, otherwise |
2900 | exception handling can fail; PR 19225. */ | |
0a1c58a2 | 2901 | if (pending_stack_adjust >= 32 |
b5cd4ed4 | 2902 | || (pending_stack_adjust > 0 |
9dd9bf80 | 2903 | && (flags & ECF_MAY_BE_ALLOCA)) |
63579539 DJ |
2904 | || (pending_stack_adjust > 0 |
2905 | && flag_exceptions && !(flags & ECF_NOTHROW)) | |
0a1c58a2 JL |
2906 | || pass == 0) |
2907 | do_pending_stack_adjust (); | |
51bbfa0c | 2908 | |
0a1c58a2 | 2909 | /* Precompute any arguments as needed. */ |
f8a097cd | 2910 | if (pass) |
84b8030f | 2911 | precompute_arguments (num_actuals, args); |
51bbfa0c | 2912 | |
0a1c58a2 JL |
2913 | /* Now we are about to start emitting insns that can be deleted |
2914 | if a libcall is deleted. */ | |
84b8030f | 2915 | if (pass && (flags & ECF_MALLOC)) |
0a1c58a2 | 2916 | start_sequence (); |
51bbfa0c | 2917 | |
cb91fab0 | 2918 | if (pass == 0 && crtl->stack_protect_guard) |
b755446c RH |
2919 | stack_protect_epilogue (); |
2920 | ||
099e9712 | 2921 | adjusted_args_size = args_size; |
ce48579b RH |
2922 | /* Compute the actual size of the argument block required. The variable |
2923 | and constant sizes must be combined, the size may have to be rounded, | |
2924 | and there may be a minimum required size. When generating a sibcall | |
2925 | pattern, do not round up, since we'll be re-using whatever space our | |
2926 | caller provided. */ | |
2927 | unadjusted_args_size | |
f725a3ec KH |
2928 | = compute_argument_block_size (reg_parm_stack_space, |
2929 | &adjusted_args_size, | |
5d059ed9 | 2930 | fndecl, fntype, |
ce48579b RH |
2931 | (pass == 0 ? 0 |
2932 | : preferred_stack_boundary)); | |
2933 | ||
f725a3ec | 2934 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
ce48579b | 2935 | |
f8a097cd | 2936 | /* The argument block when performing a sibling call is the |
c22cacf3 | 2937 | incoming argument block. */ |
f8a097cd | 2938 | if (pass == 0) |
c67846f2 | 2939 | { |
2e3f842f | 2940 | argblock = crtl->args.internal_arg_pointer; |
76e048a8 KT |
2941 | if (STACK_GROWS_DOWNWARD) |
2942 | argblock | |
2943 | = plus_constant (Pmode, argblock, crtl->args.pretend_args_size); | |
2944 | else | |
2945 | argblock | |
2946 | = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size); | |
2947 | ||
c67846f2 | 2948 | stored_args_map = sbitmap_alloc (args_size.constant); |
f61e445a | 2949 | bitmap_clear (stored_args_map); |
c67846f2 | 2950 | } |
ce48579b | 2951 | |
0a1c58a2 JL |
2952 | /* If we have no actual push instructions, or shouldn't use them, |
2953 | make space for all args right now. */ | |
099e9712 | 2954 | else if (adjusted_args_size.var != 0) |
51bbfa0c | 2955 | { |
0a1c58a2 JL |
2956 | if (old_stack_level == 0) |
2957 | { | |
9eac0f2a | 2958 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
38afb23f | 2959 | old_stack_pointer_delta = stack_pointer_delta; |
0a1c58a2 JL |
2960 | old_pending_adj = pending_stack_adjust; |
2961 | pending_stack_adjust = 0; | |
0a1c58a2 JL |
2962 | /* stack_arg_under_construction says whether a stack arg is |
2963 | being constructed at the old stack level. Pushing the stack | |
2964 | gets a clean outgoing argument block. */ | |
2965 | old_stack_arg_under_construction = stack_arg_under_construction; | |
2966 | stack_arg_under_construction = 0; | |
0a1c58a2 | 2967 | } |
099e9712 | 2968 | argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); |
a11e0df4 | 2969 | if (flag_stack_usage_info) |
d3c12306 | 2970 | current_function_has_unbounded_dynamic_stack_size = 1; |
51bbfa0c | 2971 | } |
0a1c58a2 JL |
2972 | else |
2973 | { | |
2974 | /* Note that we must go through the motions of allocating an argument | |
2975 | block even if the size is zero because we may be storing args | |
2976 | in the area reserved for register arguments, which may be part of | |
2977 | the stack frame. */ | |
26a258fe | 2978 | |
099e9712 | 2979 | int needed = adjusted_args_size.constant; |
51bbfa0c | 2980 | |
0a1c58a2 JL |
2981 | /* Store the maximum argument space used. It will be pushed by |
2982 | the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow | |
2983 | checking). */ | |
51bbfa0c | 2984 | |
38173d38 JH |
2985 | if (needed > crtl->outgoing_args_size) |
2986 | crtl->outgoing_args_size = needed; | |
51bbfa0c | 2987 | |
0a1c58a2 JL |
2988 | if (must_preallocate) |
2989 | { | |
f73ad30e JH |
2990 | if (ACCUMULATE_OUTGOING_ARGS) |
2991 | { | |
f8a097cd JH |
2992 | /* Since the stack pointer will never be pushed, it is |
2993 | possible for the evaluation of a parm to clobber | |
2994 | something we have already written to the stack. | |
2995 | Since most function calls on RISC machines do not use | |
2996 | the stack, this is uncommon, but must work correctly. | |
26a258fe | 2997 | |
f73ad30e | 2998 | Therefore, we save any area of the stack that was already |
f8a097cd JH |
2999 | written and that we are using. Here we set up to do this |
3000 | by making a new stack usage map from the old one. The | |
f725a3ec | 3001 | actual save will be done by store_one_arg. |
26a258fe | 3002 | |
f73ad30e JH |
3003 | Another approach might be to try to reorder the argument |
3004 | evaluations to avoid this conflicting stack usage. */ | |
26a258fe | 3005 | |
f8a097cd JH |
3006 | /* Since we will be writing into the entire argument area, |
3007 | the map must be allocated for its entire size, not just | |
3008 | the part that is the responsibility of the caller. */ | |
5d059ed9 | 3009 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
ac294f0b | 3010 | needed += reg_parm_stack_space; |
51bbfa0c | 3011 | |
6dad9361 TS |
3012 | if (ARGS_GROW_DOWNWARD) |
3013 | highest_outgoing_arg_in_use | |
3014 | = MAX (initial_highest_arg_in_use, needed + 1); | |
3015 | else | |
3016 | highest_outgoing_arg_in_use | |
3017 | = MAX (initial_highest_arg_in_use, needed); | |
3018 | ||
04695783 | 3019 | free (stack_usage_map_buf); |
5ed6ace5 | 3020 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
d9725c41 | 3021 | stack_usage_map = stack_usage_map_buf; |
51bbfa0c | 3022 | |
f73ad30e | 3023 | if (initial_highest_arg_in_use) |
2e09e75a JM |
3024 | memcpy (stack_usage_map, initial_stack_usage_map, |
3025 | initial_highest_arg_in_use); | |
2f4aa534 | 3026 | |
f73ad30e | 3027 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
961192e1 | 3028 | memset (&stack_usage_map[initial_highest_arg_in_use], 0, |
f73ad30e JH |
3029 | (highest_outgoing_arg_in_use |
3030 | - initial_highest_arg_in_use)); | |
3031 | needed = 0; | |
2f4aa534 | 3032 | |
f8a097cd JH |
3033 | /* The address of the outgoing argument list must not be |
3034 | copied to a register here, because argblock would be left | |
3035 | pointing to the wrong place after the call to | |
f725a3ec | 3036 | allocate_dynamic_stack_space below. */ |
2f4aa534 | 3037 | |
f73ad30e | 3038 | argblock = virtual_outgoing_args_rtx; |
f725a3ec | 3039 | } |
f73ad30e | 3040 | else |
26a258fe | 3041 | { |
f73ad30e | 3042 | if (inhibit_defer_pop == 0) |
0a1c58a2 | 3043 | { |
f73ad30e | 3044 | /* Try to reuse some or all of the pending_stack_adjust |
ce48579b RH |
3045 | to get this space. */ |
3046 | needed | |
f725a3ec | 3047 | = (combine_pending_stack_adjustment_and_call |
ce48579b | 3048 | (unadjusted_args_size, |
099e9712 | 3049 | &adjusted_args_size, |
ce48579b RH |
3050 | preferred_unit_stack_boundary)); |
3051 | ||
3052 | /* combine_pending_stack_adjustment_and_call computes | |
3053 | an adjustment before the arguments are allocated. | |
3054 | Account for them and see whether or not the stack | |
3055 | needs to go up or down. */ | |
3056 | needed = unadjusted_args_size - needed; | |
3057 | ||
3058 | if (needed < 0) | |
f73ad30e | 3059 | { |
ce48579b RH |
3060 | /* We're releasing stack space. */ |
3061 | /* ??? We can avoid any adjustment at all if we're | |
3062 | already aligned. FIXME. */ | |
3063 | pending_stack_adjust = -needed; | |
3064 | do_pending_stack_adjust (); | |
f73ad30e JH |
3065 | needed = 0; |
3066 | } | |
f725a3ec | 3067 | else |
ce48579b RH |
3068 | /* We need to allocate space. We'll do that in |
3069 | push_block below. */ | |
3070 | pending_stack_adjust = 0; | |
0a1c58a2 | 3071 | } |
ce48579b RH |
3072 | |
3073 | /* Special case this because overhead of `push_block' in | |
3074 | this case is non-trivial. */ | |
f73ad30e JH |
3075 | if (needed == 0) |
3076 | argblock = virtual_outgoing_args_rtx; | |
0a1c58a2 | 3077 | else |
d892f288 DD |
3078 | { |
3079 | argblock = push_block (GEN_INT (needed), 0, 0); | |
6dad9361 TS |
3080 | if (ARGS_GROW_DOWNWARD) |
3081 | argblock = plus_constant (Pmode, argblock, needed); | |
d892f288 | 3082 | } |
f73ad30e | 3083 | |
f8a097cd JH |
3084 | /* We only really need to call `copy_to_reg' in the case |
3085 | where push insns are going to be used to pass ARGBLOCK | |
3086 | to a function call in ARGS. In that case, the stack | |
3087 | pointer changes value from the allocation point to the | |
3088 | call point, and hence the value of | |
3089 | VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might | |
3090 | as well always do it. */ | |
f73ad30e | 3091 | argblock = copy_to_reg (argblock); |
38afb23f OH |
3092 | } |
3093 | } | |
3094 | } | |
0a1c58a2 | 3095 | |
38afb23f OH |
3096 | if (ACCUMULATE_OUTGOING_ARGS) |
3097 | { | |
3098 | /* The save/restore code in store_one_arg handles all | |
3099 | cases except one: a constructor call (including a C | |
3100 | function returning a BLKmode struct) to initialize | |
3101 | an argument. */ | |
3102 | if (stack_arg_under_construction) | |
3103 | { | |
ac294f0b KT |
3104 | rtx push_size |
3105 | = GEN_INT (adjusted_args_size.constant | |
5d059ed9 | 3106 | + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype |
81464b2c | 3107 | : TREE_TYPE (fndecl))) ? 0 |
ac294f0b | 3108 | : reg_parm_stack_space)); |
38afb23f OH |
3109 | if (old_stack_level == 0) |
3110 | { | |
9eac0f2a | 3111 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
38afb23f OH |
3112 | old_stack_pointer_delta = stack_pointer_delta; |
3113 | old_pending_adj = pending_stack_adjust; | |
3114 | pending_stack_adjust = 0; | |
3115 | /* stack_arg_under_construction says whether a stack | |
3116 | arg is being constructed at the old stack level. | |
3117 | Pushing the stack gets a clean outgoing argument | |
3118 | block. */ | |
3119 | old_stack_arg_under_construction | |
3120 | = stack_arg_under_construction; | |
3121 | stack_arg_under_construction = 0; | |
3122 | /* Make a new map for the new argument list. */ | |
04695783 | 3123 | free (stack_usage_map_buf); |
b9eae1a9 | 3124 | stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); |
d9725c41 | 3125 | stack_usage_map = stack_usage_map_buf; |
38afb23f | 3126 | highest_outgoing_arg_in_use = 0; |
f73ad30e | 3127 | } |
d3c12306 EB |
3128 | /* We can pass TRUE as the 4th argument because we just |
3129 | saved the stack pointer and will restore it right after | |
3130 | the call. */ | |
3a42502d RH |
3131 | allocate_dynamic_stack_space (push_size, 0, |
3132 | BIGGEST_ALIGNMENT, true); | |
0a1c58a2 | 3133 | } |
bfbf933a | 3134 | |
38afb23f OH |
3135 | /* If argument evaluation might modify the stack pointer, |
3136 | copy the address of the argument list to a register. */ | |
3137 | for (i = 0; i < num_actuals; i++) | |
3138 | if (args[i].pass_on_stack) | |
3139 | { | |
3140 | argblock = copy_addr_to_reg (argblock); | |
3141 | break; | |
3142 | } | |
3143 | } | |
d329e058 | 3144 | |
0a1c58a2 | 3145 | compute_argument_addresses (args, argblock, num_actuals); |
bfbf933a | 3146 | |
3d9684ae JG |
3147 | /* Perform stack alignment before the first push (the last arg). */ |
3148 | if (argblock == 0 | |
f830ddc2 | 3149 | && adjusted_args_size.constant > reg_parm_stack_space |
099e9712 | 3150 | && adjusted_args_size.constant != unadjusted_args_size) |
4e217aed | 3151 | { |
0a1c58a2 JL |
3152 | /* When the stack adjustment is pending, we get better code |
3153 | by combining the adjustments. */ | |
f725a3ec | 3154 | if (pending_stack_adjust |
0a1c58a2 | 3155 | && ! inhibit_defer_pop) |
ce48579b RH |
3156 | { |
3157 | pending_stack_adjust | |
f725a3ec | 3158 | = (combine_pending_stack_adjustment_and_call |
ce48579b | 3159 | (unadjusted_args_size, |
099e9712 | 3160 | &adjusted_args_size, |
ce48579b RH |
3161 | preferred_unit_stack_boundary)); |
3162 | do_pending_stack_adjust (); | |
3163 | } | |
0a1c58a2 | 3164 | else if (argblock == 0) |
099e9712 | 3165 | anti_adjust_stack (GEN_INT (adjusted_args_size.constant |
0a1c58a2 | 3166 | - unadjusted_args_size)); |
0a1c58a2 | 3167 | } |
ebcd0b57 JH |
3168 | /* Now that the stack is properly aligned, pops can't safely |
3169 | be deferred during the evaluation of the arguments. */ | |
3170 | NO_DEFER_POP; | |
51bbfa0c | 3171 | |
d3c12306 EB |
3172 | /* Record the maximum pushed stack space size. We need to delay |
3173 | doing it this far to take into account the optimization done | |
3174 | by combine_pending_stack_adjustment_and_call. */ | |
a11e0df4 | 3175 | if (flag_stack_usage_info |
d3c12306 EB |
3176 | && !ACCUMULATE_OUTGOING_ARGS |
3177 | && pass | |
3178 | && adjusted_args_size.var == 0) | |
3179 | { | |
3180 | int pushed = adjusted_args_size.constant + pending_stack_adjust; | |
3181 | if (pushed > current_function_pushed_stack_size) | |
3182 | current_function_pushed_stack_size = pushed; | |
3183 | } | |
3184 | ||
09e2bf48 | 3185 | funexp = rtx_for_function_call (fndecl, addr); |
51bbfa0c | 3186 | |
0a1c58a2 JL |
3187 | /* Precompute all register parameters. It isn't safe to compute anything |
3188 | once we have started filling any specific hard regs. */ | |
3189 | precompute_register_parameters (num_actuals, args, ®_parm_seen); | |
51bbfa0c | 3190 | |
5039610b SL |
3191 | if (CALL_EXPR_STATIC_CHAIN (exp)) |
3192 | static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); | |
6de9cd9a DN |
3193 | else |
3194 | static_chain_value = 0; | |
3195 | ||
f73ad30e | 3196 | #ifdef REG_PARM_STACK_SPACE |
0a1c58a2 JL |
3197 | /* Save the fixed argument area if it's part of the caller's frame and |
3198 | is clobbered by argument setup for this call. */ | |
f8a097cd | 3199 | if (ACCUMULATE_OUTGOING_ARGS && pass) |
f73ad30e JH |
3200 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, |
3201 | &low_to_save, &high_to_save); | |
b94301c2 | 3202 | #endif |
51bbfa0c | 3203 | |
0a1c58a2 JL |
3204 | /* Now store (and compute if necessary) all non-register parms. |
3205 | These come before register parms, since they can require block-moves, | |
3206 | which could clobber the registers used for register parms. | |
3207 | Parms which have partial registers are not stored here, | |
3208 | but we do preallocate space here if they want that. */ | |
51bbfa0c | 3209 | |
0a1c58a2 | 3210 | for (i = 0; i < num_actuals; i++) |
0196c95e | 3211 | { |
d5e254e1 IE |
3212 | /* Delay bounds until all other args are stored. */ |
3213 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
3214 | continue; | |
3215 | else if (args[i].reg == 0 || args[i].pass_on_stack) | |
0196c95e | 3216 | { |
48810515 | 3217 | rtx_insn *before_arg = get_last_insn (); |
0196c95e | 3218 | |
ddc923b5 MP |
3219 | /* We don't allow passing huge (> 2^30 B) arguments |
3220 | by value. It would cause an overflow later on. */ | |
3221 | if (adjusted_args_size.constant | |
3222 | >= (1 << (HOST_BITS_PER_INT - 2))) | |
3223 | { | |
3224 | sorry ("passing too large argument on stack"); | |
3225 | continue; | |
3226 | } | |
3227 | ||
0196c95e JJ |
3228 | if (store_one_arg (&args[i], argblock, flags, |
3229 | adjusted_args_size.var != 0, | |
3230 | reg_parm_stack_space) | |
3231 | || (pass == 0 | |
3232 | && check_sibcall_argument_overlap (before_arg, | |
3233 | &args[i], 1))) | |
3234 | sibcall_failure = 1; | |
3235 | } | |
3236 | ||
2b1c5433 | 3237 | if (args[i].stack) |
7d810276 JJ |
3238 | call_fusage |
3239 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)), | |
3240 | gen_rtx_USE (VOIDmode, args[i].stack), | |
3241 | call_fusage); | |
0196c95e | 3242 | } |
0a1c58a2 JL |
3243 | |
3244 | /* If we have a parm that is passed in registers but not in memory | |
3245 | and whose alignment does not permit a direct copy into registers, | |
3246 | make a group of pseudos that correspond to each register that we | |
3247 | will later fill. */ | |
3248 | if (STRICT_ALIGNMENT) | |
3249 | store_unaligned_arguments_into_pseudos (args, num_actuals); | |
3250 | ||
3251 | /* Now store any partially-in-registers parm. | |
3252 | This is the last place a block-move can happen. */ | |
3253 | if (reg_parm_seen) | |
3254 | for (i = 0; i < num_actuals; i++) | |
3255 | if (args[i].partial != 0 && ! args[i].pass_on_stack) | |
c67846f2 | 3256 | { |
48810515 | 3257 | rtx_insn *before_arg = get_last_insn (); |
c67846f2 | 3258 | |
99206968 KT |
3259 | /* On targets with weird calling conventions (e.g. PA) it's |
3260 | hard to ensure that all cases of argument overlap between | |
3261 | stack and registers work. Play it safe and bail out. */ | |
3262 | if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD) | |
3263 | { | |
3264 | sibcall_failure = 1; | |
3265 | break; | |
3266 | } | |
3267 | ||
4c6b3b2a JJ |
3268 | if (store_one_arg (&args[i], argblock, flags, |
3269 | adjusted_args_size.var != 0, | |
3270 | reg_parm_stack_space) | |
3271 | || (pass == 0 | |
3272 | && check_sibcall_argument_overlap (before_arg, | |
0cdca92b | 3273 | &args[i], 1))) |
c67846f2 JJ |
3274 | sibcall_failure = 1; |
3275 | } | |
51bbfa0c | 3276 | |
2f21e1ba BS |
3277 | bool any_regs = false; |
3278 | for (i = 0; i < num_actuals; i++) | |
3279 | if (args[i].reg != NULL_RTX) | |
3280 | { | |
3281 | any_regs = true; | |
3282 | targetm.calls.call_args (args[i].reg, funtype); | |
3283 | } | |
3284 | if (!any_regs) | |
3285 | targetm.calls.call_args (pc_rtx, funtype); | |
3286 | ||
3287 | /* Figure out the register where the value, if any, will come back. */ | |
3288 | valreg = 0; | |
3289 | valbnd = 0; | |
3290 | if (TYPE_MODE (rettype) != VOIDmode | |
3291 | && ! structure_value_addr) | |
3292 | { | |
3293 | if (pcc_struct_value) | |
3294 | { | |
3295 | valreg = hard_function_value (build_pointer_type (rettype), | |
3296 | fndecl, NULL, (pass == 0)); | |
3297 | if (CALL_WITH_BOUNDS_P (exp)) | |
3298 | valbnd = targetm.calls. | |
3299 | chkp_function_value_bounds (build_pointer_type (rettype), | |
3300 | fndecl, (pass == 0)); | |
3301 | } | |
3302 | else | |
3303 | { | |
3304 | valreg = hard_function_value (rettype, fndecl, fntype, | |
3305 | (pass == 0)); | |
3306 | if (CALL_WITH_BOUNDS_P (exp)) | |
3307 | valbnd = targetm.calls.chkp_function_value_bounds (rettype, | |
3308 | fndecl, | |
3309 | (pass == 0)); | |
3310 | } | |
3311 | ||
3312 | /* If VALREG is a PARALLEL whose first member has a zero | |
3313 | offset, use that. This is for targets such as m68k that | |
3314 | return the same value in multiple places. */ | |
3315 | if (GET_CODE (valreg) == PARALLEL) | |
3316 | { | |
3317 | rtx elem = XVECEXP (valreg, 0, 0); | |
3318 | rtx where = XEXP (elem, 0); | |
3319 | rtx offset = XEXP (elem, 1); | |
3320 | if (offset == const0_rtx | |
3321 | && GET_MODE (where) == GET_MODE (valreg)) | |
3322 | valreg = where; | |
3323 | } | |
3324 | } | |
3325 | ||
d5e254e1 IE |
3326 | /* Store all bounds not passed in registers. */ |
3327 | for (i = 0; i < num_actuals; i++) | |
3328 | { | |
3329 | if (POINTER_BOUNDS_P (args[i].tree_value) | |
3330 | && !args[i].reg) | |
3331 | store_bounds (&args[i], | |
3332 | args[i].pointer_arg == -1 | |
3333 | ? NULL | |
3334 | : &args[args[i].pointer_arg]); | |
3335 | } | |
3336 | ||
0a1c58a2 JL |
3337 | /* If register arguments require space on the stack and stack space |
3338 | was not preallocated, allocate stack space here for arguments | |
3339 | passed in registers. */ | |
5d059ed9 | 3340 | if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
81464b2c | 3341 | && !ACCUMULATE_OUTGOING_ARGS |
f725a3ec | 3342 | && must_preallocate == 0 && reg_parm_stack_space > 0) |
0a1c58a2 | 3343 | anti_adjust_stack (GEN_INT (reg_parm_stack_space)); |
756e0e12 | 3344 | |
0a1c58a2 JL |
3345 | /* Pass the function the address in which to return a |
3346 | structure value. */ | |
3347 | if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) | |
3348 | { | |
7ae4ad28 | 3349 | structure_value_addr |
5ae6cd0d | 3350 | = convert_memory_address (Pmode, structure_value_addr); |
61f71b34 | 3351 | emit_move_insn (struct_value, |
0a1c58a2 JL |
3352 | force_reg (Pmode, |
3353 | force_operand (structure_value_addr, | |
3354 | NULL_RTX))); | |
3355 | ||
f8cfc6aa | 3356 | if (REG_P (struct_value)) |
61f71b34 | 3357 | use_reg (&call_fusage, struct_value); |
0a1c58a2 | 3358 | } |
c2939b57 | 3359 | |
05e6ee93 | 3360 | after_args = get_last_insn (); |
f2d3d07e RH |
3361 | funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp, |
3362 | static_chain_value, &call_fusage, | |
3363 | reg_parm_seen, pass == 0); | |
51bbfa0c | 3364 | |
0cdca92b DJ |
3365 | load_register_parameters (args, num_actuals, &call_fusage, flags, |
3366 | pass == 0, &sibcall_failure); | |
f725a3ec | 3367 | |
0a1c58a2 JL |
3368 | /* Save a pointer to the last insn before the call, so that we can |
3369 | later safely search backwards to find the CALL_INSN. */ | |
3370 | before_call = get_last_insn (); | |
51bbfa0c | 3371 | |
7d167afd JJ |
3372 | /* Set up next argument register. For sibling calls on machines |
3373 | with register windows this should be the incoming register. */ | |
7d167afd | 3374 | if (pass == 0) |
d5cc9181 | 3375 | next_arg_reg = targetm.calls.function_incoming_arg (args_so_far, |
3c07301f NF |
3376 | VOIDmode, |
3377 | void_type_node, | |
3378 | true); | |
7d167afd | 3379 | else |
d5cc9181 | 3380 | next_arg_reg = targetm.calls.function_arg (args_so_far, |
3c07301f NF |
3381 | VOIDmode, void_type_node, |
3382 | true); | |
7d167afd | 3383 | |
e384e6b5 BS |
3384 | if (pass == 1 && (return_flags & ERF_RETURNS_ARG)) |
3385 | { | |
3386 | int arg_nr = return_flags & ERF_RETURN_ARG_MASK; | |
3d9684ae | 3387 | arg_nr = num_actuals - arg_nr - 1; |
b3681f13 TV |
3388 | if (arg_nr >= 0 |
3389 | && arg_nr < num_actuals | |
3390 | && args[arg_nr].reg | |
e384e6b5 BS |
3391 | && valreg |
3392 | && REG_P (valreg) | |
3393 | && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg)) | |
3394 | call_fusage | |
3395 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)), | |
f7df4a84 | 3396 | gen_rtx_SET (valreg, args[arg_nr].reg), |
e384e6b5 BS |
3397 | call_fusage); |
3398 | } | |
0a1c58a2 JL |
3399 | /* All arguments and registers used for the call must be set up by |
3400 | now! */ | |
3401 | ||
ce48579b | 3402 | /* Stack must be properly aligned now. */ |
366de0ce NS |
3403 | gcc_assert (!pass |
3404 | || !(stack_pointer_delta % preferred_unit_stack_boundary)); | |
ebcd0b57 | 3405 | |
0a1c58a2 | 3406 | /* Generate the actual call instruction. */ |
6de9cd9a | 3407 | emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, |
099e9712 | 3408 | adjusted_args_size.constant, struct_value_size, |
7d167afd | 3409 | next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, |
d5cc9181 | 3410 | flags, args_so_far); |
0a1c58a2 | 3411 | |
1e288103 | 3412 | if (flag_ipa_ra) |
4f660b15 | 3413 | { |
48810515 DM |
3414 | rtx_call_insn *last; |
3415 | rtx datum = NULL_RTX; | |
4f660b15 RO |
3416 | if (fndecl != NULL_TREE) |
3417 | { | |
3418 | datum = XEXP (DECL_RTL (fndecl), 0); | |
3419 | gcc_assert (datum != NULL_RTX | |
3420 | && GET_CODE (datum) == SYMBOL_REF); | |
3421 | } | |
3422 | last = last_call_insn (); | |
3423 | add_reg_note (last, REG_CALL_DECL, datum); | |
3424 | } | |
3425 | ||
05e6ee93 MM |
3426 | /* If the call setup or the call itself overlaps with anything |
3427 | of the argument setup we probably clobbered our call address. | |
3428 | In that case we can't do sibcalls. */ | |
3429 | if (pass == 0 | |
3430 | && check_sibcall_argument_overlap (after_args, 0, 0)) | |
3431 | sibcall_failure = 1; | |
3432 | ||
bef5d8b6 RS |
3433 | /* If a non-BLKmode value is returned at the most significant end |
3434 | of a register, shift the register right by the appropriate amount | |
3435 | and update VALREG accordingly. BLKmode values are handled by the | |
3436 | group load/store machinery below. */ | |
3437 | if (!structure_value_addr | |
3438 | && !pcc_struct_value | |
66de4d7c | 3439 | && TYPE_MODE (rettype) != VOIDmode |
28ed065e | 3440 | && TYPE_MODE (rettype) != BLKmode |
66de4d7c | 3441 | && REG_P (valreg) |
28ed065e | 3442 | && targetm.calls.return_in_msb (rettype)) |
bef5d8b6 | 3443 | { |
28ed065e | 3444 | if (shift_return_value (TYPE_MODE (rettype), false, valreg)) |
bef5d8b6 | 3445 | sibcall_failure = 1; |
28ed065e | 3446 | valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); |
bef5d8b6 RS |
3447 | } |
3448 | ||
84b8030f | 3449 | if (pass && (flags & ECF_MALLOC)) |
0a1c58a2 JL |
3450 | { |
3451 | rtx temp = gen_reg_rtx (GET_MODE (valreg)); | |
48810515 | 3452 | rtx_insn *last, *insns; |
0a1c58a2 | 3453 | |
f725a3ec | 3454 | /* The return value from a malloc-like function is a pointer. */ |
28ed065e | 3455 | if (TREE_CODE (rettype) == POINTER_TYPE) |
d154bfa2 | 3456 | mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT); |
0a1c58a2 JL |
3457 | |
3458 | emit_move_insn (temp, valreg); | |
3459 | ||
3460 | /* The return value from a malloc-like function can not alias | |
3461 | anything else. */ | |
3462 | last = get_last_insn (); | |
65c5f2a6 | 3463 | add_reg_note (last, REG_NOALIAS, temp); |
0a1c58a2 JL |
3464 | |
3465 | /* Write out the sequence. */ | |
3466 | insns = get_insns (); | |
3467 | end_sequence (); | |
2f937369 | 3468 | emit_insn (insns); |
0a1c58a2 JL |
3469 | valreg = temp; |
3470 | } | |
51bbfa0c | 3471 | |
6fb5fa3c DB |
3472 | /* For calls to `setjmp', etc., inform |
3473 | function.c:setjmp_warnings that it should complain if | |
3474 | nonvolatile values are live. For functions that cannot | |
3475 | return, inform flow that control does not fall through. */ | |
51bbfa0c | 3476 | |
6e14af16 | 3477 | if ((flags & ECF_NORETURN) || pass == 0) |
c2939b57 | 3478 | { |
570a98eb | 3479 | /* The barrier must be emitted |
0a1c58a2 JL |
3480 | immediately after the CALL_INSN. Some ports emit more |
3481 | than just a CALL_INSN above, so we must search for it here. */ | |
51bbfa0c | 3482 | |
48810515 | 3483 | rtx_insn *last = get_last_insn (); |
4b4bf941 | 3484 | while (!CALL_P (last)) |
0a1c58a2 JL |
3485 | { |
3486 | last = PREV_INSN (last); | |
3487 | /* There was no CALL_INSN? */ | |
366de0ce | 3488 | gcc_assert (last != before_call); |
0a1c58a2 | 3489 | } |
51bbfa0c | 3490 | |
570a98eb | 3491 | emit_barrier_after (last); |
8af61113 | 3492 | |
f451eeef JS |
3493 | /* Stack adjustments after a noreturn call are dead code. |
3494 | However when NO_DEFER_POP is in effect, we must preserve | |
3495 | stack_pointer_delta. */ | |
3496 | if (inhibit_defer_pop == 0) | |
3497 | { | |
3498 | stack_pointer_delta = old_stack_allocated; | |
3499 | pending_stack_adjust = 0; | |
3500 | } | |
0a1c58a2 | 3501 | } |
51bbfa0c | 3502 | |
0a1c58a2 | 3503 | /* If value type not void, return an rtx for the value. */ |
51bbfa0c | 3504 | |
28ed065e | 3505 | if (TYPE_MODE (rettype) == VOIDmode |
0a1c58a2 | 3506 | || ignore) |
b5cd4ed4 | 3507 | target = const0_rtx; |
0a1c58a2 JL |
3508 | else if (structure_value_addr) |
3509 | { | |
3c0cb5de | 3510 | if (target == 0 || !MEM_P (target)) |
0a1c58a2 | 3511 | { |
3bdf5ad1 | 3512 | target |
28ed065e MM |
3513 | = gen_rtx_MEM (TYPE_MODE (rettype), |
3514 | memory_address (TYPE_MODE (rettype), | |
3bdf5ad1 | 3515 | structure_value_addr)); |
28ed065e | 3516 | set_mem_attributes (target, rettype, 1); |
0a1c58a2 JL |
3517 | } |
3518 | } | |
3519 | else if (pcc_struct_value) | |
cacbd532 | 3520 | { |
0a1c58a2 JL |
3521 | /* This is the special C++ case where we need to |
3522 | know what the true target was. We take care to | |
3523 | never use this value more than once in one expression. */ | |
28ed065e | 3524 | target = gen_rtx_MEM (TYPE_MODE (rettype), |
0a1c58a2 | 3525 | copy_to_reg (valreg)); |
28ed065e | 3526 | set_mem_attributes (target, rettype, 1); |
cacbd532 | 3527 | } |
0a1c58a2 JL |
3528 | /* Handle calls that return values in multiple non-contiguous locations. |
3529 | The Irix 6 ABI has examples of this. */ | |
3530 | else if (GET_CODE (valreg) == PARALLEL) | |
3531 | { | |
6de9cd9a | 3532 | if (target == 0) |
5ef0b50d | 3533 | target = emit_group_move_into_temps (valreg); |
1d1b7dc4 RS |
3534 | else if (rtx_equal_p (target, valreg)) |
3535 | ; | |
3536 | else if (GET_CODE (target) == PARALLEL) | |
3537 | /* Handle the result of a emit_group_move_into_temps | |
3538 | call in the previous pass. */ | |
3539 | emit_group_move (target, valreg); | |
3540 | else | |
28ed065e MM |
3541 | emit_group_store (target, valreg, rettype, |
3542 | int_size_in_bytes (rettype)); | |
0a1c58a2 JL |
3543 | } |
3544 | else if (target | |
28ed065e | 3545 | && GET_MODE (target) == TYPE_MODE (rettype) |
0a1c58a2 JL |
3546 | && GET_MODE (target) == GET_MODE (valreg)) |
3547 | { | |
51caaefe EB |
3548 | bool may_overlap = false; |
3549 | ||
f2d18690 KK |
3550 | /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard |
3551 | reg to a plain register. */ | |
3fb30019 RS |
3552 | if (!REG_P (target) || HARD_REGISTER_P (target)) |
3553 | valreg = avoid_likely_spilled_reg (valreg); | |
f2d18690 | 3554 | |
51caaefe EB |
3555 | /* If TARGET is a MEM in the argument area, and we have |
3556 | saved part of the argument area, then we can't store | |
3557 | directly into TARGET as it may get overwritten when we | |
3558 | restore the argument save area below. Don't work too | |
3559 | hard though and simply force TARGET to a register if it | |
3560 | is a MEM; the optimizer is quite likely to sort it out. */ | |
3561 | if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) | |
3562 | for (i = 0; i < num_actuals; i++) | |
3563 | if (args[i].save_area) | |
3564 | { | |
3565 | may_overlap = true; | |
3566 | break; | |
3567 | } | |
0219237c | 3568 | |
51caaefe EB |
3569 | if (may_overlap) |
3570 | target = copy_to_reg (valreg); | |
3571 | else | |
3572 | { | |
3573 | /* TARGET and VALREG cannot be equal at this point | |
3574 | because the latter would not have | |
3575 | REG_FUNCTION_VALUE_P true, while the former would if | |
3576 | it were referring to the same register. | |
3577 | ||
3578 | If they refer to the same register, this move will be | |
3579 | a no-op, except when function inlining is being | |
3580 | done. */ | |
3581 | emit_move_insn (target, valreg); | |
3582 | ||
3583 | /* If we are setting a MEM, this code must be executed. | |
3584 | Since it is emitted after the call insn, sibcall | |
3585 | optimization cannot be performed in that case. */ | |
3586 | if (MEM_P (target)) | |
3587 | sibcall_failure = 1; | |
3588 | } | |
0a1c58a2 | 3589 | } |
0a1c58a2 | 3590 | else |
3fb30019 | 3591 | target = copy_to_reg (avoid_likely_spilled_reg (valreg)); |
51bbfa0c | 3592 | |
cde0f3fd PB |
3593 | /* If we promoted this return value, make the proper SUBREG. |
3594 | TARGET might be const0_rtx here, so be careful. */ | |
3595 | if (REG_P (target) | |
28ed065e MM |
3596 | && TYPE_MODE (rettype) != BLKmode |
3597 | && GET_MODE (target) != TYPE_MODE (rettype)) | |
61f71b34 | 3598 | { |
28ed065e | 3599 | tree type = rettype; |
cde0f3fd PB |
3600 | int unsignedp = TYPE_UNSIGNED (type); |
3601 | int offset = 0; | |
ef4bddc2 | 3602 | machine_mode pmode; |
cde0f3fd PB |
3603 | |
3604 | /* Ensure we promote as expected, and get the new unsignedness. */ | |
3605 | pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, | |
3606 | funtype, 1); | |
3607 | gcc_assert (GET_MODE (target) == pmode); | |
3608 | ||
3609 | if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) | |
3610 | && (GET_MODE_SIZE (GET_MODE (target)) | |
3611 | > GET_MODE_SIZE (TYPE_MODE (type)))) | |
366de0ce | 3612 | { |
cde0f3fd PB |
3613 | offset = GET_MODE_SIZE (GET_MODE (target)) |
3614 | - GET_MODE_SIZE (TYPE_MODE (type)); | |
3615 | if (! BYTES_BIG_ENDIAN) | |
3616 | offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; | |
3617 | else if (! WORDS_BIG_ENDIAN) | |
3618 | offset %= UNITS_PER_WORD; | |
366de0ce | 3619 | } |
cde0f3fd PB |
3620 | |
3621 | target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); | |
3622 | SUBREG_PROMOTED_VAR_P (target) = 1; | |
362d42dc | 3623 | SUBREG_PROMOTED_SET (target, unsignedp); |
61f71b34 | 3624 | } |
84b55618 | 3625 | |
0a1c58a2 JL |
3626 | /* If size of args is variable or this was a constructor call for a stack |
3627 | argument, restore saved stack-pointer value. */ | |
51bbfa0c | 3628 | |
9dd9bf80 | 3629 | if (old_stack_level) |
0a1c58a2 | 3630 | { |
48810515 | 3631 | rtx_insn *prev = get_last_insn (); |
9a08d230 | 3632 | |
9eac0f2a | 3633 | emit_stack_restore (SAVE_BLOCK, old_stack_level); |
38afb23f | 3634 | stack_pointer_delta = old_stack_pointer_delta; |
9a08d230 | 3635 | |
faf7a23d | 3636 | fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta); |
9a08d230 | 3637 | |
0a1c58a2 | 3638 | pending_stack_adjust = old_pending_adj; |
d25cee4d | 3639 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
0a1c58a2 JL |
3640 | stack_arg_under_construction = old_stack_arg_under_construction; |
3641 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; | |
3642 | stack_usage_map = initial_stack_usage_map; | |
0a1c58a2 JL |
3643 | sibcall_failure = 1; |
3644 | } | |
f8a097cd | 3645 | else if (ACCUMULATE_OUTGOING_ARGS && pass) |
0a1c58a2 | 3646 | { |
51bbfa0c | 3647 | #ifdef REG_PARM_STACK_SPACE |
0a1c58a2 | 3648 | if (save_area) |
b820d2b8 AM |
3649 | restore_fixed_argument_area (save_area, argblock, |
3650 | high_to_save, low_to_save); | |
b94301c2 | 3651 | #endif |
51bbfa0c | 3652 | |
0a1c58a2 JL |
3653 | /* If we saved any argument areas, restore them. */ |
3654 | for (i = 0; i < num_actuals; i++) | |
3655 | if (args[i].save_area) | |
3656 | { | |
ef4bddc2 | 3657 | machine_mode save_mode = GET_MODE (args[i].save_area); |
0a1c58a2 JL |
3658 | rtx stack_area |
3659 | = gen_rtx_MEM (save_mode, | |
3660 | memory_address (save_mode, | |
3661 | XEXP (args[i].stack_slot, 0))); | |
3662 | ||
3663 | if (save_mode != BLKmode) | |
3664 | emit_move_insn (stack_area, args[i].save_area); | |
3665 | else | |
44bb111a | 3666 | emit_block_move (stack_area, args[i].save_area, |
e7949876 | 3667 | GEN_INT (args[i].locate.size.constant), |
44bb111a | 3668 | BLOCK_OP_CALL_PARM); |
0a1c58a2 | 3669 | } |
51bbfa0c | 3670 | |
0a1c58a2 JL |
3671 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
3672 | stack_usage_map = initial_stack_usage_map; | |
3673 | } | |
51bbfa0c | 3674 | |
d33606c3 EB |
3675 | /* If this was alloca, record the new stack level. */ |
3676 | if (flags & ECF_MAY_BE_ALLOCA) | |
3677 | record_new_stack_level (); | |
51bbfa0c | 3678 | |
0a1c58a2 JL |
3679 | /* Free up storage we no longer need. */ |
3680 | for (i = 0; i < num_actuals; ++i) | |
04695783 | 3681 | free (args[i].aligned_regs); |
0a1c58a2 | 3682 | |
2f21e1ba BS |
3683 | targetm.calls.end_call_args (); |
3684 | ||
0a1c58a2 JL |
3685 | insns = get_insns (); |
3686 | end_sequence (); | |
3687 | ||
3688 | if (pass == 0) | |
3689 | { | |
3690 | tail_call_insns = insns; | |
3691 | ||
0a1c58a2 JL |
3692 | /* Restore the pending stack adjustment now that we have |
3693 | finished generating the sibling call sequence. */ | |
1503a7ec | 3694 | |
7f2f0a01 | 3695 | restore_pending_stack_adjust (&save); |
099e9712 JH |
3696 | |
3697 | /* Prepare arg structure for next iteration. */ | |
f725a3ec | 3698 | for (i = 0; i < num_actuals; i++) |
099e9712 JH |
3699 | { |
3700 | args[i].value = 0; | |
3701 | args[i].aligned_regs = 0; | |
3702 | args[i].stack = 0; | |
3703 | } | |
c67846f2 JJ |
3704 | |
3705 | sbitmap_free (stored_args_map); | |
48810515 | 3706 | internal_arg_pointer_exp_state.scan_start = NULL; |
9771b263 | 3707 | internal_arg_pointer_exp_state.cache.release (); |
0a1c58a2 JL |
3708 | } |
3709 | else | |
38afb23f OH |
3710 | { |
3711 | normal_call_insns = insns; | |
3712 | ||
3713 | /* Verify that we've deallocated all the stack we used. */ | |
6e14af16 | 3714 | gcc_assert ((flags & ECF_NORETURN) |
366de0ce NS |
3715 | || (old_stack_allocated |
3716 | == stack_pointer_delta - pending_stack_adjust)); | |
38afb23f | 3717 | } |
fadb729c JJ |
3718 | |
3719 | /* If something prevents making this a sibling call, | |
3720 | zero out the sequence. */ | |
3721 | if (sibcall_failure) | |
48810515 | 3722 | tail_call_insns = NULL; |
6de9cd9a DN |
3723 | else |
3724 | break; | |
0a1c58a2 JL |
3725 | } |
3726 | ||
1ea7e6ad | 3727 | /* If tail call production succeeded, we need to remove REG_EQUIV notes on |
6de9cd9a DN |
3728 | arguments too, as argument area is now clobbered by the call. */ |
3729 | if (tail_call_insns) | |
0a1c58a2 | 3730 | { |
6de9cd9a | 3731 | emit_insn (tail_call_insns); |
e3b5732b | 3732 | crtl->tail_call_emit = true; |
0a1c58a2 JL |
3733 | } |
3734 | else | |
2f937369 | 3735 | emit_insn (normal_call_insns); |
51bbfa0c | 3736 | |
0a1c58a2 | 3737 | currently_expanding_call--; |
8e6a59fe | 3738 | |
04695783 | 3739 | free (stack_usage_map_buf); |
d9725c41 | 3740 | |
d5e254e1 IE |
3741 | /* Join result with returned bounds so caller may use them if needed. */ |
3742 | target = chkp_join_splitted_slot (target, valbnd); | |
3743 | ||
51bbfa0c RS |
3744 | return target; |
3745 | } | |
ded9bf77 | 3746 | |
6de9cd9a DN |
3747 | /* A sibling call sequence invalidates any REG_EQUIV notes made for |
3748 | this function's incoming arguments. | |
3749 | ||
3750 | At the start of RTL generation we know the only REG_EQUIV notes | |
29d51cdb SB |
3751 | in the rtl chain are those for incoming arguments, so we can look |
3752 | for REG_EQUIV notes between the start of the function and the | |
3753 | NOTE_INSN_FUNCTION_BEG. | |
6de9cd9a DN |
3754 | |
3755 | This is (slight) overkill. We could keep track of the highest | |
3756 | argument we clobber and be more selective in removing notes, but it | |
3757 | does not seem to be worth the effort. */ | |
29d51cdb | 3758 | |
6de9cd9a DN |
3759 | void |
3760 | fixup_tail_calls (void) | |
3761 | { | |
48810515 | 3762 | rtx_insn *insn; |
29d51cdb SB |
3763 | |
3764 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
3765 | { | |
a31830a7 SB |
3766 | rtx note; |
3767 | ||
29d51cdb SB |
3768 | /* There are never REG_EQUIV notes for the incoming arguments |
3769 | after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ | |
3770 | if (NOTE_P (insn) | |
a38e7aa5 | 3771 | && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) |
29d51cdb SB |
3772 | break; |
3773 | ||
a31830a7 SB |
3774 | note = find_reg_note (insn, REG_EQUIV, 0); |
3775 | if (note) | |
3776 | remove_note (insn, note); | |
3777 | note = find_reg_note (insn, REG_EQUIV, 0); | |
3778 | gcc_assert (!note); | |
29d51cdb | 3779 | } |
6de9cd9a DN |
3780 | } |
3781 | ||
ded9bf77 AH |
3782 | /* Traverse a list of TYPES and expand all complex types into their |
3783 | components. */ | |
2f2b4a02 | 3784 | static tree |
ded9bf77 AH |
3785 | split_complex_types (tree types) |
3786 | { | |
3787 | tree p; | |
3788 | ||
42ba5130 RH |
3789 | /* Before allocating memory, check for the common case of no complex. */ |
3790 | for (p = types; p; p = TREE_CHAIN (p)) | |
3791 | { | |
3792 | tree type = TREE_VALUE (p); | |
3793 | if (TREE_CODE (type) == COMPLEX_TYPE | |
3794 | && targetm.calls.split_complex_arg (type)) | |
c22cacf3 | 3795 | goto found; |
42ba5130 RH |
3796 | } |
3797 | return types; | |
3798 | ||
3799 | found: | |
ded9bf77 AH |
3800 | types = copy_list (types); |
3801 | ||
3802 | for (p = types; p; p = TREE_CHAIN (p)) | |
3803 | { | |
3804 | tree complex_type = TREE_VALUE (p); | |
3805 | ||
42ba5130 RH |
3806 | if (TREE_CODE (complex_type) == COMPLEX_TYPE |
3807 | && targetm.calls.split_complex_arg (complex_type)) | |
ded9bf77 AH |
3808 | { |
3809 | tree next, imag; | |
3810 | ||
3811 | /* Rewrite complex type with component type. */ | |
3812 | TREE_VALUE (p) = TREE_TYPE (complex_type); | |
3813 | next = TREE_CHAIN (p); | |
3814 | ||
3815 | /* Add another component type for the imaginary part. */ | |
3816 | imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); | |
3817 | TREE_CHAIN (p) = imag; | |
3818 | TREE_CHAIN (imag) = next; | |
3819 | ||
3820 | /* Skip the newly created node. */ | |
3821 | p = TREE_CHAIN (p); | |
3822 | } | |
3823 | } | |
3824 | ||
3825 | return types; | |
3826 | } | |
51bbfa0c | 3827 | \f |
de76b467 | 3828 | /* Output a library call to function FUN (a SYMBOL_REF rtx). |
f725a3ec | 3829 | The RETVAL parameter specifies whether return value needs to be saved, other |
0407c02b | 3830 | parameters are documented in the emit_library_call function below. */ |
8ac61af7 | 3831 | |
de76b467 | 3832 | static rtx |
d329e058 AJ |
3833 | emit_library_call_value_1 (int retval, rtx orgfun, rtx value, |
3834 | enum libcall_type fn_type, | |
ef4bddc2 | 3835 | machine_mode outmode, int nargs, va_list p) |
43bc5f13 | 3836 | { |
3c0fca12 RH |
3837 | /* Total size in bytes of all the stack-parms scanned so far. */ |
3838 | struct args_size args_size; | |
3839 | /* Size of arguments before any adjustments (such as rounding). */ | |
3840 | struct args_size original_args_size; | |
b3694847 | 3841 | int argnum; |
3c0fca12 | 3842 | rtx fun; |
81464b2c KT |
3843 | /* Todo, choose the correct decl type of orgfun. Sadly this information |
3844 | isn't present here, so we default to native calling abi here. */ | |
033df0b9 | 3845 | tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
5d059ed9 | 3846 | tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
3c0fca12 | 3847 | int count; |
3c0fca12 | 3848 | rtx argblock = 0; |
d5cc9181 JR |
3849 | CUMULATIVE_ARGS args_so_far_v; |
3850 | cumulative_args_t args_so_far; | |
f725a3ec KH |
3851 | struct arg |
3852 | { | |
3853 | rtx value; | |
ef4bddc2 | 3854 | machine_mode mode; |
f725a3ec KH |
3855 | rtx reg; |
3856 | int partial; | |
e7949876 | 3857 | struct locate_and_pad_arg_data locate; |
f725a3ec KH |
3858 | rtx save_area; |
3859 | }; | |
3c0fca12 RH |
3860 | struct arg *argvec; |
3861 | int old_inhibit_defer_pop = inhibit_defer_pop; | |
3862 | rtx call_fusage = 0; | |
3863 | rtx mem_value = 0; | |
5591ee6f | 3864 | rtx valreg; |
3c0fca12 RH |
3865 | int pcc_struct_value = 0; |
3866 | int struct_value_size = 0; | |
52a11cbf | 3867 | int flags; |
3c0fca12 | 3868 | int reg_parm_stack_space = 0; |
3c0fca12 | 3869 | int needed; |
48810515 | 3870 | rtx_insn *before_call; |
0ed4bf92 | 3871 | bool have_push_fusage; |
b0c48229 | 3872 | tree tfom; /* type_for_mode (outmode, 0) */ |
3c0fca12 | 3873 | |
f73ad30e | 3874 | #ifdef REG_PARM_STACK_SPACE |
3c0fca12 RH |
3875 | /* Define the boundary of the register parm stack space that needs to be |
3876 | save, if any. */ | |
726a989a | 3877 | int low_to_save = 0, high_to_save = 0; |
f725a3ec | 3878 | rtx save_area = 0; /* Place that it is saved. */ |
3c0fca12 RH |
3879 | #endif |
3880 | ||
3c0fca12 RH |
3881 | /* Size of the stack reserved for parameter registers. */ |
3882 | int initial_highest_arg_in_use = highest_outgoing_arg_in_use; | |
3883 | char *initial_stack_usage_map = stack_usage_map; | |
d9725c41 | 3884 | char *stack_usage_map_buf = NULL; |
3c0fca12 | 3885 | |
61f71b34 DD |
3886 | rtx struct_value = targetm.calls.struct_value_rtx (0, 0); |
3887 | ||
3c0fca12 | 3888 | #ifdef REG_PARM_STACK_SPACE |
3c0fca12 | 3889 | reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0); |
3c0fca12 RH |
3890 | #endif |
3891 | ||
9555a122 | 3892 | /* By default, library functions can not throw. */ |
52a11cbf RH |
3893 | flags = ECF_NOTHROW; |
3894 | ||
9555a122 RH |
3895 | switch (fn_type) |
3896 | { | |
3897 | case LCT_NORMAL: | |
53d4257f | 3898 | break; |
9555a122 | 3899 | case LCT_CONST: |
53d4257f JH |
3900 | flags |= ECF_CONST; |
3901 | break; | |
9555a122 | 3902 | case LCT_PURE: |
53d4257f | 3903 | flags |= ECF_PURE; |
9555a122 | 3904 | break; |
9555a122 RH |
3905 | case LCT_NORETURN: |
3906 | flags |= ECF_NORETURN; | |
3907 | break; | |
3908 | case LCT_THROW: | |
3909 | flags = ECF_NORETURN; | |
3910 | break; | |
9defc9b7 RH |
3911 | case LCT_RETURNS_TWICE: |
3912 | flags = ECF_RETURNS_TWICE; | |
3913 | break; | |
9555a122 | 3914 | } |
3c0fca12 RH |
3915 | fun = orgfun; |
3916 | ||
3c0fca12 RH |
3917 | /* Ensure current function's preferred stack boundary is at least |
3918 | what we need. */ | |
cb91fab0 JH |
3919 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) |
3920 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
3c0fca12 RH |
3921 | |
3922 | /* If this kind of value comes back in memory, | |
3923 | decide where in memory it should come back. */ | |
b0c48229 | 3924 | if (outmode != VOIDmode) |
3c0fca12 | 3925 | { |
ae2bcd98 | 3926 | tfom = lang_hooks.types.type_for_mode (outmode, 0); |
61f71b34 | 3927 | if (aggregate_value_p (tfom, 0)) |
b0c48229 | 3928 | { |
3c0fca12 | 3929 | #ifdef PCC_STATIC_STRUCT_RETURN |
b0c48229 | 3930 | rtx pointer_reg |
1d636cc6 | 3931 | = hard_function_value (build_pointer_type (tfom), 0, 0, 0); |
b0c48229 NB |
3932 | mem_value = gen_rtx_MEM (outmode, pointer_reg); |
3933 | pcc_struct_value = 1; | |
3934 | if (value == 0) | |
3935 | value = gen_reg_rtx (outmode); | |
3c0fca12 | 3936 | #else /* not PCC_STATIC_STRUCT_RETURN */ |
b0c48229 | 3937 | struct_value_size = GET_MODE_SIZE (outmode); |
3c0cb5de | 3938 | if (value != 0 && MEM_P (value)) |
b0c48229 NB |
3939 | mem_value = value; |
3940 | else | |
9474e8ab | 3941 | mem_value = assign_temp (tfom, 1, 1); |
3c0fca12 | 3942 | #endif |
b0c48229 | 3943 | /* This call returns a big structure. */ |
84b8030f | 3944 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
b0c48229 | 3945 | } |
3c0fca12 | 3946 | } |
b0c48229 NB |
3947 | else |
3948 | tfom = void_type_node; | |
3c0fca12 RH |
3949 | |
3950 | /* ??? Unfinished: must pass the memory address as an argument. */ | |
3951 | ||
3952 | /* Copy all the libcall-arguments out of the varargs data | |
3953 | and into a vector ARGVEC. | |
3954 | ||
3955 | Compute how to pass each argument. We only support a very small subset | |
3956 | of the full argument passing conventions to limit complexity here since | |
3957 | library functions shouldn't have many args. */ | |
3958 | ||
f883e0a7 | 3959 | argvec = XALLOCAVEC (struct arg, nargs + 1); |
703ad42b | 3960 | memset (argvec, 0, (nargs + 1) * sizeof (struct arg)); |
3c0fca12 | 3961 | |
97fc4caf | 3962 | #ifdef INIT_CUMULATIVE_LIBCALL_ARGS |
d5cc9181 | 3963 | INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun); |
97fc4caf | 3964 | #else |
d5cc9181 | 3965 | INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs); |
97fc4caf | 3966 | #endif |
d5cc9181 | 3967 | args_so_far = pack_cumulative_args (&args_so_far_v); |
3c0fca12 RH |
3968 | |
3969 | args_size.constant = 0; | |
3970 | args_size.var = 0; | |
3971 | ||
3972 | count = 0; | |
3973 | ||
3974 | push_temp_slots (); | |
3975 | ||
3976 | /* If there's a structure value address to be passed, | |
3977 | either pass it in the special place, or pass it as an extra argument. */ | |
61f71b34 | 3978 | if (mem_value && struct_value == 0 && ! pcc_struct_value) |
3c0fca12 RH |
3979 | { |
3980 | rtx addr = XEXP (mem_value, 0); | |
c22cacf3 | 3981 | |
3c0fca12 RH |
3982 | nargs++; |
3983 | ||
ee88d9aa MK |
3984 | /* Make sure it is a reasonable operand for a move or push insn. */ |
3985 | if (!REG_P (addr) && !MEM_P (addr) | |
1a627b35 RS |
3986 | && !(CONSTANT_P (addr) |
3987 | && targetm.legitimate_constant_p (Pmode, addr))) | |
ee88d9aa MK |
3988 | addr = force_operand (addr, NULL_RTX); |
3989 | ||
3c0fca12 RH |
3990 | argvec[count].value = addr; |
3991 | argvec[count].mode = Pmode; | |
3992 | argvec[count].partial = 0; | |
3993 | ||
d5cc9181 | 3994 | argvec[count].reg = targetm.calls.function_arg (args_so_far, |
3c07301f | 3995 | Pmode, NULL_TREE, true); |
d5cc9181 | 3996 | gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode, |
78a52f11 | 3997 | NULL_TREE, 1) == 0); |
3c0fca12 RH |
3998 | |
3999 | locate_and_pad_parm (Pmode, NULL_TREE, | |
a4d5044f | 4000 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
c22cacf3 | 4001 | 1, |
a4d5044f CM |
4002 | #else |
4003 | argvec[count].reg != 0, | |
4004 | #endif | |
2e4ceca5 UW |
4005 | reg_parm_stack_space, 0, |
4006 | NULL_TREE, &args_size, &argvec[count].locate); | |
3c0fca12 | 4007 | |
3c0fca12 RH |
4008 | if (argvec[count].reg == 0 || argvec[count].partial != 0 |
4009 | || reg_parm_stack_space > 0) | |
e7949876 | 4010 | args_size.constant += argvec[count].locate.size.constant; |
3c0fca12 | 4011 | |
d5cc9181 | 4012 | targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true); |
3c0fca12 RH |
4013 | |
4014 | count++; | |
4015 | } | |
4016 | ||
4017 | for (; count < nargs; count++) | |
4018 | { | |
4019 | rtx val = va_arg (p, rtx); | |
ef4bddc2 | 4020 | machine_mode mode = (machine_mode) va_arg (p, int); |
5e617be8 | 4021 | int unsigned_p = 0; |
3c0fca12 RH |
4022 | |
4023 | /* We cannot convert the arg value to the mode the library wants here; | |
4024 | must do it earlier where we know the signedness of the arg. */ | |
366de0ce NS |
4025 | gcc_assert (mode != BLKmode |
4026 | && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); | |
3c0fca12 | 4027 | |
ee88d9aa MK |
4028 | /* Make sure it is a reasonable operand for a move or push insn. */ |
4029 | if (!REG_P (val) && !MEM_P (val) | |
1a627b35 | 4030 | && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val))) |
ee88d9aa MK |
4031 | val = force_operand (val, NULL_RTX); |
4032 | ||
d5cc9181 | 4033 | if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1)) |
3c0fca12 | 4034 | { |
f474c6f8 | 4035 | rtx slot; |
6cdd5672 | 4036 | int must_copy |
d5cc9181 | 4037 | = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1); |
f474c6f8 | 4038 | |
becfd6e5 KZ |
4039 | /* If this was a CONST function, it is now PURE since it now |
4040 | reads memory. */ | |
99a32567 DM |
4041 | if (flags & ECF_CONST) |
4042 | { | |
4043 | flags &= ~ECF_CONST; | |
4044 | flags |= ECF_PURE; | |
4045 | } | |
4046 | ||
e0c68ce9 | 4047 | if (MEM_P (val) && !must_copy) |
c4b9a87e ER |
4048 | { |
4049 | tree val_expr = MEM_EXPR (val); | |
4050 | if (val_expr) | |
4051 | mark_addressable (val_expr); | |
4052 | slot = val; | |
4053 | } | |
9969aaf6 | 4054 | else |
f474c6f8 | 4055 | { |
ae2bcd98 | 4056 | slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), |
9474e8ab | 4057 | 1, 1); |
f474c6f8 AO |
4058 | emit_move_insn (slot, val); |
4059 | } | |
1da68f56 | 4060 | |
6b5273c3 AO |
4061 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, |
4062 | gen_rtx_USE (VOIDmode, slot), | |
4063 | call_fusage); | |
f474c6f8 AO |
4064 | if (must_copy) |
4065 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, | |
4066 | gen_rtx_CLOBBER (VOIDmode, | |
4067 | slot), | |
4068 | call_fusage); | |
4069 | ||
3c0fca12 | 4070 | mode = Pmode; |
f474c6f8 | 4071 | val = force_operand (XEXP (slot, 0), NULL_RTX); |
3c0fca12 | 4072 | } |
3c0fca12 | 4073 | |
5e617be8 | 4074 | mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0); |
3c0fca12 | 4075 | argvec[count].mode = mode; |
5e617be8 | 4076 | argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p); |
d5cc9181 | 4077 | argvec[count].reg = targetm.calls.function_arg (args_so_far, mode, |
3c07301f | 4078 | NULL_TREE, true); |
3c0fca12 | 4079 | |
3c0fca12 | 4080 | argvec[count].partial |
d5cc9181 | 4081 | = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1); |
3c0fca12 | 4082 | |
3576f984 RS |
4083 | if (argvec[count].reg == 0 |
4084 | || argvec[count].partial != 0 | |
4085 | || reg_parm_stack_space > 0) | |
4086 | { | |
4087 | locate_and_pad_parm (mode, NULL_TREE, | |
a4d5044f | 4088 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
3576f984 | 4089 | 1, |
a4d5044f | 4090 | #else |
3576f984 RS |
4091 | argvec[count].reg != 0, |
4092 | #endif | |
2e4ceca5 | 4093 | reg_parm_stack_space, argvec[count].partial, |
3576f984 RS |
4094 | NULL_TREE, &args_size, &argvec[count].locate); |
4095 | args_size.constant += argvec[count].locate.size.constant; | |
4096 | gcc_assert (!argvec[count].locate.size.var); | |
4097 | } | |
4098 | #ifdef BLOCK_REG_PADDING | |
4099 | else | |
4100 | /* The argument is passed entirely in registers. See at which | |
4101 | end it should be padded. */ | |
4102 | argvec[count].locate.where_pad = | |
4103 | BLOCK_REG_PADDING (mode, NULL_TREE, | |
4104 | GET_MODE_SIZE (mode) <= UNITS_PER_WORD); | |
a4d5044f | 4105 | #endif |
3c0fca12 | 4106 | |
d5cc9181 | 4107 | targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true); |
3c0fca12 | 4108 | } |
3c0fca12 | 4109 | |
3c0fca12 RH |
4110 | /* If this machine requires an external definition for library |
4111 | functions, write one out. */ | |
4112 | assemble_external_libcall (fun); | |
4113 | ||
4114 | original_args_size = args_size; | |
1503a7ec JH |
4115 | args_size.constant = (((args_size.constant |
4116 | + stack_pointer_delta | |
4117 | + STACK_BYTES - 1) | |
4118 | / STACK_BYTES | |
4119 | * STACK_BYTES) | |
4120 | - stack_pointer_delta); | |
3c0fca12 RH |
4121 | |
4122 | args_size.constant = MAX (args_size.constant, | |
4123 | reg_parm_stack_space); | |
4124 | ||
5d059ed9 | 4125 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
ac294f0b | 4126 | args_size.constant -= reg_parm_stack_space; |
3c0fca12 | 4127 | |
38173d38 JH |
4128 | if (args_size.constant > crtl->outgoing_args_size) |
4129 | crtl->outgoing_args_size = args_size.constant; | |
3c0fca12 | 4130 | |
a11e0df4 | 4131 | if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS) |
d3c12306 EB |
4132 | { |
4133 | int pushed = args_size.constant + pending_stack_adjust; | |
4134 | if (pushed > current_function_pushed_stack_size) | |
4135 | current_function_pushed_stack_size = pushed; | |
4136 | } | |
4137 | ||
f73ad30e JH |
4138 | if (ACCUMULATE_OUTGOING_ARGS) |
4139 | { | |
4140 | /* Since the stack pointer will never be pushed, it is possible for | |
4141 | the evaluation of a parm to clobber something we have already | |
4142 | written to the stack. Since most function calls on RISC machines | |
4143 | do not use the stack, this is uncommon, but must work correctly. | |
3c0fca12 | 4144 | |
f73ad30e JH |
4145 | Therefore, we save any area of the stack that was already written |
4146 | and that we are using. Here we set up to do this by making a new | |
4147 | stack usage map from the old one. | |
3c0fca12 | 4148 | |
f73ad30e JH |
4149 | Another approach might be to try to reorder the argument |
4150 | evaluations to avoid this conflicting stack usage. */ | |
3c0fca12 | 4151 | |
f73ad30e | 4152 | needed = args_size.constant; |
3c0fca12 | 4153 | |
f73ad30e JH |
4154 | /* Since we will be writing into the entire argument area, the |
4155 | map must be allocated for its entire size, not just the part that | |
4156 | is the responsibility of the caller. */ | |
5d059ed9 | 4157 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
ac294f0b | 4158 | needed += reg_parm_stack_space; |
3c0fca12 | 4159 | |
6dad9361 TS |
4160 | if (ARGS_GROW_DOWNWARD) |
4161 | highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, | |
4162 | needed + 1); | |
4163 | else | |
4164 | highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed); | |
4165 | ||
5ed6ace5 | 4166 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
d9725c41 | 4167 | stack_usage_map = stack_usage_map_buf; |
3c0fca12 | 4168 | |
f73ad30e | 4169 | if (initial_highest_arg_in_use) |
2e09e75a JM |
4170 | memcpy (stack_usage_map, initial_stack_usage_map, |
4171 | initial_highest_arg_in_use); | |
3c0fca12 | 4172 | |
f73ad30e | 4173 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
961192e1 | 4174 | memset (&stack_usage_map[initial_highest_arg_in_use], 0, |
f73ad30e JH |
4175 | highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
4176 | needed = 0; | |
3c0fca12 | 4177 | |
c39ada04 | 4178 | /* We must be careful to use virtual regs before they're instantiated, |
c22cacf3 | 4179 | and real regs afterwards. Loop optimization, for example, can create |
c39ada04 DD |
4180 | new libcalls after we've instantiated the virtual regs, and if we |
4181 | use virtuals anyway, they won't match the rtl patterns. */ | |
3c0fca12 | 4182 | |
c39ada04 | 4183 | if (virtuals_instantiated) |
0a81f074 RS |
4184 | argblock = plus_constant (Pmode, stack_pointer_rtx, |
4185 | STACK_POINTER_OFFSET); | |
c39ada04 DD |
4186 | else |
4187 | argblock = virtual_outgoing_args_rtx; | |
f73ad30e JH |
4188 | } |
4189 | else | |
4190 | { | |
4191 | if (!PUSH_ARGS) | |
4192 | argblock = push_block (GEN_INT (args_size.constant), 0, 0); | |
4193 | } | |
3c0fca12 | 4194 | |
3d9684ae | 4195 | /* We push args individually in reverse order, perform stack alignment |
3c0fca12 | 4196 | before the first push (the last arg). */ |
3d9684ae | 4197 | if (argblock == 0) |
3c0fca12 RH |
4198 | anti_adjust_stack (GEN_INT (args_size.constant |
4199 | - original_args_size.constant)); | |
3c0fca12 | 4200 | |
3d9684ae | 4201 | argnum = nargs - 1; |
3c0fca12 | 4202 | |
f73ad30e JH |
4203 | #ifdef REG_PARM_STACK_SPACE |
4204 | if (ACCUMULATE_OUTGOING_ARGS) | |
4205 | { | |
4206 | /* The argument list is the property of the called routine and it | |
4207 | may clobber it. If the fixed area has been used for previous | |
b820d2b8 AM |
4208 | parameters, we must save and restore it. */ |
4209 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, | |
4210 | &low_to_save, &high_to_save); | |
3c0fca12 RH |
4211 | } |
4212 | #endif | |
f725a3ec | 4213 | |
2f21e1ba BS |
4214 | /* When expanding a normal call, args are stored in push order, |
4215 | which is the reverse of what we have here. */ | |
4216 | bool any_regs = false; | |
4217 | for (int i = nargs; i-- > 0; ) | |
4218 | if (argvec[i].reg != NULL_RTX) | |
4219 | { | |
4220 | targetm.calls.call_args (argvec[i].reg, NULL_TREE); | |
4221 | any_regs = true; | |
4222 | } | |
4223 | if (!any_regs) | |
4224 | targetm.calls.call_args (pc_rtx, NULL_TREE); | |
4225 | ||
3c0fca12 RH |
4226 | /* Push the args that need to be pushed. */ |
4227 | ||
0ed4bf92 BS |
4228 | have_push_fusage = false; |
4229 | ||
3c0fca12 RH |
4230 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
4231 | are to be pushed. */ | |
3d9684ae | 4232 | for (count = 0; count < nargs; count++, argnum--) |
3c0fca12 | 4233 | { |
ef4bddc2 | 4234 | machine_mode mode = argvec[argnum].mode; |
b3694847 | 4235 | rtx val = argvec[argnum].value; |
3c0fca12 RH |
4236 | rtx reg = argvec[argnum].reg; |
4237 | int partial = argvec[argnum].partial; | |
6bdf8c2e | 4238 | unsigned int parm_align = argvec[argnum].locate.boundary; |
f73ad30e | 4239 | int lower_bound = 0, upper_bound = 0, i; |
3c0fca12 RH |
4240 | |
4241 | if (! (reg != 0 && partial == 0)) | |
4242 | { | |
2b1c5433 JJ |
4243 | rtx use; |
4244 | ||
f73ad30e JH |
4245 | if (ACCUMULATE_OUTGOING_ARGS) |
4246 | { | |
f8a097cd JH |
4247 | /* If this is being stored into a pre-allocated, fixed-size, |
4248 | stack area, save any previous data at that location. */ | |
3c0fca12 | 4249 | |
6dad9361 TS |
4250 | if (ARGS_GROW_DOWNWARD) |
4251 | { | |
4252 | /* stack_slot is negative, but we want to index stack_usage_map | |
4253 | with positive values. */ | |
4254 | upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; | |
4255 | lower_bound = upper_bound - argvec[argnum].locate.size.constant; | |
4256 | } | |
4257 | else | |
4258 | { | |
4259 | lower_bound = argvec[argnum].locate.slot_offset.constant; | |
4260 | upper_bound = lower_bound + argvec[argnum].locate.size.constant; | |
4261 | } | |
3c0fca12 | 4262 | |
546ff777 AM |
4263 | i = lower_bound; |
4264 | /* Don't worry about things in the fixed argument area; | |
4265 | it has already been saved. */ | |
4266 | if (i < reg_parm_stack_space) | |
4267 | i = reg_parm_stack_space; | |
4268 | while (i < upper_bound && stack_usage_map[i] == 0) | |
4269 | i++; | |
3c0fca12 | 4270 | |
546ff777 | 4271 | if (i < upper_bound) |
f73ad30e | 4272 | { |
e7949876 AM |
4273 | /* We need to make a save area. */ |
4274 | unsigned int size | |
4275 | = argvec[argnum].locate.size.constant * BITS_PER_UNIT; | |
ef4bddc2 | 4276 | machine_mode save_mode |
e7949876 AM |
4277 | = mode_for_size (size, MODE_INT, 1); |
4278 | rtx adr | |
0a81f074 | 4279 | = plus_constant (Pmode, argblock, |
e7949876 | 4280 | argvec[argnum].locate.offset.constant); |
f73ad30e | 4281 | rtx stack_area |
e7949876 | 4282 | = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); |
f73ad30e | 4283 | |
9778f2f8 JH |
4284 | if (save_mode == BLKmode) |
4285 | { | |
4286 | argvec[argnum].save_area | |
4287 | = assign_stack_temp (BLKmode, | |
9474e8ab MM |
4288 | argvec[argnum].locate.size.constant |
4289 | ); | |
9778f2f8 | 4290 | |
1a8cb155 RS |
4291 | emit_block_move (validize_mem |
4292 | (copy_rtx (argvec[argnum].save_area)), | |
c22cacf3 | 4293 | stack_area, |
9778f2f8 JH |
4294 | GEN_INT (argvec[argnum].locate.size.constant), |
4295 | BLOCK_OP_CALL_PARM); | |
4296 | } | |
4297 | else | |
4298 | { | |
4299 | argvec[argnum].save_area = gen_reg_rtx (save_mode); | |
4300 | ||
4301 | emit_move_insn (argvec[argnum].save_area, stack_area); | |
4302 | } | |
f73ad30e | 4303 | } |
3c0fca12 | 4304 | } |
19caa751 | 4305 | |
6bdf8c2e | 4306 | emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, |
44bb111a | 4307 | partial, reg, 0, argblock, |
e7949876 AM |
4308 | GEN_INT (argvec[argnum].locate.offset.constant), |
4309 | reg_parm_stack_space, | |
99206968 | 4310 | ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false); |
3c0fca12 | 4311 | |
3c0fca12 | 4312 | /* Now mark the segment we just used. */ |
f73ad30e JH |
4313 | if (ACCUMULATE_OUTGOING_ARGS) |
4314 | for (i = lower_bound; i < upper_bound; i++) | |
4315 | stack_usage_map[i] = 1; | |
3c0fca12 RH |
4316 | |
4317 | NO_DEFER_POP; | |
475a3eef | 4318 | |
2b1c5433 JJ |
4319 | /* Indicate argument access so that alias.c knows that these |
4320 | values are live. */ | |
4321 | if (argblock) | |
0a81f074 | 4322 | use = plus_constant (Pmode, argblock, |
2b1c5433 | 4323 | argvec[argnum].locate.offset.constant); |
0ed4bf92 BS |
4324 | else if (have_push_fusage) |
4325 | continue; | |
2b1c5433 | 4326 | else |
0ed4bf92 BS |
4327 | { |
4328 | /* When arguments are pushed, trying to tell alias.c where | |
4329 | exactly this argument is won't work, because the | |
4330 | auto-increment causes confusion. So we merely indicate | |
4331 | that we access something with a known mode somewhere on | |
4332 | the stack. */ | |
4333 | use = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
4334 | gen_rtx_SCRATCH (Pmode)); | |
4335 | have_push_fusage = true; | |
4336 | } | |
2b1c5433 JJ |
4337 | use = gen_rtx_MEM (argvec[argnum].mode, use); |
4338 | use = gen_rtx_USE (VOIDmode, use); | |
4339 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); | |
3c0fca12 RH |
4340 | } |
4341 | } | |
4342 | ||
3d9684ae | 4343 | argnum = nargs - 1; |
3c0fca12 | 4344 | |
531ca746 | 4345 | fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0); |
3c0fca12 RH |
4346 | |
4347 | /* Now load any reg parms into their regs. */ | |
4348 | ||
4349 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments | |
4350 | are to be pushed. */ | |
3d9684ae | 4351 | for (count = 0; count < nargs; count++, argnum--) |
3c0fca12 | 4352 | { |
ef4bddc2 | 4353 | machine_mode mode = argvec[argnum].mode; |
b3694847 | 4354 | rtx val = argvec[argnum].value; |
3c0fca12 RH |
4355 | rtx reg = argvec[argnum].reg; |
4356 | int partial = argvec[argnum].partial; | |
ee222ce0 | 4357 | #ifdef BLOCK_REG_PADDING |
460b171d | 4358 | int size = 0; |
ee222ce0 | 4359 | #endif |
460b171d | 4360 | |
3c0fca12 RH |
4361 | /* Handle calls that pass values in multiple non-contiguous |
4362 | locations. The PA64 has examples of this for library calls. */ | |
4363 | if (reg != 0 && GET_CODE (reg) == PARALLEL) | |
ff15c351 | 4364 | emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); |
3c0fca12 | 4365 | else if (reg != 0 && partial == 0) |
460b171d JB |
4366 | { |
4367 | emit_move_insn (reg, val); | |
4368 | #ifdef BLOCK_REG_PADDING | |
4369 | size = GET_MODE_SIZE (argvec[argnum].mode); | |
4370 | ||
4371 | /* Copied from load_register_parameters. */ | |
4372 | ||
4373 | /* Handle case where we have a value that needs shifting | |
4374 | up to the msb. eg. a QImode value and we're padding | |
4375 | upward on a BYTES_BIG_ENDIAN machine. */ | |
4376 | if (size < UNITS_PER_WORD | |
4377 | && (argvec[argnum].locate.where_pad | |
4378 | == (BYTES_BIG_ENDIAN ? upward : downward))) | |
4379 | { | |
4380 | rtx x; | |
4381 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
4382 | ||
4383 | /* Assigning REG here rather than a temp makes CALL_FUSAGE | |
4384 | report the whole reg as used. Strictly speaking, the | |
4385 | call only uses SIZE bytes at the msb end, but it doesn't | |
4386 | seem worth generating rtl to say that. */ | |
4387 | reg = gen_rtx_REG (word_mode, REGNO (reg)); | |
4388 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); | |
4389 | if (x != reg) | |
4390 | emit_move_insn (reg, x); | |
4391 | } | |
4392 | #endif | |
4393 | } | |
3c0fca12 RH |
4394 | |
4395 | NO_DEFER_POP; | |
4396 | } | |
4397 | ||
3c0fca12 RH |
4398 | /* Any regs containing parms remain in use through the call. */ |
4399 | for (count = 0; count < nargs; count++) | |
4400 | { | |
4401 | rtx reg = argvec[count].reg; | |
4402 | if (reg != 0 && GET_CODE (reg) == PARALLEL) | |
4403 | use_group_regs (&call_fusage, reg); | |
4404 | else if (reg != 0) | |
3b1bf459 BS |
4405 | { |
4406 | int partial = argvec[count].partial; | |
4407 | if (partial) | |
4408 | { | |
4409 | int nregs; | |
4410 | gcc_assert (partial % UNITS_PER_WORD == 0); | |
4411 | nregs = partial / UNITS_PER_WORD; | |
4412 | use_regs (&call_fusage, REGNO (reg), nregs); | |
4413 | } | |
4414 | else | |
4415 | use_reg (&call_fusage, reg); | |
4416 | } | |
3c0fca12 RH |
4417 | } |
4418 | ||
4419 | /* Pass the function the address in which to return a structure value. */ | |
61f71b34 | 4420 | if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) |
3c0fca12 | 4421 | { |
61f71b34 | 4422 | emit_move_insn (struct_value, |
3c0fca12 RH |
4423 | force_reg (Pmode, |
4424 | force_operand (XEXP (mem_value, 0), | |
4425 | NULL_RTX))); | |
f8cfc6aa | 4426 | if (REG_P (struct_value)) |
61f71b34 | 4427 | use_reg (&call_fusage, struct_value); |
3c0fca12 RH |
4428 | } |
4429 | ||
4430 | /* Don't allow popping to be deferred, since then | |
4431 | cse'ing of library calls could delete a call and leave the pop. */ | |
4432 | NO_DEFER_POP; | |
5591ee6f | 4433 | valreg = (mem_value == 0 && outmode != VOIDmode |
390b17c2 | 4434 | ? hard_libcall_value (outmode, orgfun) : NULL_RTX); |
3c0fca12 | 4435 | |
ce48579b | 4436 | /* Stack must be properly aligned now. */ |
366de0ce NS |
4437 | gcc_assert (!(stack_pointer_delta |
4438 | & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); | |
ebcd0b57 | 4439 | |
695ee791 RH |
4440 | before_call = get_last_insn (); |
4441 | ||
3c0fca12 RH |
4442 | /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which |
4443 | will set inhibit_defer_pop to that value. */ | |
de76b467 JH |
4444 | /* The return type is needed to decide how many bytes the function pops. |
4445 | Signedness plays no role in that, so for simplicity, we pretend it's | |
4446 | always signed. We also assume that the list of arguments passed has | |
4447 | no impact, so we pretend it is unknown. */ | |
3c0fca12 | 4448 | |
6de9cd9a | 4449 | emit_call_1 (fun, NULL, |
f725a3ec | 4450 | get_identifier (XSTR (orgfun, 0)), |
b0c48229 | 4451 | build_function_type (tfom, NULL_TREE), |
f725a3ec | 4452 | original_args_size.constant, args_size.constant, |
3c0fca12 | 4453 | struct_value_size, |
d5cc9181 | 4454 | targetm.calls.function_arg (args_so_far, |
3c07301f | 4455 | VOIDmode, void_type_node, true), |
5591ee6f | 4456 | valreg, |
d5cc9181 | 4457 | old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far); |
3c0fca12 | 4458 | |
1e288103 | 4459 | if (flag_ipa_ra) |
4f660b15 | 4460 | { |
e67d1102 | 4461 | rtx datum = orgfun; |
4f660b15 | 4462 | gcc_assert (GET_CODE (datum) == SYMBOL_REF); |
e67d1102 | 4463 | rtx_call_insn *last = last_call_insn (); |
4f660b15 RO |
4464 | add_reg_note (last, REG_CALL_DECL, datum); |
4465 | } | |
4466 | ||
460b171d JB |
4467 | /* Right-shift returned value if necessary. */ |
4468 | if (!pcc_struct_value | |
4469 | && TYPE_MODE (tfom) != BLKmode | |
4470 | && targetm.calls.return_in_msb (tfom)) | |
4471 | { | |
4472 | shift_return_value (TYPE_MODE (tfom), false, valreg); | |
4473 | valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg)); | |
4474 | } | |
4475 | ||
2f21e1ba BS |
4476 | targetm.calls.end_call_args (); |
4477 | ||
6fb5fa3c DB |
4478 | /* For calls to `setjmp', etc., inform function.c:setjmp_warnings |
4479 | that it should complain if nonvolatile values are live. For | |
4480 | functions that cannot return, inform flow that control does not | |
4481 | fall through. */ | |
6e14af16 | 4482 | if (flags & ECF_NORETURN) |
695ee791 | 4483 | { |
570a98eb | 4484 | /* The barrier note must be emitted |
695ee791 RH |
4485 | immediately after the CALL_INSN. Some ports emit more than |
4486 | just a CALL_INSN above, so we must search for it here. */ | |
48810515 | 4487 | rtx_insn *last = get_last_insn (); |
4b4bf941 | 4488 | while (!CALL_P (last)) |
695ee791 RH |
4489 | { |
4490 | last = PREV_INSN (last); | |
4491 | /* There was no CALL_INSN? */ | |
366de0ce | 4492 | gcc_assert (last != before_call); |
695ee791 RH |
4493 | } |
4494 | ||
570a98eb | 4495 | emit_barrier_after (last); |
695ee791 RH |
4496 | } |
4497 | ||
85da11a6 EB |
4498 | /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW |
4499 | and LCT_RETURNS_TWICE, cannot perform non-local gotos. */ | |
4500 | if (flags & ECF_NOTHROW) | |
4501 | { | |
48810515 | 4502 | rtx_insn *last = get_last_insn (); |
85da11a6 EB |
4503 | while (!CALL_P (last)) |
4504 | { | |
4505 | last = PREV_INSN (last); | |
4506 | /* There was no CALL_INSN? */ | |
4507 | gcc_assert (last != before_call); | |
4508 | } | |
4509 | ||
4510 | make_reg_eh_region_note_nothrow_nononlocal (last); | |
4511 | } | |
4512 | ||
3c0fca12 RH |
4513 | /* Now restore inhibit_defer_pop to its actual original value. */ |
4514 | OK_DEFER_POP; | |
4515 | ||
4516 | pop_temp_slots (); | |
4517 | ||
4518 | /* Copy the value to the right place. */ | |
de76b467 | 4519 | if (outmode != VOIDmode && retval) |
3c0fca12 RH |
4520 | { |
4521 | if (mem_value) | |
4522 | { | |
4523 | if (value == 0) | |
4524 | value = mem_value; | |
4525 | if (value != mem_value) | |
4526 | emit_move_insn (value, mem_value); | |
4527 | } | |
c3297561 AO |
4528 | else if (GET_CODE (valreg) == PARALLEL) |
4529 | { | |
4530 | if (value == 0) | |
4531 | value = gen_reg_rtx (outmode); | |
643642eb | 4532 | emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); |
c3297561 | 4533 | } |
3c0fca12 | 4534 | else |
7ab0aca2 | 4535 | { |
cde0f3fd | 4536 | /* Convert to the proper mode if a promotion has been active. */ |
7ab0aca2 RH |
4537 | if (GET_MODE (valreg) != outmode) |
4538 | { | |
4539 | int unsignedp = TYPE_UNSIGNED (tfom); | |
4540 | ||
cde0f3fd PB |
4541 | gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, |
4542 | fndecl ? TREE_TYPE (fndecl) : fntype, 1) | |
7ab0aca2 | 4543 | == GET_MODE (valreg)); |
7ab0aca2 RH |
4544 | valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); |
4545 | } | |
4546 | ||
4547 | if (value != 0) | |
4548 | emit_move_insn (value, valreg); | |
4549 | else | |
4550 | value = valreg; | |
4551 | } | |
3c0fca12 RH |
4552 | } |
4553 | ||
f73ad30e | 4554 | if (ACCUMULATE_OUTGOING_ARGS) |
3c0fca12 | 4555 | { |
f73ad30e JH |
4556 | #ifdef REG_PARM_STACK_SPACE |
4557 | if (save_area) | |
b820d2b8 AM |
4558 | restore_fixed_argument_area (save_area, argblock, |
4559 | high_to_save, low_to_save); | |
3c0fca12 | 4560 | #endif |
f725a3ec | 4561 | |
f73ad30e JH |
4562 | /* If we saved any argument areas, restore them. */ |
4563 | for (count = 0; count < nargs; count++) | |
4564 | if (argvec[count].save_area) | |
4565 | { | |
ef4bddc2 | 4566 | machine_mode save_mode = GET_MODE (argvec[count].save_area); |
0a81f074 | 4567 | rtx adr = plus_constant (Pmode, argblock, |
e7949876 AM |
4568 | argvec[count].locate.offset.constant); |
4569 | rtx stack_area = gen_rtx_MEM (save_mode, | |
4570 | memory_address (save_mode, adr)); | |
f73ad30e | 4571 | |
9778f2f8 JH |
4572 | if (save_mode == BLKmode) |
4573 | emit_block_move (stack_area, | |
1a8cb155 RS |
4574 | validize_mem |
4575 | (copy_rtx (argvec[count].save_area)), | |
9778f2f8 JH |
4576 | GEN_INT (argvec[count].locate.size.constant), |
4577 | BLOCK_OP_CALL_PARM); | |
4578 | else | |
4579 | emit_move_insn (stack_area, argvec[count].save_area); | |
f73ad30e | 4580 | } |
3c0fca12 | 4581 | |
f73ad30e JH |
4582 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
4583 | stack_usage_map = initial_stack_usage_map; | |
4584 | } | |
43bc5f13 | 4585 | |
04695783 | 4586 | free (stack_usage_map_buf); |
d9725c41 | 4587 | |
de76b467 JH |
4588 | return value; |
4589 | ||
4590 | } | |
4591 | \f | |
4592 | /* Output a library call to function FUN (a SYMBOL_REF rtx) | |
4593 | (emitting the queue unless NO_QUEUE is nonzero), | |
4594 | for a value of mode OUTMODE, | |
4595 | with NARGS different arguments, passed as alternating rtx values | |
4596 | and machine_modes to convert them to. | |
de76b467 | 4597 | |
84b8030f KZ |
4598 | FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for |
4599 | `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for | |
4600 | other types of library calls. */ | |
de76b467 JH |
4601 | |
4602 | void | |
e34d07f2 | 4603 | emit_library_call (rtx orgfun, enum libcall_type fn_type, |
ef4bddc2 | 4604 | machine_mode outmode, int nargs, ...) |
de76b467 | 4605 | { |
e34d07f2 | 4606 | va_list p; |
d329e058 | 4607 | |
e34d07f2 | 4608 | va_start (p, nargs); |
2a8f6b90 | 4609 | emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p); |
e34d07f2 | 4610 | va_end (p); |
de76b467 JH |
4611 | } |
4612 | \f | |
4613 | /* Like emit_library_call except that an extra argument, VALUE, | |
4614 | comes second and says where to store the result. | |
4615 | (If VALUE is zero, this function chooses a convenient way | |
4616 | to return the value. | |
4617 | ||
4618 | This function returns an rtx for where the value is to be found. | |
4619 | If VALUE is nonzero, VALUE is returned. */ | |
4620 | ||
4621 | rtx | |
e34d07f2 KG |
4622 | emit_library_call_value (rtx orgfun, rtx value, |
4623 | enum libcall_type fn_type, | |
ef4bddc2 | 4624 | machine_mode outmode, int nargs, ...) |
de76b467 | 4625 | { |
6268b922 | 4626 | rtx result; |
e34d07f2 | 4627 | va_list p; |
d329e058 | 4628 | |
e34d07f2 | 4629 | va_start (p, nargs); |
6268b922 KG |
4630 | result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, |
4631 | nargs, p); | |
e34d07f2 | 4632 | va_end (p); |
de76b467 | 4633 | |
6268b922 | 4634 | return result; |
322e3e34 RK |
4635 | } |
4636 | \f | |
d5e254e1 IE |
4637 | |
4638 | /* Store pointer bounds argument ARG into Bounds Table entry | |
4639 | associated with PARM. */ | |
4640 | static void | |
4641 | store_bounds (struct arg_data *arg, struct arg_data *parm) | |
4642 | { | |
4643 | rtx slot = NULL, ptr = NULL, addr = NULL; | |
4644 | ||
4645 | /* We may pass bounds not associated with any pointer. */ | |
4646 | if (!parm) | |
4647 | { | |
4648 | gcc_assert (arg->special_slot); | |
4649 | slot = arg->special_slot; | |
4650 | ptr = const0_rtx; | |
4651 | } | |
4652 | /* Find pointer associated with bounds and where it is | |
4653 | passed. */ | |
4654 | else | |
4655 | { | |
4656 | if (!parm->reg) | |
4657 | { | |
4658 | gcc_assert (!arg->special_slot); | |
4659 | ||
4660 | addr = adjust_address (parm->stack, Pmode, arg->pointer_offset); | |
4661 | } | |
4662 | else if (REG_P (parm->reg)) | |
4663 | { | |
4664 | gcc_assert (arg->special_slot); | |
4665 | slot = arg->special_slot; | |
4666 | ||
4667 | if (MEM_P (parm->value)) | |
4668 | addr = adjust_address (parm->value, Pmode, arg->pointer_offset); | |
4669 | else if (REG_P (parm->value)) | |
4670 | ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset); | |
4671 | else | |
4672 | { | |
4673 | gcc_assert (!arg->pointer_offset); | |
4674 | ptr = parm->value; | |
4675 | } | |
4676 | } | |
4677 | else | |
4678 | { | |
4679 | gcc_assert (GET_CODE (parm->reg) == PARALLEL); | |
4680 | ||
4681 | gcc_assert (arg->special_slot); | |
4682 | slot = arg->special_slot; | |
4683 | ||
4684 | if (parm->parallel_value) | |
4685 | ptr = chkp_get_value_with_offs (parm->parallel_value, | |
4686 | GEN_INT (arg->pointer_offset)); | |
4687 | else | |
4688 | gcc_unreachable (); | |
4689 | } | |
4690 | } | |
4691 | ||
4692 | /* Expand bounds. */ | |
4693 | if (!arg->value) | |
4694 | arg->value = expand_normal (arg->tree_value); | |
4695 | ||
4696 | targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot); | |
4697 | } | |
4698 | ||
51bbfa0c RS |
4699 | /* Store a single argument for a function call |
4700 | into the register or memory area where it must be passed. | |
4701 | *ARG describes the argument value and where to pass it. | |
4702 | ||
4703 | ARGBLOCK is the address of the stack-block for all the arguments, | |
d45cf215 | 4704 | or 0 on a machine where arguments are pushed individually. |
51bbfa0c RS |
4705 | |
4706 | MAY_BE_ALLOCA nonzero says this could be a call to `alloca' | |
f725a3ec | 4707 | so must be careful about how the stack is used. |
51bbfa0c RS |
4708 | |
4709 | VARIABLE_SIZE nonzero says that this was a variable-sized outgoing | |
4710 | argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate | |
4711 | that we need not worry about saving and restoring the stack. | |
4712 | ||
4c6b3b2a | 4713 | FNDECL is the declaration of the function we are calling. |
f725a3ec | 4714 | |
da7d8304 | 4715 | Return nonzero if this arg should cause sibcall failure, |
4c6b3b2a | 4716 | zero otherwise. */ |
51bbfa0c | 4717 | |
4c6b3b2a | 4718 | static int |
d329e058 AJ |
4719 | store_one_arg (struct arg_data *arg, rtx argblock, int flags, |
4720 | int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) | |
51bbfa0c | 4721 | { |
b3694847 | 4722 | tree pval = arg->tree_value; |
51bbfa0c RS |
4723 | rtx reg = 0; |
4724 | int partial = 0; | |
4725 | int used = 0; | |
6a651371 | 4726 | int i, lower_bound = 0, upper_bound = 0; |
4c6b3b2a | 4727 | int sibcall_failure = 0; |
51bbfa0c RS |
4728 | |
4729 | if (TREE_CODE (pval) == ERROR_MARK) | |
4c6b3b2a | 4730 | return 1; |
51bbfa0c | 4731 | |
cc79451b RK |
4732 | /* Push a new temporary level for any temporaries we make for |
4733 | this argument. */ | |
4734 | push_temp_slots (); | |
4735 | ||
f8a097cd | 4736 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) |
51bbfa0c | 4737 | { |
f73ad30e JH |
4738 | /* If this is being stored into a pre-allocated, fixed-size, stack area, |
4739 | save any previous data at that location. */ | |
4740 | if (argblock && ! variable_size && arg->stack) | |
4741 | { | |
6dad9361 TS |
4742 | if (ARGS_GROW_DOWNWARD) |
4743 | { | |
4744 | /* stack_slot is negative, but we want to index stack_usage_map | |
4745 | with positive values. */ | |
4746 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) | |
4747 | upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; | |
4748 | else | |
4749 | upper_bound = 0; | |
51bbfa0c | 4750 | |
6dad9361 TS |
4751 | lower_bound = upper_bound - arg->locate.size.constant; |
4752 | } | |
f73ad30e | 4753 | else |
6dad9361 TS |
4754 | { |
4755 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) | |
4756 | lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); | |
4757 | else | |
4758 | lower_bound = 0; | |
51bbfa0c | 4759 | |
6dad9361 TS |
4760 | upper_bound = lower_bound + arg->locate.size.constant; |
4761 | } | |
51bbfa0c | 4762 | |
546ff777 AM |
4763 | i = lower_bound; |
4764 | /* Don't worry about things in the fixed argument area; | |
4765 | it has already been saved. */ | |
4766 | if (i < reg_parm_stack_space) | |
4767 | i = reg_parm_stack_space; | |
4768 | while (i < upper_bound && stack_usage_map[i] == 0) | |
4769 | i++; | |
51bbfa0c | 4770 | |
546ff777 | 4771 | if (i < upper_bound) |
51bbfa0c | 4772 | { |
e7949876 AM |
4773 | /* We need to make a save area. */ |
4774 | unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; | |
ef4bddc2 | 4775 | machine_mode save_mode = mode_for_size (size, MODE_INT, 1); |
e7949876 AM |
4776 | rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); |
4777 | rtx stack_area = gen_rtx_MEM (save_mode, adr); | |
f73ad30e JH |
4778 | |
4779 | if (save_mode == BLKmode) | |
4780 | { | |
9ee5337d EB |
4781 | arg->save_area |
4782 | = assign_temp (TREE_TYPE (arg->tree_value), 1, 1); | |
f73ad30e | 4783 | preserve_temp_slots (arg->save_area); |
1a8cb155 RS |
4784 | emit_block_move (validize_mem (copy_rtx (arg->save_area)), |
4785 | stack_area, | |
7816b87e | 4786 | GEN_INT (arg->locate.size.constant), |
44bb111a | 4787 | BLOCK_OP_CALL_PARM); |
f73ad30e JH |
4788 | } |
4789 | else | |
4790 | { | |
4791 | arg->save_area = gen_reg_rtx (save_mode); | |
4792 | emit_move_insn (arg->save_area, stack_area); | |
4793 | } | |
51bbfa0c RS |
4794 | } |
4795 | } | |
4796 | } | |
b564df06 | 4797 | |
51bbfa0c RS |
4798 | /* If this isn't going to be placed on both the stack and in registers, |
4799 | set up the register and number of words. */ | |
4800 | if (! arg->pass_on_stack) | |
aa7634dd DM |
4801 | { |
4802 | if (flags & ECF_SIBCALL) | |
4803 | reg = arg->tail_call_reg; | |
4804 | else | |
4805 | reg = arg->reg; | |
4806 | partial = arg->partial; | |
4807 | } | |
51bbfa0c | 4808 | |
366de0ce NS |
4809 | /* Being passed entirely in a register. We shouldn't be called in |
4810 | this case. */ | |
4811 | gcc_assert (reg == 0 || partial != 0); | |
c22cacf3 | 4812 | |
4ab56118 RK |
4813 | /* If this arg needs special alignment, don't load the registers |
4814 | here. */ | |
4815 | if (arg->n_aligned_regs != 0) | |
4816 | reg = 0; | |
f725a3ec | 4817 | |
4ab56118 | 4818 | /* If this is being passed partially in a register, we can't evaluate |
51bbfa0c RS |
4819 | it directly into its stack slot. Otherwise, we can. */ |
4820 | if (arg->value == 0) | |
d64f5a78 | 4821 | { |
d64f5a78 RS |
4822 | /* stack_arg_under_construction is nonzero if a function argument is |
4823 | being evaluated directly into the outgoing argument list and | |
4824 | expand_call must take special action to preserve the argument list | |
4825 | if it is called recursively. | |
4826 | ||
4827 | For scalar function arguments stack_usage_map is sufficient to | |
4828 | determine which stack slots must be saved and restored. Scalar | |
4829 | arguments in general have pass_on_stack == 0. | |
4830 | ||
4831 | If this argument is initialized by a function which takes the | |
4832 | address of the argument (a C++ constructor or a C function | |
4833 | returning a BLKmode structure), then stack_usage_map is | |
4834 | insufficient and expand_call must push the stack around the | |
4835 | function call. Such arguments have pass_on_stack == 1. | |
4836 | ||
4837 | Note that it is always safe to set stack_arg_under_construction, | |
4838 | but this generates suboptimal code if set when not needed. */ | |
4839 | ||
4840 | if (arg->pass_on_stack) | |
4841 | stack_arg_under_construction++; | |
f73ad30e | 4842 | |
3a08477a RK |
4843 | arg->value = expand_expr (pval, |
4844 | (partial | |
4845 | || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) | |
4846 | ? NULL_RTX : arg->stack, | |
8403445a | 4847 | VOIDmode, EXPAND_STACK_PARM); |
1efe6448 RK |
4848 | |
4849 | /* If we are promoting object (or for any other reason) the mode | |
4850 | doesn't agree, convert the mode. */ | |
4851 | ||
7373d92d RK |
4852 | if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) |
4853 | arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), | |
4854 | arg->value, arg->unsignedp); | |
1efe6448 | 4855 | |
d64f5a78 RS |
4856 | if (arg->pass_on_stack) |
4857 | stack_arg_under_construction--; | |
d64f5a78 | 4858 | } |
51bbfa0c | 4859 | |
0dc42b03 | 4860 | /* Check for overlap with already clobbered argument area. */ |
07eef816 KH |
4861 | if ((flags & ECF_SIBCALL) |
4862 | && MEM_P (arg->value) | |
4863 | && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), | |
4864 | arg->locate.size.constant)) | |
4865 | sibcall_failure = 1; | |
0dc42b03 | 4866 | |
51bbfa0c RS |
4867 | /* Don't allow anything left on stack from computation |
4868 | of argument to alloca. */ | |
f8a097cd | 4869 | if (flags & ECF_MAY_BE_ALLOCA) |
51bbfa0c RS |
4870 | do_pending_stack_adjust (); |
4871 | ||
4872 | if (arg->value == arg->stack) | |
37a08a29 RK |
4873 | /* If the value is already in the stack slot, we are done. */ |
4874 | ; | |
1efe6448 | 4875 | else if (arg->mode != BLKmode) |
51bbfa0c | 4876 | { |
b3694847 | 4877 | int size; |
46bd2bee | 4878 | unsigned int parm_align; |
51bbfa0c RS |
4879 | |
4880 | /* Argument is a scalar, not entirely passed in registers. | |
4881 | (If part is passed in registers, arg->partial says how much | |
4882 | and emit_push_insn will take care of putting it there.) | |
f725a3ec | 4883 | |
51bbfa0c RS |
4884 | Push it, and if its size is less than the |
4885 | amount of space allocated to it, | |
4886 | also bump stack pointer by the additional space. | |
4887 | Note that in C the default argument promotions | |
4888 | will prevent such mismatches. */ | |
4889 | ||
1efe6448 | 4890 | size = GET_MODE_SIZE (arg->mode); |
51bbfa0c RS |
4891 | /* Compute how much space the push instruction will push. |
4892 | On many machines, pushing a byte will advance the stack | |
4893 | pointer by a halfword. */ | |
4894 | #ifdef PUSH_ROUNDING | |
4895 | size = PUSH_ROUNDING (size); | |
4896 | #endif | |
4897 | used = size; | |
4898 | ||
4899 | /* Compute how much space the argument should get: | |
4900 | round up to a multiple of the alignment for arguments. */ | |
1efe6448 | 4901 | if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) |
51bbfa0c RS |
4902 | used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) |
4903 | / (PARM_BOUNDARY / BITS_PER_UNIT)) | |
4904 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
4905 | ||
46bd2bee JM |
4906 | /* Compute the alignment of the pushed argument. */ |
4907 | parm_align = arg->locate.boundary; | |
4908 | if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) | |
4909 | { | |
4910 | int pad = used - size; | |
4911 | if (pad) | |
4912 | { | |
4913 | unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; | |
4914 | parm_align = MIN (parm_align, pad_align); | |
4915 | } | |
4916 | } | |
4917 | ||
51bbfa0c RS |
4918 | /* This isn't already where we want it on the stack, so put it there. |
4919 | This can either be done with push or copy insns. */ | |
99206968 | 4920 | if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, |
46bd2bee | 4921 | parm_align, partial, reg, used - size, argblock, |
e7949876 | 4922 | ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, |
99206968 KT |
4923 | ARGS_SIZE_RTX (arg->locate.alignment_pad), true)) |
4924 | sibcall_failure = 1; | |
841404cd AO |
4925 | |
4926 | /* Unless this is a partially-in-register argument, the argument is now | |
4927 | in the stack. */ | |
4928 | if (partial == 0) | |
4929 | arg->value = arg->stack; | |
51bbfa0c RS |
4930 | } |
4931 | else | |
4932 | { | |
4933 | /* BLKmode, at least partly to be pushed. */ | |
4934 | ||
1b1f20ca | 4935 | unsigned int parm_align; |
b3694847 | 4936 | int excess; |
51bbfa0c RS |
4937 | rtx size_rtx; |
4938 | ||
4939 | /* Pushing a nonscalar. | |
4940 | If part is passed in registers, PARTIAL says how much | |
4941 | and emit_push_insn will take care of putting it there. */ | |
4942 | ||
4943 | /* Round its size up to a multiple | |
4944 | of the allocation unit for arguments. */ | |
4945 | ||
e7949876 | 4946 | if (arg->locate.size.var != 0) |
51bbfa0c RS |
4947 | { |
4948 | excess = 0; | |
e7949876 | 4949 | size_rtx = ARGS_SIZE_RTX (arg->locate.size); |
51bbfa0c RS |
4950 | } |
4951 | else | |
4952 | { | |
78a52f11 RH |
4953 | /* PUSH_ROUNDING has no effect on us, because emit_push_insn |
4954 | for BLKmode is careful to avoid it. */ | |
4955 | excess = (arg->locate.size.constant | |
4956 | - int_size_in_bytes (TREE_TYPE (pval)) | |
4957 | + partial); | |
db4c55f6 | 4958 | size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), |
bbbbb16a ILT |
4959 | NULL_RTX, TYPE_MODE (sizetype), |
4960 | EXPAND_NORMAL); | |
51bbfa0c RS |
4961 | } |
4962 | ||
bfc45551 | 4963 | parm_align = arg->locate.boundary; |
1b1f20ca RH |
4964 | |
4965 | /* When an argument is padded down, the block is aligned to | |
4966 | PARM_BOUNDARY, but the actual argument isn't. */ | |
4967 | if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) | |
4968 | { | |
e7949876 | 4969 | if (arg->locate.size.var) |
1b1f20ca RH |
4970 | parm_align = BITS_PER_UNIT; |
4971 | else if (excess) | |
4972 | { | |
97d05bfd | 4973 | unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; |
1b1f20ca RH |
4974 | parm_align = MIN (parm_align, excess_align); |
4975 | } | |
4976 | } | |
4977 | ||
3c0cb5de | 4978 | if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) |
4c6b3b2a JJ |
4979 | { |
4980 | /* emit_push_insn might not work properly if arg->value and | |
e7949876 | 4981 | argblock + arg->locate.offset areas overlap. */ |
4c6b3b2a JJ |
4982 | rtx x = arg->value; |
4983 | int i = 0; | |
4984 | ||
38173d38 | 4985 | if (XEXP (x, 0) == crtl->args.internal_arg_pointer |
4c6b3b2a JJ |
4986 | || (GET_CODE (XEXP (x, 0)) == PLUS |
4987 | && XEXP (XEXP (x, 0), 0) == | |
38173d38 | 4988 | crtl->args.internal_arg_pointer |
481683e1 | 4989 | && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) |
4c6b3b2a | 4990 | { |
38173d38 | 4991 | if (XEXP (x, 0) != crtl->args.internal_arg_pointer) |
4c6b3b2a JJ |
4992 | i = INTVAL (XEXP (XEXP (x, 0), 1)); |
4993 | ||
e0a21ab9 | 4994 | /* expand_call should ensure this. */ |
366de0ce | 4995 | gcc_assert (!arg->locate.offset.var |
d6c2c77c | 4996 | && arg->locate.size.var == 0 |
481683e1 | 4997 | && CONST_INT_P (size_rtx)); |
4c6b3b2a | 4998 | |
e7949876 | 4999 | if (arg->locate.offset.constant > i) |
4c6b3b2a | 5000 | { |
e7949876 | 5001 | if (arg->locate.offset.constant < i + INTVAL (size_rtx)) |
4c6b3b2a JJ |
5002 | sibcall_failure = 1; |
5003 | } | |
e7949876 | 5004 | else if (arg->locate.offset.constant < i) |
4c6b3b2a | 5005 | { |
d6c2c77c JC |
5006 | /* Use arg->locate.size.constant instead of size_rtx |
5007 | because we only care about the part of the argument | |
5008 | on the stack. */ | |
5009 | if (i < (arg->locate.offset.constant | |
5010 | + arg->locate.size.constant)) | |
5011 | sibcall_failure = 1; | |
5012 | } | |
5013 | else | |
5014 | { | |
5015 | /* Even though they appear to be at the same location, | |
5016 | if part of the outgoing argument is in registers, | |
5017 | they aren't really at the same location. Check for | |
5018 | this by making sure that the incoming size is the | |
5019 | same as the outgoing size. */ | |
5020 | if (arg->locate.size.constant != INTVAL (size_rtx)) | |
4c6b3b2a JJ |
5021 | sibcall_failure = 1; |
5022 | } | |
5023 | } | |
5024 | } | |
5025 | ||
1efe6448 | 5026 | emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, |
1b1f20ca | 5027 | parm_align, partial, reg, excess, argblock, |
e7949876 | 5028 | ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, |
99206968 | 5029 | ARGS_SIZE_RTX (arg->locate.alignment_pad), false); |
51bbfa0c | 5030 | |
841404cd AO |
5031 | /* Unless this is a partially-in-register argument, the argument is now |
5032 | in the stack. | |
51bbfa0c | 5033 | |
841404cd AO |
5034 | ??? Unlike the case above, in which we want the actual |
5035 | address of the data, so that we can load it directly into a | |
5036 | register, here we want the address of the stack slot, so that | |
5037 | it's properly aligned for word-by-word copying or something | |
5038 | like that. It's not clear that this is always correct. */ | |
5039 | if (partial == 0) | |
5040 | arg->value = arg->stack_slot; | |
5041 | } | |
8df3dbb7 RH |
5042 | |
5043 | if (arg->reg && GET_CODE (arg->reg) == PARALLEL) | |
5044 | { | |
5045 | tree type = TREE_TYPE (arg->tree_value); | |
5046 | arg->parallel_value | |
5047 | = emit_group_load_into_temps (arg->reg, arg->value, type, | |
5048 | int_size_in_bytes (type)); | |
5049 | } | |
51bbfa0c | 5050 | |
8403445a AM |
5051 | /* Mark all slots this store used. */ |
5052 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) | |
5053 | && argblock && ! variable_size && arg->stack) | |
5054 | for (i = lower_bound; i < upper_bound; i++) | |
5055 | stack_usage_map[i] = 1; | |
5056 | ||
51bbfa0c RS |
5057 | /* Once we have pushed something, pops can't safely |
5058 | be deferred during the rest of the arguments. */ | |
5059 | NO_DEFER_POP; | |
5060 | ||
9474e8ab | 5061 | /* Free any temporary slots made in processing this argument. */ |
cc79451b | 5062 | pop_temp_slots (); |
4c6b3b2a JJ |
5063 | |
5064 | return sibcall_failure; | |
51bbfa0c | 5065 | } |
a4b1b92a | 5066 | |
fe984136 | 5067 | /* Nonzero if we do not know how to pass TYPE solely in registers. */ |
a4b1b92a | 5068 | |
fe984136 | 5069 | bool |
ef4bddc2 | 5070 | must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED, |
586de218 | 5071 | const_tree type) |
fe984136 RH |
5072 | { |
5073 | if (!type) | |
5074 | return false; | |
5075 | ||
5076 | /* If the type has variable size... */ | |
5077 | if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
5078 | return true; | |
a4b1b92a | 5079 | |
fe984136 RH |
5080 | /* If the type is marked as addressable (it is required |
5081 | to be constructed into the stack)... */ | |
5082 | if (TREE_ADDRESSABLE (type)) | |
5083 | return true; | |
5084 | ||
5085 | return false; | |
5086 | } | |
a4b1b92a | 5087 | |
7ae4ad28 | 5088 | /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one |
fe984136 RH |
5089 | takes trailing padding of a structure into account. */ |
5090 | /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */ | |
a4b1b92a RH |
5091 | |
5092 | bool | |
ef4bddc2 | 5093 | must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type) |
a4b1b92a RH |
5094 | { |
5095 | if (!type) | |
40cdfd5a | 5096 | return false; |
a4b1b92a RH |
5097 | |
5098 | /* If the type has variable size... */ | |
5099 | if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
5100 | return true; | |
5101 | ||
5102 | /* If the type is marked as addressable (it is required | |
5103 | to be constructed into the stack)... */ | |
5104 | if (TREE_ADDRESSABLE (type)) | |
5105 | return true; | |
5106 | ||
5107 | /* If the padding and mode of the type is such that a copy into | |
5108 | a register would put it into the wrong part of the register. */ | |
5109 | if (mode == BLKmode | |
5110 | && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) | |
5111 | && (FUNCTION_ARG_PADDING (mode, type) | |
5112 | == (BYTES_BIG_ENDIAN ? upward : downward))) | |
5113 | return true; | |
5114 | ||
5115 | return false; | |
5116 | } |