]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
Merge in gcc2-ss-010999
[gcc.git] / gcc / integrate.c
CommitLineData
175160e7 1/* Procedure integration for GNU CC.
08e299d6 2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
175160e7
MT
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
175160e7
MT
21
22
175160e7 23#include "config.h"
670ee920 24#include "system.h"
ccd043a9 25
175160e7
MT
26#include "rtl.h"
27#include "tree.h"
12307ca2 28#include "regs.h"
175160e7
MT
29#include "flags.h"
30#include "insn-config.h"
31#include "insn-flags.h"
32#include "expr.h"
33#include "output.h"
e9a25f70 34#include "recog.h"
175160e7
MT
35#include "integrate.h"
36#include "real.h"
6adb4e3a 37#include "except.h"
175160e7 38#include "function.h"
d6f4ec51 39#include "toplev.h"
ab87f8c8 40#include "intl.h"
175160e7
MT
41
42#include "obstack.h"
43#define obstack_chunk_alloc xmalloc
44#define obstack_chunk_free free
175160e7
MT
45
46extern struct obstack *function_maybepermanent_obstack;
47
175160e7
MT
48/* Similar, but round to the next highest integer that meets the
49 alignment. */
50#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51
52/* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54#ifndef INTEGRATE_THRESHOLD
aec98e42
ML
55/* Inlining small functions might save more space then not inlining at
56 all. Assume 1 instruction for the call and 1.5 insns per argument. */
175160e7 57#define INTEGRATE_THRESHOLD(DECL) \
aec98e42 58 (optimize_size \
c51262cf 59 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
aec98e42 60 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
175160e7
MT
61#endif
62\f
36edd3cc 63static rtvec initialize_for_inline PROTO((tree));
a4c3ddd8 64static void adjust_copied_decl_tree PROTO((tree));
a4c3ddd8 65static void note_modified_parmregs PROTO((rtx, rtx));
a4c3ddd8
BS
66static void integrate_parm_decls PROTO((tree, struct inline_remap *,
67 rtvec));
68static void integrate_decl_tree PROTO((tree, int,
69 struct inline_remap *));
a4c3ddd8
BS
70static void subst_constants PROTO((rtx *, rtx,
71 struct inline_remap *));
a4c3ddd8
BS
72static void set_block_origin_self PROTO((tree));
73static void set_decl_origin_self PROTO((tree));
74static void set_block_abstract_flags PROTO((tree, int));
75static void process_reg_param PROTO((struct inline_remap *, rtx,
76 rtx));
81fbaa41 77
a4c3ddd8
BS
78
79void set_decl_abstract_flags PROTO((tree, int));
02e24c7a 80static tree copy_and_set_decl_abstract_origin PROTO((tree));
f9e814f1
TP
81
82/* The maximum number of instructions accepted for inlining a
83 function. Increasing values mean more agressive inlining.
84 This affects currently only functions explicitly marked as
85 inline (or methods defined within the class definition for C++).
86 The default value of 10000 is arbitrary but high to match the
87 previously unlimited gcc capabilities. */
88
89int inline_max_insns = 10000;
90
36edd3cc
BS
91/* Used by copy_rtx_and_substitute; this indicates whether the function is
92 called for the purpose of inlining or some other purpose (i.e. loop
93 unrolling). This affects how constant pool references are handled.
94 This variable contains the FUNCTION_DECL for the inlined function. */
95static struct function *inlining = 0;
175160e7 96\f
1f3d3a31 97/* Returns the Ith entry in the label_map contained in MAP. If the
e5e809f4
JL
98 Ith entry has not yet been set, return a fresh label. This function
99 performs a lazy initialization of label_map, thereby avoiding huge memory
100 explosions when the label_map gets very large. */
101
1f3d3a31
JL
102rtx
103get_label_from_map (map, i)
e5e809f4 104 struct inline_remap *map;
1f3d3a31
JL
105 int i;
106{
107 rtx x = map->label_map[i];
108
109 if (x == NULL_RTX)
0177de87 110 x = map->label_map[i] = gen_label_rtx();
1f3d3a31
JL
111
112 return x;
113}
114
175160e7
MT
115/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
116 is safe and reasonable to integrate into other functions.
ab87f8c8 117 Nonzero means value is a warning msgid with a single %s
175160e7
MT
118 for the function's name. */
119
dff01034 120const char *
175160e7
MT
121function_cannot_inline_p (fndecl)
122 register tree fndecl;
123{
124 register rtx insn;
125 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
f9e814f1
TP
126
127 /* For functions marked as inline increase the maximum size to
128 inline_max_insns (-finline-limit-<n>). For regular functions
129 use the limit given by INTEGRATE_THRESHOLD. */
130
131 int max_insns = (DECL_INLINE (fndecl))
132 ? (inline_max_insns
133 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
134 : INTEGRATE_THRESHOLD (fndecl);
135
175160e7
MT
136 register int ninsns = 0;
137 register tree parms;
64ed0f40 138 rtx result;
175160e7 139
e5e809f4 140 /* No inlines with varargs. */
175160e7 141 if ((last && TREE_VALUE (last) != void_type_node)
5d3fe1fe 142 || current_function_varargs)
ab87f8c8 143 return N_("varargs function cannot be inline");
175160e7
MT
144
145 if (current_function_calls_alloca)
ab87f8c8 146 return N_("function using alloca cannot be inline");
175160e7
MT
147
148 if (current_function_contains_functions)
ab87f8c8 149 return N_("function with nested functions cannot be inline");
175160e7 150
b9096844 151 if (forced_labels)
f5a8be8e 152 return N_("function with label addresses used in initializers cannot inline");
b9096844 153
aeb302bb
JM
154 if (current_function_cannot_inline)
155 return current_function_cannot_inline;
156
175160e7 157 /* If its not even close, don't even look. */
f9e814f1 158 if (get_max_uid () > 3 * max_insns)
ab87f8c8 159 return N_("function too large to be inline");
175160e7 160
175160e7
MT
161#if 0
162 /* Don't inline functions which do not specify a function prototype and
163 have BLKmode argument or take the address of a parameter. */
164 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
165 {
166 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
167 TREE_ADDRESSABLE (parms) = 1;
168 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
ab87f8c8 169 return N_("no prototype, and parameter address used; cannot be inline");
175160e7
MT
170 }
171#endif
172
173 /* We can't inline functions that return structures
174 the old-fashioned PCC way, copying into a static block. */
175 if (current_function_returns_pcc_struct)
ab87f8c8 176 return N_("inline functions not supported for this return value type");
175160e7
MT
177
178 /* We can't inline functions that return structures of varying size. */
179 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
ab87f8c8 180 return N_("function with varying-size return value cannot be inline");
175160e7 181
c8ad69c1
RK
182 /* Cannot inline a function with a varying size argument or one that
183 receives a transparent union. */
175160e7 184 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
c8ad69c1
RK
185 {
186 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
ab87f8c8 187 return N_("function with varying-size parameter cannot be inline");
c8ad69c1 188 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
ab87f8c8 189 return N_("function with transparent unit parameter cannot be inline");
c8ad69c1 190 }
175160e7 191
f9e814f1 192 if (get_max_uid () > max_insns)
175160e7 193 {
12307ca2
RK
194 for (ninsns = 0, insn = get_first_nonparm_insn ();
195 insn && ninsns < max_insns;
175160e7 196 insn = NEXT_INSN (insn))
12307ca2
RK
197 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
198 ninsns++;
175160e7
MT
199
200 if (ninsns >= max_insns)
ab87f8c8 201 return N_("function too large to be inline");
175160e7
MT
202 }
203
acd693d1
RH
204 /* We will not inline a function which uses computed goto. The addresses of
205 its local labels, which may be tucked into global storage, are of course
206 not constant across instantiations, which causes unexpected behaviour. */
207 if (current_function_has_computed_jump)
208 return N_("function with computed jump cannot inline");
ead02915 209
2edc3b33
JW
210 /* We cannot inline a nested function that jumps to a nonlocal label. */
211 if (current_function_has_nonlocal_goto)
ab87f8c8 212 return N_("function with nonlocal goto cannot be inline");
2edc3b33 213
6adb4e3a
MS
214 /* This is a hack, until the inliner is taught about eh regions at
215 the start of the function. */
216 for (insn = get_insns ();
db3cf6fb
MS
217 insn
218 && ! (GET_CODE (insn) == NOTE
219 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
6adb4e3a
MS
220 insn = NEXT_INSN (insn))
221 {
222 if (insn && GET_CODE (insn) == NOTE
223 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
ab87f8c8 224 return N_("function with complex parameters cannot be inline");
6adb4e3a
MS
225 }
226
64ed0f40
JW
227 /* We can't inline functions that return a PARALLEL rtx. */
228 result = DECL_RTL (DECL_RESULT (fndecl));
229 if (result && GET_CODE (result) == PARALLEL)
ab87f8c8 230 return N_("inline functions not supported for this return value type");
64ed0f40 231
175160e7
MT
232 return 0;
233}
234\f
175160e7
MT
235/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
236 Zero for a reg that isn't a parm's home.
237 Only reg numbers less than max_parm_reg are mapped here. */
238static tree *parmdecl_map;
239
175160e7
MT
240/* In save_for_inline, nonzero if past the parm-initialization insns. */
241static int in_nonparm_insns;
242\f
36edd3cc 243/* Subroutine for `save_for_inline_nocopy'. Performs initialization
175160e7 244 needed to save FNDECL's insns and info for future inline expansion. */
36edd3cc 245
49ad7cfa 246static rtvec
36edd3cc 247initialize_for_inline (fndecl)
175160e7 248 tree fndecl;
175160e7 249{
49ad7cfa 250 int i;
175160e7
MT
251 rtvec arg_vector;
252 tree parms;
253
175160e7 254 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
4c9a05bc 255 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
175160e7
MT
256 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
257
258 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
259 parms;
260 parms = TREE_CHAIN (parms), i++)
261 {
262 rtx p = DECL_RTL (parms);
263
8a173c73
RK
264 /* If we have (mem (addressof (mem ...))), use the inner MEM since
265 otherwise the copy_rtx call below will not unshare the MEM since
266 it shares ADDRESSOF. */
267 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
268 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
269 p = XEXP (XEXP (p, 0), 0);
270
175160e7
MT
271 RTVEC_ELT (arg_vector, i) = p;
272
273 if (GET_CODE (p) == REG)
274 parmdecl_map[REGNO (p)] = parms;
f231e307
RK
275 else if (GET_CODE (p) == CONCAT)
276 {
277 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
278 rtx pimag = gen_imagpart (GET_MODE (preal), p);
279
280 if (GET_CODE (preal) == REG)
281 parmdecl_map[REGNO (preal)] = parms;
282 if (GET_CODE (pimag) == REG)
283 parmdecl_map[REGNO (pimag)] = parms;
284 }
285
048dfa64
RS
286 /* This flag is cleared later
287 if the function ever modifies the value of the parm. */
175160e7
MT
288 TREE_READONLY (parms) = 1;
289 }
290
49ad7cfa 291 return arg_vector;
175160e7
MT
292}
293
c75ac904
RS
294/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
295 they all point to the new (copied) rtxs. */
296
297static void
298adjust_copied_decl_tree (block)
299 register tree block;
300{
301 register tree subblock;
302 register rtx original_end;
303
304 original_end = BLOCK_END_NOTE (block);
305 if (original_end)
306 {
307 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
308 NOTE_SOURCE_FILE (original_end) = 0;
309 }
310
311 /* Process all subblocks. */
312 for (subblock = BLOCK_SUBBLOCKS (block);
313 subblock;
314 subblock = TREE_CHAIN (subblock))
315 adjust_copied_decl_tree (subblock);
316}
317
02e24c7a
MM
318/* Copy NODE (as with copy_node). NODE must be a DECL. Set the
319 DECL_ABSTRACT_ORIGIN for the new accordinly. */
320
321static tree
322copy_and_set_decl_abstract_origin (node)
323 tree node;
324{
325 tree copy = copy_node (node);
326 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
327 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
328 situation occurs if we inline a function which itself made
329 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
330 most distant ancestor, we don't have to do anything here. */
331 ;
332 else
333 /* The most distant ancestor must be NODE. */
334 DECL_ABSTRACT_ORIGIN (copy) = node;
335
336 return copy;
337}
338
175160e7
MT
339/* Make the insns and PARM_DECLs of the current function permanent
340 and record other information in DECL_SAVED_INSNS to allow inlining
341 of this function in subsequent calls.
342
343 This routine need not copy any insns because we are not going
344 to immediately compile the insns in the insn chain. There
345 are two cases when we would compile the insns for FNDECL:
346 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
347 be output at the end of other compilation, because somebody took
348 its address. In the first case, the insns of FNDECL are copied
349 as it is expanded inline, so FNDECL's saved insns are not
350 modified. In the second case, FNDECL is used for the last time,
351 so modifying the rtl is not a problem.
352
09578c27
RK
353 We don't have to worry about FNDECL being inline expanded by
354 other functions which are written at the end of compilation
355 because flag_no_inline is turned on when we begin writing
356 functions at the end of compilation. */
175160e7
MT
357
358void
359save_for_inline_nocopy (fndecl)
360 tree fndecl;
361{
362 rtx insn;
49ad7cfa 363 rtvec argvec;
175160e7 364 rtx first_nonparm_insn;
175160e7
MT
365
366 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
367 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
368 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
369 for the parms, prior to elimination of virtual registers.
370 These values are needed for substituting parms properly. */
371
175160e7
MT
372 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
373
374 /* Make and emit a return-label if we have not already done so. */
375
376 if (return_label == 0)
377 {
378 return_label = gen_label_rtx ();
379 emit_label (return_label);
380 }
381
36edd3cc 382 argvec = initialize_for_inline (fndecl);
175160e7
MT
383
384 /* If there are insns that copy parms from the stack into pseudo registers,
385 those insns are not copied. `expand_inline_function' must
386 emit the correct code to handle such things. */
387
388 insn = get_insns ();
389 if (GET_CODE (insn) != NOTE)
390 abort ();
391
392 /* Get the insn which signals the end of parameter setup code. */
393 first_nonparm_insn = get_first_nonparm_insn ();
394
395 /* Now just scan the chain of insns to see what happens to our
396 PARM_DECLs. If a PARM_DECL is used but never modified, we
397 can substitute its rtl directly when expanding inline (and
398 perform constant folding when its incoming value is constant).
399 Otherwise, we have to copy its value into a new register and track
400 the new register's life. */
401
402 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
403 {
404 if (insn == first_nonparm_insn)
405 in_nonparm_insns = 1;
406
407 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
36edd3cc
BS
408 /* Record what interesting things happen to our parameters. */
409 note_stores (PATTERN (insn), note_modified_parmregs);
175160e7
MT
410 }
411
412 /* We have now allocated all that needs to be allocated permanently
413 on the rtx obstack. Set our high-water mark, so that we
414 can free the rest of this when the time comes. */
415
416 preserve_data ();
417
49ad7cfa
BS
418 current_function->inl_max_label_num = max_label_num ();
419 current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
420 current_function->original_arg_vector = argvec;
421 current_function->original_decl_initial = DECL_INITIAL (fndecl);
422 DECL_SAVED_INSNS (fndecl) = current_function;
175160e7
MT
423}
424\f
175160e7
MT
425/* Note whether a parameter is modified or not. */
426
427static void
428note_modified_parmregs (reg, x)
429 rtx reg;
487a6e06 430 rtx x ATTRIBUTE_UNUSED;
175160e7
MT
431{
432 if (GET_CODE (reg) == REG && in_nonparm_insns
433 && REGNO (reg) < max_parm_reg
434 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
435 && parmdecl_map[REGNO (reg)] != 0)
436 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
437}
438
175160e7
MT
439/* Unfortunately, we need a global copy of const_equiv map for communication
440 with a function called from note_stores. Be *very* careful that this
441 is used properly in the presence of recursion. */
442
c68da89c 443varray_type global_const_equiv_varray;
175160e7
MT
444\f
445#define FIXED_BASE_PLUS_P(X) \
446 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
447 && GET_CODE (XEXP (X, 0)) == REG \
448 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
f9b06ea4 449 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
175160e7 450
a4c3ddd8
BS
451/* Called to set up a mapping for the case where a parameter is in a
452 register. If it is read-only and our argument is a constant, set up the
453 constant equivalence.
454
455 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
456 if it is a register.
457
458 Also, don't allow hard registers here; they might not be valid when
459 substituted into insns. */
460static void
461process_reg_param (map, loc, copy)
462 struct inline_remap *map;
463 rtx loc, copy;
464{
465 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
466 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
467 && ! REG_USERVAR_P (copy))
468 || (GET_CODE (copy) == REG
469 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
470 {
471 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
472 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
c68da89c
KR
473 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
474 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
a4c3ddd8
BS
475 copy = temp;
476 }
477 map->reg_map[REGNO (loc)] = copy;
478}
e6cfb550
AM
479
480/* Used by duplicate_eh_handlers to map labels for the exception table */
481static struct inline_remap *eif_eh_map;
482
483static rtx
484expand_inline_function_eh_labelmap (label)
485 rtx label;
486{
487 int index = CODE_LABEL_NUMBER (label);
488 return get_label_from_map (eif_eh_map, index);
489}
490
175160e7
MT
491/* Integrate the procedure defined by FNDECL. Note that this function
492 may wind up calling itself. Since the static variables are not
493 reentrant, we do not assign them until after the possibility
bfa30b22 494 of recursion is eliminated.
175160e7
MT
495
496 If IGNORE is nonzero, do not produce a value.
497 Otherwise store the value in TARGET if it is nonzero and that is convenient.
498
499 Value is:
500 (rtx)-1 if we could not substitute the function
501 0 if we substituted it and it does not produce a value
502 else an rtx for where the value is stored. */
503
504rtx
12307ca2
RK
505expand_inline_function (fndecl, parms, target, ignore, type,
506 structure_value_addr)
175160e7
MT
507 tree fndecl, parms;
508 rtx target;
509 int ignore;
510 tree type;
511 rtx structure_value_addr;
512{
36edd3cc 513 struct function *inlining_previous;
49ad7cfa 514 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
81578142 515 tree formal, actual, block;
36edd3cc 516 rtx parm_insns = inl_f->emit->x_first_insn;
49ad7cfa
BS
517 rtx insns = (inl_f->inl_last_parm_insn
518 ? NEXT_INSN (inl_f->inl_last_parm_insn)
519 : parm_insns);
175160e7
MT
520 tree *arg_trees;
521 rtx *arg_vals;
522 rtx insn;
523 int max_regno;
175160e7 524 register int i;
36edd3cc 525 int min_labelno = inl_f->emit->x_first_label_num;
49ad7cfa 526 int max_labelno = inl_f->inl_max_label_num;
175160e7
MT
527 int nargs;
528 rtx local_return_label = 0;
529 rtx loc;
2132517d 530 rtx stack_save = 0;
175160e7 531 rtx temp;
c68da89c 532 struct inline_remap *map = 0;
51723711 533#ifdef HAVE_cc0
175160e7 534 rtx cc0_insn = 0;
51723711 535#endif
49ad7cfa 536 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
a6dd1cb6 537 rtx static_chain_value = 0;
49ad7cfa 538 int inl_max_uid;
175160e7 539
253a01b4
JL
540 /* The pointer used to track the true location of the memory used
541 for MAP->LABEL_MAP. */
542 rtx *real_label_map = 0;
543
175160e7 544 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
36edd3cc 545 max_regno = inl_f->emit->x_reg_rtx_no + 3;
175160e7
MT
546 if (max_regno < FIRST_PSEUDO_REGISTER)
547 abort ();
548
549 nargs = list_length (DECL_ARGUMENTS (fndecl));
550
2d8d0db8
RK
551 /* Check that the parms type match and that sufficient arguments were
552 passed. Since the appropriate conversions or default promotions have
553 already been applied, the machine modes should match exactly. */
554
12307ca2 555 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
175160e7 556 formal;
12307ca2 557 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
175160e7 558 {
2d8d0db8
RK
559 tree arg;
560 enum machine_mode mode;
561
562 if (actual == 0)
ab176425 563 return (rtx) (HOST_WIDE_INT) -1;
2d8d0db8
RK
564
565 arg = TREE_VALUE (actual);
12307ca2 566 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
2d8d0db8
RK
567
568 if (mode != TYPE_MODE (TREE_TYPE (arg))
569 /* If they are block mode, the types should match exactly.
570 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
571 which could happen if the parameter has incomplete type. */
d80db03d
RK
572 || (mode == BLKmode
573 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
574 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
ab176425 575 return (rtx) (HOST_WIDE_INT) -1;
175160e7
MT
576 }
577
2d8d0db8
RK
578 /* Extra arguments are valid, but will be ignored below, so we must
579 evaluate them here for side-effects. */
580 for (; actual; actual = TREE_CHAIN (actual))
581 expand_expr (TREE_VALUE (actual), const0_rtx,
582 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
583
175160e7
MT
584 /* Make a binding contour to keep inline cleanups called at
585 outer function-scope level from looking like they are shadowing
586 parameter declarations. */
587 pushlevel (0);
588
175160e7
MT
589 /* Expand the function arguments. Do this first so that any
590 new registers get created before we allocate the maps. */
591
592 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
593 arg_trees = (tree *) alloca (nargs * sizeof (tree));
594
595 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
596 formal;
597 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
598 {
599 /* Actual parameter, converted to the type of the argument within the
600 function. */
601 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
602 /* Mode of the variable used within the function. */
603 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
9175051c 604 int invisiref = 0;
175160e7 605
175160e7
MT
606 arg_trees[i] = arg;
607 loc = RTVEC_ELT (arg_vector, i);
608
609 /* If this is an object passed by invisible reference, we copy the
610 object into a stack slot and save its address. If this will go
611 into memory, we do nothing now. Otherwise, we just expand the
612 argument. */
613 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
614 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
615 {
4b7cb39e
RK
616 rtx stack_slot
617 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
618 int_size_in_bytes (TREE_TYPE (arg)), 1);
440b3dae
MM
619 MEM_SET_IN_STRUCT_P (stack_slot,
620 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
175160e7
MT
621
622 store_expr (arg, stack_slot, 0);
623
624 arg_vals[i] = XEXP (stack_slot, 0);
9175051c 625 invisiref = 1;
175160e7
MT
626 }
627 else if (GET_CODE (loc) != MEM)
36aa0bf5
RK
628 {
629 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
630 /* The mode if LOC and ARG can differ if LOC was a variable
631 that had its mode promoted via PROMOTED_MODE. */
5be957a2
RS
632 arg_vals[i] = convert_modes (GET_MODE (loc),
633 TYPE_MODE (TREE_TYPE (arg)),
634 expand_expr (arg, NULL_RTX, mode,
635 EXPAND_SUM),
636 TREE_UNSIGNED (TREE_TYPE (formal)));
36aa0bf5
RK
637 else
638 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
639 }
175160e7
MT
640 else
641 arg_vals[i] = 0;
642
643 if (arg_vals[i] != 0
644 && (! TREE_READONLY (formal)
645 /* If the parameter is not read-only, copy our argument through
646 a register. Also, we cannot use ARG_VALS[I] if it overlaps
647 TARGET in any way. In the inline function, they will likely
648 be two different pseudos, and `safe_from_p' will make all
649 sorts of smart assumptions about their not conflicting.
650 But if ARG_VALS[I] overlaps TARGET, these assumptions are
9175051c
JM
651 wrong, so put ARG_VALS[I] into a fresh register.
652 Don't worry about invisible references, since their stack
653 temps will never overlap the target. */
175160e7 654 || (target != 0
9175051c 655 && ! invisiref
3eda169f
RK
656 && (GET_CODE (arg_vals[i]) == REG
657 || GET_CODE (arg_vals[i]) == SUBREG
658 || GET_CODE (arg_vals[i]) == MEM)
30caed6d
RS
659 && reg_overlap_mentioned_p (arg_vals[i], target))
660 /* ??? We must always copy a SUBREG into a REG, because it might
661 get substituted into an address, and not all ports correctly
662 handle SUBREGs in addresses. */
663 || (GET_CODE (arg_vals[i]) == SUBREG)))
4b7cb39e 664 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
12307ca2
RK
665
666 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
e5e809f4 667 && POINTER_TYPE_P (TREE_TYPE (formal)))
12307ca2
RK
668 mark_reg_pointer (arg_vals[i],
669 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
670 / BITS_PER_UNIT));
175160e7
MT
671 }
672
673 /* Allocate the structures we use to remap things. */
674
675 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
676 map->fndecl = fndecl;
677
678 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 679 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
175160e7 680
3bb1329e
BK
681 /* We used to use alloca here, but the size of what it would try to
682 allocate would occasionally cause it to exceed the stack limit and
683 cause unpredictable core dumps. */
253a01b4
JL
684 real_label_map
685 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
686 map->label_map = real_label_map;
175160e7 687
36edd3cc 688 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
49ad7cfa
BS
689 map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
690 bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
175160e7 691 map->min_insnno = 0;
49ad7cfa 692 map->max_insnno = inl_max_uid;
175160e7 693
a70f7bb2
JW
694 map->integrating = 1;
695
c68da89c
KR
696 /* const_equiv_varray maps pseudos in our routine to constants, so
697 it needs to be large enough for all our pseudos. This is the
698 number we are currently using plus the number in the called
699 routine, plus 15 for each arg, five to compute the virtual frame
700 pointer, and five for the return value. This should be enough
701 for most cases. We do not reference entries outside the range of
702 the map.
c66e0741
RK
703
704 ??? These numbers are quite arbitrary and were obtained by
705 experimentation. At some point, we should try to allocate the
706 table after all the parameters are set up so we an more accurately
707 estimate the number of pseudos we will need. */
708
c68da89c
KR
709 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
710 (max_reg_num ()
711 + (max_regno - FIRST_PSEUDO_REGISTER)
712 + 15 * nargs
713 + 10),
714 "expand_inline_function");
175160e7
MT
715 map->const_age = 0;
716
717 /* Record the current insn in case we have to set up pointers to frame
3ba10494
AS
718 and argument memory blocks. If there are no insns yet, add a dummy
719 insn that can be used as an insertion point. */
175160e7 720 map->insns_at_start = get_last_insn ();
e9a25f70 721 if (map->insns_at_start == 0)
3ba10494 722 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
175160e7 723
36edd3cc
BS
724 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
725 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
12307ca2 726
175160e7
MT
727 /* Update the outgoing argument size to allow for those in the inlined
728 function. */
49ad7cfa
BS
729 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
730 current_function_outgoing_args_size = inl_f->outgoing_args_size;
175160e7
MT
731
732 /* If the inline function needs to make PIC references, that means
733 that this function's PIC offset table must be used. */
49ad7cfa 734 if (inl_f->uses_pic_offset_table)
175160e7
MT
735 current_function_uses_pic_offset_table = 1;
736
a6dd1cb6 737 /* If this function needs a context, set it up. */
49ad7cfa 738 if (inl_f->needs_context)
a6dd1cb6
RK
739 static_chain_value = lookup_static_chain (fndecl);
740
1c1f2d29
JM
741 if (GET_CODE (parm_insns) == NOTE
742 && NOTE_LINE_NUMBER (parm_insns) > 0)
743 {
744 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
745 NOTE_LINE_NUMBER (parm_insns));
746 if (note)
747 RTX_INTEGRATED_P (note) = 1;
748 }
749
175160e7
MT
750 /* Process each argument. For each, set up things so that the function's
751 reference to the argument will refer to the argument being passed.
752 We only replace REG with REG here. Any simplifications are done
753 via const_equiv_map.
754
755 We make two passes: In the first, we deal with parameters that will
756 be placed into registers, since we need to ensure that the allocated
757 register number fits in const_equiv_map. Then we store all non-register
758 parameters into their memory location. */
759
fd28789a
RS
760 /* Don't try to free temp stack slots here, because we may put one of the
761 parameters into a temp stack slot. */
762
175160e7
MT
763 for (i = 0; i < nargs; i++)
764 {
765 rtx copy = arg_vals[i];
766
767 loc = RTVEC_ELT (arg_vector, i);
768
769 /* There are three cases, each handled separately. */
770 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
771 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
772 {
773 /* This must be an object passed by invisible reference (it could
774 also be a variable-sized object, but we forbid inlining functions
775 with variable-sized arguments). COPY is the address of the
776 actual value (this computation will cause it to be copied). We
777 map that address for the register, noting the actual address as
778 an equivalent in case it can be substituted into the insns. */
779
780 if (GET_CODE (copy) != REG)
781 {
782 temp = copy_addr_to_reg (copy);
c68da89c
KR
783 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
784 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
175160e7
MT
785 copy = temp;
786 }
787 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
788 }
789 else if (GET_CODE (loc) == MEM)
790 {
791 /* This is the case of a parameter that lives in memory.
792 It will live in the block we allocate in the called routine's
793 frame that simulates the incoming argument area. Do nothing
794 now; we will call store_expr later. */
795 ;
796 }
797 else if (GET_CODE (loc) == REG)
a4c3ddd8 798 process_reg_param (map, loc, copy);
bc2eeab2
RS
799 else if (GET_CODE (loc) == CONCAT)
800 {
bc2eeab2
RS
801 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
802 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
803 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
804 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
805
a4c3ddd8
BS
806 process_reg_param (map, locreal, copyreal);
807 process_reg_param (map, locimag, copyimag);
bc2eeab2 808 }
175160e7
MT
809 else
810 abort ();
175160e7
MT
811 }
812
36edd3cc
BS
813 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
814 specially. This function can be called recursively, so we need to
815 save the previous value. */
816 inlining_previous = inlining;
817 inlining = inl_f;
818
175160e7
MT
819 /* Now do the parameters that will be placed in memory. */
820
821 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
822 formal; formal = TREE_CHAIN (formal), i++)
823 {
175160e7
MT
824 loc = RTVEC_ELT (arg_vector, i);
825
826 if (GET_CODE (loc) == MEM
827 /* Exclude case handled above. */
828 && ! (GET_CODE (XEXP (loc, 0)) == REG
829 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
830 {
cdd6e2db
TW
831 rtx note = emit_note (DECL_SOURCE_FILE (formal),
832 DECL_SOURCE_LINE (formal));
833 if (note)
834 RTX_INTEGRATED_P (note) = 1;
175160e7
MT
835
836 /* Compute the address in the area we reserved and store the
837 value there. */
838 temp = copy_rtx_and_substitute (loc, map);
02bea8a8 839 subst_constants (&temp, NULL_RTX, map);
175160e7
MT
840 apply_change_group ();
841 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
842 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
843 store_expr (arg_trees[i], temp, 0);
175160e7
MT
844 }
845 }
846
847 /* Deal with the places that the function puts its result.
848 We are driven by what is placed into DECL_RESULT.
849
850 Initially, we assume that we don't have anything special handling for
851 REG_FUNCTION_RETURN_VALUE_P. */
852
853 map->inline_target = 0;
854 loc = DECL_RTL (DECL_RESULT (fndecl));
58a2f534 855
175160e7
MT
856 if (TYPE_MODE (type) == VOIDmode)
857 /* There is no return value to worry about. */
858 ;
859 else if (GET_CODE (loc) == MEM)
860 {
58a2f534
RH
861 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
862 {
863 temp = copy_rtx_and_substitute (loc, map);
864 subst_constants (&temp, NULL_RTX, map);
865 apply_change_group ();
866 target = temp;
867 }
868 else
869 {
870 if (! structure_value_addr
871 || ! aggregate_value_p (DECL_RESULT (fndecl)))
872 abort ();
175160e7 873
58a2f534
RH
874 /* Pass the function the address in which to return a structure
875 value. Note that a constructor can cause someone to call us
876 with STRUCTURE_VALUE_ADDR, but the initialization takes place
877 via the first parameter, rather than the struct return address.
175160e7 878
58a2f534
RH
879 We have two cases: If the address is a simple register
880 indirect, use the mapping mechanism to point that register to
881 our structure return address. Otherwise, store the structure
882 return value into the place that it will be referenced from. */
175160e7 883
58a2f534 884 if (GET_CODE (XEXP (loc, 0)) == REG)
175160e7 885 {
58a2f534
RH
886 temp = force_operand (structure_value_addr, NULL_RTX);
887 temp = force_reg (Pmode, temp);
888 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
889
c68da89c
KR
890 if (CONSTANT_P (structure_value_addr)
891 || GET_CODE (structure_value_addr) == ADDRESSOF
892 || (GET_CODE (structure_value_addr) == PLUS
893 && (XEXP (structure_value_addr, 0)
894 == virtual_stack_vars_rtx)
895 && (GET_CODE (XEXP (structure_value_addr, 1))
896 == CONST_INT)))
58a2f534 897 {
c68da89c
KR
898 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
899 CONST_AGE_PARM);
58a2f534
RH
900 }
901 }
902 else
903 {
904 temp = copy_rtx_and_substitute (loc, map);
905 subst_constants (&temp, NULL_RTX, map);
906 apply_change_group ();
907 emit_move_insn (temp, structure_value_addr);
175160e7 908 }
175160e7
MT
909 }
910 }
911 else if (ignore)
912 /* We will ignore the result value, so don't look at its structure.
913 Note that preparations for an aggregate return value
914 do need to be made (above) even if it will be ignored. */
915 ;
916 else if (GET_CODE (loc) == REG)
917 {
918 /* The function returns an object in a register and we use the return
919 value. Set up our target for remapping. */
920
921 /* Machine mode function was declared to return. */
922 enum machine_mode departing_mode = TYPE_MODE (type);
923 /* (Possibly wider) machine mode it actually computes
3ff2293f
BK
924 (for the sake of callers that fail to declare it right).
925 We have to use the mode of the result's RTL, rather than
926 its type, since expand_function_start may have promoted it. */
60da674b
RH
927 enum machine_mode arriving_mode
928 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
175160e7
MT
929 rtx reg_to_map;
930
931 /* Don't use MEMs as direct targets because on some machines
932 substituting a MEM for a REG makes invalid insns.
933 Let the combiner substitute the MEM if that is valid. */
934 if (target == 0 || GET_CODE (target) != REG
935 || GET_MODE (target) != departing_mode)
c36fce9a
GRK
936 {
937 /* Don't make BLKmode registers. If this looks like
938 a BLKmode object being returned in a register, get
939 the mode from that, otherwise abort. */
940 if (departing_mode == BLKmode)
941 {
60da674b
RH
942 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
943 {
944 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
945 arriving_mode = departing_mode;
946 }
947 else
948 abort();
c36fce9a
GRK
949 }
950
60da674b 951 target = gen_reg_rtx (departing_mode);
c36fce9a 952 }
175160e7
MT
953
954 /* If function's value was promoted before return,
955 avoid machine mode mismatch when we substitute INLINE_TARGET.
956 But TARGET is what we will return to the caller. */
957 if (arriving_mode != departing_mode)
2d0bd5fd
RK
958 {
959 /* Avoid creating a paradoxical subreg wider than
960 BITS_PER_WORD, since that is illegal. */
961 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
962 {
963 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
964 GET_MODE_BITSIZE (arriving_mode)))
965 /* Maybe could be handled by using convert_move () ? */
966 abort ();
967 reg_to_map = gen_reg_rtx (arriving_mode);
968 target = gen_lowpart (departing_mode, reg_to_map);
969 }
970 else
38a448ca 971 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
2d0bd5fd 972 }
175160e7
MT
973 else
974 reg_to_map = target;
975
976 /* Usually, the result value is the machine's return register.
977 Sometimes it may be a pseudo. Handle both cases. */
978 if (REG_FUNCTION_VALUE_P (loc))
979 map->inline_target = reg_to_map;
980 else
981 map->reg_map[REGNO (loc)] = reg_to_map;
982 }
64ed0f40
JW
983 else
984 abort ();
175160e7 985
255fe733
JM
986 /* Make a fresh binding contour that we can easily remove. Do this after
987 expanding our arguments so cleanups are properly scoped. */
988 pushlevel (0);
989 expand_start_bindings (0);
990
e5e809f4
JL
991 /* Initialize label_map. get_label_from_map will actually make
992 the labels. */
993 bzero ((char *) &map->label_map [min_labelno],
994 (max_labelno - min_labelno) * sizeof (rtx));
175160e7
MT
995
996 /* Perform postincrements before actually calling the function. */
997 emit_queue ();
998
999 /* Clean up stack so that variables might have smaller offsets. */
1000 do_pending_stack_adjust ();
1001
c68da89c
KR
1002 /* Save a copy of the location of const_equiv_varray for
1003 mark_stores, called via note_stores. */
1004 global_const_equiv_varray = map->const_equiv_varray;
175160e7 1005
136cf361
RK
1006 /* If the called function does an alloca, save and restore the
1007 stack pointer around the call. This saves stack space, but
2132517d
RK
1008 also is required if this inline is being done between two
1009 pushes. */
49ad7cfa 1010 if (inl_f->calls_alloca)
2132517d
RK
1011 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1012
175160e7
MT
1013 /* Now copy the insns one by one. Do this in two passes, first the insns and
1014 then their REG_NOTES, just like save_for_inline. */
1015
1016 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1017
1018 for (insn = insns; insn; insn = NEXT_INSN (insn))
1019 {
c9734bb9 1020 rtx copy, pattern, set;
175160e7
MT
1021
1022 map->orig_asm_operands_vector = 0;
1023
1024 switch (GET_CODE (insn))
1025 {
1026 case INSN:
1027 pattern = PATTERN (insn);
c9734bb9 1028 set = single_set (insn);
175160e7
MT
1029 copy = 0;
1030 if (GET_CODE (pattern) == USE
1031 && GET_CODE (XEXP (pattern, 0)) == REG
1032 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1033 /* The (USE (REG n)) at return from the function should
1034 be ignored since we are changing (REG n) into
1035 inline_target. */
1036 break;
1037
154bba13
TT
1038 /* If the inline fn needs eh context, make sure that
1039 the current fn has one. */
1040 if (GET_CODE (pattern) == USE
1041 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
01eb7f9a 1042 get_eh_context ();
154bba13 1043
175160e7
MT
1044 /* Ignore setting a function value that we don't want to use. */
1045 if (map->inline_target == 0
c9734bb9
RK
1046 && set != 0
1047 && GET_CODE (SET_DEST (set)) == REG
1048 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
5cd76fcd 1049 {
c9734bb9 1050 if (volatile_refs_p (SET_SRC (set)))
5cd76fcd 1051 {
c9734bb9
RK
1052 rtx new_set;
1053
5cd76fcd
RS
1054 /* If we must not delete the source,
1055 load it into a new temporary. */
1056 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
c9734bb9
RK
1057
1058 new_set = single_set (copy);
1059 if (new_set == 0)
1060 abort ();
1061
1062 SET_DEST (new_set)
1063 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
5cd76fcd 1064 }
d8090d46
RK
1065 /* If the source and destination are the same and it
1066 has a note on it, keep the insn. */
1067 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1068 && REG_NOTES (insn) != 0)
1069 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
5cd76fcd
RS
1070 else
1071 break;
1072 }
c9734bb9
RK
1073
1074 /* If this is setting the static chain rtx, omit it. */
1075 else if (static_chain_value != 0
1076 && set != 0
1077 && GET_CODE (SET_DEST (set)) == REG
1078 && rtx_equal_p (SET_DEST (set),
1079 static_chain_incoming_rtx))
1080 break;
1081
a6dd1cb6
RK
1082 /* If this is setting the static chain pseudo, set it from
1083 the value we want to give it instead. */
1084 else if (static_chain_value != 0
c9734bb9
RK
1085 && set != 0
1086 && rtx_equal_p (SET_SRC (set),
a6dd1cb6
RK
1087 static_chain_incoming_rtx))
1088 {
c9734bb9 1089 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
a6dd1cb6 1090
c9734bb9 1091 copy = emit_move_insn (newdest, static_chain_value);
a6dd1cb6
RK
1092 static_chain_value = 0;
1093 }
5cd76fcd
RS
1094 else
1095 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
175160e7
MT
1096 /* REG_NOTES will be copied later. */
1097
1098#ifdef HAVE_cc0
1099 /* If this insn is setting CC0, it may need to look at
1100 the insn that uses CC0 to see what type of insn it is.
1101 In that case, the call to recog via validate_change will
1102 fail. So don't substitute constants here. Instead,
1103 do it when we emit the following insn.
1104
1105 For example, see the pyr.md file. That machine has signed and
1106 unsigned compares. The compare patterns must check the
1107 following branch insn to see which what kind of compare to
1108 emit.
1109
1110 If the previous insn set CC0, substitute constants on it as
1111 well. */
1112 if (sets_cc0_p (PATTERN (copy)) != 0)
1113 cc0_insn = copy;
1114 else
1115 {
1116 if (cc0_insn)
1117 try_constants (cc0_insn, map);
1118 cc0_insn = 0;
1119 try_constants (copy, map);
1120 }
1121#else
1122 try_constants (copy, map);
1123#endif
1124 break;
1125
1126 case JUMP_INSN:
299b54ba
RK
1127 if (GET_CODE (PATTERN (insn)) == RETURN
1128 || (GET_CODE (PATTERN (insn)) == PARALLEL
1129 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
175160e7
MT
1130 {
1131 if (local_return_label == 0)
1132 local_return_label = gen_label_rtx ();
1133 pattern = gen_jump (local_return_label);
1134 }
1135 else
1136 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1137
1138 copy = emit_jump_insn (pattern);
1139
1140#ifdef HAVE_cc0
1141 if (cc0_insn)
1142 try_constants (cc0_insn, map);
1143 cc0_insn = 0;
1144#endif
1145 try_constants (copy, map);
1146
1147 /* If this used to be a conditional jump insn but whose branch
1148 direction is now know, we must do something special. */
1149 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1150 {
1151#ifdef HAVE_cc0
b30f05db
BS
1152 /* If the previous insn set cc0 for us, delete it. */
1153 if (sets_cc0_p (PREV_INSN (copy)))
1154 delete_insn (PREV_INSN (copy));
175160e7
MT
1155#endif
1156
1157 /* If this is now a no-op, delete it. */
1158 if (map->last_pc_value == pc_rtx)
1159 {
1160 delete_insn (copy);
1161 copy = 0;
1162 }
1163 else
1164 /* Otherwise, this is unconditional jump so we must put a
1165 BARRIER after it. We could do some dead code elimination
1166 here, but jump.c will do it just as well. */
1167 emit_barrier ();
1168 }
1169 break;
1170
1171 case CALL_INSN:
1172 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1173 copy = emit_call_insn (pattern);
1174
d7e09326
RK
1175 /* Because the USAGE information potentially contains objects other
1176 than hard registers, we need to copy it. */
db3cf6fb
MS
1177 CALL_INSN_FUNCTION_USAGE (copy)
1178 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
d7e09326 1179
175160e7
MT
1180#ifdef HAVE_cc0
1181 if (cc0_insn)
1182 try_constants (cc0_insn, map);
1183 cc0_insn = 0;
1184#endif
1185 try_constants (copy, map);
1186
1187 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1188 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
c68da89c 1189 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
175160e7
MT
1190 break;
1191
1192 case CODE_LABEL:
e5e809f4
JL
1193 copy = emit_label (get_label_from_map (map,
1194 CODE_LABEL_NUMBER (insn)));
bfa30b22 1195 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
1196 map->const_age++;
1197 break;
1198
1199 case BARRIER:
1200 copy = emit_barrier ();
1201 break;
1202
1203 case NOTE:
1204 /* It is important to discard function-end and function-beg notes,
1205 so we have only one of each in the current function.
1206 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1207 deleted these in the copy used for continuing compilation,
1208 not the copy used for inlining). */
1209 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1210 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1211 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
6adb4e3a 1212 {
e5e809f4
JL
1213 copy = emit_note (NOTE_SOURCE_FILE (insn),
1214 NOTE_LINE_NUMBER (insn));
1215 if (copy
1216 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1217 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
6adb4e3a 1218 {
e5e809f4
JL
1219 rtx label
1220 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
6adb4e3a 1221
9a0d1e1b
AM
1222 /* we have to duplicate the handlers for the original */
1223 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1224 {
e6cfb550
AM
1225 /* We need to duplicate the handlers for the EH region
1226 and we need to indicate where the label map is */
1227 eif_eh_map = map;
1228 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
1229 CODE_LABEL_NUMBER (label),
1230 expand_inline_function_eh_labelmap);
9a0d1e1b
AM
1231 }
1232
6adb4e3a
MS
1233 /* We have to forward these both to match the new exception
1234 region. */
1235 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
1236 }
1237 }
175160e7
MT
1238 else
1239 copy = 0;
1240 break;
1241
1242 default:
1243 abort ();
1244 break;
1245 }
1246
1247 if (copy)
1248 RTX_INTEGRATED_P (copy) = 1;
1249
1250 map->insn_map[INSN_UID (insn)] = copy;
1251 }
1252
e62d14be
RS
1253 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1254 from parameters can be substituted in. These are the only ones that
1255 are valid across the entire function. */
1256 map->const_age++;
175160e7
MT
1257 for (insn = insns; insn; insn = NEXT_INSN (insn))
1258 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
db25e492
RS
1259 && map->insn_map[INSN_UID (insn)]
1260 && REG_NOTES (insn))
1261 {
1262 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1263 /* We must also do subst_constants, in case one of our parameters
1264 has const type and constant value. */
1265 subst_constants (&tem, NULL_RTX, map);
1266 apply_change_group ();
1267 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1268 }
175160e7
MT
1269
1270 if (local_return_label)
1271 emit_label (local_return_label);
1272
2132517d 1273 /* Restore the stack pointer if we saved it above. */
49ad7cfa 1274 if (inl_f->calls_alloca)
2132517d
RK
1275 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1276
175160e7
MT
1277 /* Make copies of the decls of the symbols in the inline function, so that
1278 the copies of the variables get declared in the current function. Set
1279 up things so that lookup_static_chain knows that to interpret registers
1280 in SAVE_EXPRs for TYPE_SIZEs as local. */
1281
1282 inline_function_decl = fndecl;
175160e7 1283 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
49ad7cfa 1284 integrate_decl_tree (inl_f->original_decl_initial, 0, map);
175160e7
MT
1285 inline_function_decl = 0;
1286
8ef63e62
RS
1287 /* End the scope containing the copied formal parameter variables
1288 and copied LABEL_DECLs. */
175160e7
MT
1289
1290 expand_end_bindings (getdecls (), 1, 1);
81578142 1291 block = poplevel (1, 1, 0);
637c5064
RS
1292 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1293 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
175160e7 1294 poplevel (0, 0, 0);
9b53bc83
DE
1295
1296 /* Must mark the line number note after inlined functions as a repeat, so
1297 that the test coverage code can avoid counting the call twice. This
1298 just tells the code to ignore the immediately following line note, since
1299 there already exists a copy of this note before the expanded inline call.
1300 This line number note is still needed for debugging though, so we can't
1301 delete it. */
1302 if (flag_test_coverage)
1303 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1304
175160e7
MT
1305 emit_line_note (input_filename, lineno);
1306
c36fce9a
GRK
1307 /* If the function returns a BLKmode object in a register, copy it
1308 out of the temp register into a BLKmode memory object. */
1309 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1310 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1311 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1312
175160e7 1313 if (structure_value_addr)
1b6d951b 1314 {
38a448ca
RH
1315 target = gen_rtx_MEM (TYPE_MODE (type),
1316 memory_address (TYPE_MODE (type),
1317 structure_value_addr));
440b3dae 1318 MEM_SET_IN_STRUCT_P (target, 1);
1b6d951b 1319 }
3bb1329e
BK
1320
1321 /* Make sure we free the things we explicitly allocated with xmalloc. */
51cbea76
JL
1322 if (real_label_map)
1323 free (real_label_map);
c68da89c
KR
1324 if (map)
1325 VARRAY_FREE (map->const_equiv_varray);
36edd3cc 1326 inlining = inlining_previous;
3bb1329e 1327
175160e7
MT
1328 return target;
1329}
1330\f
1331/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1332 push all of those decls and give each one the corresponding home. */
1333
1334static void
1335integrate_parm_decls (args, map, arg_vector)
1336 tree args;
1337 struct inline_remap *map;
1338 rtvec arg_vector;
1339{
1340 register tree tail;
1341 register int i;
1342
1343 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1344 {
1345 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1346 TREE_TYPE (tail));
1347 rtx new_decl_rtl
1348 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1349
a76386d8
RK
1350 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1351 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1352 here, but that's going to require some more work. */
1353 /* DECL_INCOMING_RTL (decl) = ?; */
175160e7
MT
1354 /* These args would always appear unused, if not for this. */
1355 TREE_USED (decl) = 1;
1356 /* Prevent warning for shadowing with these. */
02e24c7a 1357 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
175160e7
MT
1358 pushdecl (decl);
1359 /* Fully instantiate the address with the equivalent form so that the
1360 debugging information contains the actual register, instead of the
1361 virtual register. Do this by not passing an insn to
1362 subst_constants. */
02bea8a8 1363 subst_constants (&new_decl_rtl, NULL_RTX, map);
175160e7
MT
1364 apply_change_group ();
1365 DECL_RTL (decl) = new_decl_rtl;
1366 }
1367}
1368
1369/* Given a BLOCK node LET, push decls and levels so as to construct in the
1370 current function a tree of contexts isomorphic to the one that is given.
1371
1372 LEVEL indicates how far down into the BLOCK tree is the node we are
8ef63e62 1373 currently traversing. It is always zero except for recursive calls.
175160e7 1374
858a47b1 1375 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
175160e7 1376 registers used in the DECL_RTL field should be remapped. If it is zero,
8ef63e62 1377 no mapping is necessary. */
175160e7
MT
1378
1379static void
8ef63e62 1380integrate_decl_tree (let, level, map)
175160e7
MT
1381 tree let;
1382 int level;
1383 struct inline_remap *map;
175160e7
MT
1384{
1385 tree t, node;
1386
8ef63e62
RS
1387 if (level > 0)
1388 pushlevel (0);
175160e7
MT
1389
1390 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1391 {
f6bad6ff
JM
1392 tree d;
1393
1394 push_obstacks_nochange ();
1395 saveable_allocation ();
02e24c7a 1396 d = copy_and_set_decl_abstract_origin (t);
f6bad6ff
JM
1397 pop_obstacks ();
1398
8ef63e62 1399 if (DECL_RTL (t) != 0)
175160e7
MT
1400 {
1401 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1402 /* Fully instantiate the address with the equivalent form so that the
1403 debugging information contains the actual register, instead of the
1404 virtual register. Do this by not passing an insn to
1405 subst_constants. */
02bea8a8 1406 subst_constants (&DECL_RTL (d), NULL_RTX, map);
175160e7
MT
1407 apply_change_group ();
1408 }
175160e7
MT
1409 /* These args would always appear unused, if not for this. */
1410 TREE_USED (d) = 1;
f6bad6ff
JM
1411
1412 if (DECL_LANG_SPECIFIC (d))
1413 copy_lang_decl (d);
1414
bd95070a 1415 pushdecl (d);
175160e7
MT
1416 }
1417
1418 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
8ef63e62 1419 integrate_decl_tree (t, level + 1, map);
175160e7 1420
8ef63e62
RS
1421 if (level > 0)
1422 {
1423 node = poplevel (1, 0, 0);
1424 if (node)
81578142
RS
1425 {
1426 TREE_USED (node) = TREE_USED (let);
1427 BLOCK_ABSTRACT_ORIGIN (node) = let;
1428 }
8ef63e62 1429 }
175160e7
MT
1430}
1431\f
1432/* Create a new copy of an rtx.
1433 Recursively copies the operands of the rtx,
1434 except for those few rtx codes that are sharable.
1435
1436 We always return an rtx that is similar to that incoming rtx, with the
1437 exception of possibly changing a REG to a SUBREG or vice versa. No
1438 rtl is ever emitted.
1439
1440 Handle constants that need to be placed in the constant pool by
1441 calling `force_const_mem'. */
1442
1443rtx
1444copy_rtx_and_substitute (orig, map)
1445 register rtx orig;
1446 struct inline_remap *map;
1447{
1448 register rtx copy, temp;
1449 register int i, j;
1450 register RTX_CODE code;
1451 register enum machine_mode mode;
6f7d635c 1452 register const char *format_ptr;
175160e7
MT
1453 int regno;
1454
1455 if (orig == 0)
1456 return 0;
1457
1458 code = GET_CODE (orig);
1459 mode = GET_MODE (orig);
1460
1461 switch (code)
1462 {
1463 case REG:
1464 /* If the stack pointer register shows up, it must be part of
1465 stack-adjustments (*not* because we eliminated the frame pointer!).
1466 Small hard registers are returned as-is. Pseudo-registers
1467 go through their `reg_map'. */
1468 regno = REGNO (orig);
f83a0992
JL
1469 if (regno <= LAST_VIRTUAL_REGISTER
1470 || (map->integrating
1471 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
175160e7
MT
1472 {
1473 /* Some hard registers are also mapped,
1474 but others are not translated. */
1475 if (map->reg_map[regno] != 0)
1476 return map->reg_map[regno];
1477
1478 /* If this is the virtual frame pointer, make space in current
1479 function's stack frame for the stack frame of the inline function.
1480
1481 Copy the address of this area into a pseudo. Map
1482 virtual_stack_vars_rtx to this pseudo and set up a constant
1483 equivalence for it to be the address. This will substitute the
1484 address into insns where it can be substituted and use the new
1485 pseudo where it can't. */
1486 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1487 {
1488 rtx loc, seq;
49ad7cfa 1489 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
175160e7 1490
3e42d56b
DE
1491#ifdef FRAME_GROWS_DOWNWARD
1492 /* In this case, virtual_stack_vars_rtx points to one byte
1493 higher than the top of the frame area. So make sure we
1494 allocate a big enough chunk to keep the frame pointer
1495 aligned like a real one. */
1496 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1497#endif
175160e7
MT
1498 start_sequence ();
1499 loc = assign_stack_temp (BLKmode, size, 1);
1500 loc = XEXP (loc, 0);
1501#ifdef FRAME_GROWS_DOWNWARD
1502 /* In this case, virtual_stack_vars_rtx points to one byte
1503 higher than the top of the frame area. So compute the offset
3e42d56b
DE
1504 to one byte higher than our substitute frame. */
1505 loc = plus_constant (loc, size);
175160e7 1506#endif
59b2d722
RK
1507 map->reg_map[regno] = temp
1508 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 1509
12307ca2
RK
1510#ifdef STACK_BOUNDARY
1511 mark_reg_pointer (map->reg_map[regno],
1512 STACK_BOUNDARY / BITS_PER_UNIT);
1513#endif
1514
c68da89c 1515 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7
MT
1516
1517 seq = gen_sequence ();
1518 end_sequence ();
1519 emit_insn_after (seq, map->insns_at_start);
5c23c401 1520 return temp;
175160e7 1521 }
f83a0992
JL
1522 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1523 || (map->integrating
1524 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1525 == orig)))
175160e7
MT
1526 {
1527 /* Do the same for a block to contain any arguments referenced
0f41302f 1528 in memory. */
175160e7 1529 rtx loc, seq;
49ad7cfa 1530 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
175160e7
MT
1531
1532 start_sequence ();
1533 loc = assign_stack_temp (BLKmode, size, 1);
1534 loc = XEXP (loc, 0);
931553d8
RS
1535 /* When arguments grow downward, the virtual incoming
1536 args pointer points to the top of the argument block,
0f41302f 1537 so the remapped location better do the same. */
931553d8
RS
1538#ifdef ARGS_GROW_DOWNWARD
1539 loc = plus_constant (loc, size);
1540#endif
59b2d722
RK
1541 map->reg_map[regno] = temp
1542 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 1543
12307ca2
RK
1544#ifdef STACK_BOUNDARY
1545 mark_reg_pointer (map->reg_map[regno],
1546 STACK_BOUNDARY / BITS_PER_UNIT);
1547#endif
1548
c68da89c 1549 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7
MT
1550
1551 seq = gen_sequence ();
1552 end_sequence ();
1553 emit_insn_after (seq, map->insns_at_start);
5c23c401 1554 return temp;
175160e7
MT
1555 }
1556 else if (REG_FUNCTION_VALUE_P (orig))
1557 {
1558 /* This is a reference to the function return value. If
1559 the function doesn't have a return value, error. If the
c36fce9a 1560 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
175160e7
MT
1561 if (map->inline_target == 0)
1562 /* Must be unrolling loops or replicating code if we
1563 reach here, so return the register unchanged. */
1564 return orig;
60da674b
RH
1565 else if (GET_MODE (map->inline_target) != BLKmode
1566 && mode != GET_MODE (map->inline_target))
293e1467 1567 return gen_lowpart (mode, map->inline_target);
175160e7
MT
1568 else
1569 return map->inline_target;
1570 }
1571 return orig;
1572 }
1573 if (map->reg_map[regno] == NULL)
1574 {
1575 map->reg_map[regno] = gen_reg_rtx (mode);
1576 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1577 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1578 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1579 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2
RK
1580
1581 if (map->regno_pointer_flag[regno])
1582 mark_reg_pointer (map->reg_map[regno],
1583 map->regno_pointer_align[regno]);
175160e7
MT
1584 }
1585 return map->reg_map[regno];
1586
1587 case SUBREG:
1588 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1589 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1590 if (GET_CODE (copy) == SUBREG)
38a448ca
RH
1591 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1592 SUBREG_WORD (orig) + SUBREG_WORD (copy));
bc2eeab2 1593 else if (GET_CODE (copy) == CONCAT)
ddc54eaa
NC
1594 {
1595 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1596
1597 if (GET_MODE (retval) == GET_MODE (orig))
1598 return retval;
1599 else
1600 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1601 (SUBREG_WORD (orig) %
1602 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1603 / (unsigned) UNITS_PER_WORD)));
1604 }
175160e7 1605 else
38a448ca
RH
1606 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1607 SUBREG_WORD (orig));
175160e7 1608
e9a25f70 1609 case ADDRESSOF:
38a448ca 1610 copy = gen_rtx_ADDRESSOF (mode,
8f985ec4
ZW
1611 copy_rtx_and_substitute (XEXP (orig, 0), map),
1612 0, ADDRESSOF_DECL(orig));
e9a25f70
JL
1613 regno = ADDRESSOF_REGNO (orig);
1614 if (map->reg_map[regno])
1615 regno = REGNO (map->reg_map[regno]);
1616 else if (regno > LAST_VIRTUAL_REGISTER)
1617 {
1618 temp = XEXP (orig, 0);
1619 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1620 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1621 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1622 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1623 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1624
1625 if (map->regno_pointer_flag[regno])
1626 mark_reg_pointer (map->reg_map[regno],
1627 map->regno_pointer_align[regno]);
1628 regno = REGNO (map->reg_map[regno]);
1629 }
1630 ADDRESSOF_REGNO (copy) = regno;
1631 return copy;
1632
175160e7
MT
1633 case USE:
1634 case CLOBBER:
1635 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927
RS
1636 to (use foo) if the original insn didn't have a subreg.
1637 Removing the subreg distorts the VAX movstrhi pattern
1638 by changing the mode of an operand. */
175160e7 1639 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
d632e927 1640 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7 1641 copy = SUBREG_REG (copy);
38a448ca 1642 return gen_rtx_fmt_e (code, VOIDmode, copy);
175160e7
MT
1643
1644 case CODE_LABEL:
1f3d3a31 1645 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
175160e7 1646 = LABEL_PRESERVE_P (orig);
1f3d3a31 1647 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
175160e7
MT
1648
1649 case LABEL_REF:
c5c76735
JL
1650 copy
1651 = gen_rtx_LABEL_REF
1652 (mode,
1653 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1654 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1655
175160e7 1656 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
1657
1658 /* The fact that this label was previously nonlocal does not mean
1659 it still is, so we must check if it is within the range of
1660 this function's labels. */
1661 LABEL_REF_NONLOCAL_P (copy)
1662 = (LABEL_REF_NONLOCAL_P (orig)
1663 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1664 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e
RK
1665
1666 /* If we have made a nonlocal label local, it means that this
9faa82d8 1667 inlined call will be referring to our nonlocal goto handler.
81d57b8e
RK
1668 So make sure we create one for this block; we normally would
1669 not since this is not otherwise considered a "call". */
1670 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1671 function_call_count++;
1672
175160e7
MT
1673 return copy;
1674
1675 case PC:
1676 case CC0:
1677 case CONST_INT:
f543676f
JW
1678 return orig;
1679
175160e7 1680 case SYMBOL_REF:
f543676f
JW
1681 /* Symbols which represent the address of a label stored in the constant
1682 pool must be modified to point to a constant pool entry for the
1683 remapped label. Otherwise, symbols are returned unchanged. */
1684 if (CONSTANT_POOL_ADDRESS_P (orig))
1685 {
36edd3cc
BS
1686 struct function *f = inlining ? inlining : current_function;
1687 rtx constant = get_pool_constant_for_function (f, orig);
1688 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1689 if (inlining)
1690 {
1691 rtx temp = force_const_mem (const_mode,
1692 copy_rtx_and_substitute (constant, map));
1693#if 0
1694 /* Legitimizing the address here is incorrect.
1695
1696 Since we had a SYMBOL_REF before, we can assume it is valid
1697 to have one in this position in the insn.
1698
1699 Also, change_address may create new registers. These
1700 registers will not have valid reg_map entries. This can
1701 cause try_constants() to fail because assumes that all
1702 registers in the rtx have valid reg_map entries, and it may
1703 end up replacing one of these new registers with junk. */
1704
1705 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1706 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1707#endif
1708
1709 temp = XEXP (temp, 0);
1710
1711#ifdef POINTERS_EXTEND_UNSIGNED
1712 if (GET_MODE (temp) != GET_MODE (orig))
1713 temp = convert_memory_address (GET_MODE (orig), temp);
1714#endif
1715 return temp;
1716 }
1717 else if (GET_CODE (constant) == LABEL_REF)
15e13f2c 1718 return XEXP (force_const_mem (GET_MODE (orig),
c1ceaaa6
RK
1719 copy_rtx_and_substitute (constant,
1720 map)),
1721 0);
f543676f 1722 }
e6cfb550
AM
1723 else
1724 if (SYMBOL_REF_NEED_ADJUST (orig))
1725 {
1726 eif_eh_map = map;
1727 return rethrow_symbol_map (orig,
1728 expand_inline_function_eh_labelmap);
1729 }
c1ceaaa6 1730
175160e7
MT
1731 return orig;
1732
1733 case CONST_DOUBLE:
1734 /* We have to make a new copy of this CONST_DOUBLE because don't want
1735 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1736 duplicate of a CONST_DOUBLE we have already seen. */
1737 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1738 {
1739 REAL_VALUE_TYPE d;
1740
1741 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 1742 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
1743 }
1744 else
1745 return immed_double_const (CONST_DOUBLE_LOW (orig),
1746 CONST_DOUBLE_HIGH (orig), VOIDmode);
1747
1748 case CONST:
1749 /* Make new constant pool entry for a constant
1750 that was in the pool of the inline function. */
1751 if (RTX_INTEGRATED_P (orig))
175160e7 1752 abort ();
36edd3cc 1753 break;
175160e7
MT
1754
1755 case ASM_OPERANDS:
1756 /* If a single asm insn contains multiple output operands
1757 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1758 We must make sure that the copied insn continues to share it. */
1759 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1760 {
1761 copy = rtx_alloc (ASM_OPERANDS);
81d82304 1762 copy->volatil = orig->volatil;
175160e7
MT
1763 XSTR (copy, 0) = XSTR (orig, 0);
1764 XSTR (copy, 1) = XSTR (orig, 1);
1765 XINT (copy, 2) = XINT (orig, 2);
1766 XVEC (copy, 3) = map->copy_asm_operands_vector;
1767 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1768 XSTR (copy, 5) = XSTR (orig, 5);
1769 XINT (copy, 6) = XINT (orig, 6);
1770 return copy;
1771 }
1772 break;
1773
1774 case CALL:
1775 /* This is given special treatment because the first
1776 operand of a CALL is a (MEM ...) which may get
1777 forced into a register for cse. This is undesirable
1778 if function-address cse isn't wanted or if we won't do cse. */
1779#ifndef NO_FUNCTION_CSE
1780 if (! (optimize && ! flag_no_function_cse))
1781#endif
c5c76735
JL
1782 return
1783 gen_rtx_CALL
1784 (GET_MODE (orig),
1785 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1786 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
1787 map)),
1788 copy_rtx_and_substitute (XEXP (orig, 1), map));
175160e7
MT
1789 break;
1790
1791#if 0
1792 /* Must be ifdefed out for loop unrolling to work. */
1793 case RETURN:
1794 abort ();
1795#endif
1796
1797 case SET:
1798 /* If this is setting fp or ap, it means that we have a nonlocal goto.
e9a25f70 1799 Adjust the setting by the offset of the area we made.
175160e7
MT
1800 If the nonlocal goto is into the current function,
1801 this will result in unnecessarily bad code, but should work. */
1802 if (SET_DEST (orig) == virtual_stack_vars_rtx
1803 || SET_DEST (orig) == virtual_incoming_args_rtx)
e9a25f70
JL
1804 {
1805 /* In case a translation hasn't occurred already, make one now. */
d6e6c585
JL
1806 rtx equiv_reg;
1807 rtx equiv_loc;
1808 HOST_WIDE_INT loc_offset;
1809
1810 copy_rtx_and_substitute (SET_DEST (orig), map);
1811 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
c68da89c 1812 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray, REGNO (equiv_reg)).rtx;
d6e6c585 1813 loc_offset
e9a25f70 1814 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
c5c76735 1815
38a448ca
RH
1816 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1817 force_operand
1818 (plus_constant
1819 (copy_rtx_and_substitute (SET_SRC (orig), map),
1820 - loc_offset),
1821 NULL_RTX));
e9a25f70 1822 }
175160e7
MT
1823 break;
1824
1825 case MEM:
36edd3cc
BS
1826 if (inlining
1827 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1828 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1829 {
1830 enum machine_mode const_mode = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1831 rtx constant = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1832 constant = copy_rtx_and_substitute (constant, map);
1833 /* If this was an address of a constant pool entry that itself
1834 had to be placed in the constant pool, it might not be a
1835 valid address. So the recursive call might have turned it
1836 into a register. In that case, it isn't a constant any
1837 more, so return it. This has the potential of changing a
1838 MEM into a REG, but we'll assume that it safe. */
1839 if (! CONSTANT_P (constant))
1840 return constant;
1841 return validize_mem (force_const_mem (const_mode, constant));
1842 }
175160e7
MT
1843 copy = rtx_alloc (MEM);
1844 PUT_MODE (copy, mode);
1845 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
440b3dae 1846 MEM_COPY_ATTRIBUTES (copy, orig);
41472af8 1847 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
a70f7bb2
JW
1848
1849 /* If doing function inlining, this MEM might not be const in the
1850 function that it is being inlined into, and thus may not be
1851 unchanging after function inlining. Constant pool references are
1852 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
1853 for them. */
1854 if (! map->integrating)
1855 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1856
175160e7 1857 return copy;
e9a25f70
JL
1858
1859 default:
1860 break;
175160e7
MT
1861 }
1862
1863 copy = rtx_alloc (code);
1864 PUT_MODE (copy, mode);
1865 copy->in_struct = orig->in_struct;
1866 copy->volatil = orig->volatil;
1867 copy->unchanging = orig->unchanging;
1868
1869 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
1870
1871 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
1872 {
1873 switch (*format_ptr++)
1874 {
1875 case '0':
ef178af3
ZW
1876 /* Copy this through the wide int field; that's safest. */
1877 X0WINT (copy, i) = X0WINT (orig, i);
175160e7
MT
1878 break;
1879
1880 case 'e':
1881 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
1882 break;
1883
1884 case 'u':
1885 /* Change any references to old-insns to point to the
1886 corresponding copied insns. */
1887 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
1888 break;
1889
1890 case 'E':
1891 XVEC (copy, i) = XVEC (orig, i);
1892 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
1893 {
1894 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1895 for (j = 0; j < XVECLEN (copy, i); j++)
1896 XVECEXP (copy, i, j)
1897 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
1898 }
1899 break;
1900
02bea8a8
RK
1901 case 'w':
1902 XWINT (copy, i) = XWINT (orig, i);
1903 break;
1904
175160e7
MT
1905 case 'i':
1906 XINT (copy, i) = XINT (orig, i);
1907 break;
1908
1909 case 's':
1910 XSTR (copy, i) = XSTR (orig, i);
1911 break;
1912
8f985ec4
ZW
1913 case 't':
1914 XTREE (copy, i) = XTREE (orig, i);
1915 break;
1916
175160e7
MT
1917 default:
1918 abort ();
1919 }
1920 }
1921
1922 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
1923 {
1924 map->orig_asm_operands_vector = XVEC (orig, 3);
1925 map->copy_asm_operands_vector = XVEC (copy, 3);
1926 map->copy_asm_constraints_vector = XVEC (copy, 4);
1927 }
1928
1929 return copy;
1930}
1931\f
1932/* Substitute known constant values into INSN, if that is valid. */
1933
1934void
1935try_constants (insn, map)
1936 rtx insn;
1937 struct inline_remap *map;
1938{
1939 int i;
1940
1941 map->num_sets = 0;
1942 subst_constants (&PATTERN (insn), insn, map);
1943
1944 /* Apply the changes if they are valid; otherwise discard them. */
1945 apply_change_group ();
1946
1947 /* Show we don't know the value of anything stored or clobbered. */
1948 note_stores (PATTERN (insn), mark_stores);
1949 map->last_pc_value = 0;
1950#ifdef HAVE_cc0
1951 map->last_cc0_value = 0;
1952#endif
1953
1954 /* Set up any constant equivalences made in this insn. */
1955 for (i = 0; i < map->num_sets; i++)
1956 {
1957 if (GET_CODE (map->equiv_sets[i].dest) == REG)
1958 {
1959 int regno = REGNO (map->equiv_sets[i].dest);
1960
c68da89c
KR
1961 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
1962 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
1963 /* Following clause is a hack to make case work where GNU C++
1964 reassigns a variable to make cse work right. */
1965 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
1966 regno).rtx,
1967 map->equiv_sets[i].equiv))
1968 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
1969 map->equiv_sets[i].equiv, map->const_age);
175160e7
MT
1970 }
1971 else if (map->equiv_sets[i].dest == pc_rtx)
1972 map->last_pc_value = map->equiv_sets[i].equiv;
1973#ifdef HAVE_cc0
1974 else if (map->equiv_sets[i].dest == cc0_rtx)
1975 map->last_cc0_value = map->equiv_sets[i].equiv;
1976#endif
1977 }
1978}
1979\f
1980/* Substitute known constants for pseudo regs in the contents of LOC,
1981 which are part of INSN.
d45cf215 1982 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
1983 update DECL_RTL).
1984 These changes are taken out by try_constants if the result is not valid.
1985
1986 Note that we are more concerned with determining when the result of a SET
1987 is a constant, for further propagation, than actually inserting constants
1988 into insns; cse will do the latter task better.
1989
1990 This function is also used to adjust address of items previously addressed
1991 via the virtual stack variable or virtual incoming arguments registers. */
1992
1993static void
1994subst_constants (loc, insn, map)
1995 rtx *loc;
1996 rtx insn;
1997 struct inline_remap *map;
1998{
1999 rtx x = *loc;
2000 register int i;
2001 register enum rtx_code code;
6f7d635c 2002 register const char *format_ptr;
175160e7
MT
2003 int num_changes = num_validated_changes ();
2004 rtx new = 0;
a30d557c 2005 enum machine_mode op0_mode = MAX_MACHINE_MODE;
175160e7
MT
2006
2007 code = GET_CODE (x);
2008
2009 switch (code)
2010 {
2011 case PC:
2012 case CONST_INT:
2013 case CONST_DOUBLE:
2014 case SYMBOL_REF:
2015 case CONST:
2016 case LABEL_REF:
2017 case ADDRESS:
2018 return;
2019
2020#ifdef HAVE_cc0
2021 case CC0:
2022 validate_change (insn, loc, map->last_cc0_value, 1);
2023 return;
2024#endif
2025
2026 case USE:
2027 case CLOBBER:
2028 /* The only thing we can do with a USE or CLOBBER is possibly do
2029 some substitutions in a MEM within it. */
2030 if (GET_CODE (XEXP (x, 0)) == MEM)
2031 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2032 return;
2033
2034 case REG:
2035 /* Substitute for parms and known constants. Don't replace
2036 hard regs used as user variables with constants. */
2037 {
2038 int regno = REGNO (x);
c68da89c 2039 struct const_equiv_data *p;
c66e0741 2040
175160e7 2041 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
6a651371 2042 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
c68da89c
KR
2043 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2044 p->rtx != 0)
2045 && p->age >= map->const_age)
2046 validate_change (insn, loc, p->rtx, 1);
175160e7
MT
2047 return;
2048 }
2049
2050 case SUBREG:
637c5064
RS
2051 /* SUBREG applied to something other than a reg
2052 should be treated as ordinary, since that must
2053 be a special hack and we don't know how to treat it specially.
2054 Consider for example mulsidi3 in m68k.md.
2055 Ordinary SUBREG of a REG needs this special treatment. */
2056 if (GET_CODE (SUBREG_REG (x)) == REG)
2057 {
2058 rtx inner = SUBREG_REG (x);
2059 rtx new = 0;
175160e7 2060
637c5064
RS
2061 /* We can't call subst_constants on &SUBREG_REG (x) because any
2062 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2063 see what is inside, try to form the new SUBREG and see if that is
2064 valid. We handle two cases: extracting a full word in an
2065 integral mode and extracting the low part. */
2066 subst_constants (&inner, NULL_RTX, map);
175160e7 2067
637c5064
RS
2068 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2069 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2070 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2071 new = operand_subword (inner, SUBREG_WORD (x), 0,
2072 GET_MODE (SUBREG_REG (x)));
175160e7 2073
26986265 2074 cancel_changes (num_changes);
637c5064
RS
2075 if (new == 0 && subreg_lowpart_p (x))
2076 new = gen_lowpart_common (GET_MODE (x), inner);
175160e7 2077
637c5064
RS
2078 if (new)
2079 validate_change (insn, loc, new, 1);
175160e7 2080
637c5064
RS
2081 return;
2082 }
2083 break;
175160e7
MT
2084
2085 case MEM:
2086 subst_constants (&XEXP (x, 0), insn, map);
2087
2088 /* If a memory address got spoiled, change it back. */
2089 if (insn != 0 && num_validated_changes () != num_changes
2090 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2091 cancel_changes (num_changes);
2092 return;
2093
2094 case SET:
2095 {
2096 /* Substitute constants in our source, and in any arguments to a
2097 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2098 itself. */
2099 rtx *dest_loc = &SET_DEST (x);
2100 rtx dest = *dest_loc;
2101 rtx src, tem;
2102
2103 subst_constants (&SET_SRC (x), insn, map);
2104 src = SET_SRC (x);
2105
2106 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
2107 || GET_CODE (*dest_loc) == SUBREG
2108 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2109 {
2110 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2111 {
2112 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2113 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2114 }
2115 dest_loc = &XEXP (*dest_loc, 0);
2116 }
2117
91594e43
RS
2118 /* Do substitute in the address of a destination in memory. */
2119 if (GET_CODE (*dest_loc) == MEM)
2120 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2121
175160e7
MT
2122 /* Check for the case of DEST a SUBREG, both it and the underlying
2123 register are less than one word, and the SUBREG has the wider mode.
2124 In the case, we are really setting the underlying register to the
2125 source converted to the mode of DEST. So indicate that. */
2126 if (GET_CODE (dest) == SUBREG
2127 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2128 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2129 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2130 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
2131 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2132 src)))
175160e7
MT
2133 src = tem, dest = SUBREG_REG (dest);
2134
2135 /* If storing a recognizable value save it for later recording. */
2136 if ((map->num_sets < MAX_RECOG_OPERANDS)
2137 && (CONSTANT_P (src)
c9734bb9 2138 || (GET_CODE (src) == REG
83b93f40
RK
2139 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2140 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7
MT
2141 || (GET_CODE (src) == PLUS
2142 && GET_CODE (XEXP (src, 0)) == REG
83b93f40
RK
2143 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2144 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
2145 && CONSTANT_P (XEXP (src, 1)))
2146 || GET_CODE (src) == COMPARE
2147#ifdef HAVE_cc0
2148 || dest == cc0_rtx
2149#endif
2150 || (dest == pc_rtx
2151 && (src == pc_rtx || GET_CODE (src) == RETURN
2152 || GET_CODE (src) == LABEL_REF))))
2153 {
2154 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2155 it will cause us to save the COMPARE with any constants
2156 substituted, which is what we want for later. */
2157 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2158 map->equiv_sets[map->num_sets++].dest = dest;
2159 }
175160e7 2160 }
e9a25f70
JL
2161 return;
2162
2163 default:
2164 break;
175160e7
MT
2165 }
2166
2167 format_ptr = GET_RTX_FORMAT (code);
2168
2169 /* If the first operand is an expression, save its mode for later. */
2170 if (*format_ptr == 'e')
2171 op0_mode = GET_MODE (XEXP (x, 0));
2172
2173 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2174 {
2175 switch (*format_ptr++)
2176 {
2177 case '0':
2178 break;
2179
2180 case 'e':
2181 if (XEXP (x, i))
2182 subst_constants (&XEXP (x, i), insn, map);
2183 break;
2184
2185 case 'u':
2186 case 'i':
2187 case 's':
02bea8a8 2188 case 'w':
8f985ec4 2189 case 't':
175160e7
MT
2190 break;
2191
2192 case 'E':
2193 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2194 {
2195 int j;
2196 for (j = 0; j < XVECLEN (x, i); j++)
2197 subst_constants (&XVECEXP (x, i, j), insn, map);
2198 }
2199 break;
2200
2201 default:
2202 abort ();
2203 }
2204 }
2205
2206 /* If this is a commutative operation, move a constant to the second
2207 operand unless the second operand is already a CONST_INT. */
2208 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2209 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2210 {
2211 rtx tem = XEXP (x, 0);
2212 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2213 validate_change (insn, &XEXP (x, 1), tem, 1);
2214 }
2215
2216 /* Simplify the expression in case we put in some constants. */
2217 switch (GET_RTX_CLASS (code))
2218 {
2219 case '1':
a30d557c
ZW
2220 if (op0_mode == MAX_MACHINE_MODE)
2221 abort ();
175160e7
MT
2222 new = simplify_unary_operation (code, GET_MODE (x),
2223 XEXP (x, 0), op0_mode);
2224 break;
2225
2226 case '<':
2227 {
2228 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2229 if (op_mode == VOIDmode)
2230 op_mode = GET_MODE (XEXP (x, 1));
2231 new = simplify_relational_operation (code, op_mode,
2232 XEXP (x, 0), XEXP (x, 1));
b565a316
RK
2233#ifdef FLOAT_STORE_FLAG_VALUE
2234 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2235 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
81fbaa41
RK
2236 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2237 GET_MODE (x)));
b565a316 2238#endif
175160e7
MT
2239 break;
2240 }
2241
2242 case '2':
2243 case 'c':
2244 new = simplify_binary_operation (code, GET_MODE (x),
2245 XEXP (x, 0), XEXP (x, 1));
2246 break;
2247
2248 case 'b':
2249 case '3':
a30d557c
ZW
2250 if (op0_mode == MAX_MACHINE_MODE)
2251 abort ();
175160e7
MT
2252 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2253 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2254 break;
2255 }
2256
2257 if (new)
2258 validate_change (insn, loc, new, 1);
2259}
2260
2261/* Show that register modified no longer contain known constants. We are
2262 called from note_stores with parts of the new insn. */
2263
2264void
2265mark_stores (dest, x)
2266 rtx dest;
487a6e06 2267 rtx x ATTRIBUTE_UNUSED;
175160e7 2268{
e2eb57b7 2269 int regno = -1;
6a651371 2270 enum machine_mode mode = VOIDmode;
e2eb57b7
RK
2271
2272 /* DEST is always the innermost thing set, except in the case of
2273 SUBREGs of hard registers. */
175160e7
MT
2274
2275 if (GET_CODE (dest) == REG)
e2eb57b7
RK
2276 regno = REGNO (dest), mode = GET_MODE (dest);
2277 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2278 {
2279 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2280 mode = GET_MODE (SUBREG_REG (dest));
2281 }
2282
2283 if (regno >= 0)
2284 {
2285 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2286 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2287 int i;
2288
e9a25f70
JL
2289 /* Ignore virtual stack var or virtual arg register since those
2290 are handled separately. */
2291 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2292 && regno != VIRTUAL_STACK_VARS_REGNUM)
2293 for (i = regno; i <= last_reg; i++)
6a651371 2294 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
c68da89c 2295 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
e2eb57b7 2296 }
175160e7
MT
2297}
2298\f
81578142
RS
2299/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2300 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2301 that it points to the node itself, thus indicating that the node is its
2302 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2303 the given node is NULL, recursively descend the decl/block tree which
2304 it is the root of, and for each other ..._DECL or BLOCK node contained
2305 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2306 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2307 values to point to themselves. */
2308
81578142
RS
2309static void
2310set_block_origin_self (stmt)
2311 register tree stmt;
2312{
2313 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2314 {
2315 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2316
2317 {
2318 register tree local_decl;
2319
2320 for (local_decl = BLOCK_VARS (stmt);
2321 local_decl != NULL_TREE;
2322 local_decl = TREE_CHAIN (local_decl))
2323 set_decl_origin_self (local_decl); /* Potential recursion. */
2324 }
2325
2326 {
2327 register tree subblock;
2328
2329 for (subblock = BLOCK_SUBBLOCKS (stmt);
2330 subblock != NULL_TREE;
2331 subblock = BLOCK_CHAIN (subblock))
2332 set_block_origin_self (subblock); /* Recurse. */
2333 }
2334 }
2335}
2336
2337/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2338 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2339 node to so that it points to the node itself, thus indicating that the
2340 node represents its own (abstract) origin. Additionally, if the
2341 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2342 the decl/block tree of which the given node is the root of, and for
2343 each other ..._DECL or BLOCK node contained therein whose
2344 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2345 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2346 point to themselves. */
2347
2348static void
2349set_decl_origin_self (decl)
2350 register tree decl;
2351{
2352 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2353 {
2354 DECL_ABSTRACT_ORIGIN (decl) = decl;
2355 if (TREE_CODE (decl) == FUNCTION_DECL)
2356 {
2357 register tree arg;
2358
2359 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2360 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
2361 if (DECL_INITIAL (decl) != NULL_TREE
2362 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
2363 set_block_origin_self (DECL_INITIAL (decl));
2364 }
2365 }
2366}
2367\f
2368/* Given a pointer to some BLOCK node, and a boolean value to set the
2369 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2370 the given block, and for all local decls and all local sub-blocks
2371 (recursively) which are contained therein. */
2372
81578142
RS
2373static void
2374set_block_abstract_flags (stmt, setting)
2375 register tree stmt;
2376 register int setting;
2377{
12307ca2
RK
2378 register tree local_decl;
2379 register tree subblock;
81578142 2380
12307ca2 2381 BLOCK_ABSTRACT (stmt) = setting;
81578142 2382
12307ca2
RK
2383 for (local_decl = BLOCK_VARS (stmt);
2384 local_decl != NULL_TREE;
2385 local_decl = TREE_CHAIN (local_decl))
2386 set_decl_abstract_flags (local_decl, setting);
81578142 2387
12307ca2
RK
2388 for (subblock = BLOCK_SUBBLOCKS (stmt);
2389 subblock != NULL_TREE;
2390 subblock = BLOCK_CHAIN (subblock))
2391 set_block_abstract_flags (subblock, setting);
81578142
RS
2392}
2393
2394/* Given a pointer to some ..._DECL node, and a boolean value to set the
2395 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2396 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2397 set the abstract flags for all of the parameters, local vars, local
2398 blocks and sub-blocks (recursively) to the same setting. */
2399
2400void
2401set_decl_abstract_flags (decl, setting)
2402 register tree decl;
2403 register int setting;
2404{
2405 DECL_ABSTRACT (decl) = setting;
2406 if (TREE_CODE (decl) == FUNCTION_DECL)
2407 {
2408 register tree arg;
2409
2410 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2411 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
2412 if (DECL_INITIAL (decl) != NULL_TREE
2413 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
2414 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2415 }
2416}
2417\f
175160e7
MT
2418/* Output the assembly language code for the function FNDECL
2419 from its DECL_SAVED_INSNS. Used for inline functions that are output
2420 at end of compilation instead of where they came in the source. */
2421
2422void
2423output_inline_function (fndecl)
2424 tree fndecl;
2425{
36edd3cc 2426 struct function *curf = current_function;
49ad7cfa 2427 struct function *f = DECL_SAVED_INSNS (fndecl);
175160e7 2428
49ad7cfa 2429 current_function = f;
175160e7 2430 current_function_decl = fndecl;
49ad7cfa 2431 clear_emit_caches ();
175160e7 2432
36edd3cc
BS
2433 /* Things we allocate from here on are part of this function, not
2434 permanent. */
2435 temporary_allocation ();
175160e7 2436
49ad7cfa 2437 set_new_last_label_num (f->inl_max_label_num);
175160e7 2438
81578142
RS
2439 /* We must have already output DWARF debugging information for the
2440 original (abstract) inline function declaration/definition, so
2441 we want to make sure that the debugging information we generate
2442 for this special instance of the inline function refers back to
2443 the information we already generated. To make sure that happens,
2444 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2445 node (and for all of the local ..._DECL nodes which are its children)
2446 so that they all point to themselves. */
2447
2448 set_decl_origin_self (fndecl);
2449
51783c14
JM
2450 /* We're not deferring this any longer. */
2451 DECL_DEFER_OUTPUT (fndecl) = 0;
2452
f4744807 2453 /* We can't inline this anymore. */
49ad7cfa 2454 f->inlinable = 0;
f4744807 2455 DECL_INLINE (fndecl) = 0;
09578c27 2456
175160e7
MT
2457 /* Compile this function all the way down to assembly code. */
2458 rest_of_compilation (fndecl);
2459
36edd3cc
BS
2460 current_function = curf;
2461 current_function_decl = curf ? curf->decl : 0;
175160e7 2462}
This page took 1.022348 seconds and 5 git commands to generate.