1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "insn-config.h"
32 #include "insn-flags.h"
36 #include "integrate.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack
*function_maybepermanent_obstack
;
50 /* Similar, but round to the next highest integer that meets the
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
65 static rtvec initialize_for_inline
PARAMS ((tree
));
66 static void note_modified_parmregs
PARAMS ((rtx
, rtx
, void *));
67 static void integrate_parm_decls
PARAMS ((tree
, struct inline_remap
*,
69 static tree integrate_decl_tree
PARAMS ((tree
,
70 struct inline_remap
*));
71 static void subst_constants
PARAMS ((rtx
*, rtx
,
72 struct inline_remap
*, int));
73 static void set_block_origin_self
PARAMS ((tree
));
74 static void set_decl_origin_self
PARAMS ((tree
));
75 static void set_block_abstract_flags
PARAMS ((tree
, int));
76 static void process_reg_param
PARAMS ((struct inline_remap
*, rtx
,
78 void set_decl_abstract_flags
PARAMS ((tree
, int));
79 static rtx expand_inline_function_eh_labelmap
PARAMS ((rtx
));
80 static void mark_stores
PARAMS ((rtx
, rtx
, void *));
81 static int compare_blocks
PARAMS ((const PTR
, const PTR
));
82 static int find_block
PARAMS ((const PTR
, const PTR
));
84 /* The maximum number of instructions accepted for inlining a
85 function. Increasing values mean more agressive inlining.
86 This affects currently only functions explicitly marked as
87 inline (or methods defined within the class definition for C++).
88 The default value of 10000 is arbitrary but high to match the
89 previously unlimited gcc capabilities. */
91 int inline_max_insns
= 10000;
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function
*inlining
= 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map
, i
)
106 struct inline_remap
*map
;
109 rtx x
= map
->label_map
[i
];
112 x
= map
->label_map
[i
] = gen_label_rtx();
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
123 function_cannot_inline_p (fndecl
)
124 register tree fndecl
;
127 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
129 /* For functions marked as inline increase the maximum size to
130 inline_max_insns (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns
= (DECL_INLINE (fndecl
))
135 + 8 * list_length (DECL_ARGUMENTS (fndecl
)))
136 : INTEGRATE_THRESHOLD (fndecl
);
138 register int ninsns
= 0;
142 /* No inlines with varargs. */
143 if ((last
&& TREE_VALUE (last
) != void_type_node
)
144 || current_function_varargs
)
145 return N_("varargs function cannot be inline");
147 if (current_function_calls_alloca
)
148 return N_("function using alloca cannot be inline");
150 if (current_function_calls_setjmp
)
151 return N_("function using setjmp cannot be inline");
153 if (current_function_contains_functions
)
154 return N_("function with nested functions cannot be inline");
158 N_("function with label addresses used in initializers cannot inline");
160 if (current_function_cannot_inline
)
161 return current_function_cannot_inline
;
163 /* If its not even close, don't even look. */
164 if (get_max_uid () > 3 * max_insns
)
165 return N_("function too large to be inline");
168 /* Don't inline functions which do not specify a function prototype and
169 have BLKmode argument or take the address of a parameter. */
170 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
172 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
173 TREE_ADDRESSABLE (parms
) = 1;
174 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
175 return N_("no prototype, and parameter address used; cannot be inline");
179 /* We can't inline functions that return structures
180 the old-fashioned PCC way, copying into a static block. */
181 if (current_function_returns_pcc_struct
)
182 return N_("inline functions not supported for this return value type");
184 /* We can't inline functions that return structures of varying size. */
185 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
186 return N_("function with varying-size return value cannot be inline");
188 /* Cannot inline a function with a varying size argument or one that
189 receives a transparent union. */
190 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
192 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
193 return N_("function with varying-size parameter cannot be inline");
194 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
195 return N_("function with transparent unit parameter cannot be inline");
198 if (get_max_uid () > max_insns
)
200 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
201 insn
&& ninsns
< max_insns
;
202 insn
= NEXT_INSN (insn
))
203 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
206 if (ninsns
>= max_insns
)
207 return N_("function too large to be inline");
210 /* We will not inline a function which uses computed goto. The addresses of
211 its local labels, which may be tucked into global storage, are of course
212 not constant across instantiations, which causes unexpected behaviour. */
213 if (current_function_has_computed_jump
)
214 return N_("function with computed jump cannot inline");
216 /* We cannot inline a nested function that jumps to a nonlocal label. */
217 if (current_function_has_nonlocal_goto
)
218 return N_("function with nonlocal goto cannot be inline");
220 /* This is a hack, until the inliner is taught about eh regions at
221 the start of the function. */
222 for (insn
= get_insns ();
224 && ! (GET_CODE (insn
) == NOTE
225 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
226 insn
= NEXT_INSN (insn
))
228 if (insn
&& GET_CODE (insn
) == NOTE
229 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
230 return N_("function with complex parameters cannot be inline");
233 /* We can't inline functions that return a PARALLEL rtx. */
234 result
= DECL_RTL (DECL_RESULT (fndecl
));
235 if (result
&& GET_CODE (result
) == PARALLEL
)
236 return N_("inline functions not supported for this return value type");
241 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
242 Zero for a reg that isn't a parm's home.
243 Only reg numbers less than max_parm_reg are mapped here. */
244 static tree
*parmdecl_map
;
246 /* In save_for_inline, nonzero if past the parm-initialization insns. */
247 static int in_nonparm_insns
;
249 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
250 needed to save FNDECL's insns and info for future inline expansion. */
253 initialize_for_inline (fndecl
)
260 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
261 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
262 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
264 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
266 parms
= TREE_CHAIN (parms
), i
++)
268 rtx p
= DECL_RTL (parms
);
270 /* If we have (mem (addressof (mem ...))), use the inner MEM since
271 otherwise the copy_rtx call below will not unshare the MEM since
272 it shares ADDRESSOF. */
273 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
274 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
275 p
= XEXP (XEXP (p
, 0), 0);
277 RTVEC_ELT (arg_vector
, i
) = p
;
279 if (GET_CODE (p
) == REG
)
280 parmdecl_map
[REGNO (p
)] = parms
;
281 else if (GET_CODE (p
) == CONCAT
)
283 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
284 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
286 if (GET_CODE (preal
) == REG
)
287 parmdecl_map
[REGNO (preal
)] = parms
;
288 if (GET_CODE (pimag
) == REG
)
289 parmdecl_map
[REGNO (pimag
)] = parms
;
292 /* This flag is cleared later
293 if the function ever modifies the value of the parm. */
294 TREE_READONLY (parms
) = 1;
300 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
301 originally was in the FROM_FN, but now it will be in the
305 copy_decl_for_inlining (decl
, from_fn
, to_fn
)
312 /* Copy the declaration. */
313 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
314 /* For a parameter, we must make an equivalent VAR_DECL, not a
316 copy
= build_decl (VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
319 copy
= copy_node (decl
);
320 if (DECL_LANG_SPECIFIC (copy
))
321 copy_lang_decl (copy
);
323 /* TREE_ADDRESSABLE isn't used to indicate that a label's
324 address has been taken; it's for internal bookkeeping in
325 expand_goto_internal. */
326 if (TREE_CODE (copy
) == LABEL_DECL
)
327 TREE_ADDRESSABLE (copy
) = 0;
330 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
331 declaration inspired this copy. */
332 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
334 /* The new variable/label has no RTL, yet. */
335 DECL_RTL (copy
) = NULL_RTX
;
337 /* These args would always appear unused, if not for this. */
338 TREE_USED (copy
) = 1;
340 /* Set the context for the new declaration. */
341 if (!DECL_CONTEXT (decl
))
342 /* Globals stay global. */
344 else if (DECL_CONTEXT (decl
) != from_fn
)
345 /* Things that weren't in the scope of the function we're inlining
346 from aren't in the scope we're inlining too, either. */
348 else if (TREE_STATIC (decl
))
349 /* Function-scoped static variables should say in the original
353 /* Ordinary automatic local variables are now in the scope of the
355 DECL_CONTEXT (copy
) = to_fn
;
360 /* Make the insns and PARM_DECLs of the current function permanent
361 and record other information in DECL_SAVED_INSNS to allow inlining
362 of this function in subsequent calls.
364 This routine need not copy any insns because we are not going
365 to immediately compile the insns in the insn chain. There
366 are two cases when we would compile the insns for FNDECL:
367 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
368 be output at the end of other compilation, because somebody took
369 its address. In the first case, the insns of FNDECL are copied
370 as it is expanded inline, so FNDECL's saved insns are not
371 modified. In the second case, FNDECL is used for the last time,
372 so modifying the rtl is not a problem.
374 We don't have to worry about FNDECL being inline expanded by
375 other functions which are written at the end of compilation
376 because flag_no_inline is turned on when we begin writing
377 functions at the end of compilation. */
380 save_for_inline_nocopy (fndecl
)
385 rtx first_nonparm_insn
;
387 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
388 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
389 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
390 for the parms, prior to elimination of virtual registers.
391 These values are needed for substituting parms properly. */
393 parmdecl_map
= (tree
*) xmalloc (max_parm_reg
* sizeof (tree
));
395 /* Make and emit a return-label if we have not already done so. */
397 if (return_label
== 0)
399 return_label
= gen_label_rtx ();
400 emit_label (return_label
);
403 argvec
= initialize_for_inline (fndecl
);
405 /* If there are insns that copy parms from the stack into pseudo registers,
406 those insns are not copied. `expand_inline_function' must
407 emit the correct code to handle such things. */
410 if (GET_CODE (insn
) != NOTE
)
413 /* Get the insn which signals the end of parameter setup code. */
414 first_nonparm_insn
= get_first_nonparm_insn ();
416 /* Now just scan the chain of insns to see what happens to our
417 PARM_DECLs. If a PARM_DECL is used but never modified, we
418 can substitute its rtl directly when expanding inline (and
419 perform constant folding when its incoming value is constant).
420 Otherwise, we have to copy its value into a new register and track
421 the new register's life. */
422 in_nonparm_insns
= 0;
423 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
425 if (insn
== first_nonparm_insn
)
426 in_nonparm_insns
= 1;
428 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
429 /* Record what interesting things happen to our parameters. */
430 note_stores (PATTERN (insn
), note_modified_parmregs
, NULL
);
433 /* We have now allocated all that needs to be allocated permanently
434 on the rtx obstack. Set our high-water mark, so that we
435 can free the rest of this when the time comes. */
439 cfun
->inl_max_label_num
= max_label_num ();
440 cfun
->inl_last_parm_insn
= cfun
->x_last_parm_insn
;
441 cfun
->original_arg_vector
= argvec
;
442 cfun
->original_decl_initial
= DECL_INITIAL (fndecl
);
443 DECL_SAVED_INSNS (fndecl
) = cfun
;
449 /* Note whether a parameter is modified or not. */
452 note_modified_parmregs (reg
, x
, data
)
454 rtx x ATTRIBUTE_UNUSED
;
455 void *data ATTRIBUTE_UNUSED
;
457 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
458 && REGNO (reg
) < max_parm_reg
459 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
460 && parmdecl_map
[REGNO (reg
)] != 0)
461 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
464 /* Unfortunately, we need a global copy of const_equiv map for communication
465 with a function called from note_stores. Be *very* careful that this
466 is used properly in the presence of recursion. */
468 varray_type global_const_equiv_varray
;
470 #define FIXED_BASE_PLUS_P(X) \
471 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
472 && GET_CODE (XEXP (X, 0)) == REG \
473 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
474 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
476 /* Called to set up a mapping for the case where a parameter is in a
477 register. If it is read-only and our argument is a constant, set up the
478 constant equivalence.
480 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
483 Also, don't allow hard registers here; they might not be valid when
484 substituted into insns. */
486 process_reg_param (map
, loc
, copy
)
487 struct inline_remap
*map
;
490 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
491 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
492 && ! REG_USERVAR_P (copy
))
493 || (GET_CODE (copy
) == REG
494 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
496 rtx temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
497 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
498 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
499 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
502 map
->reg_map
[REGNO (loc
)] = copy
;
505 /* Used by duplicate_eh_handlers to map labels for the exception table */
506 static struct inline_remap
*eif_eh_map
;
509 expand_inline_function_eh_labelmap (label
)
512 int index
= CODE_LABEL_NUMBER (label
);
513 return get_label_from_map (eif_eh_map
, index
);
516 /* Compare two BLOCKs for qsort. The key we sort on is the
517 BLOCK_ABSTRACT_ORIGIN of the blocks. */
520 compare_blocks (v1
, v2
)
524 tree b1
= *((tree
*) v1
);
525 tree b2
= *((tree
*) v2
);
527 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1
)
528 - (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
531 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
532 an original block; the second to a remapped equivalent. */
540 tree b2
= *((tree
*) v2
);
542 return ((char *) b1
- (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
545 /* Integrate the procedure defined by FNDECL. Note that this function
546 may wind up calling itself. Since the static variables are not
547 reentrant, we do not assign them until after the possibility
548 of recursion is eliminated.
550 If IGNORE is nonzero, do not produce a value.
551 Otherwise store the value in TARGET if it is nonzero and that is convenient.
554 (rtx)-1 if we could not substitute the function
555 0 if we substituted it and it does not produce a value
556 else an rtx for where the value is stored. */
559 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
560 structure_value_addr
)
565 rtx structure_value_addr
;
567 struct function
*inlining_previous
;
568 struct function
*inl_f
= DECL_SAVED_INSNS (fndecl
);
569 tree formal
, actual
, block
;
570 rtx parm_insns
= inl_f
->emit
->x_first_insn
;
571 rtx insns
= (inl_f
->inl_last_parm_insn
572 ? NEXT_INSN (inl_f
->inl_last_parm_insn
)
579 int min_labelno
= inl_f
->emit
->x_first_label_num
;
580 int max_labelno
= inl_f
->inl_max_label_num
;
582 rtx local_return_label
= 0;
586 struct inline_remap
*map
= 0;
590 rtvec arg_vector
= (rtvec
) inl_f
->original_arg_vector
;
591 rtx static_chain_value
= 0;
594 /* The pointer used to track the true location of the memory used
595 for MAP->LABEL_MAP. */
596 rtx
*real_label_map
= 0;
598 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
599 max_regno
= inl_f
->emit
->x_reg_rtx_no
+ 3;
600 if (max_regno
< FIRST_PSEUDO_REGISTER
)
603 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
605 /* Check that the parms type match and that sufficient arguments were
606 passed. Since the appropriate conversions or default promotions have
607 already been applied, the machine modes should match exactly. */
609 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
611 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
614 enum machine_mode mode
;
617 return (rtx
) (HOST_WIDE_INT
) -1;
619 arg
= TREE_VALUE (actual
);
620 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
622 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
623 /* If they are block mode, the types should match exactly.
624 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
625 which could happen if the parameter has incomplete type. */
627 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
628 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
629 return (rtx
) (HOST_WIDE_INT
) -1;
632 /* Extra arguments are valid, but will be ignored below, so we must
633 evaluate them here for side-effects. */
634 for (; actual
; actual
= TREE_CHAIN (actual
))
635 expand_expr (TREE_VALUE (actual
), const0_rtx
,
636 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
638 /* Expand the function arguments. Do this first so that any
639 new registers get created before we allocate the maps. */
641 arg_vals
= (rtx
*) xmalloc (nargs
* sizeof (rtx
));
642 arg_trees
= (tree
*) xmalloc (nargs
* sizeof (tree
));
644 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
646 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
648 /* Actual parameter, converted to the type of the argument within the
650 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
651 /* Mode of the variable used within the function. */
652 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
656 loc
= RTVEC_ELT (arg_vector
, i
);
658 /* If this is an object passed by invisible reference, we copy the
659 object into a stack slot and save its address. If this will go
660 into memory, we do nothing now. Otherwise, we just expand the
662 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
663 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
666 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
667 int_size_in_bytes (TREE_TYPE (arg
)), 1);
668 MEM_SET_IN_STRUCT_P (stack_slot
,
669 AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
671 store_expr (arg
, stack_slot
, 0);
673 arg_vals
[i
] = XEXP (stack_slot
, 0);
676 else if (GET_CODE (loc
) != MEM
)
678 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
679 /* The mode if LOC and ARG can differ if LOC was a variable
680 that had its mode promoted via PROMOTED_MODE. */
681 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
682 TYPE_MODE (TREE_TYPE (arg
)),
683 expand_expr (arg
, NULL_RTX
, mode
,
685 TREE_UNSIGNED (TREE_TYPE (formal
)));
687 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
693 && (! TREE_READONLY (formal
)
694 /* If the parameter is not read-only, copy our argument through
695 a register. Also, we cannot use ARG_VALS[I] if it overlaps
696 TARGET in any way. In the inline function, they will likely
697 be two different pseudos, and `safe_from_p' will make all
698 sorts of smart assumptions about their not conflicting.
699 But if ARG_VALS[I] overlaps TARGET, these assumptions are
700 wrong, so put ARG_VALS[I] into a fresh register.
701 Don't worry about invisible references, since their stack
702 temps will never overlap the target. */
705 && (GET_CODE (arg_vals
[i
]) == REG
706 || GET_CODE (arg_vals
[i
]) == SUBREG
707 || GET_CODE (arg_vals
[i
]) == MEM
)
708 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
709 /* ??? We must always copy a SUBREG into a REG, because it might
710 get substituted into an address, and not all ports correctly
711 handle SUBREGs in addresses. */
712 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
713 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
715 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
716 && POINTER_TYPE_P (TREE_TYPE (formal
)))
717 mark_reg_pointer (arg_vals
[i
],
718 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
722 /* Allocate the structures we use to remap things. */
724 map
= (struct inline_remap
*) xmalloc (sizeof (struct inline_remap
));
725 map
->fndecl
= fndecl
;
727 VARRAY_TREE_INIT (map
->block_map
, 10, "block_map");
728 map
->reg_map
= (rtx
*) xcalloc (max_regno
, sizeof (rtx
));
730 /* We used to use alloca here, but the size of what it would try to
731 allocate would occasionally cause it to exceed the stack limit and
732 cause unpredictable core dumps. */
734 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
735 map
->label_map
= real_label_map
;
737 inl_max_uid
= (inl_f
->emit
->x_cur_insn_uid
+ 1);
738 map
->insn_map
= (rtx
*) xcalloc (inl_max_uid
, sizeof (rtx
));
740 map
->max_insnno
= inl_max_uid
;
742 map
->integrating
= 1;
744 /* const_equiv_varray maps pseudos in our routine to constants, so
745 it needs to be large enough for all our pseudos. This is the
746 number we are currently using plus the number in the called
747 routine, plus 15 for each arg, five to compute the virtual frame
748 pointer, and five for the return value. This should be enough
749 for most cases. We do not reference entries outside the range of
752 ??? These numbers are quite arbitrary and were obtained by
753 experimentation. At some point, we should try to allocate the
754 table after all the parameters are set up so we an more accurately
755 estimate the number of pseudos we will need. */
757 VARRAY_CONST_EQUIV_INIT (map
->const_equiv_varray
,
759 + (max_regno
- FIRST_PSEUDO_REGISTER
)
762 "expand_inline_function");
765 /* Record the current insn in case we have to set up pointers to frame
766 and argument memory blocks. If there are no insns yet, add a dummy
767 insn that can be used as an insertion point. */
768 map
->insns_at_start
= get_last_insn ();
769 if (map
->insns_at_start
== 0)
770 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
772 map
->regno_pointer_flag
= inl_f
->emit
->regno_pointer_flag
;
773 map
->regno_pointer_align
= inl_f
->emit
->regno_pointer_align
;
775 /* Update the outgoing argument size to allow for those in the inlined
777 if (inl_f
->outgoing_args_size
> current_function_outgoing_args_size
)
778 current_function_outgoing_args_size
= inl_f
->outgoing_args_size
;
780 /* If the inline function needs to make PIC references, that means
781 that this function's PIC offset table must be used. */
782 if (inl_f
->uses_pic_offset_table
)
783 current_function_uses_pic_offset_table
= 1;
785 /* If this function needs a context, set it up. */
786 if (inl_f
->needs_context
)
787 static_chain_value
= lookup_static_chain (fndecl
);
789 if (GET_CODE (parm_insns
) == NOTE
790 && NOTE_LINE_NUMBER (parm_insns
) > 0)
792 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
793 NOTE_LINE_NUMBER (parm_insns
));
795 RTX_INTEGRATED_P (note
) = 1;
798 /* Process each argument. For each, set up things so that the function's
799 reference to the argument will refer to the argument being passed.
800 We only replace REG with REG here. Any simplifications are done
803 We make two passes: In the first, we deal with parameters that will
804 be placed into registers, since we need to ensure that the allocated
805 register number fits in const_equiv_map. Then we store all non-register
806 parameters into their memory location. */
808 /* Don't try to free temp stack slots here, because we may put one of the
809 parameters into a temp stack slot. */
811 for (i
= 0; i
< nargs
; i
++)
813 rtx copy
= arg_vals
[i
];
815 loc
= RTVEC_ELT (arg_vector
, i
);
817 /* There are three cases, each handled separately. */
818 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
819 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
821 /* This must be an object passed by invisible reference (it could
822 also be a variable-sized object, but we forbid inlining functions
823 with variable-sized arguments). COPY is the address of the
824 actual value (this computation will cause it to be copied). We
825 map that address for the register, noting the actual address as
826 an equivalent in case it can be substituted into the insns. */
828 if (GET_CODE (copy
) != REG
)
830 temp
= copy_addr_to_reg (copy
);
831 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
832 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
835 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
837 else if (GET_CODE (loc
) == MEM
)
839 /* This is the case of a parameter that lives in memory. It
840 will live in the block we allocate in the called routine's
841 frame that simulates the incoming argument area. Do nothing
842 with the parameter now; we will call store_expr later. In
843 this case, however, we must ensure that the virtual stack and
844 incoming arg rtx values are expanded now so that we can be
845 sure we have enough slots in the const equiv map since the
846 store_expr call can easily blow the size estimate. */
847 if (DECL_FRAME_SIZE (fndecl
) != 0)
848 copy_rtx_and_substitute (virtual_stack_vars_rtx
, map
, 0);
850 if (DECL_SAVED_INSNS (fndecl
)->args_size
!= 0)
851 copy_rtx_and_substitute (virtual_incoming_args_rtx
, map
, 0);
853 else if (GET_CODE (loc
) == REG
)
854 process_reg_param (map
, loc
, copy
);
855 else if (GET_CODE (loc
) == CONCAT
)
857 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
858 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
859 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
860 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
862 process_reg_param (map
, locreal
, copyreal
);
863 process_reg_param (map
, locimag
, copyimag
);
869 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
870 specially. This function can be called recursively, so we need to
871 save the previous value. */
872 inlining_previous
= inlining
;
875 /* Now do the parameters that will be placed in memory. */
877 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
878 formal
; formal
= TREE_CHAIN (formal
), i
++)
880 loc
= RTVEC_ELT (arg_vector
, i
);
882 if (GET_CODE (loc
) == MEM
883 /* Exclude case handled above. */
884 && ! (GET_CODE (XEXP (loc
, 0)) == REG
885 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
887 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
888 DECL_SOURCE_LINE (formal
));
890 RTX_INTEGRATED_P (note
) = 1;
892 /* Compute the address in the area we reserved and store the
894 temp
= copy_rtx_and_substitute (loc
, map
, 1);
895 subst_constants (&temp
, NULL_RTX
, map
, 1);
896 apply_change_group ();
897 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
898 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
899 store_expr (arg_trees
[i
], temp
, 0);
903 /* Deal with the places that the function puts its result.
904 We are driven by what is placed into DECL_RESULT.
906 Initially, we assume that we don't have anything special handling for
907 REG_FUNCTION_RETURN_VALUE_P. */
909 map
->inline_target
= 0;
910 loc
= DECL_RTL (DECL_RESULT (fndecl
));
912 if (TYPE_MODE (type
) == VOIDmode
)
913 /* There is no return value to worry about. */
915 else if (GET_CODE (loc
) == MEM
)
917 if (GET_CODE (XEXP (loc
, 0)) == ADDRESSOF
)
919 temp
= copy_rtx_and_substitute (loc
, map
, 1);
920 subst_constants (&temp
, NULL_RTX
, map
, 1);
921 apply_change_group ();
926 if (! structure_value_addr
927 || ! aggregate_value_p (DECL_RESULT (fndecl
)))
930 /* Pass the function the address in which to return a structure
931 value. Note that a constructor can cause someone to call us
932 with STRUCTURE_VALUE_ADDR, but the initialization takes place
933 via the first parameter, rather than the struct return address.
935 We have two cases: If the address is a simple register
936 indirect, use the mapping mechanism to point that register to
937 our structure return address. Otherwise, store the structure
938 return value into the place that it will be referenced from. */
940 if (GET_CODE (XEXP (loc
, 0)) == REG
)
942 temp
= force_operand (structure_value_addr
, NULL_RTX
);
943 temp
= force_reg (Pmode
, temp
);
944 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
946 if (CONSTANT_P (structure_value_addr
)
947 || GET_CODE (structure_value_addr
) == ADDRESSOF
948 || (GET_CODE (structure_value_addr
) == PLUS
949 && (XEXP (structure_value_addr
, 0)
950 == virtual_stack_vars_rtx
)
951 && (GET_CODE (XEXP (structure_value_addr
, 1))
954 SET_CONST_EQUIV_DATA (map
, temp
, structure_value_addr
,
960 temp
= copy_rtx_and_substitute (loc
, map
, 1);
961 subst_constants (&temp
, NULL_RTX
, map
, 0);
962 apply_change_group ();
963 emit_move_insn (temp
, structure_value_addr
);
968 /* We will ignore the result value, so don't look at its structure.
969 Note that preparations for an aggregate return value
970 do need to be made (above) even if it will be ignored. */
972 else if (GET_CODE (loc
) == REG
)
974 /* The function returns an object in a register and we use the return
975 value. Set up our target for remapping. */
977 /* Machine mode function was declared to return. */
978 enum machine_mode departing_mode
= TYPE_MODE (type
);
979 /* (Possibly wider) machine mode it actually computes
980 (for the sake of callers that fail to declare it right).
981 We have to use the mode of the result's RTL, rather than
982 its type, since expand_function_start may have promoted it. */
983 enum machine_mode arriving_mode
984 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
987 /* Don't use MEMs as direct targets because on some machines
988 substituting a MEM for a REG makes invalid insns.
989 Let the combiner substitute the MEM if that is valid. */
990 if (target
== 0 || GET_CODE (target
) != REG
991 || GET_MODE (target
) != departing_mode
)
993 /* Don't make BLKmode registers. If this looks like
994 a BLKmode object being returned in a register, get
995 the mode from that, otherwise abort. */
996 if (departing_mode
== BLKmode
)
998 if (REG
== GET_CODE (DECL_RTL (DECL_RESULT (fndecl
))))
1000 departing_mode
= GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
1001 arriving_mode
= departing_mode
;
1007 target
= gen_reg_rtx (departing_mode
);
1010 /* If function's value was promoted before return,
1011 avoid machine mode mismatch when we substitute INLINE_TARGET.
1012 But TARGET is what we will return to the caller. */
1013 if (arriving_mode
!= departing_mode
)
1015 /* Avoid creating a paradoxical subreg wider than
1016 BITS_PER_WORD, since that is illegal. */
1017 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
1019 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
1020 GET_MODE_BITSIZE (arriving_mode
)))
1021 /* Maybe could be handled by using convert_move () ? */
1023 reg_to_map
= gen_reg_rtx (arriving_mode
);
1024 target
= gen_lowpart (departing_mode
, reg_to_map
);
1027 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
1030 reg_to_map
= target
;
1032 /* Usually, the result value is the machine's return register.
1033 Sometimes it may be a pseudo. Handle both cases. */
1034 if (REG_FUNCTION_VALUE_P (loc
))
1035 map
->inline_target
= reg_to_map
;
1037 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1042 /* Initialize label_map. get_label_from_map will actually make
1044 bzero ((char *) &map
->label_map
[min_labelno
],
1045 (max_labelno
- min_labelno
) * sizeof (rtx
));
1047 /* Make copies of the decls of the symbols in the inline function, so that
1048 the copies of the variables get declared in the current function. Set
1049 up things so that lookup_static_chain knows that to interpret registers
1050 in SAVE_EXPRs for TYPE_SIZEs as local. */
1051 inline_function_decl
= fndecl
;
1052 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1053 block
= integrate_decl_tree (inl_f
->original_decl_initial
, map
);
1054 BLOCK_ABSTRACT_ORIGIN (block
) = DECL_ORIGIN (fndecl
);
1055 inline_function_decl
= 0;
1057 /* Make a fresh binding contour that we can easily remove. Do this after
1058 expanding our arguments so cleanups are properly scoped. */
1059 expand_start_bindings_and_block (0, block
);
1061 /* Sort the block-map so that it will be easy to find remapped
1063 qsort (&VARRAY_TREE (map
->block_map
, 0),
1064 map
->block_map
->elements_used
,
1068 /* Perform postincrements before actually calling the function. */
1071 /* Clean up stack so that variables might have smaller offsets. */
1072 do_pending_stack_adjust ();
1074 /* Save a copy of the location of const_equiv_varray for
1075 mark_stores, called via note_stores. */
1076 global_const_equiv_varray
= map
->const_equiv_varray
;
1078 /* If the called function does an alloca, save and restore the
1079 stack pointer around the call. This saves stack space, but
1080 also is required if this inline is being done between two
1082 if (inl_f
->calls_alloca
)
1083 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1085 /* Now copy the insns one by one. Do this in two passes, first the insns and
1086 then their REG_NOTES, just like save_for_inline. */
1088 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1090 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1092 rtx copy
, pattern
, set
;
1094 map
->orig_asm_operands_vector
= 0;
1096 switch (GET_CODE (insn
))
1099 pattern
= PATTERN (insn
);
1100 set
= single_set (insn
);
1102 if (GET_CODE (pattern
) == USE
1103 && GET_CODE (XEXP (pattern
, 0)) == REG
1104 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1105 /* The (USE (REG n)) at return from the function should
1106 be ignored since we are changing (REG n) into
1110 /* If the inline fn needs eh context, make sure that
1111 the current fn has one. */
1112 if (GET_CODE (pattern
) == USE
1113 && find_reg_note (insn
, REG_EH_CONTEXT
, 0) != 0)
1116 /* Ignore setting a function value that we don't want to use. */
1117 if (map
->inline_target
== 0
1119 && GET_CODE (SET_DEST (set
)) == REG
1120 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1122 if (volatile_refs_p (SET_SRC (set
)))
1126 /* If we must not delete the source,
1127 load it into a new temporary. */
1128 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1130 new_set
= single_set (copy
);
1135 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1137 /* If the source and destination are the same and it
1138 has a note on it, keep the insn. */
1139 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1140 && REG_NOTES (insn
) != 0)
1141 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1146 /* If this is setting the static chain rtx, omit it. */
1147 else if (static_chain_value
!= 0
1149 && GET_CODE (SET_DEST (set
)) == REG
1150 && rtx_equal_p (SET_DEST (set
),
1151 static_chain_incoming_rtx
))
1154 /* If this is setting the static chain pseudo, set it from
1155 the value we want to give it instead. */
1156 else if (static_chain_value
!= 0
1158 && rtx_equal_p (SET_SRC (set
),
1159 static_chain_incoming_rtx
))
1161 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
, 1);
1163 copy
= emit_move_insn (newdest
, static_chain_value
);
1164 static_chain_value
= 0;
1167 /* If this is setting the virtual stack vars register, this must
1168 be the code at the handler for a builtin longjmp. The value
1169 saved in the setjmp buffer will be the address of the frame
1170 we've made for this inlined instance within our frame. But we
1171 know the offset of that value so we can use it to reconstruct
1172 our virtual stack vars register from that value. If we are
1173 copying it from the stack pointer, leave it unchanged. */
1175 && rtx_equal_p (SET_DEST (set
), virtual_stack_vars_rtx
))
1177 HOST_WIDE_INT offset
;
1178 temp
= map
->reg_map
[REGNO (SET_DEST (set
))];
1179 temp
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1182 if (rtx_equal_p (temp
, virtual_stack_vars_rtx
))
1184 else if (GET_CODE (temp
) == PLUS
1185 && rtx_equal_p (XEXP (temp
, 0), virtual_stack_vars_rtx
)
1186 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
)
1187 offset
= INTVAL (XEXP (temp
, 1));
1191 if (rtx_equal_p (SET_SRC (set
), stack_pointer_rtx
))
1192 temp
= SET_SRC (set
);
1194 temp
= force_operand (plus_constant (SET_SRC (set
),
1198 copy
= emit_move_insn (virtual_stack_vars_rtx
, temp
);
1202 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1203 /* REG_NOTES will be copied later. */
1206 /* If this insn is setting CC0, it may need to look at
1207 the insn that uses CC0 to see what type of insn it is.
1208 In that case, the call to recog via validate_change will
1209 fail. So don't substitute constants here. Instead,
1210 do it when we emit the following insn.
1212 For example, see the pyr.md file. That machine has signed and
1213 unsigned compares. The compare patterns must check the
1214 following branch insn to see which what kind of compare to
1217 If the previous insn set CC0, substitute constants on it as
1219 if (sets_cc0_p (PATTERN (copy
)) != 0)
1224 try_constants (cc0_insn
, map
);
1226 try_constants (copy
, map
);
1229 try_constants (copy
, map
);
1234 if (GET_CODE (PATTERN (insn
)) == RETURN
1235 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1236 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1238 if (local_return_label
== 0)
1239 local_return_label
= gen_label_rtx ();
1240 pattern
= gen_jump (local_return_label
);
1243 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1245 copy
= emit_jump_insn (pattern
);
1249 try_constants (cc0_insn
, map
);
1252 try_constants (copy
, map
);
1254 /* If this used to be a conditional jump insn but whose branch
1255 direction is now know, we must do something special. */
1256 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1259 /* If the previous insn set cc0 for us, delete it. */
1260 if (sets_cc0_p (PREV_INSN (copy
)))
1261 delete_insn (PREV_INSN (copy
));
1264 /* If this is now a no-op, delete it. */
1265 if (map
->last_pc_value
== pc_rtx
)
1271 /* Otherwise, this is unconditional jump so we must put a
1272 BARRIER after it. We could do some dead code elimination
1273 here, but jump.c will do it just as well. */
1279 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1280 copy
= emit_call_insn (pattern
);
1282 /* Because the USAGE information potentially contains objects other
1283 than hard registers, we need to copy it. */
1284 CALL_INSN_FUNCTION_USAGE (copy
)
1285 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
),
1290 try_constants (cc0_insn
, map
);
1293 try_constants (copy
, map
);
1295 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1296 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1297 VARRAY_CONST_EQUIV (map
->const_equiv_varray
, i
).rtx
= 0;
1301 copy
= emit_label (get_label_from_map (map
,
1302 CODE_LABEL_NUMBER (insn
)));
1303 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1308 copy
= emit_barrier ();
1312 /* It is important to discard function-end and function-beg notes,
1313 so we have only one of each in the current function.
1314 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1315 deleted these in the copy used for continuing compilation,
1316 not the copy used for inlining). */
1317 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1318 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1319 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1321 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
1322 NOTE_LINE_NUMBER (insn
));
1324 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
1325 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
1328 = get_label_from_map (map
, NOTE_EH_HANDLER (copy
));
1330 /* we have to duplicate the handlers for the original */
1331 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
1333 /* We need to duplicate the handlers for the EH region
1334 and we need to indicate where the label map is */
1336 duplicate_eh_handlers (NOTE_EH_HANDLER (copy
),
1337 CODE_LABEL_NUMBER (label
),
1338 expand_inline_function_eh_labelmap
);
1341 /* We have to forward these both to match the new exception
1343 NOTE_EH_HANDLER (copy
) = CODE_LABEL_NUMBER (label
);
1346 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_BEG
1347 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_END
)
1348 && NOTE_BLOCK (insn
))
1350 tree
*mapped_block_p
;
1353 = (tree
*) bsearch (NOTE_BLOCK (insn
),
1354 &VARRAY_TREE (map
->block_map
, 0),
1355 map
->block_map
->elements_used
,
1359 if (!mapped_block_p
)
1362 NOTE_BLOCK (copy
) = *mapped_block_p
;
1374 RTX_INTEGRATED_P (copy
) = 1;
1376 map
->insn_map
[INSN_UID (insn
)] = copy
;
1379 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1380 from parameters can be substituted in. These are the only ones that
1381 are valid across the entire function. */
1383 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1384 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1385 && map
->insn_map
[INSN_UID (insn
)]
1386 && REG_NOTES (insn
))
1388 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
, 0);
1390 /* We must also do subst_constants, in case one of our parameters
1391 has const type and constant value. */
1392 subst_constants (&tem
, NULL_RTX
, map
, 0);
1393 apply_change_group ();
1394 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1397 if (local_return_label
)
1398 emit_label (local_return_label
);
1400 /* Restore the stack pointer if we saved it above. */
1401 if (inl_f
->calls_alloca
)
1402 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1404 if (! cfun
->x_whole_function_mode_p
)
1405 /* In statement-at-a-time mode, we just tell the front-end to add
1406 this block to the list of blocks at this binding level. We
1407 can't do it the way it's done for function-at-a-time mode the
1408 superblocks have not been created yet. */
1409 insert_block (block
);
1413 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl
));
1414 BLOCK_CHAIN (DECL_INITIAL (current_function_decl
)) = block
;
1417 /* End the scope containing the copied formal parameter variables
1418 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1419 here so that expand_end_bindings will not check for unused
1420 variables. That's already been checked for when the inlined
1421 function was defined. */
1422 expand_end_bindings (NULL_TREE
, 1, 1);
1424 /* Must mark the line number note after inlined functions as a repeat, so
1425 that the test coverage code can avoid counting the call twice. This
1426 just tells the code to ignore the immediately following line note, since
1427 there already exists a copy of this note before the expanded inline call.
1428 This line number note is still needed for debugging though, so we can't
1430 if (flag_test_coverage
)
1431 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
1433 emit_line_note (input_filename
, lineno
);
1435 /* If the function returns a BLKmode object in a register, copy it
1436 out of the temp register into a BLKmode memory object. */
1438 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
1439 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
1440 target
= copy_blkmode_from_reg (0, target
, TREE_TYPE (TREE_TYPE (fndecl
)));
1442 if (structure_value_addr
)
1444 target
= gen_rtx_MEM (TYPE_MODE (type
),
1445 memory_address (TYPE_MODE (type
),
1446 structure_value_addr
));
1447 MEM_SET_IN_STRUCT_P (target
, 1);
1450 /* Make sure we free the things we explicitly allocated with xmalloc. */
1452 free (real_label_map
);
1453 VARRAY_FREE (map
->const_equiv_varray
);
1454 free (map
->reg_map
);
1455 VARRAY_FREE (map
->block_map
);
1456 free (map
->insn_map
);
1461 inlining
= inlining_previous
;
1466 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1467 push all of those decls and give each one the corresponding home. */
1470 integrate_parm_decls (args
, map
, arg_vector
)
1472 struct inline_remap
*map
;
1478 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1480 tree decl
= copy_decl_for_inlining (tail
, map
->fndecl
,
1481 current_function_decl
);
1483 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
, 1);
1485 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1486 here, but that's going to require some more work. */
1487 /* DECL_INCOMING_RTL (decl) = ?; */
1488 /* Fully instantiate the address with the equivalent form so that the
1489 debugging information contains the actual register, instead of the
1490 virtual register. Do this by not passing an insn to
1492 subst_constants (&new_decl_rtl
, NULL_RTX
, map
, 1);
1493 apply_change_group ();
1494 DECL_RTL (decl
) = new_decl_rtl
;
1498 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1499 current function a tree of contexts isomorphic to the one that is given.
1501 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1502 registers used in the DECL_RTL field should be remapped. If it is zero,
1503 no mapping is necessary. */
1506 integrate_decl_tree (let
, map
)
1508 struct inline_remap
*map
;
1514 new_block
= make_node (BLOCK
);
1515 VARRAY_PUSH_TREE (map
->block_map
, new_block
);
1516 next
= &BLOCK_VARS (new_block
);
1518 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1522 push_obstacks_nochange ();
1523 saveable_allocation ();
1524 d
= copy_decl_for_inlining (t
, map
->fndecl
, current_function_decl
);
1527 if (DECL_RTL (t
) != 0)
1529 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
, 1);
1531 /* Fully instantiate the address with the equivalent form so that the
1532 debugging information contains the actual register, instead of the
1533 virtual register. Do this by not passing an insn to
1535 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
, 1);
1536 apply_change_group ();
1539 /* Add this declaration to the list of variables in the new
1542 next
= &TREE_CHAIN (d
);
1545 next
= &BLOCK_SUBBLOCKS (new_block
);
1546 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1548 *next
= integrate_decl_tree (t
, map
);
1549 BLOCK_SUPERCONTEXT (*next
) = new_block
;
1550 next
= &BLOCK_CHAIN (*next
);
1553 TREE_USED (new_block
) = TREE_USED (let
);
1554 BLOCK_ABSTRACT_ORIGIN (new_block
) = let
;
1559 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1560 except for those few rtx codes that are sharable.
1562 We always return an rtx that is similar to that incoming rtx, with the
1563 exception of possibly changing a REG to a SUBREG or vice versa. No
1564 rtl is ever emitted.
1566 If FOR_LHS is nonzero, if means we are processing something that will
1567 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1568 inlining since we need to be conservative in how it is set for
1571 Handle constants that need to be placed in the constant pool by
1572 calling `force_const_mem'. */
1575 copy_rtx_and_substitute (orig
, map
, for_lhs
)
1577 struct inline_remap
*map
;
1580 register rtx copy
, temp
;
1582 register RTX_CODE code
;
1583 register enum machine_mode mode
;
1584 register const char *format_ptr
;
1590 code
= GET_CODE (orig
);
1591 mode
= GET_MODE (orig
);
1596 /* If the stack pointer register shows up, it must be part of
1597 stack-adjustments (*not* because we eliminated the frame pointer!).
1598 Small hard registers are returned as-is. Pseudo-registers
1599 go through their `reg_map'. */
1600 regno
= REGNO (orig
);
1601 if (regno
<= LAST_VIRTUAL_REGISTER
1602 || (map
->integrating
1603 && DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
== orig
))
1605 /* Some hard registers are also mapped,
1606 but others are not translated. */
1607 if (map
->reg_map
[regno
] != 0)
1608 return map
->reg_map
[regno
];
1610 /* If this is the virtual frame pointer, make space in current
1611 function's stack frame for the stack frame of the inline function.
1613 Copy the address of this area into a pseudo. Map
1614 virtual_stack_vars_rtx to this pseudo and set up a constant
1615 equivalence for it to be the address. This will substitute the
1616 address into insns where it can be substituted and use the new
1617 pseudo where it can't. */
1618 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
1621 int size
= get_func_frame_size (DECL_SAVED_INSNS (map
->fndecl
));
1623 #ifdef FRAME_GROWS_DOWNWARD
1624 /* In this case, virtual_stack_vars_rtx points to one byte
1625 higher than the top of the frame area. So make sure we
1626 allocate a big enough chunk to keep the frame pointer
1627 aligned like a real one. */
1628 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
1631 loc
= assign_stack_temp (BLKmode
, size
, 1);
1632 loc
= XEXP (loc
, 0);
1633 #ifdef FRAME_GROWS_DOWNWARD
1634 /* In this case, virtual_stack_vars_rtx points to one byte
1635 higher than the top of the frame area. So compute the offset
1636 to one byte higher than our substitute frame. */
1637 loc
= plus_constant (loc
, size
);
1639 map
->reg_map
[regno
] = temp
1640 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1642 #ifdef STACK_BOUNDARY
1643 mark_reg_pointer (map
->reg_map
[regno
],
1644 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1647 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1649 seq
= gen_sequence ();
1651 emit_insn_after (seq
, map
->insns_at_start
);
1654 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
1655 || (map
->integrating
1656 && (DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
1659 /* Do the same for a block to contain any arguments referenced
1662 int size
= DECL_SAVED_INSNS (map
->fndecl
)->args_size
;
1665 loc
= assign_stack_temp (BLKmode
, size
, 1);
1666 loc
= XEXP (loc
, 0);
1667 /* When arguments grow downward, the virtual incoming
1668 args pointer points to the top of the argument block,
1669 so the remapped location better do the same. */
1670 #ifdef ARGS_GROW_DOWNWARD
1671 loc
= plus_constant (loc
, size
);
1673 map
->reg_map
[regno
] = temp
1674 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1676 #ifdef STACK_BOUNDARY
1677 mark_reg_pointer (map
->reg_map
[regno
],
1678 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1681 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1683 seq
= gen_sequence ();
1685 emit_insn_after (seq
, map
->insns_at_start
);
1688 else if (REG_FUNCTION_VALUE_P (orig
))
1690 /* This is a reference to the function return value. If
1691 the function doesn't have a return value, error. If the
1692 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1693 if (map
->inline_target
== 0)
1694 /* Must be unrolling loops or replicating code if we
1695 reach here, so return the register unchanged. */
1697 else if (GET_MODE (map
->inline_target
) != BLKmode
1698 && mode
!= GET_MODE (map
->inline_target
))
1699 return gen_lowpart (mode
, map
->inline_target
);
1701 return map
->inline_target
;
1705 if (map
->reg_map
[regno
] == NULL
)
1707 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
1708 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
1709 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
1710 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
1711 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1713 if (map
->regno_pointer_flag
[regno
])
1714 mark_reg_pointer (map
->reg_map
[regno
],
1715 map
->regno_pointer_align
[regno
]);
1717 return map
->reg_map
[regno
];
1720 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
, for_lhs
);
1721 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1722 if (GET_CODE (copy
) == SUBREG
)
1723 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
1724 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
1725 else if (GET_CODE (copy
) == CONCAT
)
1727 rtx retval
= subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1);
1729 if (GET_MODE (retval
) == GET_MODE (orig
))
1732 return gen_rtx_SUBREG (GET_MODE (orig
), retval
,
1733 (SUBREG_WORD (orig
) %
1734 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig
)))
1735 / (unsigned) UNITS_PER_WORD
)));
1738 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
1739 SUBREG_WORD (orig
));
1742 copy
= gen_rtx_ADDRESSOF (mode
,
1743 copy_rtx_and_substitute (XEXP (orig
, 0),
1745 0, ADDRESSOF_DECL(orig
));
1746 regno
= ADDRESSOF_REGNO (orig
);
1747 if (map
->reg_map
[regno
])
1748 regno
= REGNO (map
->reg_map
[regno
]);
1749 else if (regno
> LAST_VIRTUAL_REGISTER
)
1751 temp
= XEXP (orig
, 0);
1752 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
1753 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
1754 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
1755 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
1756 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1758 if (map
->regno_pointer_flag
[regno
])
1759 mark_reg_pointer (map
->reg_map
[regno
],
1760 map
->regno_pointer_align
[regno
]);
1761 regno
= REGNO (map
->reg_map
[regno
]);
1763 ADDRESSOF_REGNO (copy
) = regno
;
1768 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1769 to (use foo) if the original insn didn't have a subreg.
1770 Removing the subreg distorts the VAX movstrhi pattern
1771 by changing the mode of an operand. */
1772 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
, code
== CLOBBER
);
1773 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
1774 copy
= SUBREG_REG (copy
);
1775 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
1778 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
1779 = LABEL_PRESERVE_P (orig
);
1780 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
1786 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1787 : get_label_from_map (map
, CODE_LABEL_NUMBER (XEXP (orig
, 0))));
1789 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
1791 /* The fact that this label was previously nonlocal does not mean
1792 it still is, so we must check if it is within the range of
1793 this function's labels. */
1794 LABEL_REF_NONLOCAL_P (copy
)
1795 = (LABEL_REF_NONLOCAL_P (orig
)
1796 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
1797 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
1799 /* If we have made a nonlocal label local, it means that this
1800 inlined call will be referring to our nonlocal goto handler.
1801 So make sure we create one for this block; we normally would
1802 not since this is not otherwise considered a "call". */
1803 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
1804 function_call_count
++;
1814 /* Symbols which represent the address of a label stored in the constant
1815 pool must be modified to point to a constant pool entry for the
1816 remapped label. Otherwise, symbols are returned unchanged. */
1817 if (CONSTANT_POOL_ADDRESS_P (orig
))
1819 struct function
*f
= inlining
? inlining
: cfun
;
1820 rtx constant
= get_pool_constant_for_function (f
, orig
);
1821 enum machine_mode const_mode
= get_pool_mode_for_function (f
, orig
);
1824 rtx temp
= force_const_mem (const_mode
,
1825 copy_rtx_and_substitute (constant
,
1829 /* Legitimizing the address here is incorrect.
1831 Since we had a SYMBOL_REF before, we can assume it is valid
1832 to have one in this position in the insn.
1834 Also, change_address may create new registers. These
1835 registers will not have valid reg_map entries. This can
1836 cause try_constants() to fail because assumes that all
1837 registers in the rtx have valid reg_map entries, and it may
1838 end up replacing one of these new registers with junk. */
1840 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1841 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
1844 temp
= XEXP (temp
, 0);
1846 #ifdef POINTERS_EXTEND_UNSIGNED
1847 if (GET_MODE (temp
) != GET_MODE (orig
))
1848 temp
= convert_memory_address (GET_MODE (orig
), temp
);
1852 else if (GET_CODE (constant
) == LABEL_REF
)
1853 return XEXP (force_const_mem
1855 copy_rtx_and_substitute (constant
, map
, for_lhs
)),
1859 if (SYMBOL_REF_NEED_ADJUST (orig
))
1862 return rethrow_symbol_map (orig
,
1863 expand_inline_function_eh_labelmap
);
1869 /* We have to make a new copy of this CONST_DOUBLE because don't want
1870 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1871 duplicate of a CONST_DOUBLE we have already seen. */
1872 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
1876 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
1877 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
1880 return immed_double_const (CONST_DOUBLE_LOW (orig
),
1881 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
1884 /* Make new constant pool entry for a constant
1885 that was in the pool of the inline function. */
1886 if (RTX_INTEGRATED_P (orig
))
1891 /* If a single asm insn contains multiple output operands
1892 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1893 We must make sure that the copied insn continues to share it. */
1894 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
1896 copy
= rtx_alloc (ASM_OPERANDS
);
1897 copy
->volatil
= orig
->volatil
;
1898 XSTR (copy
, 0) = XSTR (orig
, 0);
1899 XSTR (copy
, 1) = XSTR (orig
, 1);
1900 XINT (copy
, 2) = XINT (orig
, 2);
1901 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
1902 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
1903 XSTR (copy
, 5) = XSTR (orig
, 5);
1904 XINT (copy
, 6) = XINT (orig
, 6);
1910 /* This is given special treatment because the first
1911 operand of a CALL is a (MEM ...) which may get
1912 forced into a register for cse. This is undesirable
1913 if function-address cse isn't wanted or if we won't do cse. */
1914 #ifndef NO_FUNCTION_CSE
1915 if (! (optimize
&& ! flag_no_function_cse
))
1920 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
1921 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
1923 copy_rtx_and_substitute (XEXP (orig
, 1), map
, 0));
1927 /* Must be ifdefed out for loop unrolling to work. */
1933 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1934 Adjust the setting by the offset of the area we made.
1935 If the nonlocal goto is into the current function,
1936 this will result in unnecessarily bad code, but should work. */
1937 if (SET_DEST (orig
) == virtual_stack_vars_rtx
1938 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
1940 /* In case a translation hasn't occurred already, make one now. */
1943 HOST_WIDE_INT loc_offset
;
1945 copy_rtx_and_substitute (SET_DEST (orig
), map
, for_lhs
);
1946 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
1947 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1948 REGNO (equiv_reg
)).rtx
;
1950 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
1952 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
1955 (copy_rtx_and_substitute (SET_SRC (orig
),
1961 return gen_rtx_SET (VOIDmode
,
1962 copy_rtx_and_substitute (SET_DEST (orig
), map
, 1),
1963 copy_rtx_and_substitute (SET_SRC (orig
), map
, 0));
1968 && GET_CODE (XEXP (orig
, 0)) == SYMBOL_REF
1969 && CONSTANT_POOL_ADDRESS_P (XEXP (orig
, 0)))
1971 enum machine_mode const_mode
1972 = get_pool_mode_for_function (inlining
, XEXP (orig
, 0));
1974 = get_pool_constant_for_function (inlining
, XEXP (orig
, 0));
1976 constant
= copy_rtx_and_substitute (constant
, map
, 0);
1978 /* If this was an address of a constant pool entry that itself
1979 had to be placed in the constant pool, it might not be a
1980 valid address. So the recursive call might have turned it
1981 into a register. In that case, it isn't a constant any
1982 more, so return it. This has the potential of changing a
1983 MEM into a REG, but we'll assume that it safe. */
1984 if (! CONSTANT_P (constant
))
1987 return validize_mem (force_const_mem (const_mode
, constant
));
1990 copy
= rtx_alloc (MEM
);
1991 PUT_MODE (copy
, mode
);
1992 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
, 0);
1993 MEM_COPY_ATTRIBUTES (copy
, orig
);
1994 MEM_ALIAS_SET (copy
) = MEM_ALIAS_SET (orig
);
1995 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2002 copy
= rtx_alloc (code
);
2003 PUT_MODE (copy
, mode
);
2004 copy
->in_struct
= orig
->in_struct
;
2005 copy
->volatil
= orig
->volatil
;
2006 copy
->unchanging
= orig
->unchanging
;
2008 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2010 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2012 switch (*format_ptr
++)
2015 /* Copy this through the wide int field; that's safest. */
2016 X0WINT (copy
, i
) = X0WINT (orig
, i
);
2021 = copy_rtx_and_substitute (XEXP (orig
, i
), map
, for_lhs
);
2025 /* Change any references to old-insns to point to the
2026 corresponding copied insns. */
2027 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2031 XVEC (copy
, i
) = XVEC (orig
, i
);
2032 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2034 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2035 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2036 XVECEXP (copy
, i
, j
)
2037 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
),
2043 XWINT (copy
, i
) = XWINT (orig
, i
);
2047 XINT (copy
, i
) = XINT (orig
, i
);
2051 XSTR (copy
, i
) = XSTR (orig
, i
);
2055 XTREE (copy
, i
) = XTREE (orig
, i
);
2063 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2065 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2066 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2067 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2073 /* Substitute known constant values into INSN, if that is valid. */
2076 try_constants (insn
, map
)
2078 struct inline_remap
*map
;
2084 /* First try just updating addresses, then other things. This is
2085 important when we have something like the store of a constant
2086 into memory and we can update the memory address but the machine
2087 does not support a constant source. */
2088 subst_constants (&PATTERN (insn
), insn
, map
, 1);
2089 apply_change_group ();
2090 subst_constants (&PATTERN (insn
), insn
, map
, 0);
2091 apply_change_group ();
2093 /* Show we don't know the value of anything stored or clobbered. */
2094 note_stores (PATTERN (insn
), mark_stores
, NULL
);
2095 map
->last_pc_value
= 0;
2097 map
->last_cc0_value
= 0;
2100 /* Set up any constant equivalences made in this insn. */
2101 for (i
= 0; i
< map
->num_sets
; i
++)
2103 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2105 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2107 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
2108 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
2109 /* Following clause is a hack to make case work where GNU C++
2110 reassigns a variable to make cse work right. */
2111 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
2113 map
->equiv_sets
[i
].equiv
))
2114 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
2115 map
->equiv_sets
[i
].equiv
, map
->const_age
);
2117 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2118 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2120 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2121 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2126 /* Substitute known constants for pseudo regs in the contents of LOC,
2127 which are part of INSN.
2128 If INSN is zero, the substitution should always be done (this is used to
2130 These changes are taken out by try_constants if the result is not valid.
2132 Note that we are more concerned with determining when the result of a SET
2133 is a constant, for further propagation, than actually inserting constants
2134 into insns; cse will do the latter task better.
2136 This function is also used to adjust address of items previously addressed
2137 via the virtual stack variable or virtual incoming arguments registers.
2139 If MEMONLY is nonzero, only make changes inside a MEM. */
2142 subst_constants (loc
, insn
, map
, memonly
)
2145 struct inline_remap
*map
;
2150 register enum rtx_code code
;
2151 register const char *format_ptr
;
2152 int num_changes
= num_validated_changes ();
2154 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
2156 code
= GET_CODE (x
);
2172 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2178 /* The only thing we can do with a USE or CLOBBER is possibly do
2179 some substitutions in a MEM within it. */
2180 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2181 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
, 0);
2185 /* Substitute for parms and known constants. Don't replace
2186 hard regs used as user variables with constants. */
2189 int regno
= REGNO (x
);
2190 struct const_equiv_data
*p
;
2192 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2193 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
2194 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
2196 && p
->age
>= map
->const_age
)
2197 validate_change (insn
, loc
, p
->rtx
, 1);
2202 /* SUBREG applied to something other than a reg
2203 should be treated as ordinary, since that must
2204 be a special hack and we don't know how to treat it specially.
2205 Consider for example mulsidi3 in m68k.md.
2206 Ordinary SUBREG of a REG needs this special treatment. */
2207 if (! memonly
&& GET_CODE (SUBREG_REG (x
)) == REG
)
2209 rtx inner
= SUBREG_REG (x
);
2212 /* We can't call subst_constants on &SUBREG_REG (x) because any
2213 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2214 see what is inside, try to form the new SUBREG and see if that is
2215 valid. We handle two cases: extracting a full word in an
2216 integral mode and extracting the low part. */
2217 subst_constants (&inner
, NULL_RTX
, map
, 0);
2219 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2220 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2221 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2222 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2223 GET_MODE (SUBREG_REG (x
)));
2225 cancel_changes (num_changes
);
2226 if (new == 0 && subreg_lowpart_p (x
))
2227 new = gen_lowpart_common (GET_MODE (x
), inner
);
2230 validate_change (insn
, loc
, new, 1);
2237 subst_constants (&XEXP (x
, 0), insn
, map
, 0);
2239 /* If a memory address got spoiled, change it back. */
2240 if (! memonly
&& insn
!= 0 && num_validated_changes () != num_changes
2241 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2242 cancel_changes (num_changes
);
2247 /* Substitute constants in our source, and in any arguments to a
2248 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2250 rtx
*dest_loc
= &SET_DEST (x
);
2251 rtx dest
= *dest_loc
;
2254 subst_constants (&SET_SRC (x
), insn
, map
, memonly
);
2257 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2258 || GET_CODE (*dest_loc
) == SUBREG
2259 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2261 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2263 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
, memonly
);
2264 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
, memonly
);
2266 dest_loc
= &XEXP (*dest_loc
, 0);
2269 /* Do substitute in the address of a destination in memory. */
2270 if (GET_CODE (*dest_loc
) == MEM
)
2271 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
, 0);
2273 /* Check for the case of DEST a SUBREG, both it and the underlying
2274 register are less than one word, and the SUBREG has the wider mode.
2275 In the case, we are really setting the underlying register to the
2276 source converted to the mode of DEST. So indicate that. */
2277 if (GET_CODE (dest
) == SUBREG
2278 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2279 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2280 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2281 <= GET_MODE_SIZE (GET_MODE (dest
)))
2282 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2284 src
= tem
, dest
= SUBREG_REG (dest
);
2286 /* If storing a recognizable value save it for later recording. */
2287 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2288 && (CONSTANT_P (src
)
2289 || (GET_CODE (src
) == REG
2290 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2291 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2292 || (GET_CODE (src
) == PLUS
2293 && GET_CODE (XEXP (src
, 0)) == REG
2294 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2295 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2296 && CONSTANT_P (XEXP (src
, 1)))
2297 || GET_CODE (src
) == COMPARE
2302 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2303 || GET_CODE (src
) == LABEL_REF
))))
2305 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2306 it will cause us to save the COMPARE with any constants
2307 substituted, which is what we want for later. */
2308 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2309 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2318 format_ptr
= GET_RTX_FORMAT (code
);
2320 /* If the first operand is an expression, save its mode for later. */
2321 if (*format_ptr
== 'e')
2322 op0_mode
= GET_MODE (XEXP (x
, 0));
2324 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2326 switch (*format_ptr
++)
2333 subst_constants (&XEXP (x
, i
), insn
, map
, memonly
);
2344 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2345 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2346 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
, memonly
);
2355 /* If this is a commutative operation, move a constant to the second
2356 operand unless the second operand is already a CONST_INT. */
2358 && (GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2359 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2361 rtx tem
= XEXP (x
, 0);
2362 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2363 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2366 /* Simplify the expression in case we put in some constants. */
2368 switch (GET_RTX_CLASS (code
))
2371 if (op0_mode
== MAX_MACHINE_MODE
)
2373 new = simplify_unary_operation (code
, GET_MODE (x
),
2374 XEXP (x
, 0), op0_mode
);
2379 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2381 if (op_mode
== VOIDmode
)
2382 op_mode
= GET_MODE (XEXP (x
, 1));
2383 new = simplify_relational_operation (code
, op_mode
,
2384 XEXP (x
, 0), XEXP (x
, 1));
2385 #ifdef FLOAT_STORE_FLAG_VALUE
2386 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2388 enum machine_mode mode
= GET_MODE (x
);
2389 if (new == const0_rtx
)
2390 new = CONST0_RTX (mode
);
2393 REAL_VALUE_TYPE val
= FLOAT_STORE_FLAG_VALUE (mode
);
2394 new = CONST_DOUBLE_FROM_REAL_VALUE (val
, mode
);
2403 new = simplify_binary_operation (code
, GET_MODE (x
),
2404 XEXP (x
, 0), XEXP (x
, 1));
2409 if (op0_mode
== MAX_MACHINE_MODE
)
2412 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2413 XEXP (x
, 0), XEXP (x
, 1),
2419 validate_change (insn
, loc
, new, 1);
2422 /* Show that register modified no longer contain known constants. We are
2423 called from note_stores with parts of the new insn. */
2426 mark_stores (dest
, x
, data
)
2428 rtx x ATTRIBUTE_UNUSED
;
2429 void *data ATTRIBUTE_UNUSED
;
2432 enum machine_mode mode
= VOIDmode
;
2434 /* DEST is always the innermost thing set, except in the case of
2435 SUBREGs of hard registers. */
2437 if (GET_CODE (dest
) == REG
)
2438 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2439 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2441 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2442 mode
= GET_MODE (SUBREG_REG (dest
));
2447 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2448 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2451 /* Ignore virtual stack var or virtual arg register since those
2452 are handled separately. */
2453 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
2454 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
2455 for (i
= regno
; i
<= last_reg
; i
++)
2456 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
2457 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
2461 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2462 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2463 that it points to the node itself, thus indicating that the node is its
2464 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2465 the given node is NULL, recursively descend the decl/block tree which
2466 it is the root of, and for each other ..._DECL or BLOCK node contained
2467 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2468 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2469 values to point to themselves. */
2472 set_block_origin_self (stmt
)
2475 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2477 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2480 register tree local_decl
;
2482 for (local_decl
= BLOCK_VARS (stmt
);
2483 local_decl
!= NULL_TREE
;
2484 local_decl
= TREE_CHAIN (local_decl
))
2485 set_decl_origin_self (local_decl
); /* Potential recursion. */
2489 register tree subblock
;
2491 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2492 subblock
!= NULL_TREE
;
2493 subblock
= BLOCK_CHAIN (subblock
))
2494 set_block_origin_self (subblock
); /* Recurse. */
2499 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2500 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2501 node to so that it points to the node itself, thus indicating that the
2502 node represents its own (abstract) origin. Additionally, if the
2503 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2504 the decl/block tree of which the given node is the root of, and for
2505 each other ..._DECL or BLOCK node contained therein whose
2506 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2507 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2508 point to themselves. */
2511 set_decl_origin_self (decl
)
2514 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2516 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2517 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2521 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2522 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2523 if (DECL_INITIAL (decl
) != NULL_TREE
2524 && DECL_INITIAL (decl
) != error_mark_node
)
2525 set_block_origin_self (DECL_INITIAL (decl
));
2530 /* Given a pointer to some BLOCK node, and a boolean value to set the
2531 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2532 the given block, and for all local decls and all local sub-blocks
2533 (recursively) which are contained therein. */
2536 set_block_abstract_flags (stmt
, setting
)
2538 register int setting
;
2540 register tree local_decl
;
2541 register tree subblock
;
2543 BLOCK_ABSTRACT (stmt
) = setting
;
2545 for (local_decl
= BLOCK_VARS (stmt
);
2546 local_decl
!= NULL_TREE
;
2547 local_decl
= TREE_CHAIN (local_decl
))
2548 set_decl_abstract_flags (local_decl
, setting
);
2550 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2551 subblock
!= NULL_TREE
;
2552 subblock
= BLOCK_CHAIN (subblock
))
2553 set_block_abstract_flags (subblock
, setting
);
2556 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2557 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2558 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2559 set the abstract flags for all of the parameters, local vars, local
2560 blocks and sub-blocks (recursively) to the same setting. */
2563 set_decl_abstract_flags (decl
, setting
)
2565 register int setting
;
2567 DECL_ABSTRACT (decl
) = setting
;
2568 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2572 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2573 DECL_ABSTRACT (arg
) = setting
;
2574 if (DECL_INITIAL (decl
) != NULL_TREE
2575 && DECL_INITIAL (decl
) != error_mark_node
)
2576 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2580 /* Output the assembly language code for the function FNDECL
2581 from its DECL_SAVED_INSNS. Used for inline functions that are output
2582 at end of compilation instead of where they came in the source. */
2585 output_inline_function (fndecl
)
2588 struct function
*old_cfun
= cfun
;
2589 struct function
*f
= DECL_SAVED_INSNS (fndecl
);
2592 current_function_decl
= fndecl
;
2593 clear_emit_caches ();
2595 /* Things we allocate from here on are part of this function, not
2597 temporary_allocation ();
2599 set_new_last_label_num (f
->inl_max_label_num
);
2601 /* We must have already output DWARF debugging information for the
2602 original (abstract) inline function declaration/definition, so
2603 we want to make sure that the debugging information we generate
2604 for this special instance of the inline function refers back to
2605 the information we already generated. To make sure that happens,
2606 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2607 node (and for all of the local ..._DECL nodes which are its children)
2608 so that they all point to themselves. */
2610 set_decl_origin_self (fndecl
);
2612 /* We're not deferring this any longer. */
2613 DECL_DEFER_OUTPUT (fndecl
) = 0;
2615 /* We can't inline this anymore. */
2617 DECL_INLINE (fndecl
) = 0;
2619 /* Compile this function all the way down to assembly code. */
2620 rest_of_compilation (fndecl
);
2623 current_function_decl
= old_cfun
? old_cfun
->decl
: 0;