1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "insn-config.h"
32 #include "insn-flags.h"
36 #include "integrate.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack
*function_maybepermanent_obstack
;
50 /* Similar, but round to the next highest integer that meets the
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
65 static rtvec initialize_for_inline
PROTO((tree
));
66 static void note_modified_parmregs
PROTO((rtx
, rtx
, void *));
67 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*,
69 static tree integrate_decl_tree
PROTO((tree
,
70 struct inline_remap
*));
71 static void subst_constants
PROTO((rtx
*, rtx
,
72 struct inline_remap
*, int));
73 static void set_block_origin_self
PROTO((tree
));
74 static void set_decl_origin_self
PROTO((tree
));
75 static void set_block_abstract_flags
PROTO((tree
, int));
76 static void process_reg_param
PROTO((struct inline_remap
*, rtx
,
78 void set_decl_abstract_flags
PROTO((tree
, int));
79 static rtx expand_inline_function_eh_labelmap
PROTO((rtx
));
80 static void mark_stores
PROTO((rtx
, rtx
, void *));
81 static int compare_blocks
PROTO((const PTR
, const PTR
));
82 static int find_block
PROTO((const PTR
, const PTR
));
84 /* The maximum number of instructions accepted for inlining a
85 function. Increasing values mean more agressive inlining.
86 This affects currently only functions explicitly marked as
87 inline (or methods defined within the class definition for C++).
88 The default value of 10000 is arbitrary but high to match the
89 previously unlimited gcc capabilities. */
91 int inline_max_insns
= 10000;
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function
*inlining
= 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map
, i
)
106 struct inline_remap
*map
;
109 rtx x
= map
->label_map
[i
];
112 x
= map
->label_map
[i
] = gen_label_rtx();
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
123 function_cannot_inline_p (fndecl
)
124 register tree fndecl
;
127 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
129 /* For functions marked as inline increase the maximum size to
130 inline_max_insns (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns
= (DECL_INLINE (fndecl
))
135 + 8 * list_length (DECL_ARGUMENTS (fndecl
)))
136 : INTEGRATE_THRESHOLD (fndecl
);
138 register int ninsns
= 0;
142 /* No inlines with varargs. */
143 if ((last
&& TREE_VALUE (last
) != void_type_node
)
144 || current_function_varargs
)
145 return N_("varargs function cannot be inline");
147 if (current_function_calls_alloca
)
148 return N_("function using alloca cannot be inline");
150 if (current_function_calls_setjmp
)
151 return N_("function using setjmp cannot be inline");
153 if (current_function_contains_functions
)
154 return N_("function with nested functions cannot be inline");
158 N_("function with label addresses used in initializers cannot inline");
160 if (current_function_cannot_inline
)
161 return current_function_cannot_inline
;
163 /* If its not even close, don't even look. */
164 if (get_max_uid () > 3 * max_insns
)
165 return N_("function too large to be inline");
168 /* Don't inline functions which do not specify a function prototype and
169 have BLKmode argument or take the address of a parameter. */
170 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
172 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
173 TREE_ADDRESSABLE (parms
) = 1;
174 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
175 return N_("no prototype, and parameter address used; cannot be inline");
179 /* We can't inline functions that return structures
180 the old-fashioned PCC way, copying into a static block. */
181 if (current_function_returns_pcc_struct
)
182 return N_("inline functions not supported for this return value type");
184 /* We can't inline functions that return structures of varying size. */
185 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
186 return N_("function with varying-size return value cannot be inline");
188 /* Cannot inline a function with a varying size argument or one that
189 receives a transparent union. */
190 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
192 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
193 return N_("function with varying-size parameter cannot be inline");
194 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
195 return N_("function with transparent unit parameter cannot be inline");
198 if (get_max_uid () > max_insns
)
200 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
201 insn
&& ninsns
< max_insns
;
202 insn
= NEXT_INSN (insn
))
203 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
206 if (ninsns
>= max_insns
)
207 return N_("function too large to be inline");
210 /* We will not inline a function which uses computed goto. The addresses of
211 its local labels, which may be tucked into global storage, are of course
212 not constant across instantiations, which causes unexpected behaviour. */
213 if (current_function_has_computed_jump
)
214 return N_("function with computed jump cannot inline");
216 /* We cannot inline a nested function that jumps to a nonlocal label. */
217 if (current_function_has_nonlocal_goto
)
218 return N_("function with nonlocal goto cannot be inline");
220 /* This is a hack, until the inliner is taught about eh regions at
221 the start of the function. */
222 for (insn
= get_insns ();
224 && ! (GET_CODE (insn
) == NOTE
225 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
226 insn
= NEXT_INSN (insn
))
228 if (insn
&& GET_CODE (insn
) == NOTE
229 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
230 return N_("function with complex parameters cannot be inline");
233 /* We can't inline functions that return a PARALLEL rtx. */
234 result
= DECL_RTL (DECL_RESULT (fndecl
));
235 if (result
&& GET_CODE (result
) == PARALLEL
)
236 return N_("inline functions not supported for this return value type");
241 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
242 Zero for a reg that isn't a parm's home.
243 Only reg numbers less than max_parm_reg are mapped here. */
244 static tree
*parmdecl_map
;
246 /* In save_for_inline, nonzero if past the parm-initialization insns. */
247 static int in_nonparm_insns
;
249 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
250 needed to save FNDECL's insns and info for future inline expansion. */
253 initialize_for_inline (fndecl
)
260 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
261 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
262 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
264 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
266 parms
= TREE_CHAIN (parms
), i
++)
268 rtx p
= DECL_RTL (parms
);
270 /* If we have (mem (addressof (mem ...))), use the inner MEM since
271 otherwise the copy_rtx call below will not unshare the MEM since
272 it shares ADDRESSOF. */
273 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
274 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
275 p
= XEXP (XEXP (p
, 0), 0);
277 RTVEC_ELT (arg_vector
, i
) = p
;
279 if (GET_CODE (p
) == REG
)
280 parmdecl_map
[REGNO (p
)] = parms
;
281 else if (GET_CODE (p
) == CONCAT
)
283 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
284 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
286 if (GET_CODE (preal
) == REG
)
287 parmdecl_map
[REGNO (preal
)] = parms
;
288 if (GET_CODE (pimag
) == REG
)
289 parmdecl_map
[REGNO (pimag
)] = parms
;
292 /* This flag is cleared later
293 if the function ever modifies the value of the parm. */
294 TREE_READONLY (parms
) = 1;
300 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
301 originally was in the FROM_FN, but now it will be in the
305 copy_decl_for_inlining (decl
, from_fn
, to_fn
)
312 /* Copy the declaration. */
313 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
314 /* For a parameter, we must make an equivalent VAR_DECL, not a
316 copy
= build_decl (VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
319 copy
= copy_node (decl
);
320 if (DECL_LANG_SPECIFIC (copy
))
321 copy_lang_decl (copy
);
324 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
325 declaration inspired this copy. */
326 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
328 /* The new variable/label has no RTL, yet. */
329 DECL_RTL (copy
) = NULL_RTX
;
331 /* These args would always appear unused, if not for this. */
332 TREE_USED (copy
) = 1;
334 /* Set the context for the new declaration. */
335 if (!DECL_CONTEXT (decl
))
336 /* Globals stay global. */
338 else if (DECL_CONTEXT (decl
) != from_fn
)
339 /* Things that weren't in the scope of the function we're inlining
340 from aren't in the scope we're inlining too, either. */
342 else if (TREE_STATIC (decl
))
343 /* Function-scoped static variables should say in the original
347 /* Ordinary automatic local variables are now in the scope of the
349 DECL_CONTEXT (copy
) = to_fn
;
354 /* Make the insns and PARM_DECLs of the current function permanent
355 and record other information in DECL_SAVED_INSNS to allow inlining
356 of this function in subsequent calls.
358 This routine need not copy any insns because we are not going
359 to immediately compile the insns in the insn chain. There
360 are two cases when we would compile the insns for FNDECL:
361 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
362 be output at the end of other compilation, because somebody took
363 its address. In the first case, the insns of FNDECL are copied
364 as it is expanded inline, so FNDECL's saved insns are not
365 modified. In the second case, FNDECL is used for the last time,
366 so modifying the rtl is not a problem.
368 We don't have to worry about FNDECL being inline expanded by
369 other functions which are written at the end of compilation
370 because flag_no_inline is turned on when we begin writing
371 functions at the end of compilation. */
374 save_for_inline_nocopy (fndecl
)
379 rtx first_nonparm_insn
;
381 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
382 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
383 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
384 for the parms, prior to elimination of virtual registers.
385 These values are needed for substituting parms properly. */
387 parmdecl_map
= (tree
*) xmalloc (max_parm_reg
* sizeof (tree
));
389 /* Make and emit a return-label if we have not already done so. */
391 if (return_label
== 0)
393 return_label
= gen_label_rtx ();
394 emit_label (return_label
);
397 argvec
= initialize_for_inline (fndecl
);
399 /* If there are insns that copy parms from the stack into pseudo registers,
400 those insns are not copied. `expand_inline_function' must
401 emit the correct code to handle such things. */
404 if (GET_CODE (insn
) != NOTE
)
407 /* Get the insn which signals the end of parameter setup code. */
408 first_nonparm_insn
= get_first_nonparm_insn ();
410 /* Now just scan the chain of insns to see what happens to our
411 PARM_DECLs. If a PARM_DECL is used but never modified, we
412 can substitute its rtl directly when expanding inline (and
413 perform constant folding when its incoming value is constant).
414 Otherwise, we have to copy its value into a new register and track
415 the new register's life. */
416 in_nonparm_insns
= 0;
417 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
419 if (insn
== first_nonparm_insn
)
420 in_nonparm_insns
= 1;
422 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
423 /* Record what interesting things happen to our parameters. */
424 note_stores (PATTERN (insn
), note_modified_parmregs
, NULL
);
427 /* We have now allocated all that needs to be allocated permanently
428 on the rtx obstack. Set our high-water mark, so that we
429 can free the rest of this when the time comes. */
433 current_function
->inl_max_label_num
= max_label_num ();
434 current_function
->inl_last_parm_insn
= current_function
->x_last_parm_insn
;
435 current_function
->original_arg_vector
= argvec
;
436 current_function
->original_decl_initial
= DECL_INITIAL (fndecl
);
437 DECL_SAVED_INSNS (fndecl
) = current_function
;
443 /* Note whether a parameter is modified or not. */
446 note_modified_parmregs (reg
, x
, data
)
448 rtx x ATTRIBUTE_UNUSED
;
449 void *data ATTRIBUTE_UNUSED
;
451 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
452 && REGNO (reg
) < max_parm_reg
453 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
454 && parmdecl_map
[REGNO (reg
)] != 0)
455 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
458 /* Unfortunately, we need a global copy of const_equiv map for communication
459 with a function called from note_stores. Be *very* careful that this
460 is used properly in the presence of recursion. */
462 varray_type global_const_equiv_varray
;
464 #define FIXED_BASE_PLUS_P(X) \
465 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
466 && GET_CODE (XEXP (X, 0)) == REG \
467 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
468 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
470 /* Called to set up a mapping for the case where a parameter is in a
471 register. If it is read-only and our argument is a constant, set up the
472 constant equivalence.
474 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
477 Also, don't allow hard registers here; they might not be valid when
478 substituted into insns. */
480 process_reg_param (map
, loc
, copy
)
481 struct inline_remap
*map
;
484 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
485 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
486 && ! REG_USERVAR_P (copy
))
487 || (GET_CODE (copy
) == REG
488 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
490 rtx temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
491 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
492 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
493 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
496 map
->reg_map
[REGNO (loc
)] = copy
;
499 /* Used by duplicate_eh_handlers to map labels for the exception table */
500 static struct inline_remap
*eif_eh_map
;
503 expand_inline_function_eh_labelmap (label
)
506 int index
= CODE_LABEL_NUMBER (label
);
507 return get_label_from_map (eif_eh_map
, index
);
510 /* Compare two BLOCKs for qsort. The key we sort on is the
511 BLOCK_ABSTRACT_ORIGIN of the blocks. */
514 compare_blocks (v1
, v2
)
518 tree b1
= *((tree
*) v1
);
519 tree b2
= *((tree
*) v2
);
521 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1
)
522 - (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
525 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
526 an original block; the second to a remapped equivalent. */
534 tree b2
= *((tree
*) v2
);
536 return ((char *) b1
- (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
539 /* Integrate the procedure defined by FNDECL. Note that this function
540 may wind up calling itself. Since the static variables are not
541 reentrant, we do not assign them until after the possibility
542 of recursion is eliminated.
544 If IGNORE is nonzero, do not produce a value.
545 Otherwise store the value in TARGET if it is nonzero and that is convenient.
548 (rtx)-1 if we could not substitute the function
549 0 if we substituted it and it does not produce a value
550 else an rtx for where the value is stored. */
553 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
554 structure_value_addr
)
559 rtx structure_value_addr
;
561 struct function
*inlining_previous
;
562 struct function
*inl_f
= DECL_SAVED_INSNS (fndecl
);
563 tree formal
, actual
, block
;
564 rtx parm_insns
= inl_f
->emit
->x_first_insn
;
565 rtx insns
= (inl_f
->inl_last_parm_insn
566 ? NEXT_INSN (inl_f
->inl_last_parm_insn
)
573 int min_labelno
= inl_f
->emit
->x_first_label_num
;
574 int max_labelno
= inl_f
->inl_max_label_num
;
576 rtx local_return_label
= 0;
580 struct inline_remap
*map
= 0;
584 rtvec arg_vector
= (rtvec
) inl_f
->original_arg_vector
;
585 rtx static_chain_value
= 0;
588 /* The pointer used to track the true location of the memory used
589 for MAP->LABEL_MAP. */
590 rtx
*real_label_map
= 0;
592 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
593 max_regno
= inl_f
->emit
->x_reg_rtx_no
+ 3;
594 if (max_regno
< FIRST_PSEUDO_REGISTER
)
597 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
599 /* Check that the parms type match and that sufficient arguments were
600 passed. Since the appropriate conversions or default promotions have
601 already been applied, the machine modes should match exactly. */
603 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
605 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
608 enum machine_mode mode
;
611 return (rtx
) (HOST_WIDE_INT
) -1;
613 arg
= TREE_VALUE (actual
);
614 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
616 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
617 /* If they are block mode, the types should match exactly.
618 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
619 which could happen if the parameter has incomplete type. */
621 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
622 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
623 return (rtx
) (HOST_WIDE_INT
) -1;
626 /* Extra arguments are valid, but will be ignored below, so we must
627 evaluate them here for side-effects. */
628 for (; actual
; actual
= TREE_CHAIN (actual
))
629 expand_expr (TREE_VALUE (actual
), const0_rtx
,
630 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
632 /* Expand the function arguments. Do this first so that any
633 new registers get created before we allocate the maps. */
635 arg_vals
= (rtx
*) xmalloc (nargs
* sizeof (rtx
));
636 arg_trees
= (tree
*) xmalloc (nargs
* sizeof (tree
));
638 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
640 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
642 /* Actual parameter, converted to the type of the argument within the
644 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
645 /* Mode of the variable used within the function. */
646 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
650 loc
= RTVEC_ELT (arg_vector
, i
);
652 /* If this is an object passed by invisible reference, we copy the
653 object into a stack slot and save its address. If this will go
654 into memory, we do nothing now. Otherwise, we just expand the
656 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
657 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
660 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
661 int_size_in_bytes (TREE_TYPE (arg
)), 1);
662 MEM_SET_IN_STRUCT_P (stack_slot
,
663 AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
665 store_expr (arg
, stack_slot
, 0);
667 arg_vals
[i
] = XEXP (stack_slot
, 0);
670 else if (GET_CODE (loc
) != MEM
)
672 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
673 /* The mode if LOC and ARG can differ if LOC was a variable
674 that had its mode promoted via PROMOTED_MODE. */
675 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
676 TYPE_MODE (TREE_TYPE (arg
)),
677 expand_expr (arg
, NULL_RTX
, mode
,
679 TREE_UNSIGNED (TREE_TYPE (formal
)));
681 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
687 && (! TREE_READONLY (formal
)
688 /* If the parameter is not read-only, copy our argument through
689 a register. Also, we cannot use ARG_VALS[I] if it overlaps
690 TARGET in any way. In the inline function, they will likely
691 be two different pseudos, and `safe_from_p' will make all
692 sorts of smart assumptions about their not conflicting.
693 But if ARG_VALS[I] overlaps TARGET, these assumptions are
694 wrong, so put ARG_VALS[I] into a fresh register.
695 Don't worry about invisible references, since their stack
696 temps will never overlap the target. */
699 && (GET_CODE (arg_vals
[i
]) == REG
700 || GET_CODE (arg_vals
[i
]) == SUBREG
701 || GET_CODE (arg_vals
[i
]) == MEM
)
702 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
703 /* ??? We must always copy a SUBREG into a REG, because it might
704 get substituted into an address, and not all ports correctly
705 handle SUBREGs in addresses. */
706 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
707 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
709 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
710 && POINTER_TYPE_P (TREE_TYPE (formal
)))
711 mark_reg_pointer (arg_vals
[i
],
712 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
716 /* Allocate the structures we use to remap things. */
718 map
= (struct inline_remap
*) xmalloc (sizeof (struct inline_remap
));
719 map
->fndecl
= fndecl
;
721 VARRAY_TREE_INIT (map
->block_map
, 10, "block_map");
722 map
->reg_map
= (rtx
*) xcalloc (max_regno
, sizeof (rtx
));
724 /* We used to use alloca here, but the size of what it would try to
725 allocate would occasionally cause it to exceed the stack limit and
726 cause unpredictable core dumps. */
728 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
729 map
->label_map
= real_label_map
;
731 inl_max_uid
= (inl_f
->emit
->x_cur_insn_uid
+ 1);
732 map
->insn_map
= (rtx
*) xcalloc (inl_max_uid
, sizeof (rtx
));
734 map
->max_insnno
= inl_max_uid
;
736 map
->integrating
= 1;
738 /* const_equiv_varray maps pseudos in our routine to constants, so
739 it needs to be large enough for all our pseudos. This is the
740 number we are currently using plus the number in the called
741 routine, plus 15 for each arg, five to compute the virtual frame
742 pointer, and five for the return value. This should be enough
743 for most cases. We do not reference entries outside the range of
746 ??? These numbers are quite arbitrary and were obtained by
747 experimentation. At some point, we should try to allocate the
748 table after all the parameters are set up so we an more accurately
749 estimate the number of pseudos we will need. */
751 VARRAY_CONST_EQUIV_INIT (map
->const_equiv_varray
,
753 + (max_regno
- FIRST_PSEUDO_REGISTER
)
756 "expand_inline_function");
759 /* Record the current insn in case we have to set up pointers to frame
760 and argument memory blocks. If there are no insns yet, add a dummy
761 insn that can be used as an insertion point. */
762 map
->insns_at_start
= get_last_insn ();
763 if (map
->insns_at_start
== 0)
764 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
766 map
->regno_pointer_flag
= inl_f
->emit
->regno_pointer_flag
;
767 map
->regno_pointer_align
= inl_f
->emit
->regno_pointer_align
;
769 /* Update the outgoing argument size to allow for those in the inlined
771 if (inl_f
->outgoing_args_size
> current_function_outgoing_args_size
)
772 current_function_outgoing_args_size
= inl_f
->outgoing_args_size
;
774 /* If the inline function needs to make PIC references, that means
775 that this function's PIC offset table must be used. */
776 if (inl_f
->uses_pic_offset_table
)
777 current_function_uses_pic_offset_table
= 1;
779 /* If this function needs a context, set it up. */
780 if (inl_f
->needs_context
)
781 static_chain_value
= lookup_static_chain (fndecl
);
783 if (GET_CODE (parm_insns
) == NOTE
784 && NOTE_LINE_NUMBER (parm_insns
) > 0)
786 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
787 NOTE_LINE_NUMBER (parm_insns
));
789 RTX_INTEGRATED_P (note
) = 1;
792 /* Process each argument. For each, set up things so that the function's
793 reference to the argument will refer to the argument being passed.
794 We only replace REG with REG here. Any simplifications are done
797 We make two passes: In the first, we deal with parameters that will
798 be placed into registers, since we need to ensure that the allocated
799 register number fits in const_equiv_map. Then we store all non-register
800 parameters into their memory location. */
802 /* Don't try to free temp stack slots here, because we may put one of the
803 parameters into a temp stack slot. */
805 for (i
= 0; i
< nargs
; i
++)
807 rtx copy
= arg_vals
[i
];
809 loc
= RTVEC_ELT (arg_vector
, i
);
811 /* There are three cases, each handled separately. */
812 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
813 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
815 /* This must be an object passed by invisible reference (it could
816 also be a variable-sized object, but we forbid inlining functions
817 with variable-sized arguments). COPY is the address of the
818 actual value (this computation will cause it to be copied). We
819 map that address for the register, noting the actual address as
820 an equivalent in case it can be substituted into the insns. */
822 if (GET_CODE (copy
) != REG
)
824 temp
= copy_addr_to_reg (copy
);
825 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
826 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
829 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
831 else if (GET_CODE (loc
) == MEM
)
833 /* This is the case of a parameter that lives in memory. It
834 will live in the block we allocate in the called routine's
835 frame that simulates the incoming argument area. Do nothing
836 with the parameter now; we will call store_expr later. In
837 this case, however, we must ensure that the virtual stack and
838 incoming arg rtx values are expanded now so that we can be
839 sure we have enough slots in the const equiv map since the
840 store_expr call can easily blow the size estimate. */
841 if (DECL_FRAME_SIZE (fndecl
) != 0)
842 copy_rtx_and_substitute (virtual_stack_vars_rtx
, map
, 0);
844 if (DECL_SAVED_INSNS (fndecl
)->args_size
!= 0)
845 copy_rtx_and_substitute (virtual_incoming_args_rtx
, map
, 0);
847 else if (GET_CODE (loc
) == REG
)
848 process_reg_param (map
, loc
, copy
);
849 else if (GET_CODE (loc
) == CONCAT
)
851 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
852 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
853 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
854 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
856 process_reg_param (map
, locreal
, copyreal
);
857 process_reg_param (map
, locimag
, copyimag
);
863 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
864 specially. This function can be called recursively, so we need to
865 save the previous value. */
866 inlining_previous
= inlining
;
869 /* Now do the parameters that will be placed in memory. */
871 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
872 formal
; formal
= TREE_CHAIN (formal
), i
++)
874 loc
= RTVEC_ELT (arg_vector
, i
);
876 if (GET_CODE (loc
) == MEM
877 /* Exclude case handled above. */
878 && ! (GET_CODE (XEXP (loc
, 0)) == REG
879 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
881 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
882 DECL_SOURCE_LINE (formal
));
884 RTX_INTEGRATED_P (note
) = 1;
886 /* Compute the address in the area we reserved and store the
888 temp
= copy_rtx_and_substitute (loc
, map
, 1);
889 subst_constants (&temp
, NULL_RTX
, map
, 1);
890 apply_change_group ();
891 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
892 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
893 store_expr (arg_trees
[i
], temp
, 0);
897 /* Deal with the places that the function puts its result.
898 We are driven by what is placed into DECL_RESULT.
900 Initially, we assume that we don't have anything special handling for
901 REG_FUNCTION_RETURN_VALUE_P. */
903 map
->inline_target
= 0;
904 loc
= DECL_RTL (DECL_RESULT (fndecl
));
906 if (TYPE_MODE (type
) == VOIDmode
)
907 /* There is no return value to worry about. */
909 else if (GET_CODE (loc
) == MEM
)
911 if (GET_CODE (XEXP (loc
, 0)) == ADDRESSOF
)
913 temp
= copy_rtx_and_substitute (loc
, map
, 1);
914 subst_constants (&temp
, NULL_RTX
, map
, 1);
915 apply_change_group ();
920 if (! structure_value_addr
921 || ! aggregate_value_p (DECL_RESULT (fndecl
)))
924 /* Pass the function the address in which to return a structure
925 value. Note that a constructor can cause someone to call us
926 with STRUCTURE_VALUE_ADDR, but the initialization takes place
927 via the first parameter, rather than the struct return address.
929 We have two cases: If the address is a simple register
930 indirect, use the mapping mechanism to point that register to
931 our structure return address. Otherwise, store the structure
932 return value into the place that it will be referenced from. */
934 if (GET_CODE (XEXP (loc
, 0)) == REG
)
936 temp
= force_operand (structure_value_addr
, NULL_RTX
);
937 temp
= force_reg (Pmode
, temp
);
938 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
940 if (CONSTANT_P (structure_value_addr
)
941 || GET_CODE (structure_value_addr
) == ADDRESSOF
942 || (GET_CODE (structure_value_addr
) == PLUS
943 && (XEXP (structure_value_addr
, 0)
944 == virtual_stack_vars_rtx
)
945 && (GET_CODE (XEXP (structure_value_addr
, 1))
948 SET_CONST_EQUIV_DATA (map
, temp
, structure_value_addr
,
954 temp
= copy_rtx_and_substitute (loc
, map
, 1);
955 subst_constants (&temp
, NULL_RTX
, map
, 0);
956 apply_change_group ();
957 emit_move_insn (temp
, structure_value_addr
);
962 /* We will ignore the result value, so don't look at its structure.
963 Note that preparations for an aggregate return value
964 do need to be made (above) even if it will be ignored. */
966 else if (GET_CODE (loc
) == REG
)
968 /* The function returns an object in a register and we use the return
969 value. Set up our target for remapping. */
971 /* Machine mode function was declared to return. */
972 enum machine_mode departing_mode
= TYPE_MODE (type
);
973 /* (Possibly wider) machine mode it actually computes
974 (for the sake of callers that fail to declare it right).
975 We have to use the mode of the result's RTL, rather than
976 its type, since expand_function_start may have promoted it. */
977 enum machine_mode arriving_mode
978 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
981 /* Don't use MEMs as direct targets because on some machines
982 substituting a MEM for a REG makes invalid insns.
983 Let the combiner substitute the MEM if that is valid. */
984 if (target
== 0 || GET_CODE (target
) != REG
985 || GET_MODE (target
) != departing_mode
)
987 /* Don't make BLKmode registers. If this looks like
988 a BLKmode object being returned in a register, get
989 the mode from that, otherwise abort. */
990 if (departing_mode
== BLKmode
)
992 if (REG
== GET_CODE (DECL_RTL (DECL_RESULT (fndecl
))))
994 departing_mode
= GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
995 arriving_mode
= departing_mode
;
1001 target
= gen_reg_rtx (departing_mode
);
1004 /* If function's value was promoted before return,
1005 avoid machine mode mismatch when we substitute INLINE_TARGET.
1006 But TARGET is what we will return to the caller. */
1007 if (arriving_mode
!= departing_mode
)
1009 /* Avoid creating a paradoxical subreg wider than
1010 BITS_PER_WORD, since that is illegal. */
1011 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
1013 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
1014 GET_MODE_BITSIZE (arriving_mode
)))
1015 /* Maybe could be handled by using convert_move () ? */
1017 reg_to_map
= gen_reg_rtx (arriving_mode
);
1018 target
= gen_lowpart (departing_mode
, reg_to_map
);
1021 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
1024 reg_to_map
= target
;
1026 /* Usually, the result value is the machine's return register.
1027 Sometimes it may be a pseudo. Handle both cases. */
1028 if (REG_FUNCTION_VALUE_P (loc
))
1029 map
->inline_target
= reg_to_map
;
1031 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1036 /* Initialize label_map. get_label_from_map will actually make
1038 bzero ((char *) &map
->label_map
[min_labelno
],
1039 (max_labelno
- min_labelno
) * sizeof (rtx
));
1041 /* Make copies of the decls of the symbols in the inline function, so that
1042 the copies of the variables get declared in the current function. Set
1043 up things so that lookup_static_chain knows that to interpret registers
1044 in SAVE_EXPRs for TYPE_SIZEs as local. */
1045 inline_function_decl
= fndecl
;
1046 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1047 block
= integrate_decl_tree (inl_f
->original_decl_initial
, map
);
1048 BLOCK_ABSTRACT_ORIGIN (block
) = DECL_ORIGIN (fndecl
);
1049 inline_function_decl
= 0;
1051 /* Make a fresh binding contour that we can easily remove. Do this after
1052 expanding our arguments so cleanups are properly scoped. */
1053 expand_start_bindings_and_block (0, block
);
1055 /* Sort the block-map so that it will be easy to find remapped
1057 qsort (&VARRAY_TREE (map
->block_map
, 0),
1058 map
->block_map
->elements_used
,
1062 /* Perform postincrements before actually calling the function. */
1065 /* Clean up stack so that variables might have smaller offsets. */
1066 do_pending_stack_adjust ();
1068 /* Save a copy of the location of const_equiv_varray for
1069 mark_stores, called via note_stores. */
1070 global_const_equiv_varray
= map
->const_equiv_varray
;
1072 /* If the called function does an alloca, save and restore the
1073 stack pointer around the call. This saves stack space, but
1074 also is required if this inline is being done between two
1076 if (inl_f
->calls_alloca
)
1077 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1079 /* Now copy the insns one by one. Do this in two passes, first the insns and
1080 then their REG_NOTES, just like save_for_inline. */
1082 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1084 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1086 rtx copy
, pattern
, set
;
1088 map
->orig_asm_operands_vector
= 0;
1090 switch (GET_CODE (insn
))
1093 pattern
= PATTERN (insn
);
1094 set
= single_set (insn
);
1096 if (GET_CODE (pattern
) == USE
1097 && GET_CODE (XEXP (pattern
, 0)) == REG
1098 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1099 /* The (USE (REG n)) at return from the function should
1100 be ignored since we are changing (REG n) into
1104 /* If the inline fn needs eh context, make sure that
1105 the current fn has one. */
1106 if (GET_CODE (pattern
) == USE
1107 && find_reg_note (insn
, REG_EH_CONTEXT
, 0) != 0)
1110 /* Ignore setting a function value that we don't want to use. */
1111 if (map
->inline_target
== 0
1113 && GET_CODE (SET_DEST (set
)) == REG
1114 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1116 if (volatile_refs_p (SET_SRC (set
)))
1120 /* If we must not delete the source,
1121 load it into a new temporary. */
1122 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1124 new_set
= single_set (copy
);
1129 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1131 /* If the source and destination are the same and it
1132 has a note on it, keep the insn. */
1133 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1134 && REG_NOTES (insn
) != 0)
1135 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1140 /* If this is setting the static chain rtx, omit it. */
1141 else if (static_chain_value
!= 0
1143 && GET_CODE (SET_DEST (set
)) == REG
1144 && rtx_equal_p (SET_DEST (set
),
1145 static_chain_incoming_rtx
))
1148 /* If this is setting the static chain pseudo, set it from
1149 the value we want to give it instead. */
1150 else if (static_chain_value
!= 0
1152 && rtx_equal_p (SET_SRC (set
),
1153 static_chain_incoming_rtx
))
1155 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
, 1);
1157 copy
= emit_move_insn (newdest
, static_chain_value
);
1158 static_chain_value
= 0;
1161 /* If this is setting the virtual stack vars register, this must
1162 be the code at the handler for a builtin longjmp. The value
1163 saved in the setjmp buffer will be the address of the frame
1164 we've made for this inlined instance within our frame. But we
1165 know the offset of that value so we can use it to reconstruct
1166 our virtual stack vars register from that value. If we are
1167 copying it from the stack pointer, leave it unchanged. */
1169 && rtx_equal_p (SET_DEST (set
), virtual_stack_vars_rtx
))
1171 HOST_WIDE_INT offset
;
1172 temp
= map
->reg_map
[REGNO (SET_DEST (set
))];
1173 temp
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1176 if (rtx_equal_p (temp
, virtual_stack_vars_rtx
))
1178 else if (GET_CODE (temp
) == PLUS
1179 && rtx_equal_p (XEXP (temp
, 0), virtual_stack_vars_rtx
)
1180 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
)
1181 offset
= INTVAL (XEXP (temp
, 1));
1185 if (rtx_equal_p (SET_SRC (set
), stack_pointer_rtx
))
1186 temp
= SET_SRC (set
);
1188 temp
= force_operand (plus_constant (SET_SRC (set
),
1192 copy
= emit_move_insn (virtual_stack_vars_rtx
, temp
);
1196 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1197 /* REG_NOTES will be copied later. */
1200 /* If this insn is setting CC0, it may need to look at
1201 the insn that uses CC0 to see what type of insn it is.
1202 In that case, the call to recog via validate_change will
1203 fail. So don't substitute constants here. Instead,
1204 do it when we emit the following insn.
1206 For example, see the pyr.md file. That machine has signed and
1207 unsigned compares. The compare patterns must check the
1208 following branch insn to see which what kind of compare to
1211 If the previous insn set CC0, substitute constants on it as
1213 if (sets_cc0_p (PATTERN (copy
)) != 0)
1218 try_constants (cc0_insn
, map
);
1220 try_constants (copy
, map
);
1223 try_constants (copy
, map
);
1228 if (GET_CODE (PATTERN (insn
)) == RETURN
1229 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1230 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1232 if (local_return_label
== 0)
1233 local_return_label
= gen_label_rtx ();
1234 pattern
= gen_jump (local_return_label
);
1237 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1239 copy
= emit_jump_insn (pattern
);
1243 try_constants (cc0_insn
, map
);
1246 try_constants (copy
, map
);
1248 /* If this used to be a conditional jump insn but whose branch
1249 direction is now know, we must do something special. */
1250 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1253 /* If the previous insn set cc0 for us, delete it. */
1254 if (sets_cc0_p (PREV_INSN (copy
)))
1255 delete_insn (PREV_INSN (copy
));
1258 /* If this is now a no-op, delete it. */
1259 if (map
->last_pc_value
== pc_rtx
)
1265 /* Otherwise, this is unconditional jump so we must put a
1266 BARRIER after it. We could do some dead code elimination
1267 here, but jump.c will do it just as well. */
1273 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1274 copy
= emit_call_insn (pattern
);
1276 /* Because the USAGE information potentially contains objects other
1277 than hard registers, we need to copy it. */
1278 CALL_INSN_FUNCTION_USAGE (copy
)
1279 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
),
1284 try_constants (cc0_insn
, map
);
1287 try_constants (copy
, map
);
1289 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1290 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1291 VARRAY_CONST_EQUIV (map
->const_equiv_varray
, i
).rtx
= 0;
1295 copy
= emit_label (get_label_from_map (map
,
1296 CODE_LABEL_NUMBER (insn
)));
1297 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1302 copy
= emit_barrier ();
1306 /* It is important to discard function-end and function-beg notes,
1307 so we have only one of each in the current function.
1308 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1309 deleted these in the copy used for continuing compilation,
1310 not the copy used for inlining). */
1311 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1312 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1313 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1315 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
1316 NOTE_LINE_NUMBER (insn
));
1318 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
1319 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
1322 = get_label_from_map (map
, NOTE_EH_HANDLER (copy
));
1324 /* we have to duplicate the handlers for the original */
1325 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
1327 /* We need to duplicate the handlers for the EH region
1328 and we need to indicate where the label map is */
1330 duplicate_eh_handlers (NOTE_EH_HANDLER (copy
),
1331 CODE_LABEL_NUMBER (label
),
1332 expand_inline_function_eh_labelmap
);
1335 /* We have to forward these both to match the new exception
1337 NOTE_EH_HANDLER (copy
) = CODE_LABEL_NUMBER (label
);
1340 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_BEG
1341 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_END
)
1342 && NOTE_BLOCK (insn
))
1344 tree
*mapped_block_p
;
1347 = (tree
*) bsearch (NOTE_BLOCK (insn
),
1348 &VARRAY_TREE (map
->block_map
, 0),
1349 map
->block_map
->elements_used
,
1353 if (!mapped_block_p
)
1356 NOTE_BLOCK (copy
) = *mapped_block_p
;
1368 RTX_INTEGRATED_P (copy
) = 1;
1370 map
->insn_map
[INSN_UID (insn
)] = copy
;
1373 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1374 from parameters can be substituted in. These are the only ones that
1375 are valid across the entire function. */
1377 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1378 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1379 && map
->insn_map
[INSN_UID (insn
)]
1380 && REG_NOTES (insn
))
1382 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
, 0);
1384 /* We must also do subst_constants, in case one of our parameters
1385 has const type and constant value. */
1386 subst_constants (&tem
, NULL_RTX
, map
, 0);
1387 apply_change_group ();
1388 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1391 if (local_return_label
)
1392 emit_label (local_return_label
);
1394 /* Restore the stack pointer if we saved it above. */
1395 if (inl_f
->calls_alloca
)
1396 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1398 if (!current_function
->x_whole_function_mode_p
)
1399 /* In statement-at-a-time mode, we just tell the front-end to add
1400 this block to the list of blocks at this binding level. We
1401 can't do it the way it's done for function-at-a-time mode the
1402 superblocks have not been created yet. */
1403 insert_block (block
);
1407 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl
));
1408 BLOCK_CHAIN (DECL_INITIAL (current_function_decl
)) = block
;
1411 /* End the scope containing the copied formal parameter variables
1412 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1413 here so that expand_end_bindings will not check for unused
1414 variables. That's already been checked for when the inlined
1415 function was defined. */
1416 expand_end_bindings (NULL_TREE
, 1, 1);
1418 /* Must mark the line number note after inlined functions as a repeat, so
1419 that the test coverage code can avoid counting the call twice. This
1420 just tells the code to ignore the immediately following line note, since
1421 there already exists a copy of this note before the expanded inline call.
1422 This line number note is still needed for debugging though, so we can't
1424 if (flag_test_coverage
)
1425 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
1427 emit_line_note (input_filename
, lineno
);
1429 /* If the function returns a BLKmode object in a register, copy it
1430 out of the temp register into a BLKmode memory object. */
1432 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
1433 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
1434 target
= copy_blkmode_from_reg (0, target
, TREE_TYPE (TREE_TYPE (fndecl
)));
1436 if (structure_value_addr
)
1438 target
= gen_rtx_MEM (TYPE_MODE (type
),
1439 memory_address (TYPE_MODE (type
),
1440 structure_value_addr
));
1441 MEM_SET_IN_STRUCT_P (target
, 1);
1444 /* Make sure we free the things we explicitly allocated with xmalloc. */
1446 free (real_label_map
);
1447 VARRAY_FREE (map
->const_equiv_varray
);
1448 free (map
->reg_map
);
1449 VARRAY_FREE (map
->block_map
);
1450 free (map
->insn_map
);
1455 inlining
= inlining_previous
;
1460 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1461 push all of those decls and give each one the corresponding home. */
1464 integrate_parm_decls (args
, map
, arg_vector
)
1466 struct inline_remap
*map
;
1472 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1474 tree decl
= copy_decl_for_inlining (tail
, map
->fndecl
,
1475 current_function_decl
);
1477 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
, 1);
1479 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1480 here, but that's going to require some more work. */
1481 /* DECL_INCOMING_RTL (decl) = ?; */
1482 /* Fully instantiate the address with the equivalent form so that the
1483 debugging information contains the actual register, instead of the
1484 virtual register. Do this by not passing an insn to
1486 subst_constants (&new_decl_rtl
, NULL_RTX
, map
, 1);
1487 apply_change_group ();
1488 DECL_RTL (decl
) = new_decl_rtl
;
1492 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1493 current function a tree of contexts isomorphic to the one that is given.
1495 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1496 registers used in the DECL_RTL field should be remapped. If it is zero,
1497 no mapping is necessary. */
1500 integrate_decl_tree (let
, map
)
1502 struct inline_remap
*map
;
1508 new_block
= make_node (BLOCK
);
1509 VARRAY_PUSH_TREE (map
->block_map
, new_block
);
1510 next
= &BLOCK_VARS (new_block
);
1512 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1516 push_obstacks_nochange ();
1517 saveable_allocation ();
1518 d
= copy_decl_for_inlining (t
, map
->fndecl
, current_function_decl
);
1521 if (DECL_RTL (t
) != 0)
1523 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
, 1);
1525 /* Fully instantiate the address with the equivalent form so that the
1526 debugging information contains the actual register, instead of the
1527 virtual register. Do this by not passing an insn to
1529 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
, 1);
1530 apply_change_group ();
1533 /* Add this declaration to the list of variables in the new
1536 next
= &TREE_CHAIN (d
);
1539 next
= &BLOCK_SUBBLOCKS (new_block
);
1540 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1542 *next
= integrate_decl_tree (t
, map
);
1543 BLOCK_SUPERCONTEXT (*next
) = new_block
;
1544 next
= &BLOCK_CHAIN (*next
);
1547 TREE_USED (new_block
) = TREE_USED (let
);
1548 BLOCK_ABSTRACT_ORIGIN (new_block
) = let
;
1553 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1554 except for those few rtx codes that are sharable.
1556 We always return an rtx that is similar to that incoming rtx, with the
1557 exception of possibly changing a REG to a SUBREG or vice versa. No
1558 rtl is ever emitted.
1560 If FOR_LHS is nonzero, if means we are processing something that will
1561 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1562 inlining since we need to be conservative in how it is set for
1565 Handle constants that need to be placed in the constant pool by
1566 calling `force_const_mem'. */
1569 copy_rtx_and_substitute (orig
, map
, for_lhs
)
1571 struct inline_remap
*map
;
1574 register rtx copy
, temp
;
1576 register RTX_CODE code
;
1577 register enum machine_mode mode
;
1578 register const char *format_ptr
;
1584 code
= GET_CODE (orig
);
1585 mode
= GET_MODE (orig
);
1590 /* If the stack pointer register shows up, it must be part of
1591 stack-adjustments (*not* because we eliminated the frame pointer!).
1592 Small hard registers are returned as-is. Pseudo-registers
1593 go through their `reg_map'. */
1594 regno
= REGNO (orig
);
1595 if (regno
<= LAST_VIRTUAL_REGISTER
1596 || (map
->integrating
1597 && DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
== orig
))
1599 /* Some hard registers are also mapped,
1600 but others are not translated. */
1601 if (map
->reg_map
[regno
] != 0)
1602 return map
->reg_map
[regno
];
1604 /* If this is the virtual frame pointer, make space in current
1605 function's stack frame for the stack frame of the inline function.
1607 Copy the address of this area into a pseudo. Map
1608 virtual_stack_vars_rtx to this pseudo and set up a constant
1609 equivalence for it to be the address. This will substitute the
1610 address into insns where it can be substituted and use the new
1611 pseudo where it can't. */
1612 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
1615 int size
= get_func_frame_size (DECL_SAVED_INSNS (map
->fndecl
));
1617 #ifdef FRAME_GROWS_DOWNWARD
1618 /* In this case, virtual_stack_vars_rtx points to one byte
1619 higher than the top of the frame area. So make sure we
1620 allocate a big enough chunk to keep the frame pointer
1621 aligned like a real one. */
1622 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
1625 loc
= assign_stack_temp (BLKmode
, size
, 1);
1626 loc
= XEXP (loc
, 0);
1627 #ifdef FRAME_GROWS_DOWNWARD
1628 /* In this case, virtual_stack_vars_rtx points to one byte
1629 higher than the top of the frame area. So compute the offset
1630 to one byte higher than our substitute frame. */
1631 loc
= plus_constant (loc
, size
);
1633 map
->reg_map
[regno
] = temp
1634 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1636 #ifdef STACK_BOUNDARY
1637 mark_reg_pointer (map
->reg_map
[regno
],
1638 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1641 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1643 seq
= gen_sequence ();
1645 emit_insn_after (seq
, map
->insns_at_start
);
1648 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
1649 || (map
->integrating
1650 && (DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
1653 /* Do the same for a block to contain any arguments referenced
1656 int size
= DECL_SAVED_INSNS (map
->fndecl
)->args_size
;
1659 loc
= assign_stack_temp (BLKmode
, size
, 1);
1660 loc
= XEXP (loc
, 0);
1661 /* When arguments grow downward, the virtual incoming
1662 args pointer points to the top of the argument block,
1663 so the remapped location better do the same. */
1664 #ifdef ARGS_GROW_DOWNWARD
1665 loc
= plus_constant (loc
, size
);
1667 map
->reg_map
[regno
] = temp
1668 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1670 #ifdef STACK_BOUNDARY
1671 mark_reg_pointer (map
->reg_map
[regno
],
1672 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1675 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1677 seq
= gen_sequence ();
1679 emit_insn_after (seq
, map
->insns_at_start
);
1682 else if (REG_FUNCTION_VALUE_P (orig
))
1684 /* This is a reference to the function return value. If
1685 the function doesn't have a return value, error. If the
1686 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1687 if (map
->inline_target
== 0)
1688 /* Must be unrolling loops or replicating code if we
1689 reach here, so return the register unchanged. */
1691 else if (GET_MODE (map
->inline_target
) != BLKmode
1692 && mode
!= GET_MODE (map
->inline_target
))
1693 return gen_lowpart (mode
, map
->inline_target
);
1695 return map
->inline_target
;
1699 if (map
->reg_map
[regno
] == NULL
)
1701 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
1702 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
1703 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
1704 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
1705 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1707 if (map
->regno_pointer_flag
[regno
])
1708 mark_reg_pointer (map
->reg_map
[regno
],
1709 map
->regno_pointer_align
[regno
]);
1711 return map
->reg_map
[regno
];
1714 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
, for_lhs
);
1715 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1716 if (GET_CODE (copy
) == SUBREG
)
1717 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
1718 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
1719 else if (GET_CODE (copy
) == CONCAT
)
1721 rtx retval
= subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1);
1723 if (GET_MODE (retval
) == GET_MODE (orig
))
1726 return gen_rtx_SUBREG (GET_MODE (orig
), retval
,
1727 (SUBREG_WORD (orig
) %
1728 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig
)))
1729 / (unsigned) UNITS_PER_WORD
)));
1732 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
1733 SUBREG_WORD (orig
));
1736 copy
= gen_rtx_ADDRESSOF (mode
,
1737 copy_rtx_and_substitute (XEXP (orig
, 0),
1739 0, ADDRESSOF_DECL(orig
));
1740 regno
= ADDRESSOF_REGNO (orig
);
1741 if (map
->reg_map
[regno
])
1742 regno
= REGNO (map
->reg_map
[regno
]);
1743 else if (regno
> LAST_VIRTUAL_REGISTER
)
1745 temp
= XEXP (orig
, 0);
1746 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
1747 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
1748 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
1749 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
1750 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1752 if (map
->regno_pointer_flag
[regno
])
1753 mark_reg_pointer (map
->reg_map
[regno
],
1754 map
->regno_pointer_align
[regno
]);
1755 regno
= REGNO (map
->reg_map
[regno
]);
1757 ADDRESSOF_REGNO (copy
) = regno
;
1762 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1763 to (use foo) if the original insn didn't have a subreg.
1764 Removing the subreg distorts the VAX movstrhi pattern
1765 by changing the mode of an operand. */
1766 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
, code
== CLOBBER
);
1767 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
1768 copy
= SUBREG_REG (copy
);
1769 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
1772 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
1773 = LABEL_PRESERVE_P (orig
);
1774 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
1780 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1781 : get_label_from_map (map
, CODE_LABEL_NUMBER (XEXP (orig
, 0))));
1783 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
1785 /* The fact that this label was previously nonlocal does not mean
1786 it still is, so we must check if it is within the range of
1787 this function's labels. */
1788 LABEL_REF_NONLOCAL_P (copy
)
1789 = (LABEL_REF_NONLOCAL_P (orig
)
1790 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
1791 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
1793 /* If we have made a nonlocal label local, it means that this
1794 inlined call will be referring to our nonlocal goto handler.
1795 So make sure we create one for this block; we normally would
1796 not since this is not otherwise considered a "call". */
1797 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
1798 function_call_count
++;
1808 /* Symbols which represent the address of a label stored in the constant
1809 pool must be modified to point to a constant pool entry for the
1810 remapped label. Otherwise, symbols are returned unchanged. */
1811 if (CONSTANT_POOL_ADDRESS_P (orig
))
1813 struct function
*f
= inlining
? inlining
: current_function
;
1814 rtx constant
= get_pool_constant_for_function (f
, orig
);
1815 enum machine_mode const_mode
= get_pool_mode_for_function (f
, orig
);
1818 rtx temp
= force_const_mem (const_mode
,
1819 copy_rtx_and_substitute (constant
,
1823 /* Legitimizing the address here is incorrect.
1825 Since we had a SYMBOL_REF before, we can assume it is valid
1826 to have one in this position in the insn.
1828 Also, change_address may create new registers. These
1829 registers will not have valid reg_map entries. This can
1830 cause try_constants() to fail because assumes that all
1831 registers in the rtx have valid reg_map entries, and it may
1832 end up replacing one of these new registers with junk. */
1834 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1835 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
1838 temp
= XEXP (temp
, 0);
1840 #ifdef POINTERS_EXTEND_UNSIGNED
1841 if (GET_MODE (temp
) != GET_MODE (orig
))
1842 temp
= convert_memory_address (GET_MODE (orig
), temp
);
1846 else if (GET_CODE (constant
) == LABEL_REF
)
1847 return XEXP (force_const_mem
1849 copy_rtx_and_substitute (constant
, map
, for_lhs
)),
1853 if (SYMBOL_REF_NEED_ADJUST (orig
))
1856 return rethrow_symbol_map (orig
,
1857 expand_inline_function_eh_labelmap
);
1863 /* We have to make a new copy of this CONST_DOUBLE because don't want
1864 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1865 duplicate of a CONST_DOUBLE we have already seen. */
1866 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
1870 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
1871 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
1874 return immed_double_const (CONST_DOUBLE_LOW (orig
),
1875 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
1878 /* Make new constant pool entry for a constant
1879 that was in the pool of the inline function. */
1880 if (RTX_INTEGRATED_P (orig
))
1885 /* If a single asm insn contains multiple output operands
1886 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1887 We must make sure that the copied insn continues to share it. */
1888 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
1890 copy
= rtx_alloc (ASM_OPERANDS
);
1891 copy
->volatil
= orig
->volatil
;
1892 XSTR (copy
, 0) = XSTR (orig
, 0);
1893 XSTR (copy
, 1) = XSTR (orig
, 1);
1894 XINT (copy
, 2) = XINT (orig
, 2);
1895 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
1896 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
1897 XSTR (copy
, 5) = XSTR (orig
, 5);
1898 XINT (copy
, 6) = XINT (orig
, 6);
1904 /* This is given special treatment because the first
1905 operand of a CALL is a (MEM ...) which may get
1906 forced into a register for cse. This is undesirable
1907 if function-address cse isn't wanted or if we won't do cse. */
1908 #ifndef NO_FUNCTION_CSE
1909 if (! (optimize
&& ! flag_no_function_cse
))
1914 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
1915 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
1917 copy_rtx_and_substitute (XEXP (orig
, 1), map
, 0));
1921 /* Must be ifdefed out for loop unrolling to work. */
1927 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1928 Adjust the setting by the offset of the area we made.
1929 If the nonlocal goto is into the current function,
1930 this will result in unnecessarily bad code, but should work. */
1931 if (SET_DEST (orig
) == virtual_stack_vars_rtx
1932 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
1934 /* In case a translation hasn't occurred already, make one now. */
1937 HOST_WIDE_INT loc_offset
;
1939 copy_rtx_and_substitute (SET_DEST (orig
), map
, for_lhs
);
1940 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
1941 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1942 REGNO (equiv_reg
)).rtx
;
1944 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
1946 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
1949 (copy_rtx_and_substitute (SET_SRC (orig
),
1955 return gen_rtx_SET (VOIDmode
,
1956 copy_rtx_and_substitute (SET_DEST (orig
), map
, 1),
1957 copy_rtx_and_substitute (SET_SRC (orig
), map
, 0));
1962 && GET_CODE (XEXP (orig
, 0)) == SYMBOL_REF
1963 && CONSTANT_POOL_ADDRESS_P (XEXP (orig
, 0)))
1965 enum machine_mode const_mode
1966 = get_pool_mode_for_function (inlining
, XEXP (orig
, 0));
1968 = get_pool_constant_for_function (inlining
, XEXP (orig
, 0));
1970 constant
= copy_rtx_and_substitute (constant
, map
, 0);
1972 /* If this was an address of a constant pool entry that itself
1973 had to be placed in the constant pool, it might not be a
1974 valid address. So the recursive call might have turned it
1975 into a register. In that case, it isn't a constant any
1976 more, so return it. This has the potential of changing a
1977 MEM into a REG, but we'll assume that it safe. */
1978 if (! CONSTANT_P (constant
))
1981 return validize_mem (force_const_mem (const_mode
, constant
));
1984 copy
= rtx_alloc (MEM
);
1985 PUT_MODE (copy
, mode
);
1986 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
, 0);
1987 MEM_COPY_ATTRIBUTES (copy
, orig
);
1988 MEM_ALIAS_SET (copy
) = MEM_ALIAS_SET (orig
);
1989 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
1996 copy
= rtx_alloc (code
);
1997 PUT_MODE (copy
, mode
);
1998 copy
->in_struct
= orig
->in_struct
;
1999 copy
->volatil
= orig
->volatil
;
2000 copy
->unchanging
= orig
->unchanging
;
2002 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2004 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2006 switch (*format_ptr
++)
2009 /* Copy this through the wide int field; that's safest. */
2010 X0WINT (copy
, i
) = X0WINT (orig
, i
);
2015 = copy_rtx_and_substitute (XEXP (orig
, i
), map
, for_lhs
);
2019 /* Change any references to old-insns to point to the
2020 corresponding copied insns. */
2021 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2025 XVEC (copy
, i
) = XVEC (orig
, i
);
2026 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2028 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2029 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2030 XVECEXP (copy
, i
, j
)
2031 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
),
2037 XWINT (copy
, i
) = XWINT (orig
, i
);
2041 XINT (copy
, i
) = XINT (orig
, i
);
2045 XSTR (copy
, i
) = XSTR (orig
, i
);
2049 XTREE (copy
, i
) = XTREE (orig
, i
);
2057 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2059 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2060 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2061 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2067 /* Substitute known constant values into INSN, if that is valid. */
2070 try_constants (insn
, map
)
2072 struct inline_remap
*map
;
2078 /* First try just updating addresses, then other things. This is
2079 important when we have something like the store of a constant
2080 into memory and we can update the memory address but the machine
2081 does not support a constant source. */
2082 subst_constants (&PATTERN (insn
), insn
, map
, 1);
2083 apply_change_group ();
2084 subst_constants (&PATTERN (insn
), insn
, map
, 0);
2085 apply_change_group ();
2087 /* Show we don't know the value of anything stored or clobbered. */
2088 note_stores (PATTERN (insn
), mark_stores
, NULL
);
2089 map
->last_pc_value
= 0;
2091 map
->last_cc0_value
= 0;
2094 /* Set up any constant equivalences made in this insn. */
2095 for (i
= 0; i
< map
->num_sets
; i
++)
2097 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2099 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2101 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
2102 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
2103 /* Following clause is a hack to make case work where GNU C++
2104 reassigns a variable to make cse work right. */
2105 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
2107 map
->equiv_sets
[i
].equiv
))
2108 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
2109 map
->equiv_sets
[i
].equiv
, map
->const_age
);
2111 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2112 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2114 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2115 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2120 /* Substitute known constants for pseudo regs in the contents of LOC,
2121 which are part of INSN.
2122 If INSN is zero, the substitution should always be done (this is used to
2124 These changes are taken out by try_constants if the result is not valid.
2126 Note that we are more concerned with determining when the result of a SET
2127 is a constant, for further propagation, than actually inserting constants
2128 into insns; cse will do the latter task better.
2130 This function is also used to adjust address of items previously addressed
2131 via the virtual stack variable or virtual incoming arguments registers.
2133 If MEMONLY is nonzero, only make changes inside a MEM. */
2136 subst_constants (loc
, insn
, map
, memonly
)
2139 struct inline_remap
*map
;
2144 register enum rtx_code code
;
2145 register const char *format_ptr
;
2146 int num_changes
= num_validated_changes ();
2148 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
2150 code
= GET_CODE (x
);
2166 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2172 /* The only thing we can do with a USE or CLOBBER is possibly do
2173 some substitutions in a MEM within it. */
2174 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2175 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
, 0);
2179 /* Substitute for parms and known constants. Don't replace
2180 hard regs used as user variables with constants. */
2183 int regno
= REGNO (x
);
2184 struct const_equiv_data
*p
;
2186 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2187 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
2188 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
2190 && p
->age
>= map
->const_age
)
2191 validate_change (insn
, loc
, p
->rtx
, 1);
2196 /* SUBREG applied to something other than a reg
2197 should be treated as ordinary, since that must
2198 be a special hack and we don't know how to treat it specially.
2199 Consider for example mulsidi3 in m68k.md.
2200 Ordinary SUBREG of a REG needs this special treatment. */
2201 if (! memonly
&& GET_CODE (SUBREG_REG (x
)) == REG
)
2203 rtx inner
= SUBREG_REG (x
);
2206 /* We can't call subst_constants on &SUBREG_REG (x) because any
2207 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2208 see what is inside, try to form the new SUBREG and see if that is
2209 valid. We handle two cases: extracting a full word in an
2210 integral mode and extracting the low part. */
2211 subst_constants (&inner
, NULL_RTX
, map
, 0);
2213 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2214 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2215 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2216 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2217 GET_MODE (SUBREG_REG (x
)));
2219 cancel_changes (num_changes
);
2220 if (new == 0 && subreg_lowpart_p (x
))
2221 new = gen_lowpart_common (GET_MODE (x
), inner
);
2224 validate_change (insn
, loc
, new, 1);
2231 subst_constants (&XEXP (x
, 0), insn
, map
, 0);
2233 /* If a memory address got spoiled, change it back. */
2234 if (! memonly
&& insn
!= 0 && num_validated_changes () != num_changes
2235 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2236 cancel_changes (num_changes
);
2241 /* Substitute constants in our source, and in any arguments to a
2242 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2244 rtx
*dest_loc
= &SET_DEST (x
);
2245 rtx dest
= *dest_loc
;
2248 subst_constants (&SET_SRC (x
), insn
, map
, memonly
);
2251 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2252 || GET_CODE (*dest_loc
) == SUBREG
2253 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2255 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2257 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
, memonly
);
2258 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
, memonly
);
2260 dest_loc
= &XEXP (*dest_loc
, 0);
2263 /* Do substitute in the address of a destination in memory. */
2264 if (GET_CODE (*dest_loc
) == MEM
)
2265 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
, 0);
2267 /* Check for the case of DEST a SUBREG, both it and the underlying
2268 register are less than one word, and the SUBREG has the wider mode.
2269 In the case, we are really setting the underlying register to the
2270 source converted to the mode of DEST. So indicate that. */
2271 if (GET_CODE (dest
) == SUBREG
2272 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2273 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2274 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2275 <= GET_MODE_SIZE (GET_MODE (dest
)))
2276 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2278 src
= tem
, dest
= SUBREG_REG (dest
);
2280 /* If storing a recognizable value save it for later recording. */
2281 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2282 && (CONSTANT_P (src
)
2283 || (GET_CODE (src
) == REG
2284 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2285 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2286 || (GET_CODE (src
) == PLUS
2287 && GET_CODE (XEXP (src
, 0)) == REG
2288 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2289 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2290 && CONSTANT_P (XEXP (src
, 1)))
2291 || GET_CODE (src
) == COMPARE
2296 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2297 || GET_CODE (src
) == LABEL_REF
))))
2299 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2300 it will cause us to save the COMPARE with any constants
2301 substituted, which is what we want for later. */
2302 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2303 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2312 format_ptr
= GET_RTX_FORMAT (code
);
2314 /* If the first operand is an expression, save its mode for later. */
2315 if (*format_ptr
== 'e')
2316 op0_mode
= GET_MODE (XEXP (x
, 0));
2318 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2320 switch (*format_ptr
++)
2327 subst_constants (&XEXP (x
, i
), insn
, map
, memonly
);
2338 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2339 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2340 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
, memonly
);
2349 /* If this is a commutative operation, move a constant to the second
2350 operand unless the second operand is already a CONST_INT. */
2352 && (GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2353 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2355 rtx tem
= XEXP (x
, 0);
2356 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2357 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2360 /* Simplify the expression in case we put in some constants. */
2362 switch (GET_RTX_CLASS (code
))
2365 if (op0_mode
== MAX_MACHINE_MODE
)
2367 new = simplify_unary_operation (code
, GET_MODE (x
),
2368 XEXP (x
, 0), op0_mode
);
2373 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2375 if (op_mode
== VOIDmode
)
2376 op_mode
= GET_MODE (XEXP (x
, 1));
2377 new = simplify_relational_operation (code
, op_mode
,
2378 XEXP (x
, 0), XEXP (x
, 1));
2379 #ifdef FLOAT_STORE_FLAG_VALUE
2380 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2381 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2382 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2390 new = simplify_binary_operation (code
, GET_MODE (x
),
2391 XEXP (x
, 0), XEXP (x
, 1));
2396 if (op0_mode
== MAX_MACHINE_MODE
)
2399 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2400 XEXP (x
, 0), XEXP (x
, 1),
2406 validate_change (insn
, loc
, new, 1);
2409 /* Show that register modified no longer contain known constants. We are
2410 called from note_stores with parts of the new insn. */
2413 mark_stores (dest
, x
, data
)
2415 rtx x ATTRIBUTE_UNUSED
;
2416 void *data ATTRIBUTE_UNUSED
;
2419 enum machine_mode mode
= VOIDmode
;
2421 /* DEST is always the innermost thing set, except in the case of
2422 SUBREGs of hard registers. */
2424 if (GET_CODE (dest
) == REG
)
2425 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2426 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2428 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2429 mode
= GET_MODE (SUBREG_REG (dest
));
2434 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2435 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2438 /* Ignore virtual stack var or virtual arg register since those
2439 are handled separately. */
2440 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
2441 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
2442 for (i
= regno
; i
<= last_reg
; i
++)
2443 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
2444 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
2448 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2449 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2450 that it points to the node itself, thus indicating that the node is its
2451 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2452 the given node is NULL, recursively descend the decl/block tree which
2453 it is the root of, and for each other ..._DECL or BLOCK node contained
2454 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2455 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2456 values to point to themselves. */
2459 set_block_origin_self (stmt
)
2462 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2464 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2467 register tree local_decl
;
2469 for (local_decl
= BLOCK_VARS (stmt
);
2470 local_decl
!= NULL_TREE
;
2471 local_decl
= TREE_CHAIN (local_decl
))
2472 set_decl_origin_self (local_decl
); /* Potential recursion. */
2476 register tree subblock
;
2478 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2479 subblock
!= NULL_TREE
;
2480 subblock
= BLOCK_CHAIN (subblock
))
2481 set_block_origin_self (subblock
); /* Recurse. */
2486 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2487 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2488 node to so that it points to the node itself, thus indicating that the
2489 node represents its own (abstract) origin. Additionally, if the
2490 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2491 the decl/block tree of which the given node is the root of, and for
2492 each other ..._DECL or BLOCK node contained therein whose
2493 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2494 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2495 point to themselves. */
2498 set_decl_origin_self (decl
)
2501 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2503 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2504 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2508 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2509 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2510 if (DECL_INITIAL (decl
) != NULL_TREE
2511 && DECL_INITIAL (decl
) != error_mark_node
)
2512 set_block_origin_self (DECL_INITIAL (decl
));
2517 /* Given a pointer to some BLOCK node, and a boolean value to set the
2518 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2519 the given block, and for all local decls and all local sub-blocks
2520 (recursively) which are contained therein. */
2523 set_block_abstract_flags (stmt
, setting
)
2525 register int setting
;
2527 register tree local_decl
;
2528 register tree subblock
;
2530 BLOCK_ABSTRACT (stmt
) = setting
;
2532 for (local_decl
= BLOCK_VARS (stmt
);
2533 local_decl
!= NULL_TREE
;
2534 local_decl
= TREE_CHAIN (local_decl
))
2535 set_decl_abstract_flags (local_decl
, setting
);
2537 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2538 subblock
!= NULL_TREE
;
2539 subblock
= BLOCK_CHAIN (subblock
))
2540 set_block_abstract_flags (subblock
, setting
);
2543 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2544 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2545 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2546 set the abstract flags for all of the parameters, local vars, local
2547 blocks and sub-blocks (recursively) to the same setting. */
2550 set_decl_abstract_flags (decl
, setting
)
2552 register int setting
;
2554 DECL_ABSTRACT (decl
) = setting
;
2555 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2559 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2560 DECL_ABSTRACT (arg
) = setting
;
2561 if (DECL_INITIAL (decl
) != NULL_TREE
2562 && DECL_INITIAL (decl
) != error_mark_node
)
2563 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2567 /* Output the assembly language code for the function FNDECL
2568 from its DECL_SAVED_INSNS. Used for inline functions that are output
2569 at end of compilation instead of where they came in the source. */
2572 output_inline_function (fndecl
)
2575 struct function
*curf
= current_function
;
2576 struct function
*f
= DECL_SAVED_INSNS (fndecl
);
2578 current_function
= f
;
2579 current_function_decl
= fndecl
;
2580 clear_emit_caches ();
2582 /* Things we allocate from here on are part of this function, not
2584 temporary_allocation ();
2586 set_new_last_label_num (f
->inl_max_label_num
);
2588 /* We must have already output DWARF debugging information for the
2589 original (abstract) inline function declaration/definition, so
2590 we want to make sure that the debugging information we generate
2591 for this special instance of the inline function refers back to
2592 the information we already generated. To make sure that happens,
2593 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2594 node (and for all of the local ..._DECL nodes which are its children)
2595 so that they all point to themselves. */
2597 set_decl_origin_self (fndecl
);
2599 /* We're not deferring this any longer. */
2600 DECL_DEFER_OUTPUT (fndecl
) = 0;
2602 /* We can't inline this anymore. */
2604 DECL_INLINE (fndecl
) = 0;
2606 /* Compile this function all the way down to assembly code. */
2607 rest_of_compilation (fndecl
);
2609 current_function
= curf
;
2610 current_function_decl
= curf
? curf
->decl
: 0;