1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-config.h"
31 #include "insn-flags.h"
35 #include "integrate.h"
43 #define obstack_chunk_alloc xmalloc
44 #define obstack_chunk_free free
46 extern struct obstack
*function_maybepermanent_obstack
;
48 /* Similar, but round to the next highest integer that meets the
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 /* Inlining small functions might save more space then not inlining at
56 all. Assume 1 instruction for the call and 1.5 insns per argument. */
57 #define INTEGRATE_THRESHOLD(DECL) \
59 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
60 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 static rtvec initialize_for_inline
PROTO((tree
));
64 static void adjust_copied_decl_tree
PROTO((tree
));
65 static void note_modified_parmregs
PROTO((rtx
, rtx
));
66 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*,
68 static void integrate_decl_tree
PROTO((tree
, int,
69 struct inline_remap
*));
70 static void subst_constants
PROTO((rtx
*, rtx
,
71 struct inline_remap
*));
72 static void set_block_origin_self
PROTO((tree
));
73 static void set_decl_origin_self
PROTO((tree
));
74 static void set_block_abstract_flags
PROTO((tree
, int));
75 static void process_reg_param
PROTO((struct inline_remap
*, rtx
,
79 void set_decl_abstract_flags
PROTO((tree
, int));
80 static tree copy_and_set_decl_abstract_origin
PROTO((tree
));
82 /* The maximum number of instructions accepted for inlining a
83 function. Increasing values mean more agressive inlining.
84 This affects currently only functions explicitly marked as
85 inline (or methods defined within the class definition for C++).
86 The default value of 10000 is arbitrary but high to match the
87 previously unlimited gcc capabilities. */
89 int inline_max_insns
= 10000;
91 /* Used by copy_rtx_and_substitute; this indicates whether the function is
92 called for the purpose of inlining or some other purpose (i.e. loop
93 unrolling). This affects how constant pool references are handled.
94 This variable contains the FUNCTION_DECL for the inlined function. */
95 static struct function
*inlining
= 0;
97 /* Returns the Ith entry in the label_map contained in MAP. If the
98 Ith entry has not yet been set, return a fresh label. This function
99 performs a lazy initialization of label_map, thereby avoiding huge memory
100 explosions when the label_map gets very large. */
103 get_label_from_map (map
, i
)
104 struct inline_remap
*map
;
107 rtx x
= map
->label_map
[i
];
110 x
= map
->label_map
[i
] = gen_label_rtx();
115 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
116 is safe and reasonable to integrate into other functions.
117 Nonzero means value is a warning msgid with a single %s
118 for the function's name. */
121 function_cannot_inline_p (fndecl
)
122 register tree fndecl
;
125 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
127 /* For functions marked as inline increase the maximum size to
128 inline_max_insns (-finline-limit-<n>). For regular functions
129 use the limit given by INTEGRATE_THRESHOLD. */
131 int max_insns
= (DECL_INLINE (fndecl
))
133 + 8 * list_length (DECL_ARGUMENTS (fndecl
)))
134 : INTEGRATE_THRESHOLD (fndecl
);
136 register int ninsns
= 0;
140 /* No inlines with varargs. */
141 if ((last
&& TREE_VALUE (last
) != void_type_node
)
142 || current_function_varargs
)
143 return N_("varargs function cannot be inline");
145 if (current_function_calls_alloca
)
146 return N_("function using alloca cannot be inline");
148 if (current_function_contains_functions
)
149 return N_("function with nested functions cannot be inline");
152 return "function with label addresses used in initializers cannot inline";
154 if (current_function_cannot_inline
)
155 return current_function_cannot_inline
;
157 /* If its not even close, don't even look. */
158 if (get_max_uid () > 3 * max_insns
)
159 return N_("function too large to be inline");
162 /* Don't inline functions which do not specify a function prototype and
163 have BLKmode argument or take the address of a parameter. */
164 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
166 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
167 TREE_ADDRESSABLE (parms
) = 1;
168 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
169 return N_("no prototype, and parameter address used; cannot be inline");
173 /* We can't inline functions that return structures
174 the old-fashioned PCC way, copying into a static block. */
175 if (current_function_returns_pcc_struct
)
176 return N_("inline functions not supported for this return value type");
178 /* We can't inline functions that return structures of varying size. */
179 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
180 return N_("function with varying-size return value cannot be inline");
182 /* Cannot inline a function with a varying size argument or one that
183 receives a transparent union. */
184 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
186 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
187 return N_("function with varying-size parameter cannot be inline");
188 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
189 return N_("function with transparent unit parameter cannot be inline");
192 if (get_max_uid () > max_insns
)
194 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
195 insn
&& ninsns
< max_insns
;
196 insn
= NEXT_INSN (insn
))
197 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
200 if (ninsns
>= max_insns
)
201 return N_("function too large to be inline");
204 /* We will not inline a function which uses computed goto. The addresses of
205 its local labels, which may be tucked into global storage, are of course
206 not constant across instantiations, which causes unexpected behaviour. */
207 if (current_function_has_computed_jump
)
208 return N_("function with computed jump cannot inline");
210 /* We cannot inline a nested function that jumps to a nonlocal label. */
211 if (current_function_has_nonlocal_goto
)
212 return N_("function with nonlocal goto cannot be inline");
214 /* This is a hack, until the inliner is taught about eh regions at
215 the start of the function. */
216 for (insn
= get_insns ();
218 && ! (GET_CODE (insn
) == NOTE
219 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
220 insn
= NEXT_INSN (insn
))
222 if (insn
&& GET_CODE (insn
) == NOTE
223 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
224 return N_("function with complex parameters cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 result
= DECL_RTL (DECL_RESULT (fndecl
));
229 if (result
&& GET_CODE (result
) == PARALLEL
)
230 return N_("inline functions not supported for this return value type");
235 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
236 Zero for a reg that isn't a parm's home.
237 Only reg numbers less than max_parm_reg are mapped here. */
238 static tree
*parmdecl_map
;
240 /* In save_for_inline, nonzero if past the parm-initialization insns. */
241 static int in_nonparm_insns
;
243 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
244 needed to save FNDECL's insns and info for future inline expansion. */
247 initialize_for_inline (fndecl
)
254 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
255 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
256 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
258 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
260 parms
= TREE_CHAIN (parms
), i
++)
262 rtx p
= DECL_RTL (parms
);
264 /* If we have (mem (addressof (mem ...))), use the inner MEM since
265 otherwise the copy_rtx call below will not unshare the MEM since
266 it shares ADDRESSOF. */
267 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
268 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
269 p
= XEXP (XEXP (p
, 0), 0);
271 RTVEC_ELT (arg_vector
, i
) = p
;
273 if (GET_CODE (p
) == REG
)
274 parmdecl_map
[REGNO (p
)] = parms
;
275 else if (GET_CODE (p
) == CONCAT
)
277 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
278 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
280 if (GET_CODE (preal
) == REG
)
281 parmdecl_map
[REGNO (preal
)] = parms
;
282 if (GET_CODE (pimag
) == REG
)
283 parmdecl_map
[REGNO (pimag
)] = parms
;
286 /* This flag is cleared later
287 if the function ever modifies the value of the parm. */
288 TREE_READONLY (parms
) = 1;
294 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
295 they all point to the new (copied) rtxs. */
298 adjust_copied_decl_tree (block
)
301 register tree subblock
;
302 register rtx original_end
;
304 original_end
= BLOCK_END_NOTE (block
);
307 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
308 NOTE_SOURCE_FILE (original_end
) = 0;
311 /* Process all subblocks. */
312 for (subblock
= BLOCK_SUBBLOCKS (block
);
314 subblock
= TREE_CHAIN (subblock
))
315 adjust_copied_decl_tree (subblock
);
318 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
319 DECL_ABSTRACT_ORIGIN for the new accordinly. */
322 copy_and_set_decl_abstract_origin (node
)
325 tree copy
= copy_node (node
);
326 if (DECL_ABSTRACT_ORIGIN (copy
) != NULL_TREE
)
327 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
328 situation occurs if we inline a function which itself made
329 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
330 most distant ancestor, we don't have to do anything here. */
333 /* The most distant ancestor must be NODE. */
334 DECL_ABSTRACT_ORIGIN (copy
) = node
;
339 /* Make the insns and PARM_DECLs of the current function permanent
340 and record other information in DECL_SAVED_INSNS to allow inlining
341 of this function in subsequent calls.
343 This routine need not copy any insns because we are not going
344 to immediately compile the insns in the insn chain. There
345 are two cases when we would compile the insns for FNDECL:
346 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
347 be output at the end of other compilation, because somebody took
348 its address. In the first case, the insns of FNDECL are copied
349 as it is expanded inline, so FNDECL's saved insns are not
350 modified. In the second case, FNDECL is used for the last time,
351 so modifying the rtl is not a problem.
353 We don't have to worry about FNDECL being inline expanded by
354 other functions which are written at the end of compilation
355 because flag_no_inline is turned on when we begin writing
356 functions at the end of compilation. */
359 save_for_inline_nocopy (fndecl
)
364 rtx first_nonparm_insn
;
366 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
367 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
368 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
369 for the parms, prior to elimination of virtual registers.
370 These values are needed for substituting parms properly. */
372 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
374 /* Make and emit a return-label if we have not already done so. */
376 if (return_label
== 0)
378 return_label
= gen_label_rtx ();
379 emit_label (return_label
);
382 argvec
= initialize_for_inline (fndecl
);
384 /* If there are insns that copy parms from the stack into pseudo registers,
385 those insns are not copied. `expand_inline_function' must
386 emit the correct code to handle such things. */
389 if (GET_CODE (insn
) != NOTE
)
392 /* Get the insn which signals the end of parameter setup code. */
393 first_nonparm_insn
= get_first_nonparm_insn ();
395 /* Now just scan the chain of insns to see what happens to our
396 PARM_DECLs. If a PARM_DECL is used but never modified, we
397 can substitute its rtl directly when expanding inline (and
398 perform constant folding when its incoming value is constant).
399 Otherwise, we have to copy its value into a new register and track
400 the new register's life. */
402 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
404 if (insn
== first_nonparm_insn
)
405 in_nonparm_insns
= 1;
407 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
408 /* Record what interesting things happen to our parameters. */
409 note_stores (PATTERN (insn
), note_modified_parmregs
);
412 /* We have now allocated all that needs to be allocated permanently
413 on the rtx obstack. Set our high-water mark, so that we
414 can free the rest of this when the time comes. */
418 current_function
->inl_max_label_num
= max_label_num ();
419 current_function
->inl_last_parm_insn
= current_function
->x_last_parm_insn
;
420 current_function
->original_arg_vector
= argvec
;
421 current_function
->original_decl_initial
= DECL_INITIAL (fndecl
);
422 DECL_SAVED_INSNS (fndecl
) = current_function
;
425 /* Note whether a parameter is modified or not. */
428 note_modified_parmregs (reg
, x
)
430 rtx x ATTRIBUTE_UNUSED
;
432 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
433 && REGNO (reg
) < max_parm_reg
434 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
435 && parmdecl_map
[REGNO (reg
)] != 0)
436 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
439 /* Unfortunately, we need a global copy of const_equiv map for communication
440 with a function called from note_stores. Be *very* careful that this
441 is used properly in the presence of recursion. */
443 varray_type global_const_equiv_varray
;
445 #define FIXED_BASE_PLUS_P(X) \
446 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
447 && GET_CODE (XEXP (X, 0)) == REG \
448 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
449 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
451 /* Called to set up a mapping for the case where a parameter is in a
452 register. If it is read-only and our argument is a constant, set up the
453 constant equivalence.
455 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
458 Also, don't allow hard registers here; they might not be valid when
459 substituted into insns. */
461 process_reg_param (map
, loc
, copy
)
462 struct inline_remap
*map
;
465 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
466 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
467 && ! REG_USERVAR_P (copy
))
468 || (GET_CODE (copy
) == REG
469 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
471 rtx temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
472 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
473 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
474 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
477 map
->reg_map
[REGNO (loc
)] = copy
;
480 /* Used by duplicate_eh_handlers to map labels for the exception table */
481 static struct inline_remap
*eif_eh_map
;
484 expand_inline_function_eh_labelmap (label
)
487 int index
= CODE_LABEL_NUMBER (label
);
488 return get_label_from_map (eif_eh_map
, index
);
491 /* Integrate the procedure defined by FNDECL. Note that this function
492 may wind up calling itself. Since the static variables are not
493 reentrant, we do not assign them until after the possibility
494 of recursion is eliminated.
496 If IGNORE is nonzero, do not produce a value.
497 Otherwise store the value in TARGET if it is nonzero and that is convenient.
500 (rtx)-1 if we could not substitute the function
501 0 if we substituted it and it does not produce a value
502 else an rtx for where the value is stored. */
505 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
506 structure_value_addr
)
511 rtx structure_value_addr
;
513 struct function
*inlining_previous
;
514 struct function
*inl_f
= DECL_SAVED_INSNS (fndecl
);
515 tree formal
, actual
, block
;
516 rtx parm_insns
= inl_f
->emit
->x_first_insn
;
517 rtx insns
= (inl_f
->inl_last_parm_insn
518 ? NEXT_INSN (inl_f
->inl_last_parm_insn
)
525 int min_labelno
= inl_f
->emit
->x_first_label_num
;
526 int max_labelno
= inl_f
->inl_max_label_num
;
528 rtx local_return_label
= 0;
532 struct inline_remap
*map
= 0;
536 rtvec arg_vector
= (rtvec
) inl_f
->original_arg_vector
;
537 rtx static_chain_value
= 0;
540 /* The pointer used to track the true location of the memory used
541 for MAP->LABEL_MAP. */
542 rtx
*real_label_map
= 0;
544 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
545 max_regno
= inl_f
->emit
->x_reg_rtx_no
+ 3;
546 if (max_regno
< FIRST_PSEUDO_REGISTER
)
549 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
551 /* Check that the parms type match and that sufficient arguments were
552 passed. Since the appropriate conversions or default promotions have
553 already been applied, the machine modes should match exactly. */
555 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
557 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
560 enum machine_mode mode
;
563 return (rtx
) (HOST_WIDE_INT
) -1;
565 arg
= TREE_VALUE (actual
);
566 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
568 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
569 /* If they are block mode, the types should match exactly.
570 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
571 which could happen if the parameter has incomplete type. */
573 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
574 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
575 return (rtx
) (HOST_WIDE_INT
) -1;
578 /* Extra arguments are valid, but will be ignored below, so we must
579 evaluate them here for side-effects. */
580 for (; actual
; actual
= TREE_CHAIN (actual
))
581 expand_expr (TREE_VALUE (actual
), const0_rtx
,
582 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
584 /* Make a binding contour to keep inline cleanups called at
585 outer function-scope level from looking like they are shadowing
586 parameter declarations. */
589 /* Expand the function arguments. Do this first so that any
590 new registers get created before we allocate the maps. */
592 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
593 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
595 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
597 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
599 /* Actual parameter, converted to the type of the argument within the
601 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
602 /* Mode of the variable used within the function. */
603 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
607 loc
= RTVEC_ELT (arg_vector
, i
);
609 /* If this is an object passed by invisible reference, we copy the
610 object into a stack slot and save its address. If this will go
611 into memory, we do nothing now. Otherwise, we just expand the
613 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
614 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
617 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
618 int_size_in_bytes (TREE_TYPE (arg
)), 1);
619 MEM_SET_IN_STRUCT_P (stack_slot
,
620 AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
622 store_expr (arg
, stack_slot
, 0);
624 arg_vals
[i
] = XEXP (stack_slot
, 0);
627 else if (GET_CODE (loc
) != MEM
)
629 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
630 /* The mode if LOC and ARG can differ if LOC was a variable
631 that had its mode promoted via PROMOTED_MODE. */
632 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
633 TYPE_MODE (TREE_TYPE (arg
)),
634 expand_expr (arg
, NULL_RTX
, mode
,
636 TREE_UNSIGNED (TREE_TYPE (formal
)));
638 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
644 && (! TREE_READONLY (formal
)
645 /* If the parameter is not read-only, copy our argument through
646 a register. Also, we cannot use ARG_VALS[I] if it overlaps
647 TARGET in any way. In the inline function, they will likely
648 be two different pseudos, and `safe_from_p' will make all
649 sorts of smart assumptions about their not conflicting.
650 But if ARG_VALS[I] overlaps TARGET, these assumptions are
651 wrong, so put ARG_VALS[I] into a fresh register.
652 Don't worry about invisible references, since their stack
653 temps will never overlap the target. */
656 && (GET_CODE (arg_vals
[i
]) == REG
657 || GET_CODE (arg_vals
[i
]) == SUBREG
658 || GET_CODE (arg_vals
[i
]) == MEM
)
659 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
660 /* ??? We must always copy a SUBREG into a REG, because it might
661 get substituted into an address, and not all ports correctly
662 handle SUBREGs in addresses. */
663 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
664 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
666 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
667 && POINTER_TYPE_P (TREE_TYPE (formal
)))
668 mark_reg_pointer (arg_vals
[i
],
669 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
673 /* Allocate the structures we use to remap things. */
675 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
676 map
->fndecl
= fndecl
;
678 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
679 bzero ((char *) map
->reg_map
, max_regno
* sizeof (rtx
));
681 /* We used to use alloca here, but the size of what it would try to
682 allocate would occasionally cause it to exceed the stack limit and
683 cause unpredictable core dumps. */
685 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
686 map
->label_map
= real_label_map
;
688 inl_max_uid
= (inl_f
->emit
->x_cur_insn_uid
+ 1);
689 map
->insn_map
= (rtx
*) alloca (inl_max_uid
* sizeof (rtx
));
690 bzero ((char *) map
->insn_map
, inl_max_uid
* sizeof (rtx
));
692 map
->max_insnno
= inl_max_uid
;
694 map
->integrating
= 1;
696 /* const_equiv_varray maps pseudos in our routine to constants, so
697 it needs to be large enough for all our pseudos. This is the
698 number we are currently using plus the number in the called
699 routine, plus 15 for each arg, five to compute the virtual frame
700 pointer, and five for the return value. This should be enough
701 for most cases. We do not reference entries outside the range of
704 ??? These numbers are quite arbitrary and were obtained by
705 experimentation. At some point, we should try to allocate the
706 table after all the parameters are set up so we an more accurately
707 estimate the number of pseudos we will need. */
709 VARRAY_CONST_EQUIV_INIT (map
->const_equiv_varray
,
711 + (max_regno
- FIRST_PSEUDO_REGISTER
)
714 "expand_inline_function");
717 /* Record the current insn in case we have to set up pointers to frame
718 and argument memory blocks. If there are no insns yet, add a dummy
719 insn that can be used as an insertion point. */
720 map
->insns_at_start
= get_last_insn ();
721 if (map
->insns_at_start
== 0)
722 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
724 map
->regno_pointer_flag
= inl_f
->emit
->regno_pointer_flag
;
725 map
->regno_pointer_align
= inl_f
->emit
->regno_pointer_align
;
727 /* Update the outgoing argument size to allow for those in the inlined
729 if (inl_f
->outgoing_args_size
> current_function_outgoing_args_size
)
730 current_function_outgoing_args_size
= inl_f
->outgoing_args_size
;
732 /* If the inline function needs to make PIC references, that means
733 that this function's PIC offset table must be used. */
734 if (inl_f
->uses_pic_offset_table
)
735 current_function_uses_pic_offset_table
= 1;
737 /* If this function needs a context, set it up. */
738 if (inl_f
->needs_context
)
739 static_chain_value
= lookup_static_chain (fndecl
);
741 if (GET_CODE (parm_insns
) == NOTE
742 && NOTE_LINE_NUMBER (parm_insns
) > 0)
744 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
745 NOTE_LINE_NUMBER (parm_insns
));
747 RTX_INTEGRATED_P (note
) = 1;
750 /* Process each argument. For each, set up things so that the function's
751 reference to the argument will refer to the argument being passed.
752 We only replace REG with REG here. Any simplifications are done
755 We make two passes: In the first, we deal with parameters that will
756 be placed into registers, since we need to ensure that the allocated
757 register number fits in const_equiv_map. Then we store all non-register
758 parameters into their memory location. */
760 /* Don't try to free temp stack slots here, because we may put one of the
761 parameters into a temp stack slot. */
763 for (i
= 0; i
< nargs
; i
++)
765 rtx copy
= arg_vals
[i
];
767 loc
= RTVEC_ELT (arg_vector
, i
);
769 /* There are three cases, each handled separately. */
770 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
771 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
773 /* This must be an object passed by invisible reference (it could
774 also be a variable-sized object, but we forbid inlining functions
775 with variable-sized arguments). COPY is the address of the
776 actual value (this computation will cause it to be copied). We
777 map that address for the register, noting the actual address as
778 an equivalent in case it can be substituted into the insns. */
780 if (GET_CODE (copy
) != REG
)
782 temp
= copy_addr_to_reg (copy
);
783 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
784 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
787 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
789 else if (GET_CODE (loc
) == MEM
)
791 /* This is the case of a parameter that lives in memory.
792 It will live in the block we allocate in the called routine's
793 frame that simulates the incoming argument area. Do nothing
794 now; we will call store_expr later. */
797 else if (GET_CODE (loc
) == REG
)
798 process_reg_param (map
, loc
, copy
);
799 else if (GET_CODE (loc
) == CONCAT
)
801 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
802 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
803 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
804 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
806 process_reg_param (map
, locreal
, copyreal
);
807 process_reg_param (map
, locimag
, copyimag
);
813 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
814 specially. This function can be called recursively, so we need to
815 save the previous value. */
816 inlining_previous
= inlining
;
819 /* Now do the parameters that will be placed in memory. */
821 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
822 formal
; formal
= TREE_CHAIN (formal
), i
++)
824 loc
= RTVEC_ELT (arg_vector
, i
);
826 if (GET_CODE (loc
) == MEM
827 /* Exclude case handled above. */
828 && ! (GET_CODE (XEXP (loc
, 0)) == REG
829 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
831 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
832 DECL_SOURCE_LINE (formal
));
834 RTX_INTEGRATED_P (note
) = 1;
836 /* Compute the address in the area we reserved and store the
838 temp
= copy_rtx_and_substitute (loc
, map
);
839 subst_constants (&temp
, NULL_RTX
, map
);
840 apply_change_group ();
841 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
842 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
843 store_expr (arg_trees
[i
], temp
, 0);
847 /* Deal with the places that the function puts its result.
848 We are driven by what is placed into DECL_RESULT.
850 Initially, we assume that we don't have anything special handling for
851 REG_FUNCTION_RETURN_VALUE_P. */
853 map
->inline_target
= 0;
854 loc
= DECL_RTL (DECL_RESULT (fndecl
));
856 if (TYPE_MODE (type
) == VOIDmode
)
857 /* There is no return value to worry about. */
859 else if (GET_CODE (loc
) == MEM
)
861 if (GET_CODE (XEXP (loc
, 0)) == ADDRESSOF
)
863 temp
= copy_rtx_and_substitute (loc
, map
);
864 subst_constants (&temp
, NULL_RTX
, map
);
865 apply_change_group ();
870 if (! structure_value_addr
871 || ! aggregate_value_p (DECL_RESULT (fndecl
)))
874 /* Pass the function the address in which to return a structure
875 value. Note that a constructor can cause someone to call us
876 with STRUCTURE_VALUE_ADDR, but the initialization takes place
877 via the first parameter, rather than the struct return address.
879 We have two cases: If the address is a simple register
880 indirect, use the mapping mechanism to point that register to
881 our structure return address. Otherwise, store the structure
882 return value into the place that it will be referenced from. */
884 if (GET_CODE (XEXP (loc
, 0)) == REG
)
886 temp
= force_operand (structure_value_addr
, NULL_RTX
);
887 temp
= force_reg (Pmode
, temp
);
888 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
890 if (CONSTANT_P (structure_value_addr
)
891 || GET_CODE (structure_value_addr
) == ADDRESSOF
892 || (GET_CODE (structure_value_addr
) == PLUS
893 && (XEXP (structure_value_addr
, 0)
894 == virtual_stack_vars_rtx
)
895 && (GET_CODE (XEXP (structure_value_addr
, 1))
898 SET_CONST_EQUIV_DATA (map
, temp
, structure_value_addr
,
904 temp
= copy_rtx_and_substitute (loc
, map
);
905 subst_constants (&temp
, NULL_RTX
, map
);
906 apply_change_group ();
907 emit_move_insn (temp
, structure_value_addr
);
912 /* We will ignore the result value, so don't look at its structure.
913 Note that preparations for an aggregate return value
914 do need to be made (above) even if it will be ignored. */
916 else if (GET_CODE (loc
) == REG
)
918 /* The function returns an object in a register and we use the return
919 value. Set up our target for remapping. */
921 /* Machine mode function was declared to return. */
922 enum machine_mode departing_mode
= TYPE_MODE (type
);
923 /* (Possibly wider) machine mode it actually computes
924 (for the sake of callers that fail to declare it right).
925 We have to use the mode of the result's RTL, rather than
926 its type, since expand_function_start may have promoted it. */
927 enum machine_mode arriving_mode
928 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
931 /* Don't use MEMs as direct targets because on some machines
932 substituting a MEM for a REG makes invalid insns.
933 Let the combiner substitute the MEM if that is valid. */
934 if (target
== 0 || GET_CODE (target
) != REG
935 || GET_MODE (target
) != departing_mode
)
937 /* Don't make BLKmode registers. If this looks like
938 a BLKmode object being returned in a register, get
939 the mode from that, otherwise abort. */
940 if (departing_mode
== BLKmode
)
942 if (REG
== GET_CODE (DECL_RTL (DECL_RESULT (fndecl
))))
944 departing_mode
= GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
945 arriving_mode
= departing_mode
;
951 target
= gen_reg_rtx (departing_mode
);
954 /* If function's value was promoted before return,
955 avoid machine mode mismatch when we substitute INLINE_TARGET.
956 But TARGET is what we will return to the caller. */
957 if (arriving_mode
!= departing_mode
)
959 /* Avoid creating a paradoxical subreg wider than
960 BITS_PER_WORD, since that is illegal. */
961 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
963 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
964 GET_MODE_BITSIZE (arriving_mode
)))
965 /* Maybe could be handled by using convert_move () ? */
967 reg_to_map
= gen_reg_rtx (arriving_mode
);
968 target
= gen_lowpart (departing_mode
, reg_to_map
);
971 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
976 /* Usually, the result value is the machine's return register.
977 Sometimes it may be a pseudo. Handle both cases. */
978 if (REG_FUNCTION_VALUE_P (loc
))
979 map
->inline_target
= reg_to_map
;
981 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
986 /* Make a fresh binding contour that we can easily remove. Do this after
987 expanding our arguments so cleanups are properly scoped. */
989 expand_start_bindings (0);
991 /* Initialize label_map. get_label_from_map will actually make
993 bzero ((char *) &map
->label_map
[min_labelno
],
994 (max_labelno
- min_labelno
) * sizeof (rtx
));
996 /* Perform postincrements before actually calling the function. */
999 /* Clean up stack so that variables might have smaller offsets. */
1000 do_pending_stack_adjust ();
1002 /* Save a copy of the location of const_equiv_varray for
1003 mark_stores, called via note_stores. */
1004 global_const_equiv_varray
= map
->const_equiv_varray
;
1006 /* If the called function does an alloca, save and restore the
1007 stack pointer around the call. This saves stack space, but
1008 also is required if this inline is being done between two
1010 if (inl_f
->calls_alloca
)
1011 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1013 /* Now copy the insns one by one. Do this in two passes, first the insns and
1014 then their REG_NOTES, just like save_for_inline. */
1016 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1018 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1020 rtx copy
, pattern
, set
;
1022 map
->orig_asm_operands_vector
= 0;
1024 switch (GET_CODE (insn
))
1027 pattern
= PATTERN (insn
);
1028 set
= single_set (insn
);
1030 if (GET_CODE (pattern
) == USE
1031 && GET_CODE (XEXP (pattern
, 0)) == REG
1032 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1033 /* The (USE (REG n)) at return from the function should
1034 be ignored since we are changing (REG n) into
1038 /* If the inline fn needs eh context, make sure that
1039 the current fn has one. */
1040 if (GET_CODE (pattern
) == USE
1041 && find_reg_note (insn
, REG_EH_CONTEXT
, 0) != 0)
1044 /* Ignore setting a function value that we don't want to use. */
1045 if (map
->inline_target
== 0
1047 && GET_CODE (SET_DEST (set
)) == REG
1048 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1050 if (volatile_refs_p (SET_SRC (set
)))
1054 /* If we must not delete the source,
1055 load it into a new temporary. */
1056 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1058 new_set
= single_set (copy
);
1063 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1065 /* If the source and destination are the same and it
1066 has a note on it, keep the insn. */
1067 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1068 && REG_NOTES (insn
) != 0)
1069 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1074 /* If this is setting the static chain rtx, omit it. */
1075 else if (static_chain_value
!= 0
1077 && GET_CODE (SET_DEST (set
)) == REG
1078 && rtx_equal_p (SET_DEST (set
),
1079 static_chain_incoming_rtx
))
1082 /* If this is setting the static chain pseudo, set it from
1083 the value we want to give it instead. */
1084 else if (static_chain_value
!= 0
1086 && rtx_equal_p (SET_SRC (set
),
1087 static_chain_incoming_rtx
))
1089 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
);
1091 copy
= emit_move_insn (newdest
, static_chain_value
);
1092 static_chain_value
= 0;
1095 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1096 /* REG_NOTES will be copied later. */
1099 /* If this insn is setting CC0, it may need to look at
1100 the insn that uses CC0 to see what type of insn it is.
1101 In that case, the call to recog via validate_change will
1102 fail. So don't substitute constants here. Instead,
1103 do it when we emit the following insn.
1105 For example, see the pyr.md file. That machine has signed and
1106 unsigned compares. The compare patterns must check the
1107 following branch insn to see which what kind of compare to
1110 If the previous insn set CC0, substitute constants on it as
1112 if (sets_cc0_p (PATTERN (copy
)) != 0)
1117 try_constants (cc0_insn
, map
);
1119 try_constants (copy
, map
);
1122 try_constants (copy
, map
);
1127 if (GET_CODE (PATTERN (insn
)) == RETURN
1128 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1129 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1131 if (local_return_label
== 0)
1132 local_return_label
= gen_label_rtx ();
1133 pattern
= gen_jump (local_return_label
);
1136 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1138 copy
= emit_jump_insn (pattern
);
1142 try_constants (cc0_insn
, map
);
1145 try_constants (copy
, map
);
1147 /* If this used to be a conditional jump insn but whose branch
1148 direction is now know, we must do something special. */
1149 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1152 /* If the previous insn set cc0 for us, delete it. */
1153 if (sets_cc0_p (PREV_INSN (copy
)))
1154 delete_insn (PREV_INSN (copy
));
1157 /* If this is now a no-op, delete it. */
1158 if (map
->last_pc_value
== pc_rtx
)
1164 /* Otherwise, this is unconditional jump so we must put a
1165 BARRIER after it. We could do some dead code elimination
1166 here, but jump.c will do it just as well. */
1172 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1173 copy
= emit_call_insn (pattern
);
1175 /* Because the USAGE information potentially contains objects other
1176 than hard registers, we need to copy it. */
1177 CALL_INSN_FUNCTION_USAGE (copy
)
1178 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
1182 try_constants (cc0_insn
, map
);
1185 try_constants (copy
, map
);
1187 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1188 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1189 VARRAY_CONST_EQUIV (map
->const_equiv_varray
, i
).rtx
= 0;
1193 copy
= emit_label (get_label_from_map (map
,
1194 CODE_LABEL_NUMBER (insn
)));
1195 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1200 copy
= emit_barrier ();
1204 /* It is important to discard function-end and function-beg notes,
1205 so we have only one of each in the current function.
1206 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1207 deleted these in the copy used for continuing compilation,
1208 not the copy used for inlining). */
1209 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1210 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1211 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1213 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
1214 NOTE_LINE_NUMBER (insn
));
1216 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
1217 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
1220 = get_label_from_map (map
, NOTE_BLOCK_NUMBER (copy
));
1222 /* we have to duplicate the handlers for the original */
1223 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
1225 /* We need to duplicate the handlers for the EH region
1226 and we need to indicate where the label map is */
1228 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy
),
1229 CODE_LABEL_NUMBER (label
),
1230 expand_inline_function_eh_labelmap
);
1233 /* We have to forward these both to match the new exception
1235 NOTE_BLOCK_NUMBER (copy
) = CODE_LABEL_NUMBER (label
);
1248 RTX_INTEGRATED_P (copy
) = 1;
1250 map
->insn_map
[INSN_UID (insn
)] = copy
;
1253 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1254 from parameters can be substituted in. These are the only ones that
1255 are valid across the entire function. */
1257 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1258 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1259 && map
->insn_map
[INSN_UID (insn
)]
1260 && REG_NOTES (insn
))
1262 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
1263 /* We must also do subst_constants, in case one of our parameters
1264 has const type and constant value. */
1265 subst_constants (&tem
, NULL_RTX
, map
);
1266 apply_change_group ();
1267 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1270 if (local_return_label
)
1271 emit_label (local_return_label
);
1273 /* Restore the stack pointer if we saved it above. */
1274 if (inl_f
->calls_alloca
)
1275 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1277 /* Make copies of the decls of the symbols in the inline function, so that
1278 the copies of the variables get declared in the current function. Set
1279 up things so that lookup_static_chain knows that to interpret registers
1280 in SAVE_EXPRs for TYPE_SIZEs as local. */
1282 inline_function_decl
= fndecl
;
1283 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1284 integrate_decl_tree (inl_f
->original_decl_initial
, 0, map
);
1285 inline_function_decl
= 0;
1287 /* End the scope containing the copied formal parameter variables
1288 and copied LABEL_DECLs. */
1290 expand_end_bindings (getdecls (), 1, 1);
1291 block
= poplevel (1, 1, 0);
1292 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
1293 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
1296 /* Must mark the line number note after inlined functions as a repeat, so
1297 that the test coverage code can avoid counting the call twice. This
1298 just tells the code to ignore the immediately following line note, since
1299 there already exists a copy of this note before the expanded inline call.
1300 This line number note is still needed for debugging though, so we can't
1302 if (flag_test_coverage
)
1303 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
1305 emit_line_note (input_filename
, lineno
);
1307 /* If the function returns a BLKmode object in a register, copy it
1308 out of the temp register into a BLKmode memory object. */
1309 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
1310 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
1311 target
= copy_blkmode_from_reg (0, target
, TREE_TYPE (TREE_TYPE (fndecl
)));
1313 if (structure_value_addr
)
1315 target
= gen_rtx_MEM (TYPE_MODE (type
),
1316 memory_address (TYPE_MODE (type
),
1317 structure_value_addr
));
1318 MEM_SET_IN_STRUCT_P (target
, 1);
1321 /* Make sure we free the things we explicitly allocated with xmalloc. */
1323 free (real_label_map
);
1325 VARRAY_FREE (map
->const_equiv_varray
);
1326 inlining
= inlining_previous
;
1331 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1332 push all of those decls and give each one the corresponding home. */
1335 integrate_parm_decls (args
, map
, arg_vector
)
1337 struct inline_remap
*map
;
1343 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1345 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
1348 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
1350 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
1351 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1352 here, but that's going to require some more work. */
1353 /* DECL_INCOMING_RTL (decl) = ?; */
1354 /* These args would always appear unused, if not for this. */
1355 TREE_USED (decl
) = 1;
1356 /* Prevent warning for shadowing with these. */
1357 DECL_ABSTRACT_ORIGIN (decl
) = DECL_ORIGIN (tail
);
1359 /* Fully instantiate the address with the equivalent form so that the
1360 debugging information contains the actual register, instead of the
1361 virtual register. Do this by not passing an insn to
1363 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
1364 apply_change_group ();
1365 DECL_RTL (decl
) = new_decl_rtl
;
1369 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1370 current function a tree of contexts isomorphic to the one that is given.
1372 LEVEL indicates how far down into the BLOCK tree is the node we are
1373 currently traversing. It is always zero except for recursive calls.
1375 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1376 registers used in the DECL_RTL field should be remapped. If it is zero,
1377 no mapping is necessary. */
1380 integrate_decl_tree (let
, level
, map
)
1383 struct inline_remap
*map
;
1390 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1394 push_obstacks_nochange ();
1395 saveable_allocation ();
1396 d
= copy_and_set_decl_abstract_origin (t
);
1399 if (DECL_RTL (t
) != 0)
1401 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
1402 /* Fully instantiate the address with the equivalent form so that the
1403 debugging information contains the actual register, instead of the
1404 virtual register. Do this by not passing an insn to
1406 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
1407 apply_change_group ();
1409 /* These args would always appear unused, if not for this. */
1412 if (DECL_LANG_SPECIFIC (d
))
1418 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1419 integrate_decl_tree (t
, level
+ 1, map
);
1423 node
= poplevel (1, 0, 0);
1426 TREE_USED (node
) = TREE_USED (let
);
1427 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
1432 /* Create a new copy of an rtx.
1433 Recursively copies the operands of the rtx,
1434 except for those few rtx codes that are sharable.
1436 We always return an rtx that is similar to that incoming rtx, with the
1437 exception of possibly changing a REG to a SUBREG or vice versa. No
1438 rtl is ever emitted.
1440 Handle constants that need to be placed in the constant pool by
1441 calling `force_const_mem'. */
1444 copy_rtx_and_substitute (orig
, map
)
1446 struct inline_remap
*map
;
1448 register rtx copy
, temp
;
1450 register RTX_CODE code
;
1451 register enum machine_mode mode
;
1452 register const char *format_ptr
;
1458 code
= GET_CODE (orig
);
1459 mode
= GET_MODE (orig
);
1464 /* If the stack pointer register shows up, it must be part of
1465 stack-adjustments (*not* because we eliminated the frame pointer!).
1466 Small hard registers are returned as-is. Pseudo-registers
1467 go through their `reg_map'. */
1468 regno
= REGNO (orig
);
1469 if (regno
<= LAST_VIRTUAL_REGISTER
1470 || (map
->integrating
1471 && DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
== orig
))
1473 /* Some hard registers are also mapped,
1474 but others are not translated. */
1475 if (map
->reg_map
[regno
] != 0)
1476 return map
->reg_map
[regno
];
1478 /* If this is the virtual frame pointer, make space in current
1479 function's stack frame for the stack frame of the inline function.
1481 Copy the address of this area into a pseudo. Map
1482 virtual_stack_vars_rtx to this pseudo and set up a constant
1483 equivalence for it to be the address. This will substitute the
1484 address into insns where it can be substituted and use the new
1485 pseudo where it can't. */
1486 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
1489 int size
= get_func_frame_size (DECL_SAVED_INSNS (map
->fndecl
));
1491 #ifdef FRAME_GROWS_DOWNWARD
1492 /* In this case, virtual_stack_vars_rtx points to one byte
1493 higher than the top of the frame area. So make sure we
1494 allocate a big enough chunk to keep the frame pointer
1495 aligned like a real one. */
1496 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
1499 loc
= assign_stack_temp (BLKmode
, size
, 1);
1500 loc
= XEXP (loc
, 0);
1501 #ifdef FRAME_GROWS_DOWNWARD
1502 /* In this case, virtual_stack_vars_rtx points to one byte
1503 higher than the top of the frame area. So compute the offset
1504 to one byte higher than our substitute frame. */
1505 loc
= plus_constant (loc
, size
);
1507 map
->reg_map
[regno
] = temp
1508 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1510 #ifdef STACK_BOUNDARY
1511 mark_reg_pointer (map
->reg_map
[regno
],
1512 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1515 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1517 seq
= gen_sequence ();
1519 emit_insn_after (seq
, map
->insns_at_start
);
1522 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
1523 || (map
->integrating
1524 && (DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
1527 /* Do the same for a block to contain any arguments referenced
1530 int size
= DECL_SAVED_INSNS (map
->fndecl
)->args_size
;
1533 loc
= assign_stack_temp (BLKmode
, size
, 1);
1534 loc
= XEXP (loc
, 0);
1535 /* When arguments grow downward, the virtual incoming
1536 args pointer points to the top of the argument block,
1537 so the remapped location better do the same. */
1538 #ifdef ARGS_GROW_DOWNWARD
1539 loc
= plus_constant (loc
, size
);
1541 map
->reg_map
[regno
] = temp
1542 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1544 #ifdef STACK_BOUNDARY
1545 mark_reg_pointer (map
->reg_map
[regno
],
1546 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1549 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1551 seq
= gen_sequence ();
1553 emit_insn_after (seq
, map
->insns_at_start
);
1556 else if (REG_FUNCTION_VALUE_P (orig
))
1558 /* This is a reference to the function return value. If
1559 the function doesn't have a return value, error. If the
1560 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1561 if (map
->inline_target
== 0)
1562 /* Must be unrolling loops or replicating code if we
1563 reach here, so return the register unchanged. */
1565 else if (GET_MODE (map
->inline_target
) != BLKmode
1566 && mode
!= GET_MODE (map
->inline_target
))
1567 return gen_lowpart (mode
, map
->inline_target
);
1569 return map
->inline_target
;
1573 if (map
->reg_map
[regno
] == NULL
)
1575 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
1576 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
1577 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
1578 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
1579 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1581 if (map
->regno_pointer_flag
[regno
])
1582 mark_reg_pointer (map
->reg_map
[regno
],
1583 map
->regno_pointer_align
[regno
]);
1585 return map
->reg_map
[regno
];
1588 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
1589 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1590 if (GET_CODE (copy
) == SUBREG
)
1591 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
1592 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
1593 else if (GET_CODE (copy
) == CONCAT
)
1595 rtx retval
= subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1);
1597 if (GET_MODE (retval
) == GET_MODE (orig
))
1600 return gen_rtx_SUBREG (GET_MODE (orig
), retval
,
1601 (SUBREG_WORD (orig
) %
1602 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig
)))
1603 / (unsigned) UNITS_PER_WORD
)));
1606 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
1607 SUBREG_WORD (orig
));
1610 copy
= gen_rtx_ADDRESSOF (mode
,
1611 copy_rtx_and_substitute (XEXP (orig
, 0), map
),
1612 0, ADDRESSOF_DECL(orig
));
1613 regno
= ADDRESSOF_REGNO (orig
);
1614 if (map
->reg_map
[regno
])
1615 regno
= REGNO (map
->reg_map
[regno
]);
1616 else if (regno
> LAST_VIRTUAL_REGISTER
)
1618 temp
= XEXP (orig
, 0);
1619 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
1620 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
1621 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
1622 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
1623 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1625 if (map
->regno_pointer_flag
[regno
])
1626 mark_reg_pointer (map
->reg_map
[regno
],
1627 map
->regno_pointer_align
[regno
]);
1628 regno
= REGNO (map
->reg_map
[regno
]);
1630 ADDRESSOF_REGNO (copy
) = regno
;
1635 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1636 to (use foo) if the original insn didn't have a subreg.
1637 Removing the subreg distorts the VAX movstrhi pattern
1638 by changing the mode of an operand. */
1639 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
1640 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
1641 copy
= SUBREG_REG (copy
);
1642 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
1645 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
1646 = LABEL_PRESERVE_P (orig
);
1647 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
1650 copy
= gen_rtx_LABEL_REF (mode
,
1651 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1652 : get_label_from_map (map
,
1653 CODE_LABEL_NUMBER (XEXP (orig
, 0))));
1654 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
1656 /* The fact that this label was previously nonlocal does not mean
1657 it still is, so we must check if it is within the range of
1658 this function's labels. */
1659 LABEL_REF_NONLOCAL_P (copy
)
1660 = (LABEL_REF_NONLOCAL_P (orig
)
1661 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
1662 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
1664 /* If we have made a nonlocal label local, it means that this
1665 inlined call will be referring to our nonlocal goto handler.
1666 So make sure we create one for this block; we normally would
1667 not since this is not otherwise considered a "call". */
1668 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
1669 function_call_count
++;
1679 /* Symbols which represent the address of a label stored in the constant
1680 pool must be modified to point to a constant pool entry for the
1681 remapped label. Otherwise, symbols are returned unchanged. */
1682 if (CONSTANT_POOL_ADDRESS_P (orig
))
1684 struct function
*f
= inlining
? inlining
: current_function
;
1685 rtx constant
= get_pool_constant_for_function (f
, orig
);
1686 enum machine_mode const_mode
= get_pool_mode_for_function (f
, orig
);
1689 rtx temp
= force_const_mem (const_mode
,
1690 copy_rtx_and_substitute (constant
, map
));
1692 /* Legitimizing the address here is incorrect.
1694 Since we had a SYMBOL_REF before, we can assume it is valid
1695 to have one in this position in the insn.
1697 Also, change_address may create new registers. These
1698 registers will not have valid reg_map entries. This can
1699 cause try_constants() to fail because assumes that all
1700 registers in the rtx have valid reg_map entries, and it may
1701 end up replacing one of these new registers with junk. */
1703 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1704 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
1707 temp
= XEXP (temp
, 0);
1709 #ifdef POINTERS_EXTEND_UNSIGNED
1710 if (GET_MODE (temp
) != GET_MODE (orig
))
1711 temp
= convert_memory_address (GET_MODE (orig
), temp
);
1715 else if (GET_CODE (constant
) == LABEL_REF
)
1716 return XEXP (force_const_mem (GET_MODE (orig
),
1717 copy_rtx_and_substitute (constant
,
1722 if (SYMBOL_REF_NEED_ADJUST (orig
))
1725 return rethrow_symbol_map (orig
,
1726 expand_inline_function_eh_labelmap
);
1732 /* We have to make a new copy of this CONST_DOUBLE because don't want
1733 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1734 duplicate of a CONST_DOUBLE we have already seen. */
1735 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
1739 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
1740 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
1743 return immed_double_const (CONST_DOUBLE_LOW (orig
),
1744 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
1747 /* Make new constant pool entry for a constant
1748 that was in the pool of the inline function. */
1749 if (RTX_INTEGRATED_P (orig
))
1754 /* If a single asm insn contains multiple output operands
1755 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1756 We must make sure that the copied insn continues to share it. */
1757 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
1759 copy
= rtx_alloc (ASM_OPERANDS
);
1760 copy
->volatil
= orig
->volatil
;
1761 XSTR (copy
, 0) = XSTR (orig
, 0);
1762 XSTR (copy
, 1) = XSTR (orig
, 1);
1763 XINT (copy
, 2) = XINT (orig
, 2);
1764 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
1765 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
1766 XSTR (copy
, 5) = XSTR (orig
, 5);
1767 XINT (copy
, 6) = XINT (orig
, 6);
1773 /* This is given special treatment because the first
1774 operand of a CALL is a (MEM ...) which may get
1775 forced into a register for cse. This is undesirable
1776 if function-address cse isn't wanted or if we won't do cse. */
1777 #ifndef NO_FUNCTION_CSE
1778 if (! (optimize
&& ! flag_no_function_cse
))
1780 return gen_rtx_CALL (GET_MODE (orig
),
1781 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
1782 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
1783 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
1787 /* Must be ifdefed out for loop unrolling to work. */
1793 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1794 Adjust the setting by the offset of the area we made.
1795 If the nonlocal goto is into the current function,
1796 this will result in unnecessarily bad code, but should work. */
1797 if (SET_DEST (orig
) == virtual_stack_vars_rtx
1798 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
1800 /* In case a translation hasn't occurred already, make one now. */
1803 HOST_WIDE_INT loc_offset
;
1805 copy_rtx_and_substitute (SET_DEST (orig
), map
);
1806 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
1807 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
, REGNO (equiv_reg
)).rtx
;
1809 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
1810 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
1813 (copy_rtx_and_substitute (SET_SRC (orig
), map
),
1821 && GET_CODE (XEXP (orig
, 0)) == SYMBOL_REF
1822 && CONSTANT_POOL_ADDRESS_P (XEXP (orig
, 0)))
1824 enum machine_mode const_mode
= get_pool_mode_for_function (inlining
, XEXP (orig
, 0));
1825 rtx constant
= get_pool_constant_for_function (inlining
, XEXP (orig
, 0));
1826 constant
= copy_rtx_and_substitute (constant
, map
);
1827 /* If this was an address of a constant pool entry that itself
1828 had to be placed in the constant pool, it might not be a
1829 valid address. So the recursive call might have turned it
1830 into a register. In that case, it isn't a constant any
1831 more, so return it. This has the potential of changing a
1832 MEM into a REG, but we'll assume that it safe. */
1833 if (! CONSTANT_P (constant
))
1835 return validize_mem (force_const_mem (const_mode
, constant
));
1837 copy
= rtx_alloc (MEM
);
1838 PUT_MODE (copy
, mode
);
1839 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
1840 MEM_COPY_ATTRIBUTES (copy
, orig
);
1841 MEM_ALIAS_SET (copy
) = MEM_ALIAS_SET (orig
);
1843 /* If doing function inlining, this MEM might not be const in the
1844 function that it is being inlined into, and thus may not be
1845 unchanging after function inlining. Constant pool references are
1846 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
1848 if (! map
->integrating
)
1849 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
1857 copy
= rtx_alloc (code
);
1858 PUT_MODE (copy
, mode
);
1859 copy
->in_struct
= orig
->in_struct
;
1860 copy
->volatil
= orig
->volatil
;
1861 copy
->unchanging
= orig
->unchanging
;
1863 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
1865 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
1867 switch (*format_ptr
++)
1870 /* Copy this through the wide int field; that's safest. */
1871 X0WINT (copy
, i
) = X0WINT (orig
, i
);
1875 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
1879 /* Change any references to old-insns to point to the
1880 corresponding copied insns. */
1881 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
1885 XVEC (copy
, i
) = XVEC (orig
, i
);
1886 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
1888 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
1889 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
1890 XVECEXP (copy
, i
, j
)
1891 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
1896 XWINT (copy
, i
) = XWINT (orig
, i
);
1900 XINT (copy
, i
) = XINT (orig
, i
);
1904 XSTR (copy
, i
) = XSTR (orig
, i
);
1908 XTREE (copy
, i
) = XTREE (orig
, i
);
1916 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
1918 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
1919 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
1920 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
1926 /* Substitute known constant values into INSN, if that is valid. */
1929 try_constants (insn
, map
)
1931 struct inline_remap
*map
;
1936 subst_constants (&PATTERN (insn
), insn
, map
);
1938 /* Apply the changes if they are valid; otherwise discard them. */
1939 apply_change_group ();
1941 /* Show we don't know the value of anything stored or clobbered. */
1942 note_stores (PATTERN (insn
), mark_stores
);
1943 map
->last_pc_value
= 0;
1945 map
->last_cc0_value
= 0;
1948 /* Set up any constant equivalences made in this insn. */
1949 for (i
= 0; i
< map
->num_sets
; i
++)
1951 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
1953 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
1955 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
1956 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
1957 /* Following clause is a hack to make case work where GNU C++
1958 reassigns a variable to make cse work right. */
1959 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1961 map
->equiv_sets
[i
].equiv
))
1962 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
1963 map
->equiv_sets
[i
].equiv
, map
->const_age
);
1965 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
1966 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
1968 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
1969 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
1974 /* Substitute known constants for pseudo regs in the contents of LOC,
1975 which are part of INSN.
1976 If INSN is zero, the substitution should always be done (this is used to
1978 These changes are taken out by try_constants if the result is not valid.
1980 Note that we are more concerned with determining when the result of a SET
1981 is a constant, for further propagation, than actually inserting constants
1982 into insns; cse will do the latter task better.
1984 This function is also used to adjust address of items previously addressed
1985 via the virtual stack variable or virtual incoming arguments registers. */
1988 subst_constants (loc
, insn
, map
)
1991 struct inline_remap
*map
;
1995 register enum rtx_code code
;
1996 register const char *format_ptr
;
1997 int num_changes
= num_validated_changes ();
1999 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
2001 code
= GET_CODE (x
);
2016 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2022 /* The only thing we can do with a USE or CLOBBER is possibly do
2023 some substitutions in a MEM within it. */
2024 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2025 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2029 /* Substitute for parms and known constants. Don't replace
2030 hard regs used as user variables with constants. */
2032 int regno
= REGNO (x
);
2033 struct const_equiv_data
*p
;
2035 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2036 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
2037 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
2039 && p
->age
>= map
->const_age
)
2040 validate_change (insn
, loc
, p
->rtx
, 1);
2045 /* SUBREG applied to something other than a reg
2046 should be treated as ordinary, since that must
2047 be a special hack and we don't know how to treat it specially.
2048 Consider for example mulsidi3 in m68k.md.
2049 Ordinary SUBREG of a REG needs this special treatment. */
2050 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2052 rtx inner
= SUBREG_REG (x
);
2055 /* We can't call subst_constants on &SUBREG_REG (x) because any
2056 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2057 see what is inside, try to form the new SUBREG and see if that is
2058 valid. We handle two cases: extracting a full word in an
2059 integral mode and extracting the low part. */
2060 subst_constants (&inner
, NULL_RTX
, map
);
2062 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2063 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2064 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2065 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2066 GET_MODE (SUBREG_REG (x
)));
2068 cancel_changes (num_changes
);
2069 if (new == 0 && subreg_lowpart_p (x
))
2070 new = gen_lowpart_common (GET_MODE (x
), inner
);
2073 validate_change (insn
, loc
, new, 1);
2080 subst_constants (&XEXP (x
, 0), insn
, map
);
2082 /* If a memory address got spoiled, change it back. */
2083 if (insn
!= 0 && num_validated_changes () != num_changes
2084 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2085 cancel_changes (num_changes
);
2090 /* Substitute constants in our source, and in any arguments to a
2091 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2093 rtx
*dest_loc
= &SET_DEST (x
);
2094 rtx dest
= *dest_loc
;
2097 subst_constants (&SET_SRC (x
), insn
, map
);
2100 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2101 || GET_CODE (*dest_loc
) == SUBREG
2102 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2104 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2106 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2107 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2109 dest_loc
= &XEXP (*dest_loc
, 0);
2112 /* Do substitute in the address of a destination in memory. */
2113 if (GET_CODE (*dest_loc
) == MEM
)
2114 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2116 /* Check for the case of DEST a SUBREG, both it and the underlying
2117 register are less than one word, and the SUBREG has the wider mode.
2118 In the case, we are really setting the underlying register to the
2119 source converted to the mode of DEST. So indicate that. */
2120 if (GET_CODE (dest
) == SUBREG
2121 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2122 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2123 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2124 <= GET_MODE_SIZE (GET_MODE (dest
)))
2125 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2127 src
= tem
, dest
= SUBREG_REG (dest
);
2129 /* If storing a recognizable value save it for later recording. */
2130 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2131 && (CONSTANT_P (src
)
2132 || (GET_CODE (src
) == REG
2133 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2134 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2135 || (GET_CODE (src
) == PLUS
2136 && GET_CODE (XEXP (src
, 0)) == REG
2137 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2138 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2139 && CONSTANT_P (XEXP (src
, 1)))
2140 || GET_CODE (src
) == COMPARE
2145 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2146 || GET_CODE (src
) == LABEL_REF
))))
2148 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2149 it will cause us to save the COMPARE with any constants
2150 substituted, which is what we want for later. */
2151 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2152 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2161 format_ptr
= GET_RTX_FORMAT (code
);
2163 /* If the first operand is an expression, save its mode for later. */
2164 if (*format_ptr
== 'e')
2165 op0_mode
= GET_MODE (XEXP (x
, 0));
2167 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2169 switch (*format_ptr
++)
2176 subst_constants (&XEXP (x
, i
), insn
, map
);
2187 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2190 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2191 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
2200 /* If this is a commutative operation, move a constant to the second
2201 operand unless the second operand is already a CONST_INT. */
2202 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2203 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2205 rtx tem
= XEXP (x
, 0);
2206 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2207 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2210 /* Simplify the expression in case we put in some constants. */
2211 switch (GET_RTX_CLASS (code
))
2214 if (op0_mode
== MAX_MACHINE_MODE
)
2216 new = simplify_unary_operation (code
, GET_MODE (x
),
2217 XEXP (x
, 0), op0_mode
);
2222 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2223 if (op_mode
== VOIDmode
)
2224 op_mode
= GET_MODE (XEXP (x
, 1));
2225 new = simplify_relational_operation (code
, op_mode
,
2226 XEXP (x
, 0), XEXP (x
, 1));
2227 #ifdef FLOAT_STORE_FLAG_VALUE
2228 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2229 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2230 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2238 new = simplify_binary_operation (code
, GET_MODE (x
),
2239 XEXP (x
, 0), XEXP (x
, 1));
2244 if (op0_mode
== MAX_MACHINE_MODE
)
2246 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2247 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
2252 validate_change (insn
, loc
, new, 1);
2255 /* Show that register modified no longer contain known constants. We are
2256 called from note_stores with parts of the new insn. */
2259 mark_stores (dest
, x
)
2261 rtx x ATTRIBUTE_UNUSED
;
2264 enum machine_mode mode
= VOIDmode
;
2266 /* DEST is always the innermost thing set, except in the case of
2267 SUBREGs of hard registers. */
2269 if (GET_CODE (dest
) == REG
)
2270 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2271 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2273 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2274 mode
= GET_MODE (SUBREG_REG (dest
));
2279 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2280 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2283 /* Ignore virtual stack var or virtual arg register since those
2284 are handled separately. */
2285 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
2286 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
2287 for (i
= regno
; i
<= last_reg
; i
++)
2288 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
2289 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
2293 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2294 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2295 that it points to the node itself, thus indicating that the node is its
2296 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2297 the given node is NULL, recursively descend the decl/block tree which
2298 it is the root of, and for each other ..._DECL or BLOCK node contained
2299 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2300 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2301 values to point to themselves. */
2304 set_block_origin_self (stmt
)
2307 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2309 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2312 register tree local_decl
;
2314 for (local_decl
= BLOCK_VARS (stmt
);
2315 local_decl
!= NULL_TREE
;
2316 local_decl
= TREE_CHAIN (local_decl
))
2317 set_decl_origin_self (local_decl
); /* Potential recursion. */
2321 register tree subblock
;
2323 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2324 subblock
!= NULL_TREE
;
2325 subblock
= BLOCK_CHAIN (subblock
))
2326 set_block_origin_self (subblock
); /* Recurse. */
2331 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2332 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2333 node to so that it points to the node itself, thus indicating that the
2334 node represents its own (abstract) origin. Additionally, if the
2335 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2336 the decl/block tree of which the given node is the root of, and for
2337 each other ..._DECL or BLOCK node contained therein whose
2338 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2339 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2340 point to themselves. */
2343 set_decl_origin_self (decl
)
2346 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2348 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2349 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2353 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2354 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2355 if (DECL_INITIAL (decl
) != NULL_TREE
2356 && DECL_INITIAL (decl
) != error_mark_node
)
2357 set_block_origin_self (DECL_INITIAL (decl
));
2362 /* Given a pointer to some BLOCK node, and a boolean value to set the
2363 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2364 the given block, and for all local decls and all local sub-blocks
2365 (recursively) which are contained therein. */
2368 set_block_abstract_flags (stmt
, setting
)
2370 register int setting
;
2372 register tree local_decl
;
2373 register tree subblock
;
2375 BLOCK_ABSTRACT (stmt
) = setting
;
2377 for (local_decl
= BLOCK_VARS (stmt
);
2378 local_decl
!= NULL_TREE
;
2379 local_decl
= TREE_CHAIN (local_decl
))
2380 set_decl_abstract_flags (local_decl
, setting
);
2382 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2383 subblock
!= NULL_TREE
;
2384 subblock
= BLOCK_CHAIN (subblock
))
2385 set_block_abstract_flags (subblock
, setting
);
2388 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2389 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2390 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2391 set the abstract flags for all of the parameters, local vars, local
2392 blocks and sub-blocks (recursively) to the same setting. */
2395 set_decl_abstract_flags (decl
, setting
)
2397 register int setting
;
2399 DECL_ABSTRACT (decl
) = setting
;
2400 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2404 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2405 DECL_ABSTRACT (arg
) = setting
;
2406 if (DECL_INITIAL (decl
) != NULL_TREE
2407 && DECL_INITIAL (decl
) != error_mark_node
)
2408 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2412 /* Output the assembly language code for the function FNDECL
2413 from its DECL_SAVED_INSNS. Used for inline functions that are output
2414 at end of compilation instead of where they came in the source. */
2417 output_inline_function (fndecl
)
2420 struct function
*curf
= current_function
;
2421 struct function
*f
= DECL_SAVED_INSNS (fndecl
);
2423 current_function
= f
;
2424 current_function_decl
= fndecl
;
2425 clear_emit_caches ();
2427 /* Things we allocate from here on are part of this function, not
2429 temporary_allocation ();
2431 set_new_last_label_num (f
->inl_max_label_num
);
2433 /* We must have already output DWARF debugging information for the
2434 original (abstract) inline function declaration/definition, so
2435 we want to make sure that the debugging information we generate
2436 for this special instance of the inline function refers back to
2437 the information we already generated. To make sure that happens,
2438 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2439 node (and for all of the local ..._DECL nodes which are its children)
2440 so that they all point to themselves. */
2442 set_decl_origin_self (fndecl
);
2444 /* We're not deferring this any longer. */
2445 DECL_DEFER_OUTPUT (fndecl
) = 0;
2447 /* We can't inline this anymore. */
2449 DECL_INLINE (fndecl
) = 0;
2451 /* Compile this function all the way down to assembly code. */
2452 rest_of_compilation (fndecl
);
2454 current_function
= curf
;
2455 current_function_decl
= curf
? curf
->decl
: 0;