1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-config.h"
31 #include "insn-flags.h"
35 #include "integrate.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
45 extern struct obstack
*function_maybepermanent_obstack
;
47 /* Similar, but round to the next highest integer that meets the
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 /* Inlining small functions might save more space then not inlining at
55 all. Assume 1 instruction for the call and 1.5 insns per argument. */
56 #define INTEGRATE_THRESHOLD(DECL) \
58 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
59 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
62 static rtx initialize_for_inline
PROTO((tree
, int, int, int, int));
63 static void finish_inline
PROTO((tree
, rtx
));
64 static void adjust_copied_decl_tree
PROTO((tree
));
65 static tree copy_decl_list
PROTO((tree
));
66 static tree copy_decl_tree
PROTO((tree
));
67 static void copy_decl_rtls
PROTO((tree
));
68 static void save_constants
PROTO((rtx
*));
69 static void note_modified_parmregs
PROTO((rtx
, rtx
));
70 static rtx copy_for_inline
PROTO((rtx
));
71 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*, rtvec
));
72 static void integrate_decl_tree
PROTO((tree
, int, struct inline_remap
*));
73 static void save_constants_in_decl_trees
PROTO ((tree
));
74 static void subst_constants
PROTO((rtx
*, rtx
, struct inline_remap
*));
75 static void restore_constants
PROTO((rtx
*));
76 static void set_block_origin_self
PROTO((tree
));
77 static void set_decl_origin_self
PROTO((tree
));
78 static void set_block_abstract_flags
PROTO((tree
, int));
80 void set_decl_abstract_flags
PROTO((tree
, int));
81 static tree copy_and_set_decl_abstract_origin
PROTO((tree
));
83 /* Returns the Ith entry in the label_map contained in MAP. If the
84 Ith entry has not yet been set, return a fresh label. This function
85 performs a lazy initialization of label_map, thereby avoiding huge memory
86 explosions when the label_map gets very large. */
89 get_label_from_map (map
, i
)
90 struct inline_remap
*map
;
93 rtx x
= map
->label_map
[i
];
97 push_obstacks_nochange ();
98 end_temporary_allocation ();
99 x
= map
->label_map
[i
] = gen_label_rtx();
106 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
107 is safe and reasonable to integrate into other functions.
108 Nonzero means value is a warning message with a single %s
109 for the function's name. */
112 function_cannot_inline_p (fndecl
)
113 register tree fndecl
;
116 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
117 int max_insns
= INTEGRATE_THRESHOLD (fndecl
);
118 register int ninsns
= 0;
122 /* No inlines with varargs. */
123 if ((last
&& TREE_VALUE (last
) != void_type_node
)
124 || current_function_varargs
)
125 return "varargs function cannot be inline";
127 if (current_function_calls_alloca
)
128 return "function using alloca cannot be inline";
130 if (current_function_contains_functions
)
131 return "function with nested functions cannot be inline";
133 if (current_function_cannot_inline
)
134 return current_function_cannot_inline
;
136 /* If its not even close, don't even look. */
137 if (!DECL_INLINE (fndecl
) && get_max_uid () > 3 * max_insns
)
138 return "function too large to be inline";
141 /* Don't inline functions which do not specify a function prototype and
142 have BLKmode argument or take the address of a parameter. */
143 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
145 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
146 TREE_ADDRESSABLE (parms
) = 1;
147 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
148 return "no prototype, and parameter address used; cannot be inline";
152 /* We can't inline functions that return structures
153 the old-fashioned PCC way, copying into a static block. */
154 if (current_function_returns_pcc_struct
)
155 return "inline functions not supported for this return value type";
157 /* We can't inline functions that return BLKmode structures in registers. */
158 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
159 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
160 return "inline functions not supported for this return value type";
162 /* We can't inline functions that return structures of varying size. */
163 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
164 return "function with varying-size return value cannot be inline";
166 /* Cannot inline a function with a varying size argument or one that
167 receives a transparent union. */
168 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
170 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
171 return "function with varying-size parameter cannot be inline";
172 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
173 return "function with transparent unit parameter cannot be inline";
176 if (!DECL_INLINE (fndecl
) && get_max_uid () > max_insns
)
178 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
179 insn
&& ninsns
< max_insns
;
180 insn
= NEXT_INSN (insn
))
181 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
184 if (ninsns
>= max_insns
)
185 return "function too large to be inline";
188 /* We cannot inline this function if forced_labels is non-zero. This
189 implies that a label in this function was used as an initializer.
190 Because labels can not be duplicated, all labels in the function
191 will be renamed when it is inlined. However, there is no way to find
192 and fix all variables initialized with addresses of labels in this
193 function, hence inlining is impossible. */
196 return "function with label addresses used in initializers cannot inline";
198 /* We cannot inline a nested function that jumps to a nonlocal label. */
199 if (current_function_has_nonlocal_goto
)
200 return "function with nonlocal goto cannot be inline";
202 /* This is a hack, until the inliner is taught about eh regions at
203 the start of the function. */
204 for (insn
= get_insns ();
206 && ! (GET_CODE (insn
) == NOTE
207 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
208 insn
= NEXT_INSN (insn
))
210 if (insn
&& GET_CODE (insn
) == NOTE
211 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
212 return "function with complex parameters cannot be inline";
215 /* We can't inline functions that return a PARALLEL rtx. */
216 result
= DECL_RTL (DECL_RESULT (fndecl
));
217 if (result
&& GET_CODE (result
) == PARALLEL
)
218 return "inline functions not supported for this return value type";
223 /* Variables used within save_for_inline. */
225 /* Mapping from old pseudo-register to new pseudo-registers.
226 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
227 It is allocated in `save_for_inline' and `expand_inline_function',
228 and deallocated on exit from each of those routines. */
231 /* Mapping from old code-labels to new code-labels.
232 The first element of this map is label_map[min_labelno].
233 It is allocated in `save_for_inline' and `expand_inline_function',
234 and deallocated on exit from each of those routines. */
235 static rtx
*label_map
;
237 /* Mapping from old insn uid's to copied insns.
238 It is allocated in `save_for_inline' and `expand_inline_function',
239 and deallocated on exit from each of those routines. */
240 static rtx
*insn_map
;
242 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
243 Zero for a reg that isn't a parm's home.
244 Only reg numbers less than max_parm_reg are mapped here. */
245 static tree
*parmdecl_map
;
247 /* Keep track of first pseudo-register beyond those that are parms. */
248 extern int max_parm_reg
;
249 extern rtx
*parm_reg_stack_loc
;
251 /* When an insn is being copied by copy_for_inline,
252 this is nonzero if we have copied an ASM_OPERANDS.
253 In that case, it is the original input-operand vector. */
254 static rtvec orig_asm_operands_vector
;
256 /* When an insn is being copied by copy_for_inline,
257 this is nonzero if we have copied an ASM_OPERANDS.
258 In that case, it is the copied input-operand vector. */
259 static rtvec copy_asm_operands_vector
;
261 /* Likewise, this is the copied constraints vector. */
262 static rtvec copy_asm_constraints_vector
;
264 /* In save_for_inline, nonzero if past the parm-initialization insns. */
265 static int in_nonparm_insns
;
267 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
268 needed to save FNDECL's insns and info for future inline expansion. */
271 initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, copy
)
278 int function_flags
, i
;
282 /* Compute the values of any flags we must restore when inlining this. */
285 = (current_function_calls_alloca
* FUNCTION_FLAGS_CALLS_ALLOCA
286 + current_function_calls_setjmp
* FUNCTION_FLAGS_CALLS_SETJMP
287 + current_function_calls_longjmp
* FUNCTION_FLAGS_CALLS_LONGJMP
288 + current_function_returns_struct
* FUNCTION_FLAGS_RETURNS_STRUCT
289 + current_function_returns_pcc_struct
* FUNCTION_FLAGS_RETURNS_PCC_STRUCT
290 + current_function_needs_context
* FUNCTION_FLAGS_NEEDS_CONTEXT
291 + current_function_has_nonlocal_label
* FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
292 + current_function_returns_pointer
* FUNCTION_FLAGS_RETURNS_POINTER
293 + current_function_uses_const_pool
* FUNCTION_FLAGS_USES_CONST_POOL
294 + current_function_uses_pic_offset_table
* FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
);
296 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
297 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
298 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
300 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
302 parms
= TREE_CHAIN (parms
), i
++)
304 rtx p
= DECL_RTL (parms
);
305 int copied_incoming
= 0;
307 /* If we have (mem (addressof (mem ...))), use the inner MEM since
308 otherwise the copy_rtx call below will not unshare the MEM since
309 it shares ADDRESSOF. */
310 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
311 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
312 p
= XEXP (XEXP (p
, 0), 0);
314 if (GET_CODE (p
) == MEM
&& copy
)
316 /* Copy the rtl so that modifications of the addresses
317 later in compilation won't affect this arg_vector.
318 Virtual register instantiation can screw the address
320 rtx
new = copy_rtx (p
);
322 /* Don't leave the old copy anywhere in this decl. */
323 if (DECL_RTL (parms
) == DECL_INCOMING_RTL (parms
)
324 || (GET_CODE (DECL_RTL (parms
)) == MEM
325 && GET_CODE (DECL_INCOMING_RTL (parms
)) == MEM
326 && (XEXP (DECL_RTL (parms
), 0)
327 == XEXP (DECL_INCOMING_RTL (parms
), 0))))
328 DECL_INCOMING_RTL (parms
) = new, copied_incoming
= 1;
330 DECL_RTL (parms
) = new;
333 RTVEC_ELT (arg_vector
, i
) = p
;
335 if (GET_CODE (p
) == REG
)
336 parmdecl_map
[REGNO (p
)] = parms
;
337 else if (GET_CODE (p
) == CONCAT
)
339 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
340 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
342 if (GET_CODE (preal
) == REG
)
343 parmdecl_map
[REGNO (preal
)] = parms
;
344 if (GET_CODE (pimag
) == REG
)
345 parmdecl_map
[REGNO (pimag
)] = parms
;
348 /* This flag is cleared later
349 if the function ever modifies the value of the parm. */
350 TREE_READONLY (parms
) = 1;
352 /* Copy DECL_INCOMING_RTL if not done already. This can
353 happen if DECL_RTL is a reg. */
354 if (copy
&& ! copied_incoming
)
356 p
= DECL_INCOMING_RTL (parms
);
358 /* If we have (mem (addressof (mem ...))), use the inner MEM since
359 otherwise the copy_rtx call below will not unshare the MEM since
360 it shares ADDRESSOF. */
361 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
362 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
363 p
= XEXP (XEXP (p
, 0), 0);
365 if (GET_CODE (p
) == MEM
)
366 DECL_INCOMING_RTL (parms
) = copy_rtx (p
);
370 /* Assume we start out in the insns that set up the parameters. */
371 in_nonparm_insns
= 0;
373 /* The list of DECL_SAVED_INSNS, starts off with a header which
374 contains the following information:
376 the first insn of the function (not including the insns that copy
377 parameters into registers).
378 the first parameter insn of the function,
379 the first label used by that function,
380 the last label used by that function,
381 the highest register number used for parameters,
382 the total number of registers used,
383 the size of the incoming stack area for parameters,
384 the number of bytes popped on return,
386 the labels that are forced to exist,
387 some flags that are used to restore compiler globals,
388 the value of current_function_outgoing_args_size,
389 the original argument vector,
390 the original DECL_INITIAL,
391 and pointers to the table of pseudo regs, pointer flags, and alignment. */
393 return gen_inline_header_rtx (NULL_RTX
, NULL_RTX
, min_labelno
, max_labelno
,
394 max_parm_reg
, max_reg
,
395 current_function_args_size
,
396 current_function_pops_args
,
397 stack_slot_list
, forced_labels
, function_flags
,
398 current_function_outgoing_args_size
,
399 arg_vector
, (rtx
) DECL_INITIAL (fndecl
),
400 (rtvec
) regno_reg_rtx
, regno_pointer_flag
,
402 (rtvec
) parm_reg_stack_loc
);
405 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
406 things that must be done to make FNDECL expandable as an inline function.
407 HEAD contains the chain of insns to which FNDECL will expand. */
410 finish_inline (fndecl
, head
)
414 FIRST_FUNCTION_INSN (head
) = get_first_nonparm_insn ();
415 FIRST_PARM_INSN (head
) = get_insns ();
416 DECL_SAVED_INSNS (fndecl
) = head
;
417 DECL_FRAME_SIZE (fndecl
) = get_frame_size ();
420 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
421 they all point to the new (copied) rtxs. */
424 adjust_copied_decl_tree (block
)
427 register tree subblock
;
428 register rtx original_end
;
430 original_end
= BLOCK_END_NOTE (block
);
433 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
434 NOTE_SOURCE_FILE (original_end
) = 0;
437 /* Process all subblocks. */
438 for (subblock
= BLOCK_SUBBLOCKS (block
);
440 subblock
= TREE_CHAIN (subblock
))
441 adjust_copied_decl_tree (subblock
);
444 /* Make the insns and PARM_DECLs of the current function permanent
445 and record other information in DECL_SAVED_INSNS to allow inlining
446 of this function in subsequent calls.
448 This function is called when we are going to immediately compile
449 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
450 modified by the compilation process, so we copy all of them to
451 new storage and consider the new insns to be the insn chain to be
452 compiled. Our caller (rest_of_compilation) saves the original
453 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
455 /* ??? The nonlocal_label list should be adjusted also. However, since
456 a function that contains a nested function never gets inlined currently,
457 the nonlocal_label list will always be empty, so we don't worry about
461 save_for_inline_copying (fndecl
)
464 rtx first_insn
, last_insn
, insn
;
466 int max_labelno
, min_labelno
, i
, len
;
469 rtx first_nonparm_insn
;
471 rtx
*new_parm_reg_stack_loc
;
474 /* Make and emit a return-label if we have not already done so.
475 Do this before recording the bounds on label numbers. */
477 if (return_label
== 0)
479 return_label
= gen_label_rtx ();
480 emit_label (return_label
);
483 /* Get some bounds on the labels and registers used. */
485 max_labelno
= max_label_num ();
486 min_labelno
= get_first_label_num ();
487 max_reg
= max_reg_num ();
489 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
490 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
491 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
492 for the parms, prior to elimination of virtual registers.
493 These values are needed for substituting parms properly. */
495 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
497 head
= initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, 1);
499 if (current_function_uses_const_pool
)
501 /* Replace any constant pool references with the actual constant. We
502 will put the constants back in the copy made below. */
503 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
504 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
506 save_constants (&PATTERN (insn
));
507 if (REG_NOTES (insn
))
508 save_constants (®_NOTES (insn
));
511 /* Also scan all decls, and replace any constant pool references with the
513 save_constants_in_decl_trees (DECL_INITIAL (fndecl
));
515 /* Clear out the constant pool so that we can recreate it with the
516 copied constants below. */
517 init_const_rtx_hash_table ();
518 clear_const_double_mem ();
521 max_uid
= INSN_UID (head
);
523 /* We have now allocated all that needs to be allocated permanently
524 on the rtx obstack. Set our high-water mark, so that we
525 can free the rest of this when the time comes. */
529 /* Copy the chain insns of this function.
530 Install the copied chain as the insns of this function,
531 for continued compilation;
532 the original chain is recorded as the DECL_SAVED_INSNS
533 for inlining future calls. */
535 /* If there are insns that copy parms from the stack into pseudo registers,
536 those insns are not copied. `expand_inline_function' must
537 emit the correct code to handle such things. */
540 if (GET_CODE (insn
) != NOTE
)
542 first_insn
= rtx_alloc (NOTE
);
543 NOTE_SOURCE_FILE (first_insn
) = NOTE_SOURCE_FILE (insn
);
544 NOTE_LINE_NUMBER (first_insn
) = NOTE_LINE_NUMBER (insn
);
545 INSN_UID (first_insn
) = INSN_UID (insn
);
546 PREV_INSN (first_insn
) = NULL
;
547 NEXT_INSN (first_insn
) = NULL
;
548 last_insn
= first_insn
;
550 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
551 Make these new rtx's now, and install them in regno_reg_rtx, so they
552 will be the official pseudo-reg rtx's for the rest of compilation. */
554 reg_map
= (rtx
*) savealloc (regno_pointer_flag_length
* sizeof (rtx
));
556 len
= sizeof (struct rtx_def
) + (GET_RTX_LENGTH (REG
) - 1) * sizeof (rtunion
);
557 for (i
= max_reg
- 1; i
> LAST_VIRTUAL_REGISTER
; i
--)
558 reg_map
[i
] = (rtx
)obstack_copy (function_maybepermanent_obstack
,
559 regno_reg_rtx
[i
], len
);
561 regno_reg_rtx
= reg_map
;
563 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
564 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
565 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
566 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
567 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
569 /* Likewise each label rtx must have a unique rtx as its copy. */
571 /* We used to use alloca here, but the size of what it would try to
572 allocate would occasionally cause it to exceed the stack limit and
573 cause unpredictable core dumps. Some examples were > 2Mb in size. */
574 label_map
= (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
576 for (i
= min_labelno
; i
< max_labelno
; i
++)
577 label_map
[i
] = gen_label_rtx ();
579 /* Likewise for parm_reg_stack_slot. */
580 new_parm_reg_stack_loc
= (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
581 for (i
= 0; i
< max_parm_reg
; i
++)
582 new_parm_reg_stack_loc
[i
] = copy_for_inline (parm_reg_stack_loc
[i
]);
584 parm_reg_stack_loc
= new_parm_reg_stack_loc
;
586 /* Record the mapping of old insns to copied insns. */
588 insn_map
= (rtx
*) alloca (max_uid
* sizeof (rtx
));
589 bzero ((char *) insn_map
, max_uid
* sizeof (rtx
));
591 /* Get the insn which signals the end of parameter setup code. */
592 first_nonparm_insn
= get_first_nonparm_insn ();
594 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
595 (the former occurs when a variable has its address taken)
596 since these may be shared and can be changed by virtual
597 register instantiation. DECL_RTL values for our arguments
598 have already been copied by initialize_for_inline. */
599 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_reg
; i
++)
600 if (GET_CODE (regno_reg_rtx
[i
]) == MEM
)
601 XEXP (regno_reg_rtx
[i
], 0)
602 = copy_for_inline (XEXP (regno_reg_rtx
[i
], 0));
604 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
606 new2
= (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
607 bcopy ((char *) parm_reg_stack_loc
, (char *) new2
,
608 max_parm_reg
* sizeof (rtx
));
609 parm_reg_stack_loc
= new2
;
610 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; ++i
)
611 if (parm_reg_stack_loc
[i
])
612 parm_reg_stack_loc
[i
] = copy_for_inline (parm_reg_stack_loc
[i
]);
614 /* Copy the tree of subblocks of the function, and the decls in them.
615 We will use the copy for compiling this function, then restore the original
616 subblocks and decls for use when inlining this function.
618 Several parts of the compiler modify BLOCK trees. In particular,
619 instantiate_virtual_regs will instantiate any virtual regs
620 mentioned in the DECL_RTLs of the decls, and loop
621 unrolling will replicate any BLOCK trees inside an unrolled loop.
623 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
624 which we will use for inlining. The rtl might even contain pseudoregs
625 whose space has been freed. */
627 DECL_INITIAL (fndecl
) = copy_decl_tree (DECL_INITIAL (fndecl
));
628 DECL_ARGUMENTS (fndecl
) = copy_decl_list (DECL_ARGUMENTS (fndecl
));
630 /* Now copy each DECL_RTL which is a MEM,
631 so it is safe to modify their addresses. */
632 copy_decl_rtls (DECL_INITIAL (fndecl
));
634 /* The fndecl node acts as its own progenitor, so mark it as such. */
635 DECL_ABSTRACT_ORIGIN (fndecl
) = fndecl
;
637 /* Now copy the chain of insns. Do this twice. The first copy the insn
638 itself and its body. The second time copy of REG_NOTES. This is because
639 a REG_NOTE may have a forward pointer to another insn. */
641 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
643 orig_asm_operands_vector
= 0;
645 if (insn
== first_nonparm_insn
)
646 in_nonparm_insns
= 1;
648 switch (GET_CODE (insn
))
651 /* No need to keep these. */
652 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
655 copy
= rtx_alloc (NOTE
);
656 NOTE_LINE_NUMBER (copy
) = NOTE_LINE_NUMBER (insn
);
657 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
)
658 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
661 NOTE_SOURCE_FILE (insn
) = (char *) copy
;
662 NOTE_SOURCE_FILE (copy
) = 0;
664 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
665 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
)
667 int new_region
= CODE_LABEL_NUMBER
668 (label_map
[NOTE_BLOCK_NUMBER (copy
)]);
670 /* we have to duplicate the handlers for the original */
671 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
673 handler_info
*ptr
, *temp
;
675 nr
= new_eh_region_entry (new_region
);
676 ptr
= get_first_handler (NOTE_BLOCK_NUMBER (copy
));
677 for ( ; ptr
; ptr
= ptr
->next
)
679 temp
= get_new_handler (
680 label_map
[CODE_LABEL_NUMBER (ptr
->handler_label
)],
682 add_new_handler (nr
, temp
);
686 /* We have to forward these both to match the new exception
688 NOTE_BLOCK_NUMBER (copy
) = new_region
;
691 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
697 copy
= rtx_alloc (GET_CODE (insn
));
699 if (GET_CODE (insn
) == CALL_INSN
)
700 CALL_INSN_FUNCTION_USAGE (copy
)
701 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn
));
703 PATTERN (copy
) = copy_for_inline (PATTERN (insn
));
704 INSN_CODE (copy
) = -1;
705 LOG_LINKS (copy
) = NULL_RTX
;
706 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
710 copy
= label_map
[CODE_LABEL_NUMBER (insn
)];
711 LABEL_NAME (copy
) = LABEL_NAME (insn
);
715 copy
= rtx_alloc (BARRIER
);
721 INSN_UID (copy
) = INSN_UID (insn
);
722 insn_map
[INSN_UID (insn
)] = copy
;
723 NEXT_INSN (last_insn
) = copy
;
724 PREV_INSN (copy
) = last_insn
;
728 adjust_copied_decl_tree (DECL_INITIAL (fndecl
));
730 /* Now copy the REG_NOTES. */
731 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= NEXT_INSN (insn
))
732 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
733 && insn_map
[INSN_UID(insn
)])
734 REG_NOTES (insn_map
[INSN_UID (insn
)])
735 = copy_for_inline (REG_NOTES (insn
));
737 NEXT_INSN (last_insn
) = NULL
;
739 finish_inline (fndecl
, head
);
741 /* Make new versions of the register tables. */
742 new = (char *) savealloc (regno_pointer_flag_length
);
743 bcopy (regno_pointer_flag
, new, regno_pointer_flag_length
);
744 new1
= (char *) savealloc (regno_pointer_flag_length
);
745 bcopy (regno_pointer_align
, new1
, regno_pointer_flag_length
);
747 regno_pointer_flag
= new;
748 regno_pointer_align
= new1
;
750 set_new_first_and_last_insn (first_insn
, last_insn
);
756 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
757 DECL_ABSTRACT_ORIGIN for the new accordinly. */
760 copy_and_set_decl_abstract_origin (node
)
763 tree copy
= copy_node (node
);
764 if (DECL_ABSTRACT_ORIGIN (copy
) != NULL_TREE
)
765 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
766 situation occurs if we inline a function which itself made
767 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
768 most distant ancestor, we don't have to do anything here. */
771 /* The most distant ancestor must be NODE. */
772 DECL_ABSTRACT_ORIGIN (copy
) = node
;
777 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
778 For example, this can copy a list made of TREE_LIST nodes. While copying,
779 set DECL_ABSTRACT_ORIGIN appropriately. */
782 copy_decl_list (list
)
786 register tree prev
, next
;
791 head
= prev
= copy_and_set_decl_abstract_origin (head
);
792 next
= TREE_CHAIN (list
);
797 copy
= copy_and_set_decl_abstract_origin (next
);
798 TREE_CHAIN (prev
) = copy
;
800 next
= TREE_CHAIN (next
);
805 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
808 copy_decl_tree (block
)
811 tree t
, vars
, subblocks
;
813 vars
= copy_decl_list (BLOCK_VARS (block
));
816 /* Process all subblocks. */
817 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
819 tree copy
= copy_decl_tree (t
);
820 TREE_CHAIN (copy
) = subblocks
;
824 t
= copy_node (block
);
825 BLOCK_VARS (t
) = vars
;
826 BLOCK_SUBBLOCKS (t
) = nreverse (subblocks
);
827 /* If the BLOCK being cloned is already marked as having been instantiated
828 from something else, then leave that `origin' marking alone. Otherwise,
829 mark the clone as having originated from the BLOCK we are cloning. */
830 if (BLOCK_ABSTRACT_ORIGIN (t
) == NULL_TREE
)
831 BLOCK_ABSTRACT_ORIGIN (t
) = block
;
835 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
838 copy_decl_rtls (block
)
843 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
844 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
845 DECL_RTL (t
) = copy_for_inline (DECL_RTL (t
));
847 /* Process all subblocks. */
848 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
852 /* Make the insns and PARM_DECLs of the current function permanent
853 and record other information in DECL_SAVED_INSNS to allow inlining
854 of this function in subsequent calls.
856 This routine need not copy any insns because we are not going
857 to immediately compile the insns in the insn chain. There
858 are two cases when we would compile the insns for FNDECL:
859 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
860 be output at the end of other compilation, because somebody took
861 its address. In the first case, the insns of FNDECL are copied
862 as it is expanded inline, so FNDECL's saved insns are not
863 modified. In the second case, FNDECL is used for the last time,
864 so modifying the rtl is not a problem.
866 We don't have to worry about FNDECL being inline expanded by
867 other functions which are written at the end of compilation
868 because flag_no_inline is turned on when we begin writing
869 functions at the end of compilation. */
872 save_for_inline_nocopy (fndecl
)
877 rtx first_nonparm_insn
;
879 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
880 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
881 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
882 for the parms, prior to elimination of virtual registers.
883 These values are needed for substituting parms properly. */
885 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
887 /* Make and emit a return-label if we have not already done so. */
889 if (return_label
== 0)
891 return_label
= gen_label_rtx ();
892 emit_label (return_label
);
895 head
= initialize_for_inline (fndecl
, get_first_label_num (),
896 max_label_num (), max_reg_num (), 0);
898 /* If there are insns that copy parms from the stack into pseudo registers,
899 those insns are not copied. `expand_inline_function' must
900 emit the correct code to handle such things. */
903 if (GET_CODE (insn
) != NOTE
)
906 /* Get the insn which signals the end of parameter setup code. */
907 first_nonparm_insn
= get_first_nonparm_insn ();
909 /* Now just scan the chain of insns to see what happens to our
910 PARM_DECLs. If a PARM_DECL is used but never modified, we
911 can substitute its rtl directly when expanding inline (and
912 perform constant folding when its incoming value is constant).
913 Otherwise, we have to copy its value into a new register and track
914 the new register's life. */
916 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
918 if (insn
== first_nonparm_insn
)
919 in_nonparm_insns
= 1;
921 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
923 if (current_function_uses_const_pool
)
925 /* Replace any constant pool references with the actual constant.
926 We will put the constant back if we need to write the
927 function out after all. */
928 save_constants (&PATTERN (insn
));
929 if (REG_NOTES (insn
))
930 save_constants (®_NOTES (insn
));
933 /* Record what interesting things happen to our parameters. */
934 note_stores (PATTERN (insn
), note_modified_parmregs
);
938 /* Also scan all decls, and replace any constant pool references with the
940 save_constants_in_decl_trees (DECL_INITIAL (fndecl
));
942 /* We have now allocated all that needs to be allocated permanently
943 on the rtx obstack. Set our high-water mark, so that we
944 can free the rest of this when the time comes. */
948 finish_inline (fndecl
, head
);
951 /* Given PX, a pointer into an insn, search for references to the constant
952 pool. Replace each with a CONST that has the mode of the original
953 constant, contains the constant, and has RTX_INTEGRATED_P set.
954 Similarly, constant pool addresses not enclosed in a MEM are replaced
955 with an ADDRESS and CONST rtx which also gives the constant, its
956 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
968 /* If this is a CONST_DOUBLE, don't try to fix things up in
969 CONST_DOUBLE_MEM, because this is an infinite recursion. */
970 if (GET_CODE (x
) == CONST_DOUBLE
)
972 else if (GET_CODE (x
) == MEM
&& GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
973 && CONSTANT_POOL_ADDRESS_P (XEXP (x
,0)))
975 enum machine_mode const_mode
= get_pool_mode (XEXP (x
, 0));
976 rtx
new = gen_rtx_CONST (const_mode
, get_pool_constant (XEXP (x
, 0)));
977 RTX_INTEGRATED_P (new) = 1;
979 /* If the MEM was in a different mode than the constant (perhaps we
980 were only looking at the low-order part), surround it with a
981 SUBREG so we can save both modes. */
983 if (GET_MODE (x
) != const_mode
)
985 new = gen_rtx_SUBREG (GET_MODE (x
), new, 0);
986 RTX_INTEGRATED_P (new) = 1;
990 save_constants (&XEXP (*px
, 0));
992 else if (GET_CODE (x
) == SYMBOL_REF
993 && CONSTANT_POOL_ADDRESS_P (x
))
995 *px
= gen_rtx_ADDRESS (GET_MODE (x
),
996 gen_rtx_CONST (get_pool_mode (x
),
997 get_pool_constant (x
)));
998 save_constants (&XEXP (*px
, 0));
999 RTX_INTEGRATED_P (*px
) = 1;
1004 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
1005 int len
= GET_RTX_LENGTH (GET_CODE (x
));
1007 for (i
= len
-1; i
>= 0; i
--)
1012 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1013 save_constants (&XVECEXP (x
, i
, j
));
1017 if (XEXP (x
, i
) == 0)
1021 /* Hack tail-recursion here. */
1025 save_constants (&XEXP (x
, i
));
1032 /* Note whether a parameter is modified or not. */
1035 note_modified_parmregs (reg
, x
)
1037 rtx x ATTRIBUTE_UNUSED
;
1039 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
1040 && REGNO (reg
) < max_parm_reg
1041 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
1042 && parmdecl_map
[REGNO (reg
)] != 0)
1043 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
1046 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1047 according to `reg_map' and `label_map'. The original rtl insns
1048 will be saved for inlining; this is used to make a copy
1049 which is used to finish compiling the inline function itself.
1051 If we find a "saved" constant pool entry, one which was replaced with
1052 the value of the constant, convert it back to a constant pool entry.
1053 Since the pool wasn't touched, this should simply restore the old
1056 All other kinds of rtx are copied except those that can never be
1057 changed during compilation. */
1060 copy_for_inline (orig
)
1063 register rtx x
= orig
;
1066 register enum rtx_code code
;
1067 register char *format_ptr
;
1072 code
= GET_CODE (x
);
1074 /* These types may be freely shared. */
1086 /* We have to make a new CONST_DOUBLE to ensure that we account for
1087 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1088 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
1092 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1093 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
1096 return immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
1100 /* Get constant pool entry for constant in the pool. */
1101 if (RTX_INTEGRATED_P (x
))
1102 return validize_mem (force_const_mem (GET_MODE (x
),
1103 copy_for_inline (XEXP (x
, 0))));
1107 /* Get constant pool entry, but access in different mode. */
1108 if (RTX_INTEGRATED_P (x
))
1110 new = force_const_mem (GET_MODE (SUBREG_REG (x
)),
1111 copy_for_inline (XEXP (SUBREG_REG (x
), 0)));
1113 PUT_MODE (new, GET_MODE (x
));
1114 return validize_mem (new);
1119 /* If not special for constant pool error. Else get constant pool
1121 if (! RTX_INTEGRATED_P (x
))
1124 new = force_const_mem (GET_MODE (XEXP (x
, 0)),
1125 copy_for_inline (XEXP (XEXP (x
, 0), 0)));
1126 new = XEXP (new, 0);
1128 #ifdef POINTERS_EXTEND_UNSIGNED
1129 if (GET_MODE (new) != GET_MODE (x
))
1130 new = convert_memory_address (GET_MODE (x
), new);
1136 /* If a single asm insn contains multiple output operands
1137 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1138 We must make sure that the copied insn continues to share it. */
1139 if (orig_asm_operands_vector
== XVEC (orig
, 3))
1141 x
= rtx_alloc (ASM_OPERANDS
);
1142 x
->volatil
= orig
->volatil
;
1143 XSTR (x
, 0) = XSTR (orig
, 0);
1144 XSTR (x
, 1) = XSTR (orig
, 1);
1145 XINT (x
, 2) = XINT (orig
, 2);
1146 XVEC (x
, 3) = copy_asm_operands_vector
;
1147 XVEC (x
, 4) = copy_asm_constraints_vector
;
1148 XSTR (x
, 5) = XSTR (orig
, 5);
1149 XINT (x
, 6) = XINT (orig
, 6);
1155 /* A MEM is usually allowed to be shared if its address is constant
1156 or is a constant plus one of the special registers.
1158 We do not allow sharing of addresses that are either a special
1159 register or the sum of a constant and a special register because
1160 it is possible for unshare_all_rtl to copy the address, into memory
1161 that won't be saved. Although the MEM can safely be shared, and
1162 won't be copied there, the address itself cannot be shared, and may
1165 There are also two exceptions with constants: The first is if the
1166 constant is a LABEL_REF or the sum of the LABEL_REF
1167 and an integer. This case can happen if we have an inline
1168 function that supplies a constant operand to the call of another
1169 inline function that uses it in a switch statement. In this case,
1170 we will be replacing the LABEL_REF, so we have to replace this MEM
1173 The second case is if we have a (const (plus (address ..) ...)).
1174 In that case we need to put back the address of the constant pool
1177 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
1178 && GET_CODE (XEXP (x
, 0)) != LABEL_REF
1179 && ! (GET_CODE (XEXP (x
, 0)) == CONST
1180 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
1181 && ((GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1183 || (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1189 /* If this is a non-local label, just make a new LABEL_REF.
1190 Otherwise, use the new label as well. */
1191 x
= gen_rtx_LABEL_REF (GET_MODE (orig
),
1192 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1193 : label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
1194 LABEL_REF_NONLOCAL_P (x
) = LABEL_REF_NONLOCAL_P (orig
);
1195 LABEL_OUTSIDE_LOOP_P (x
) = LABEL_OUTSIDE_LOOP_P (orig
);
1199 if (REGNO (x
) > LAST_VIRTUAL_REGISTER
)
1200 return reg_map
[REGNO (x
)];
1205 /* If a parm that gets modified lives in a pseudo-reg,
1206 clear its TREE_READONLY to prevent certain optimizations. */
1208 rtx dest
= SET_DEST (x
);
1210 while (GET_CODE (dest
) == STRICT_LOW_PART
1211 || GET_CODE (dest
) == ZERO_EXTRACT
1212 || GET_CODE (dest
) == SUBREG
)
1213 dest
= XEXP (dest
, 0);
1215 if (GET_CODE (dest
) == REG
1216 && REGNO (dest
) < max_parm_reg
1217 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
1218 && parmdecl_map
[REGNO (dest
)] != 0
1219 /* The insn to load an arg pseudo from a stack slot
1220 does not count as modifying it. */
1221 && in_nonparm_insns
)
1222 TREE_READONLY (parmdecl_map
[REGNO (dest
)]) = 0;
1226 #if 0 /* This is a good idea, but here is the wrong place for it. */
1227 /* Arrange that CONST_INTs always appear as the second operand
1228 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1229 always appear as the first. */
1231 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
1232 || (XEXP (x
, 1) == frame_pointer_rtx
1233 || (ARG_POINTER_REGNUM
!= FRAME_POINTER_REGNUM
1234 && XEXP (x
, 1) == arg_pointer_rtx
)))
1236 rtx t
= XEXP (x
, 0);
1237 XEXP (x
, 0) = XEXP (x
, 1);
1246 /* Replace this rtx with a copy of itself. */
1248 x
= rtx_alloc (code
);
1249 bcopy ((char *) orig
, (char *) x
,
1250 (sizeof (*x
) - sizeof (x
->fld
)
1251 + sizeof (x
->fld
[0]) * GET_RTX_LENGTH (code
)));
1253 /* Now scan the subexpressions recursively.
1254 We can store any replaced subexpressions directly into X
1255 since we know X is not shared! Any vectors in X
1256 must be copied if X was copied. */
1258 format_ptr
= GET_RTX_FORMAT (code
);
1260 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1262 switch (*format_ptr
++)
1265 XEXP (x
, i
) = copy_for_inline (XEXP (x
, i
));
1269 /* Change any references to old-insns to point to the
1270 corresponding copied insns. */
1271 XEXP (x
, i
) = insn_map
[INSN_UID (XEXP (x
, i
))];
1275 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
1279 XVEC (x
, i
) = gen_rtvec_vv (XVECLEN (x
, i
), XVEC (x
, i
)->elem
);
1280 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1282 = copy_for_inline (XVECEXP (x
, i
, j
));
1288 if (code
== ASM_OPERANDS
&& orig_asm_operands_vector
== 0)
1290 orig_asm_operands_vector
= XVEC (orig
, 3);
1291 copy_asm_operands_vector
= XVEC (x
, 3);
1292 copy_asm_constraints_vector
= XVEC (x
, 4);
1298 /* Unfortunately, we need a global copy of const_equiv map for communication
1299 with a function called from note_stores. Be *very* careful that this
1300 is used properly in the presence of recursion. */
1302 rtx
*global_const_equiv_map
;
1303 int global_const_equiv_map_size
;
1305 #define FIXED_BASE_PLUS_P(X) \
1306 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1307 && GET_CODE (XEXP (X, 0)) == REG \
1308 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1309 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1311 /* Integrate the procedure defined by FNDECL. Note that this function
1312 may wind up calling itself. Since the static variables are not
1313 reentrant, we do not assign them until after the possibility
1314 of recursion is eliminated.
1316 If IGNORE is nonzero, do not produce a value.
1317 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1320 (rtx)-1 if we could not substitute the function
1321 0 if we substituted it and it does not produce a value
1322 else an rtx for where the value is stored. */
1325 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
1326 structure_value_addr
)
1331 rtx structure_value_addr
;
1333 tree formal
, actual
, block
;
1334 rtx header
= DECL_SAVED_INSNS (fndecl
);
1335 rtx insns
= FIRST_FUNCTION_INSN (header
);
1336 rtx parm_insns
= FIRST_PARM_INSN (header
);
1342 int min_labelno
= FIRST_LABELNO (header
);
1343 int max_labelno
= LAST_LABELNO (header
);
1345 rtx local_return_label
= 0;
1349 struct inline_remap
*map
;
1353 rtvec arg_vector
= ORIGINAL_ARG_VECTOR (header
);
1354 rtx static_chain_value
= 0;
1356 /* The pointer used to track the true location of the memory used
1357 for MAP->LABEL_MAP. */
1358 rtx
*real_label_map
= 0;
1360 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1361 max_regno
= MAX_REGNUM (header
) + 3;
1362 if (max_regno
< FIRST_PSEUDO_REGISTER
)
1365 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
1367 /* Check that the parms type match and that sufficient arguments were
1368 passed. Since the appropriate conversions or default promotions have
1369 already been applied, the machine modes should match exactly. */
1371 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
1373 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
1376 enum machine_mode mode
;
1379 return (rtx
) (HOST_WIDE_INT
) -1;
1381 arg
= TREE_VALUE (actual
);
1382 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
1384 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
1385 /* If they are block mode, the types should match exactly.
1386 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1387 which could happen if the parameter has incomplete type. */
1389 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
1390 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
1391 return (rtx
) (HOST_WIDE_INT
) -1;
1394 /* Extra arguments are valid, but will be ignored below, so we must
1395 evaluate them here for side-effects. */
1396 for (; actual
; actual
= TREE_CHAIN (actual
))
1397 expand_expr (TREE_VALUE (actual
), const0_rtx
,
1398 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
1400 /* Make a binding contour to keep inline cleanups called at
1401 outer function-scope level from looking like they are shadowing
1402 parameter declarations. */
1405 /* Expand the function arguments. Do this first so that any
1406 new registers get created before we allocate the maps. */
1408 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
1409 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
1411 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
1413 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
1415 /* Actual parameter, converted to the type of the argument within the
1417 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
1418 /* Mode of the variable used within the function. */
1419 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
1423 loc
= RTVEC_ELT (arg_vector
, i
);
1425 /* If this is an object passed by invisible reference, we copy the
1426 object into a stack slot and save its address. If this will go
1427 into memory, we do nothing now. Otherwise, we just expand the
1429 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1430 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1433 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
1434 int_size_in_bytes (TREE_TYPE (arg
)), 1);
1435 MEM_IN_STRUCT_P (stack_slot
) = AGGREGATE_TYPE_P (TREE_TYPE (arg
));
1437 store_expr (arg
, stack_slot
, 0);
1439 arg_vals
[i
] = XEXP (stack_slot
, 0);
1442 else if (GET_CODE (loc
) != MEM
)
1444 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
1445 /* The mode if LOC and ARG can differ if LOC was a variable
1446 that had its mode promoted via PROMOTED_MODE. */
1447 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
1448 TYPE_MODE (TREE_TYPE (arg
)),
1449 expand_expr (arg
, NULL_RTX
, mode
,
1451 TREE_UNSIGNED (TREE_TYPE (formal
)));
1453 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
1458 if (arg_vals
[i
] != 0
1459 && (! TREE_READONLY (formal
)
1460 /* If the parameter is not read-only, copy our argument through
1461 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1462 TARGET in any way. In the inline function, they will likely
1463 be two different pseudos, and `safe_from_p' will make all
1464 sorts of smart assumptions about their not conflicting.
1465 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1466 wrong, so put ARG_VALS[I] into a fresh register.
1467 Don't worry about invisible references, since their stack
1468 temps will never overlap the target. */
1471 && (GET_CODE (arg_vals
[i
]) == REG
1472 || GET_CODE (arg_vals
[i
]) == SUBREG
1473 || GET_CODE (arg_vals
[i
]) == MEM
)
1474 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
1475 /* ??? We must always copy a SUBREG into a REG, because it might
1476 get substituted into an address, and not all ports correctly
1477 handle SUBREGs in addresses. */
1478 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
1479 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
1481 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
1482 && POINTER_TYPE_P (TREE_TYPE (formal
)))
1483 mark_reg_pointer (arg_vals
[i
],
1484 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
1488 /* Allocate the structures we use to remap things. */
1490 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
1491 map
->fndecl
= fndecl
;
1493 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
1494 bzero ((char *) map
->reg_map
, max_regno
* sizeof (rtx
));
1496 /* We used to use alloca here, but the size of what it would try to
1497 allocate would occasionally cause it to exceed the stack limit and
1498 cause unpredictable core dumps. */
1500 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
1501 map
->label_map
= real_label_map
;
1503 map
->insn_map
= (rtx
*) alloca (INSN_UID (header
) * sizeof (rtx
));
1504 bzero ((char *) map
->insn_map
, INSN_UID (header
) * sizeof (rtx
));
1505 map
->min_insnno
= 0;
1506 map
->max_insnno
= INSN_UID (header
);
1508 map
->integrating
= 1;
1510 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1511 be large enough for all our pseudos. This is the number we are currently
1512 using plus the number in the called routine, plus 15 for each arg,
1513 five to compute the virtual frame pointer, and five for the return value.
1514 This should be enough for most cases. We do not reference entries
1515 outside the range of the map.
1517 ??? These numbers are quite arbitrary and were obtained by
1518 experimentation. At some point, we should try to allocate the
1519 table after all the parameters are set up so we an more accurately
1520 estimate the number of pseudos we will need. */
1522 map
->const_equiv_map_size
1523 = max_reg_num () + (max_regno
- FIRST_PSEUDO_REGISTER
) + 15 * nargs
+ 10;
1525 map
->const_equiv_map
1526 = (rtx
*)alloca (map
->const_equiv_map_size
* sizeof (rtx
));
1527 bzero ((char *) map
->const_equiv_map
,
1528 map
->const_equiv_map_size
* sizeof (rtx
));
1531 = (unsigned *)alloca (map
->const_equiv_map_size
* sizeof (unsigned));
1532 bzero ((char *) map
->const_age_map
,
1533 map
->const_equiv_map_size
* sizeof (unsigned));
1536 /* Record the current insn in case we have to set up pointers to frame
1537 and argument memory blocks. If there are no insns yet, add a dummy
1538 insn that can be used as an insertion point. */
1539 map
->insns_at_start
= get_last_insn ();
1540 if (map
->insns_at_start
== 0)
1541 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
1543 map
->regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (header
);
1544 map
->regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (header
);
1546 /* Update the outgoing argument size to allow for those in the inlined
1548 if (OUTGOING_ARGS_SIZE (header
) > current_function_outgoing_args_size
)
1549 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (header
);
1551 /* If the inline function needs to make PIC references, that means
1552 that this function's PIC offset table must be used. */
1553 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
1554 current_function_uses_pic_offset_table
= 1;
1556 /* If this function needs a context, set it up. */
1557 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
1558 static_chain_value
= lookup_static_chain (fndecl
);
1560 if (GET_CODE (parm_insns
) == NOTE
1561 && NOTE_LINE_NUMBER (parm_insns
) > 0)
1563 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
1564 NOTE_LINE_NUMBER (parm_insns
));
1566 RTX_INTEGRATED_P (note
) = 1;
1569 /* Process each argument. For each, set up things so that the function's
1570 reference to the argument will refer to the argument being passed.
1571 We only replace REG with REG here. Any simplifications are done
1572 via const_equiv_map.
1574 We make two passes: In the first, we deal with parameters that will
1575 be placed into registers, since we need to ensure that the allocated
1576 register number fits in const_equiv_map. Then we store all non-register
1577 parameters into their memory location. */
1579 /* Don't try to free temp stack slots here, because we may put one of the
1580 parameters into a temp stack slot. */
1582 for (i
= 0; i
< nargs
; i
++)
1584 rtx copy
= arg_vals
[i
];
1586 loc
= RTVEC_ELT (arg_vector
, i
);
1588 /* There are three cases, each handled separately. */
1589 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1590 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1592 /* This must be an object passed by invisible reference (it could
1593 also be a variable-sized object, but we forbid inlining functions
1594 with variable-sized arguments). COPY is the address of the
1595 actual value (this computation will cause it to be copied). We
1596 map that address for the register, noting the actual address as
1597 an equivalent in case it can be substituted into the insns. */
1599 if (GET_CODE (copy
) != REG
)
1601 temp
= copy_addr_to_reg (copy
);
1602 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1603 && REGNO (temp
) < map
->const_equiv_map_size
)
1605 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1606 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1610 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
1612 else if (GET_CODE (loc
) == MEM
)
1614 /* This is the case of a parameter that lives in memory.
1615 It will live in the block we allocate in the called routine's
1616 frame that simulates the incoming argument area. Do nothing
1617 now; we will call store_expr later. */
1620 else if (GET_CODE (loc
) == REG
)
1622 /* This is the good case where the parameter is in a register.
1623 If it is read-only and our argument is a constant, set up the
1624 constant equivalence.
1626 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1627 that flag set if it is a register.
1629 Also, don't allow hard registers here; they might not be valid
1630 when substituted into insns. */
1632 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
1633 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
1634 && ! REG_USERVAR_P (copy
))
1635 || (GET_CODE (copy
) == REG
1636 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
1638 temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
1639 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
1640 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1641 && REGNO (temp
) < map
->const_equiv_map_size
)
1643 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1644 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1648 map
->reg_map
[REGNO (loc
)] = copy
;
1650 else if (GET_CODE (loc
) == CONCAT
)
1652 /* This is the good case where the parameter is in a
1653 pair of separate pseudos.
1654 If it is read-only and our argument is a constant, set up the
1655 constant equivalence.
1657 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1658 that flag set if it is a register.
1660 Also, don't allow hard registers here; they might not be valid
1661 when substituted into insns. */
1662 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
1663 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
1664 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
1665 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
1667 if ((GET_CODE (copyreal
) != REG
&& GET_CODE (copyreal
) != SUBREG
)
1668 || (GET_CODE (copyreal
) == REG
&& REG_USERVAR_P (locreal
)
1669 && ! REG_USERVAR_P (copyreal
))
1670 || (GET_CODE (copyreal
) == REG
1671 && REGNO (copyreal
) < FIRST_PSEUDO_REGISTER
))
1673 temp
= copy_to_mode_reg (GET_MODE (locreal
), copyreal
);
1674 REG_USERVAR_P (temp
) = REG_USERVAR_P (locreal
);
1675 if ((CONSTANT_P (copyreal
) || FIXED_BASE_PLUS_P (copyreal
))
1676 && REGNO (temp
) < map
->const_equiv_map_size
)
1678 map
->const_equiv_map
[REGNO (temp
)] = copyreal
;
1679 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1683 map
->reg_map
[REGNO (locreal
)] = copyreal
;
1685 if ((GET_CODE (copyimag
) != REG
&& GET_CODE (copyimag
) != SUBREG
)
1686 || (GET_CODE (copyimag
) == REG
&& REG_USERVAR_P (locimag
)
1687 && ! REG_USERVAR_P (copyimag
))
1688 || (GET_CODE (copyimag
) == REG
1689 && REGNO (copyimag
) < FIRST_PSEUDO_REGISTER
))
1691 temp
= copy_to_mode_reg (GET_MODE (locimag
), copyimag
);
1692 REG_USERVAR_P (temp
) = REG_USERVAR_P (locimag
);
1693 if ((CONSTANT_P (copyimag
) || FIXED_BASE_PLUS_P (copyimag
))
1694 && REGNO (temp
) < map
->const_equiv_map_size
)
1696 map
->const_equiv_map
[REGNO (temp
)] = copyimag
;
1697 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1701 map
->reg_map
[REGNO (locimag
)] = copyimag
;
1707 /* Now do the parameters that will be placed in memory. */
1709 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
1710 formal
; formal
= TREE_CHAIN (formal
), i
++)
1712 loc
= RTVEC_ELT (arg_vector
, i
);
1714 if (GET_CODE (loc
) == MEM
1715 /* Exclude case handled above. */
1716 && ! (GET_CODE (XEXP (loc
, 0)) == REG
1717 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
1719 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1720 DECL_SOURCE_LINE (formal
));
1722 RTX_INTEGRATED_P (note
) = 1;
1724 /* Compute the address in the area we reserved and store the
1726 temp
= copy_rtx_and_substitute (loc
, map
);
1727 subst_constants (&temp
, NULL_RTX
, map
);
1728 apply_change_group ();
1729 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1730 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
1731 store_expr (arg_trees
[i
], temp
, 0);
1735 /* Deal with the places that the function puts its result.
1736 We are driven by what is placed into DECL_RESULT.
1738 Initially, we assume that we don't have anything special handling for
1739 REG_FUNCTION_RETURN_VALUE_P. */
1741 map
->inline_target
= 0;
1742 loc
= DECL_RTL (DECL_RESULT (fndecl
));
1743 if (TYPE_MODE (type
) == VOIDmode
)
1744 /* There is no return value to worry about. */
1746 else if (GET_CODE (loc
) == MEM
)
1748 if (! structure_value_addr
|| ! aggregate_value_p (DECL_RESULT (fndecl
)))
1751 /* Pass the function the address in which to return a structure value.
1752 Note that a constructor can cause someone to call us with
1753 STRUCTURE_VALUE_ADDR, but the initialization takes place
1754 via the first parameter, rather than the struct return address.
1756 We have two cases: If the address is a simple register indirect,
1757 use the mapping mechanism to point that register to our structure
1758 return address. Otherwise, store the structure return value into
1759 the place that it will be referenced from. */
1761 if (GET_CODE (XEXP (loc
, 0)) == REG
)
1763 temp
= force_reg (Pmode
,
1764 force_operand (structure_value_addr
, NULL_RTX
));
1765 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
1766 if ((CONSTANT_P (structure_value_addr
)
1767 || GET_CODE (structure_value_addr
) == ADDRESSOF
1768 || (GET_CODE (structure_value_addr
) == PLUS
1769 && XEXP (structure_value_addr
, 0) == virtual_stack_vars_rtx
1770 && GET_CODE (XEXP (structure_value_addr
, 1)) == CONST_INT
))
1771 && REGNO (temp
) < map
->const_equiv_map_size
)
1773 map
->const_equiv_map
[REGNO (temp
)] = structure_value_addr
;
1774 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1779 temp
= copy_rtx_and_substitute (loc
, map
);
1780 subst_constants (&temp
, NULL_RTX
, map
);
1781 apply_change_group ();
1782 emit_move_insn (temp
, structure_value_addr
);
1786 /* We will ignore the result value, so don't look at its structure.
1787 Note that preparations for an aggregate return value
1788 do need to be made (above) even if it will be ignored. */
1790 else if (GET_CODE (loc
) == REG
)
1792 /* The function returns an object in a register and we use the return
1793 value. Set up our target for remapping. */
1795 /* Machine mode function was declared to return. */
1796 enum machine_mode departing_mode
= TYPE_MODE (type
);
1797 /* (Possibly wider) machine mode it actually computes
1798 (for the sake of callers that fail to declare it right).
1799 We have to use the mode of the result's RTL, rather than
1800 its type, since expand_function_start may have promoted it. */
1801 enum machine_mode arriving_mode
1802 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
1805 /* Don't use MEMs as direct targets because on some machines
1806 substituting a MEM for a REG makes invalid insns.
1807 Let the combiner substitute the MEM if that is valid. */
1808 if (target
== 0 || GET_CODE (target
) != REG
1809 || GET_MODE (target
) != departing_mode
)
1810 target
= gen_reg_rtx (departing_mode
);
1812 /* If function's value was promoted before return,
1813 avoid machine mode mismatch when we substitute INLINE_TARGET.
1814 But TARGET is what we will return to the caller. */
1815 if (arriving_mode
!= departing_mode
)
1817 /* Avoid creating a paradoxical subreg wider than
1818 BITS_PER_WORD, since that is illegal. */
1819 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
1821 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
1822 GET_MODE_BITSIZE (arriving_mode
)))
1823 /* Maybe could be handled by using convert_move () ? */
1825 reg_to_map
= gen_reg_rtx (arriving_mode
);
1826 target
= gen_lowpart (departing_mode
, reg_to_map
);
1829 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
1832 reg_to_map
= target
;
1834 /* Usually, the result value is the machine's return register.
1835 Sometimes it may be a pseudo. Handle both cases. */
1836 if (REG_FUNCTION_VALUE_P (loc
))
1837 map
->inline_target
= reg_to_map
;
1839 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1844 /* Make a fresh binding contour that we can easily remove. Do this after
1845 expanding our arguments so cleanups are properly scoped. */
1847 expand_start_bindings (0);
1849 /* Initialize label_map. get_label_from_map will actually make
1851 bzero ((char *) &map
->label_map
[min_labelno
],
1852 (max_labelno
- min_labelno
) * sizeof (rtx
));
1854 /* Perform postincrements before actually calling the function. */
1857 /* Clean up stack so that variables might have smaller offsets. */
1858 do_pending_stack_adjust ();
1860 /* Save a copy of the location of const_equiv_map for mark_stores, called
1862 global_const_equiv_map
= map
->const_equiv_map
;
1863 global_const_equiv_map_size
= map
->const_equiv_map_size
;
1865 /* If the called function does an alloca, save and restore the
1866 stack pointer around the call. This saves stack space, but
1867 also is required if this inline is being done between two
1869 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1870 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1872 /* Now copy the insns one by one. Do this in two passes, first the insns and
1873 then their REG_NOTES, just like save_for_inline. */
1875 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1877 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1879 rtx copy
, pattern
, set
;
1881 map
->orig_asm_operands_vector
= 0;
1883 switch (GET_CODE (insn
))
1886 pattern
= PATTERN (insn
);
1887 set
= single_set (insn
);
1889 if (GET_CODE (pattern
) == USE
1890 && GET_CODE (XEXP (pattern
, 0)) == REG
1891 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1892 /* The (USE (REG n)) at return from the function should
1893 be ignored since we are changing (REG n) into
1897 /* If the inline fn needs eh context, make sure that
1898 the current fn has one. */
1899 if (GET_CODE (pattern
) == USE
1900 && find_reg_note (insn
, REG_EH_CONTEXT
, 0) != 0)
1903 /* Ignore setting a function value that we don't want to use. */
1904 if (map
->inline_target
== 0
1906 && GET_CODE (SET_DEST (set
)) == REG
1907 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1909 if (volatile_refs_p (SET_SRC (set
)))
1913 /* If we must not delete the source,
1914 load it into a new temporary. */
1915 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1917 new_set
= single_set (copy
);
1922 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1924 /* If the source and destination are the same and it
1925 has a note on it, keep the insn. */
1926 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1927 && REG_NOTES (insn
) != 0)
1928 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1933 /* If this is setting the static chain rtx, omit it. */
1934 else if (static_chain_value
!= 0
1936 && GET_CODE (SET_DEST (set
)) == REG
1937 && rtx_equal_p (SET_DEST (set
),
1938 static_chain_incoming_rtx
))
1941 /* If this is setting the static chain pseudo, set it from
1942 the value we want to give it instead. */
1943 else if (static_chain_value
!= 0
1945 && rtx_equal_p (SET_SRC (set
),
1946 static_chain_incoming_rtx
))
1948 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
);
1950 copy
= emit_move_insn (newdest
, static_chain_value
);
1951 static_chain_value
= 0;
1954 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1955 /* REG_NOTES will be copied later. */
1958 /* If this insn is setting CC0, it may need to look at
1959 the insn that uses CC0 to see what type of insn it is.
1960 In that case, the call to recog via validate_change will
1961 fail. So don't substitute constants here. Instead,
1962 do it when we emit the following insn.
1964 For example, see the pyr.md file. That machine has signed and
1965 unsigned compares. The compare patterns must check the
1966 following branch insn to see which what kind of compare to
1969 If the previous insn set CC0, substitute constants on it as
1971 if (sets_cc0_p (PATTERN (copy
)) != 0)
1976 try_constants (cc0_insn
, map
);
1978 try_constants (copy
, map
);
1981 try_constants (copy
, map
);
1986 if (GET_CODE (PATTERN (insn
)) == RETURN
1987 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1988 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1990 if (local_return_label
== 0)
1991 local_return_label
= gen_label_rtx ();
1992 pattern
= gen_jump (local_return_label
);
1995 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1997 copy
= emit_jump_insn (pattern
);
2001 try_constants (cc0_insn
, map
);
2004 try_constants (copy
, map
);
2006 /* If this used to be a conditional jump insn but whose branch
2007 direction is now know, we must do something special. */
2008 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
2011 /* The previous insn set cc0 for us. So delete it. */
2012 delete_insn (PREV_INSN (copy
));
2015 /* If this is now a no-op, delete it. */
2016 if (map
->last_pc_value
== pc_rtx
)
2022 /* Otherwise, this is unconditional jump so we must put a
2023 BARRIER after it. We could do some dead code elimination
2024 here, but jump.c will do it just as well. */
2030 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
2031 copy
= emit_call_insn (pattern
);
2033 /* Because the USAGE information potentially contains objects other
2034 than hard registers, we need to copy it. */
2035 CALL_INSN_FUNCTION_USAGE (copy
)
2036 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
2040 try_constants (cc0_insn
, map
);
2043 try_constants (copy
, map
);
2045 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2046 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2047 map
->const_equiv_map
[i
] = 0;
2051 copy
= emit_label (get_label_from_map (map
,
2052 CODE_LABEL_NUMBER (insn
)));
2053 LABEL_NAME (copy
) = LABEL_NAME (insn
);
2058 copy
= emit_barrier ();
2062 /* It is important to discard function-end and function-beg notes,
2063 so we have only one of each in the current function.
2064 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2065 deleted these in the copy used for continuing compilation,
2066 not the copy used for inlining). */
2067 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
2068 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
2069 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
2071 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
2072 NOTE_LINE_NUMBER (insn
));
2074 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
2075 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
2078 = get_label_from_map (map
, NOTE_BLOCK_NUMBER (copy
));
2080 /* we have to duplicate the handlers for the original */
2081 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
2083 handler_info
*ptr
, *temp
;
2085 nr
= new_eh_region_entry (CODE_LABEL_NUMBER (label
));
2086 ptr
= get_first_handler (NOTE_BLOCK_NUMBER (copy
));
2087 for ( ; ptr
; ptr
= ptr
->next
)
2089 temp
= get_new_handler ( get_label_from_map (map
,
2090 CODE_LABEL_NUMBER (ptr
->handler_label
)),
2092 add_new_handler (nr
, temp
);
2096 /* We have to forward these both to match the new exception
2098 NOTE_BLOCK_NUMBER (copy
) = CODE_LABEL_NUMBER (label
);
2111 RTX_INTEGRATED_P (copy
) = 1;
2113 map
->insn_map
[INSN_UID (insn
)] = copy
;
2116 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2117 from parameters can be substituted in. These are the only ones that
2118 are valid across the entire function. */
2120 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2121 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
2122 && map
->insn_map
[INSN_UID (insn
)]
2123 && REG_NOTES (insn
))
2125 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
2126 /* We must also do subst_constants, in case one of our parameters
2127 has const type and constant value. */
2128 subst_constants (&tem
, NULL_RTX
, map
);
2129 apply_change_group ();
2130 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
2133 if (local_return_label
)
2134 emit_label (local_return_label
);
2136 /* Restore the stack pointer if we saved it above. */
2137 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
2138 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
2140 /* Make copies of the decls of the symbols in the inline function, so that
2141 the copies of the variables get declared in the current function. Set
2142 up things so that lookup_static_chain knows that to interpret registers
2143 in SAVE_EXPRs for TYPE_SIZEs as local. */
2145 inline_function_decl
= fndecl
;
2146 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
2147 integrate_decl_tree ((tree
) ORIGINAL_DECL_INITIAL (header
), 0, map
);
2148 inline_function_decl
= 0;
2150 /* End the scope containing the copied formal parameter variables
2151 and copied LABEL_DECLs. */
2153 expand_end_bindings (getdecls (), 1, 1);
2154 block
= poplevel (1, 1, 0);
2155 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
2156 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
2159 /* Must mark the line number note after inlined functions as a repeat, so
2160 that the test coverage code can avoid counting the call twice. This
2161 just tells the code to ignore the immediately following line note, since
2162 there already exists a copy of this note before the expanded inline call.
2163 This line number note is still needed for debugging though, so we can't
2165 if (flag_test_coverage
)
2166 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
2168 emit_line_note (input_filename
, lineno
);
2170 if (structure_value_addr
)
2172 target
= gen_rtx_MEM (TYPE_MODE (type
),
2173 memory_address (TYPE_MODE (type
),
2174 structure_value_addr
));
2175 MEM_IN_STRUCT_P (target
) = 1;
2178 /* Make sure we free the things we explicitly allocated with xmalloc. */
2180 free (real_label_map
);
2185 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2186 push all of those decls and give each one the corresponding home. */
2189 integrate_parm_decls (args
, map
, arg_vector
)
2191 struct inline_remap
*map
;
2197 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
2199 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
2202 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
2204 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
2205 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2206 here, but that's going to require some more work. */
2207 /* DECL_INCOMING_RTL (decl) = ?; */
2208 /* These args would always appear unused, if not for this. */
2209 TREE_USED (decl
) = 1;
2210 /* Prevent warning for shadowing with these. */
2211 DECL_ABSTRACT_ORIGIN (decl
) = DECL_ORIGIN (tail
);
2213 /* Fully instantiate the address with the equivalent form so that the
2214 debugging information contains the actual register, instead of the
2215 virtual register. Do this by not passing an insn to
2217 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
2218 apply_change_group ();
2219 DECL_RTL (decl
) = new_decl_rtl
;
2223 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2224 current function a tree of contexts isomorphic to the one that is given.
2226 LEVEL indicates how far down into the BLOCK tree is the node we are
2227 currently traversing. It is always zero except for recursive calls.
2229 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2230 registers used in the DECL_RTL field should be remapped. If it is zero,
2231 no mapping is necessary. */
2234 integrate_decl_tree (let
, level
, map
)
2237 struct inline_remap
*map
;
2244 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2248 push_obstacks_nochange ();
2249 saveable_allocation ();
2250 d
= copy_and_set_decl_abstract_origin (t
);
2253 if (DECL_RTL (t
) != 0)
2255 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
2256 /* Fully instantiate the address with the equivalent form so that the
2257 debugging information contains the actual register, instead of the
2258 virtual register. Do this by not passing an insn to
2260 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
2261 apply_change_group ();
2263 /* These args would always appear unused, if not for this. */
2266 if (DECL_LANG_SPECIFIC (d
))
2272 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2273 integrate_decl_tree (t
, level
+ 1, map
);
2277 node
= poplevel (1, 0, 0);
2280 TREE_USED (node
) = TREE_USED (let
);
2281 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
2286 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2287 through save_constants. */
2290 save_constants_in_decl_trees (let
)
2295 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2296 if (DECL_RTL (t
) != 0)
2297 save_constants (&DECL_RTL (t
));
2299 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2300 save_constants_in_decl_trees (t
);
2303 /* Create a new copy of an rtx.
2304 Recursively copies the operands of the rtx,
2305 except for those few rtx codes that are sharable.
2307 We always return an rtx that is similar to that incoming rtx, with the
2308 exception of possibly changing a REG to a SUBREG or vice versa. No
2309 rtl is ever emitted.
2311 Handle constants that need to be placed in the constant pool by
2312 calling `force_const_mem'. */
2315 copy_rtx_and_substitute (orig
, map
)
2317 struct inline_remap
*map
;
2319 register rtx copy
, temp
;
2321 register RTX_CODE code
;
2322 register enum machine_mode mode
;
2323 register char *format_ptr
;
2329 code
= GET_CODE (orig
);
2330 mode
= GET_MODE (orig
);
2335 /* If the stack pointer register shows up, it must be part of
2336 stack-adjustments (*not* because we eliminated the frame pointer!).
2337 Small hard registers are returned as-is. Pseudo-registers
2338 go through their `reg_map'. */
2339 regno
= REGNO (orig
);
2340 if (regno
<= LAST_VIRTUAL_REGISTER
)
2342 /* Some hard registers are also mapped,
2343 but others are not translated. */
2344 if (map
->reg_map
[regno
] != 0)
2345 return map
->reg_map
[regno
];
2347 /* If this is the virtual frame pointer, make space in current
2348 function's stack frame for the stack frame of the inline function.
2350 Copy the address of this area into a pseudo. Map
2351 virtual_stack_vars_rtx to this pseudo and set up a constant
2352 equivalence for it to be the address. This will substitute the
2353 address into insns where it can be substituted and use the new
2354 pseudo where it can't. */
2355 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
2358 int size
= DECL_FRAME_SIZE (map
->fndecl
);
2360 #ifdef FRAME_GROWS_DOWNWARD
2361 /* In this case, virtual_stack_vars_rtx points to one byte
2362 higher than the top of the frame area. So make sure we
2363 allocate a big enough chunk to keep the frame pointer
2364 aligned like a real one. */
2365 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2368 loc
= assign_stack_temp (BLKmode
, size
, 1);
2369 loc
= XEXP (loc
, 0);
2370 #ifdef FRAME_GROWS_DOWNWARD
2371 /* In this case, virtual_stack_vars_rtx points to one byte
2372 higher than the top of the frame area. So compute the offset
2373 to one byte higher than our substitute frame. */
2374 loc
= plus_constant (loc
, size
);
2376 map
->reg_map
[regno
] = temp
2377 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2379 #ifdef STACK_BOUNDARY
2380 mark_reg_pointer (map
->reg_map
[regno
],
2381 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2384 if (REGNO (temp
) < map
->const_equiv_map_size
)
2386 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2387 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2390 seq
= gen_sequence ();
2392 emit_insn_after (seq
, map
->insns_at_start
);
2395 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
2397 /* Do the same for a block to contain any arguments referenced
2400 int size
= FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map
->fndecl
));
2403 loc
= assign_stack_temp (BLKmode
, size
, 1);
2404 loc
= XEXP (loc
, 0);
2405 /* When arguments grow downward, the virtual incoming
2406 args pointer points to the top of the argument block,
2407 so the remapped location better do the same. */
2408 #ifdef ARGS_GROW_DOWNWARD
2409 loc
= plus_constant (loc
, size
);
2411 map
->reg_map
[regno
] = temp
2412 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2414 #ifdef STACK_BOUNDARY
2415 mark_reg_pointer (map
->reg_map
[regno
],
2416 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2419 if (REGNO (temp
) < map
->const_equiv_map_size
)
2421 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2422 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2425 seq
= gen_sequence ();
2427 emit_insn_after (seq
, map
->insns_at_start
);
2430 else if (REG_FUNCTION_VALUE_P (orig
))
2432 /* This is a reference to the function return value. If
2433 the function doesn't have a return value, error. If the
2434 mode doesn't agree, make a SUBREG. */
2435 if (map
->inline_target
== 0)
2436 /* Must be unrolling loops or replicating code if we
2437 reach here, so return the register unchanged. */
2439 else if (mode
!= GET_MODE (map
->inline_target
))
2440 return gen_lowpart (mode
, map
->inline_target
);
2442 return map
->inline_target
;
2446 if (map
->reg_map
[regno
] == NULL
)
2448 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
2449 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
2450 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
2451 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
2452 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2454 if (map
->regno_pointer_flag
[regno
])
2455 mark_reg_pointer (map
->reg_map
[regno
],
2456 map
->regno_pointer_align
[regno
]);
2458 return map
->reg_map
[regno
];
2461 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
2462 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2463 if (GET_CODE (copy
) == SUBREG
)
2464 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
2465 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
2466 else if (GET_CODE (copy
) == CONCAT
)
2467 return (subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1));
2469 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
2470 SUBREG_WORD (orig
));
2473 copy
= gen_rtx_ADDRESSOF (mode
,
2474 copy_rtx_and_substitute (XEXP (orig
, 0), map
), 0);
2475 SET_ADDRESSOF_DECL (copy
, ADDRESSOF_DECL (orig
));
2476 regno
= ADDRESSOF_REGNO (orig
);
2477 if (map
->reg_map
[regno
])
2478 regno
= REGNO (map
->reg_map
[regno
]);
2479 else if (regno
> LAST_VIRTUAL_REGISTER
)
2481 temp
= XEXP (orig
, 0);
2482 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
2483 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
2484 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
2485 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
2486 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2488 if (map
->regno_pointer_flag
[regno
])
2489 mark_reg_pointer (map
->reg_map
[regno
],
2490 map
->regno_pointer_align
[regno
]);
2491 regno
= REGNO (map
->reg_map
[regno
]);
2493 ADDRESSOF_REGNO (copy
) = regno
;
2498 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2499 to (use foo) if the original insn didn't have a subreg.
2500 Removing the subreg distorts the VAX movstrhi pattern
2501 by changing the mode of an operand. */
2502 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2503 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
2504 copy
= SUBREG_REG (copy
);
2505 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
2508 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
2509 = LABEL_PRESERVE_P (orig
);
2510 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
2513 copy
= gen_rtx_LABEL_REF (mode
,
2514 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
2515 : get_label_from_map (map
,
2516 CODE_LABEL_NUMBER (XEXP (orig
, 0))));
2517 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
2519 /* The fact that this label was previously nonlocal does not mean
2520 it still is, so we must check if it is within the range of
2521 this function's labels. */
2522 LABEL_REF_NONLOCAL_P (copy
)
2523 = (LABEL_REF_NONLOCAL_P (orig
)
2524 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
2525 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
2527 /* If we have made a nonlocal label local, it means that this
2528 inlined call will be referring to our nonlocal goto handler.
2529 So make sure we create one for this block; we normally would
2530 not since this is not otherwise considered a "call". */
2531 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
2532 function_call_count
++;
2542 /* Symbols which represent the address of a label stored in the constant
2543 pool must be modified to point to a constant pool entry for the
2544 remapped label. Otherwise, symbols are returned unchanged. */
2545 if (CONSTANT_POOL_ADDRESS_P (orig
))
2547 rtx constant
= get_pool_constant (orig
);
2548 if (GET_CODE (constant
) == LABEL_REF
)
2549 return XEXP (force_const_mem (GET_MODE (orig
),
2550 copy_rtx_and_substitute (constant
,
2558 /* We have to make a new copy of this CONST_DOUBLE because don't want
2559 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2560 duplicate of a CONST_DOUBLE we have already seen. */
2561 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2565 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2566 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2569 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2570 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2573 /* Make new constant pool entry for a constant
2574 that was in the pool of the inline function. */
2575 if (RTX_INTEGRATED_P (orig
))
2577 /* If this was an address of a constant pool entry that itself
2578 had to be placed in the constant pool, it might not be a
2579 valid address. So the recursive call below might turn it
2580 into a register. In that case, it isn't a constant any
2581 more, so return it. This has the potential of changing a
2582 MEM into a REG, but we'll assume that it safe. */
2583 temp
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2584 if (! CONSTANT_P (temp
))
2586 return validize_mem (force_const_mem (GET_MODE (orig
), temp
));
2591 /* If from constant pool address, make new constant pool entry and
2592 return its address. */
2593 if (! RTX_INTEGRATED_P (orig
))
2597 = force_const_mem (GET_MODE (XEXP (orig
, 0)),
2598 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
2602 /* Legitimizing the address here is incorrect.
2604 The only ADDRESS rtx's that can reach here are ones created by
2605 save_constants. Hence the operand of the ADDRESS is always valid
2606 in this position of the instruction, since the original rtx without
2607 the ADDRESS was valid.
2609 The reason we don't legitimize the address here is that on the
2610 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2611 This code forces the operand of the address to a register, which
2612 fails because we can not take the HIGH part of a register.
2614 Also, change_address may create new registers. These registers
2615 will not have valid reg_map entries. This can cause try_constants()
2616 to fail because assumes that all registers in the rtx have valid
2617 reg_map entries, and it may end up replacing one of these new
2618 registers with junk. */
2620 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2621 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2624 temp
= XEXP (temp
, 0);
2626 #ifdef POINTERS_EXTEND_UNSIGNED
2627 if (GET_MODE (temp
) != GET_MODE (orig
))
2628 temp
= convert_memory_address (GET_MODE (orig
), temp
);
2634 /* If a single asm insn contains multiple output operands
2635 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2636 We must make sure that the copied insn continues to share it. */
2637 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
2639 copy
= rtx_alloc (ASM_OPERANDS
);
2640 copy
->volatil
= orig
->volatil
;
2641 XSTR (copy
, 0) = XSTR (orig
, 0);
2642 XSTR (copy
, 1) = XSTR (orig
, 1);
2643 XINT (copy
, 2) = XINT (orig
, 2);
2644 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
2645 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
2646 XSTR (copy
, 5) = XSTR (orig
, 5);
2647 XINT (copy
, 6) = XINT (orig
, 6);
2653 /* This is given special treatment because the first
2654 operand of a CALL is a (MEM ...) which may get
2655 forced into a register for cse. This is undesirable
2656 if function-address cse isn't wanted or if we won't do cse. */
2657 #ifndef NO_FUNCTION_CSE
2658 if (! (optimize
&& ! flag_no_function_cse
))
2660 return gen_rtx_CALL (GET_MODE (orig
),
2661 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
2662 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
2663 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
2667 /* Must be ifdefed out for loop unrolling to work. */
2673 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2674 Adjust the setting by the offset of the area we made.
2675 If the nonlocal goto is into the current function,
2676 this will result in unnecessarily bad code, but should work. */
2677 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2678 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2680 /* In case a translation hasn't occurred already, make one now. */
2683 HOST_WIDE_INT loc_offset
;
2685 copy_rtx_and_substitute (SET_DEST (orig
), map
);
2686 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
2687 equiv_loc
= map
->const_equiv_map
[REGNO (equiv_reg
)];
2689 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
2690 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
2693 (copy_rtx_and_substitute (SET_SRC (orig
), map
),
2700 copy
= rtx_alloc (MEM
);
2701 PUT_MODE (copy
, mode
);
2702 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2703 MEM_IN_STRUCT_P (copy
) = MEM_IN_STRUCT_P (orig
);
2704 MEM_VOLATILE_P (copy
) = MEM_VOLATILE_P (orig
);
2705 MEM_ALIAS_SET (copy
) = MEM_ALIAS_SET (orig
);
2707 /* If doing function inlining, this MEM might not be const in the
2708 function that it is being inlined into, and thus may not be
2709 unchanging after function inlining. Constant pool references are
2710 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2712 if (! map
->integrating
)
2713 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2721 copy
= rtx_alloc (code
);
2722 PUT_MODE (copy
, mode
);
2723 copy
->in_struct
= orig
->in_struct
;
2724 copy
->volatil
= orig
->volatil
;
2725 copy
->unchanging
= orig
->unchanging
;
2727 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2729 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2731 switch (*format_ptr
++)
2734 XEXP (copy
, i
) = XEXP (orig
, i
);
2738 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
2742 /* Change any references to old-insns to point to the
2743 corresponding copied insns. */
2744 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2748 XVEC (copy
, i
) = XVEC (orig
, i
);
2749 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2751 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2752 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2753 XVECEXP (copy
, i
, j
)
2754 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
2759 XWINT (copy
, i
) = XWINT (orig
, i
);
2763 XINT (copy
, i
) = XINT (orig
, i
);
2767 XSTR (copy
, i
) = XSTR (orig
, i
);
2775 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2777 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2778 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2779 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2785 /* Substitute known constant values into INSN, if that is valid. */
2788 try_constants (insn
, map
)
2790 struct inline_remap
*map
;
2795 subst_constants (&PATTERN (insn
), insn
, map
);
2797 /* Apply the changes if they are valid; otherwise discard them. */
2798 apply_change_group ();
2800 /* Show we don't know the value of anything stored or clobbered. */
2801 note_stores (PATTERN (insn
), mark_stores
);
2802 map
->last_pc_value
= 0;
2804 map
->last_cc0_value
= 0;
2807 /* Set up any constant equivalences made in this insn. */
2808 for (i
= 0; i
< map
->num_sets
; i
++)
2810 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2812 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2814 if (regno
< map
->const_equiv_map_size
2815 && (map
->const_equiv_map
[regno
] == 0
2816 /* Following clause is a hack to make case work where GNU C++
2817 reassigns a variable to make cse work right. */
2818 || ! rtx_equal_p (map
->const_equiv_map
[regno
],
2819 map
->equiv_sets
[i
].equiv
)))
2821 map
->const_equiv_map
[regno
] = map
->equiv_sets
[i
].equiv
;
2822 map
->const_age_map
[regno
] = map
->const_age
;
2825 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2826 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2828 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2829 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2834 /* Substitute known constants for pseudo regs in the contents of LOC,
2835 which are part of INSN.
2836 If INSN is zero, the substitution should always be done (this is used to
2838 These changes are taken out by try_constants if the result is not valid.
2840 Note that we are more concerned with determining when the result of a SET
2841 is a constant, for further propagation, than actually inserting constants
2842 into insns; cse will do the latter task better.
2844 This function is also used to adjust address of items previously addressed
2845 via the virtual stack variable or virtual incoming arguments registers. */
2848 subst_constants (loc
, insn
, map
)
2851 struct inline_remap
*map
;
2855 register enum rtx_code code
;
2856 register char *format_ptr
;
2857 int num_changes
= num_validated_changes ();
2859 enum machine_mode op0_mode
;
2861 code
= GET_CODE (x
);
2876 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2882 /* The only thing we can do with a USE or CLOBBER is possibly do
2883 some substitutions in a MEM within it. */
2884 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2885 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2889 /* Substitute for parms and known constants. Don't replace
2890 hard regs used as user variables with constants. */
2892 int regno
= REGNO (x
);
2894 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2895 && regno
< map
->const_equiv_map_size
2896 && map
->const_equiv_map
[regno
] != 0
2897 && map
->const_age_map
[regno
] >= map
->const_age
)
2898 validate_change (insn
, loc
, map
->const_equiv_map
[regno
], 1);
2903 /* SUBREG applied to something other than a reg
2904 should be treated as ordinary, since that must
2905 be a special hack and we don't know how to treat it specially.
2906 Consider for example mulsidi3 in m68k.md.
2907 Ordinary SUBREG of a REG needs this special treatment. */
2908 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2910 rtx inner
= SUBREG_REG (x
);
2913 /* We can't call subst_constants on &SUBREG_REG (x) because any
2914 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2915 see what is inside, try to form the new SUBREG and see if that is
2916 valid. We handle two cases: extracting a full word in an
2917 integral mode and extracting the low part. */
2918 subst_constants (&inner
, NULL_RTX
, map
);
2920 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2921 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2922 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2923 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2924 GET_MODE (SUBREG_REG (x
)));
2926 cancel_changes (num_changes
);
2927 if (new == 0 && subreg_lowpart_p (x
))
2928 new = gen_lowpart_common (GET_MODE (x
), inner
);
2931 validate_change (insn
, loc
, new, 1);
2938 subst_constants (&XEXP (x
, 0), insn
, map
);
2940 /* If a memory address got spoiled, change it back. */
2941 if (insn
!= 0 && num_validated_changes () != num_changes
2942 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2943 cancel_changes (num_changes
);
2948 /* Substitute constants in our source, and in any arguments to a
2949 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2951 rtx
*dest_loc
= &SET_DEST (x
);
2952 rtx dest
= *dest_loc
;
2955 subst_constants (&SET_SRC (x
), insn
, map
);
2958 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2959 || GET_CODE (*dest_loc
) == SUBREG
2960 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2962 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2964 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2965 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2967 dest_loc
= &XEXP (*dest_loc
, 0);
2970 /* Do substitute in the address of a destination in memory. */
2971 if (GET_CODE (*dest_loc
) == MEM
)
2972 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2974 /* Check for the case of DEST a SUBREG, both it and the underlying
2975 register are less than one word, and the SUBREG has the wider mode.
2976 In the case, we are really setting the underlying register to the
2977 source converted to the mode of DEST. So indicate that. */
2978 if (GET_CODE (dest
) == SUBREG
2979 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2980 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2981 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2982 <= GET_MODE_SIZE (GET_MODE (dest
)))
2983 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2985 src
= tem
, dest
= SUBREG_REG (dest
);
2987 /* If storing a recognizable value save it for later recording. */
2988 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2989 && (CONSTANT_P (src
)
2990 || (GET_CODE (src
) == REG
2991 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2992 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2993 || (GET_CODE (src
) == PLUS
2994 && GET_CODE (XEXP (src
, 0)) == REG
2995 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2996 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2997 && CONSTANT_P (XEXP (src
, 1)))
2998 || GET_CODE (src
) == COMPARE
3003 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
3004 || GET_CODE (src
) == LABEL_REF
))))
3006 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3007 it will cause us to save the COMPARE with any constants
3008 substituted, which is what we want for later. */
3009 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
3010 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
3019 format_ptr
= GET_RTX_FORMAT (code
);
3021 /* If the first operand is an expression, save its mode for later. */
3022 if (*format_ptr
== 'e')
3023 op0_mode
= GET_MODE (XEXP (x
, 0));
3025 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3027 switch (*format_ptr
++)
3034 subst_constants (&XEXP (x
, i
), insn
, map
);
3044 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
3047 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3048 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
3057 /* If this is a commutative operation, move a constant to the second
3058 operand unless the second operand is already a CONST_INT. */
3059 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
3060 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
3062 rtx tem
= XEXP (x
, 0);
3063 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
3064 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
3067 /* Simplify the expression in case we put in some constants. */
3068 switch (GET_RTX_CLASS (code
))
3071 new = simplify_unary_operation (code
, GET_MODE (x
),
3072 XEXP (x
, 0), op0_mode
);
3077 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
3078 if (op_mode
== VOIDmode
)
3079 op_mode
= GET_MODE (XEXP (x
, 1));
3080 new = simplify_relational_operation (code
, op_mode
,
3081 XEXP (x
, 0), XEXP (x
, 1));
3082 #ifdef FLOAT_STORE_FLAG_VALUE
3083 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
3084 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
3085 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
3093 new = simplify_binary_operation (code
, GET_MODE (x
),
3094 XEXP (x
, 0), XEXP (x
, 1));
3099 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
3100 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
3105 validate_change (insn
, loc
, new, 1);
3108 /* Show that register modified no longer contain known constants. We are
3109 called from note_stores with parts of the new insn. */
3112 mark_stores (dest
, x
)
3114 rtx x ATTRIBUTE_UNUSED
;
3117 enum machine_mode mode
;
3119 /* DEST is always the innermost thing set, except in the case of
3120 SUBREGs of hard registers. */
3122 if (GET_CODE (dest
) == REG
)
3123 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
3124 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
3126 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
3127 mode
= GET_MODE (SUBREG_REG (dest
));
3132 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
3133 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
3136 /* Ignore virtual stack var or virtual arg register since those
3137 are handled separately. */
3138 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
3139 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
3140 for (i
= regno
; i
<= last_reg
; i
++)
3141 if (i
< global_const_equiv_map_size
)
3142 global_const_equiv_map
[i
] = 0;
3146 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3147 pointed to by PX, they represent constants in the constant pool.
3148 Replace these with a new memory reference obtained from force_const_mem.
3149 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3150 address of a constant pool entry. Replace them with the address of
3151 a new constant pool entry obtained from force_const_mem. */
3154 restore_constants (px
)
3164 if (GET_CODE (x
) == CONST_DOUBLE
)
3166 /* We have to make a new CONST_DOUBLE to ensure that we account for
3167 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3168 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
3172 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
3173 *px
= CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
3176 *px
= immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
3180 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == CONST
)
3182 restore_constants (&XEXP (x
, 0));
3183 *px
= validize_mem (force_const_mem (GET_MODE (x
), XEXP (x
, 0)));
3185 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == SUBREG
)
3187 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3188 rtx
new = XEXP (SUBREG_REG (x
), 0);
3190 restore_constants (&new);
3191 new = force_const_mem (GET_MODE (SUBREG_REG (x
)), new);
3192 PUT_MODE (new, GET_MODE (x
));
3193 *px
= validize_mem (new);
3195 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == ADDRESS
)
3197 rtx
new = XEXP (force_const_mem (GET_MODE (XEXP (x
, 0)),
3198 XEXP (XEXP (x
, 0), 0)),
3201 #ifdef POINTERS_EXTEND_UNSIGNED
3202 if (GET_MODE (new) != GET_MODE (x
))
3203 new = convert_memory_address (GET_MODE (x
), new);
3210 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3211 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
3216 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3217 restore_constants (&XVECEXP (x
, i
, j
));
3221 restore_constants (&XEXP (x
, i
));
3228 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3229 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3230 that it points to the node itself, thus indicating that the node is its
3231 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3232 the given node is NULL, recursively descend the decl/block tree which
3233 it is the root of, and for each other ..._DECL or BLOCK node contained
3234 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3235 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3236 values to point to themselves. */
3239 set_block_origin_self (stmt
)
3242 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
3244 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
3247 register tree local_decl
;
3249 for (local_decl
= BLOCK_VARS (stmt
);
3250 local_decl
!= NULL_TREE
;
3251 local_decl
= TREE_CHAIN (local_decl
))
3252 set_decl_origin_self (local_decl
); /* Potential recursion. */
3256 register tree subblock
;
3258 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
3259 subblock
!= NULL_TREE
;
3260 subblock
= BLOCK_CHAIN (subblock
))
3261 set_block_origin_self (subblock
); /* Recurse. */
3266 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3267 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3268 node to so that it points to the node itself, thus indicating that the
3269 node represents its own (abstract) origin. Additionally, if the
3270 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3271 the decl/block tree of which the given node is the root of, and for
3272 each other ..._DECL or BLOCK node contained therein whose
3273 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3274 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3275 point to themselves. */
3278 set_decl_origin_self (decl
)
3281 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
3283 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
3284 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3288 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
3289 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
3290 if (DECL_INITIAL (decl
) != NULL_TREE
3291 && DECL_INITIAL (decl
) != error_mark_node
)
3292 set_block_origin_self (DECL_INITIAL (decl
));
3297 /* Given a pointer to some BLOCK node, and a boolean value to set the
3298 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3299 the given block, and for all local decls and all local sub-blocks
3300 (recursively) which are contained therein. */
3303 set_block_abstract_flags (stmt
, setting
)
3305 register int setting
;
3307 register tree local_decl
;
3308 register tree subblock
;
3310 BLOCK_ABSTRACT (stmt
) = setting
;
3312 for (local_decl
= BLOCK_VARS (stmt
);
3313 local_decl
!= NULL_TREE
;
3314 local_decl
= TREE_CHAIN (local_decl
))
3315 set_decl_abstract_flags (local_decl
, setting
);
3317 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
3318 subblock
!= NULL_TREE
;
3319 subblock
= BLOCK_CHAIN (subblock
))
3320 set_block_abstract_flags (subblock
, setting
);
3323 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3324 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3325 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3326 set the abstract flags for all of the parameters, local vars, local
3327 blocks and sub-blocks (recursively) to the same setting. */
3330 set_decl_abstract_flags (decl
, setting
)
3332 register int setting
;
3334 DECL_ABSTRACT (decl
) = setting
;
3335 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3339 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
3340 DECL_ABSTRACT (arg
) = setting
;
3341 if (DECL_INITIAL (decl
) != NULL_TREE
3342 && DECL_INITIAL (decl
) != error_mark_node
)
3343 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
3347 /* Output the assembly language code for the function FNDECL
3348 from its DECL_SAVED_INSNS. Used for inline functions that are output
3349 at end of compilation instead of where they came in the source. */
3352 output_inline_function (fndecl
)
3358 /* Things we allocate from here on are part of this function, not
3360 temporary_allocation ();
3362 head
= DECL_SAVED_INSNS (fndecl
);
3363 current_function_decl
= fndecl
;
3365 /* This call is only used to initialize global variables. */
3366 init_function_start (fndecl
, "lossage", 1);
3368 /* Redo parameter determinations in case the FUNCTION_...
3369 macros took machine-specific actions that need to be redone. */
3370 assign_parms (fndecl
, 1);
3372 /* Set stack frame size. */
3373 assign_stack_local (BLKmode
, DECL_FRAME_SIZE (fndecl
), 0);
3375 /* The first is a bit of a lie (the array may be larger), but doesn't
3376 matter too much and it isn't worth saving the actual bound. */
3377 reg_rtx_no
= regno_pointer_flag_length
= MAX_REGNUM (head
);
3378 regno_reg_rtx
= (rtx
*) INLINE_REGNO_REG_RTX (head
);
3379 regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (head
);
3380 regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (head
);
3381 max_parm_reg
= MAX_PARMREG (head
);
3382 parm_reg_stack_loc
= (rtx
*) PARMREG_STACK_LOC (head
);
3384 stack_slot_list
= STACK_SLOT_LIST (head
);
3385 forced_labels
= FORCED_LABELS (head
);
3387 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
3388 current_function_calls_alloca
= 1;
3390 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_SETJMP
)
3391 current_function_calls_setjmp
= 1;
3393 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_LONGJMP
)
3394 current_function_calls_longjmp
= 1;
3396 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_STRUCT
)
3397 current_function_returns_struct
= 1;
3399 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT
)
3400 current_function_returns_pcc_struct
= 1;
3402 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
3403 current_function_needs_context
= 1;
3405 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
)
3406 current_function_has_nonlocal_label
= 1;
3408 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_POINTER
)
3409 current_function_returns_pointer
= 1;
3411 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_CONST_POOL
)
3412 current_function_uses_const_pool
= 1;
3414 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
3415 current_function_uses_pic_offset_table
= 1;
3417 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (head
);
3418 current_function_pops_args
= POPS_ARGS (head
);
3420 /* This is the only thing the expand_function_end call that uses to be here
3421 actually does and that call can cause problems. */
3422 immediate_size_expand
--;
3424 /* Find last insn and rebuild the constant pool. */
3425 for (last
= FIRST_PARM_INSN (head
);
3426 NEXT_INSN (last
); last
= NEXT_INSN (last
))
3428 if (GET_RTX_CLASS (GET_CODE (last
)) == 'i')
3430 restore_constants (&PATTERN (last
));
3431 restore_constants (®_NOTES (last
));
3435 set_new_first_and_last_insn (FIRST_PARM_INSN (head
), last
);
3436 set_new_first_and_last_label_num (FIRST_LABELNO (head
), LAST_LABELNO (head
));
3438 /* We must have already output DWARF debugging information for the
3439 original (abstract) inline function declaration/definition, so
3440 we want to make sure that the debugging information we generate
3441 for this special instance of the inline function refers back to
3442 the information we already generated. To make sure that happens,
3443 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3444 node (and for all of the local ..._DECL nodes which are its children)
3445 so that they all point to themselves. */
3447 set_decl_origin_self (fndecl
);
3449 /* We're not deferring this any longer. */
3450 DECL_DEFER_OUTPUT (fndecl
) = 0;
3452 /* We can't inline this anymore. */
3453 DECL_INLINE (fndecl
) = 0;
3455 /* Compile this function all the way down to assembly code. */
3456 rest_of_compilation (fndecl
);
3458 current_function_decl
= 0;