1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "insn-config.h"
30 #include "insn-flags.h"
33 #include "integrate.h"
39 #define obstack_chunk_alloc xmalloc
40 #define obstack_chunk_free free
42 extern struct obstack
*function_maybepermanent_obstack
;
44 extern tree
pushdecl ();
45 extern tree
poplevel ();
47 /* Similar, but round to the next highest integer that meets the
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 #define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 static rtx initialize_for_inline
PROTO((tree
, int, int, int, int));
59 static void finish_inline
PROTO((tree
, rtx
));
60 static void adjust_copied_decl_tree
PROTO((tree
));
61 static tree copy_decl_list
PROTO((tree
));
62 static tree copy_decl_tree
PROTO((tree
));
63 static void copy_decl_rtls
PROTO((tree
));
64 static void save_constants
PROTO((rtx
*));
65 static void note_modified_parmregs
PROTO((rtx
, rtx
));
66 static rtx copy_for_inline
PROTO((rtx
));
67 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*, rtvec
));
68 static void integrate_decl_tree
PROTO((tree
, int, struct inline_remap
*));
69 static void subst_constants
PROTO((rtx
*, rtx
, struct inline_remap
*));
70 static void restore_constants
PROTO((rtx
*));
71 static void set_block_origin_self
PROTO((tree
));
72 static void set_decl_origin_self
PROTO((tree
));
73 static void set_block_abstract_flags
PROTO((tree
, int));
75 void set_decl_abstract_flags
PROTO((tree
, int));
77 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
78 is safe and reasonable to integrate into other functions.
79 Nonzero means value is a warning message with a single %s
80 for the function's name. */
83 function_cannot_inline_p (fndecl
)
87 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
88 int max_insns
= INTEGRATE_THRESHOLD (fndecl
);
89 register int ninsns
= 0;
92 /* No inlines with varargs. `grokdeclarator' gives a warning
93 message about that if `inline' is specified. This code
94 it put in to catch the volunteers. */
95 if ((last
&& TREE_VALUE (last
) != void_type_node
)
96 || current_function_varargs
)
97 return "varargs function cannot be inline";
99 if (current_function_calls_alloca
)
100 return "function using alloca cannot be inline";
102 if (current_function_contains_functions
)
103 return "function with nested functions cannot be inline";
105 /* If its not even close, don't even look. */
106 if (!DECL_INLINE (fndecl
) && get_max_uid () > 3 * max_insns
)
107 return "function too large to be inline";
110 /* Large stacks are OK now that inlined functions can share them. */
111 /* Don't inline functions with large stack usage,
112 since they can make other recursive functions burn up stack. */
113 if (!DECL_INLINE (fndecl
) && get_frame_size () > 100)
114 return "function stack frame for inlining";
118 /* Don't inline functions which do not specify a function prototype and
119 have BLKmode argument or take the address of a parameter. */
120 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
122 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
123 TREE_ADDRESSABLE (parms
) = 1;
124 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
125 return "no prototype, and parameter address used; cannot be inline";
129 /* We can't inline functions that return structures
130 the old-fashioned PCC way, copying into a static block. */
131 if (current_function_returns_pcc_struct
)
132 return "inline functions not supported for this return value type";
134 /* We can't inline functions that return BLKmode structures in registers. */
135 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
136 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
137 return "inline functions not supported for this return value type";
139 /* We can't inline functions that return structures of varying size. */
140 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
141 return "function with varying-size return value cannot be inline";
143 /* Cannot inline a function with a varying size argument or one that
144 receives a transparent union. */
145 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
147 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
148 return "function with varying-size parameter cannot be inline";
149 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
150 return "function with transparent unit parameter cannot be inline";
153 if (!DECL_INLINE (fndecl
) && get_max_uid () > max_insns
)
155 for (ninsns
= 0, insn
= get_first_nonparm_insn (); insn
&& ninsns
< max_insns
;
156 insn
= NEXT_INSN (insn
))
158 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
162 if (ninsns
>= max_insns
)
163 return "function too large to be inline";
166 /* We cannot inline this function if forced_labels is non-zero. This
167 implies that a label in this function was used as an initializer.
168 Because labels can not be duplicated, all labels in the function
169 will be renamed when it is inlined. However, there is no way to find
170 and fix all variables initialized with addresses of labels in this
171 function, hence inlining is impossible. */
174 return "function with label addresses used in initializers cannot inline";
176 /* We cannot inline a nested function that jumps to a nonlocal label. */
177 if (current_function_has_nonlocal_goto
)
178 return "function with nonlocal goto cannot be inline";
183 /* Variables used within save_for_inline. */
185 /* Mapping from old pseudo-register to new pseudo-registers.
186 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
187 It is allocated in `save_for_inline' and `expand_inline_function',
188 and deallocated on exit from each of those routines. */
191 /* Mapping from old code-labels to new code-labels.
192 The first element of this map is label_map[min_labelno].
193 It is allocated in `save_for_inline' and `expand_inline_function',
194 and deallocated on exit from each of those routines. */
195 static rtx
*label_map
;
197 /* Mapping from old insn uid's to copied insns.
198 It is allocated in `save_for_inline' and `expand_inline_function',
199 and deallocated on exit from each of those routines. */
200 static rtx
*insn_map
;
202 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
203 Zero for a reg that isn't a parm's home.
204 Only reg numbers less than max_parm_reg are mapped here. */
205 static tree
*parmdecl_map
;
207 /* Keep track of first pseudo-register beyond those that are parms. */
208 static int max_parm_reg
;
210 /* When an insn is being copied by copy_for_inline,
211 this is nonzero if we have copied an ASM_OPERANDS.
212 In that case, it is the original input-operand vector. */
213 static rtvec orig_asm_operands_vector
;
215 /* When an insn is being copied by copy_for_inline,
216 this is nonzero if we have copied an ASM_OPERANDS.
217 In that case, it is the copied input-operand vector. */
218 static rtvec copy_asm_operands_vector
;
220 /* Likewise, this is the copied constraints vector. */
221 static rtvec copy_asm_constraints_vector
;
223 /* In save_for_inline, nonzero if past the parm-initialization insns. */
224 static int in_nonparm_insns
;
226 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
227 needed to save FNDECL's insns and info for future inline expansion. */
230 initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, copy
)
237 int function_flags
, i
;
241 /* Compute the values of any flags we must restore when inlining this. */
244 = (current_function_calls_alloca
* FUNCTION_FLAGS_CALLS_ALLOCA
245 + current_function_calls_setjmp
* FUNCTION_FLAGS_CALLS_SETJMP
246 + current_function_calls_longjmp
* FUNCTION_FLAGS_CALLS_LONGJMP
247 + current_function_returns_struct
* FUNCTION_FLAGS_RETURNS_STRUCT
248 + current_function_returns_pcc_struct
* FUNCTION_FLAGS_RETURNS_PCC_STRUCT
249 + current_function_needs_context
* FUNCTION_FLAGS_NEEDS_CONTEXT
250 + current_function_has_nonlocal_label
* FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
251 + current_function_returns_pointer
* FUNCTION_FLAGS_RETURNS_POINTER
252 + current_function_uses_const_pool
* FUNCTION_FLAGS_USES_CONST_POOL
253 + current_function_uses_pic_offset_table
* FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
);
255 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
256 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
257 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
259 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
261 parms
= TREE_CHAIN (parms
), i
++)
263 rtx p
= DECL_RTL (parms
);
265 if (GET_CODE (p
) == MEM
&& copy
)
267 /* Copy the rtl so that modifications of the addresses
268 later in compilation won't affect this arg_vector.
269 Virtual register instantiation can screw the address
271 rtx
new = copy_rtx (p
);
273 /* Don't leave the old copy anywhere in this decl. */
274 if (DECL_RTL (parms
) == DECL_INCOMING_RTL (parms
)
275 || (GET_CODE (DECL_RTL (parms
)) == MEM
276 && GET_CODE (DECL_INCOMING_RTL (parms
)) == MEM
277 && (XEXP (DECL_RTL (parms
), 0)
278 == XEXP (DECL_INCOMING_RTL (parms
), 0))))
279 DECL_INCOMING_RTL (parms
) = new;
280 DECL_RTL (parms
) = new;
283 RTVEC_ELT (arg_vector
, i
) = p
;
285 if (GET_CODE (p
) == REG
)
286 parmdecl_map
[REGNO (p
)] = parms
;
287 else if (GET_CODE (p
) == CONCAT
)
289 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
290 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
292 if (GET_CODE (preal
) == REG
)
293 parmdecl_map
[REGNO (preal
)] = parms
;
294 if (GET_CODE (pimag
) == REG
)
295 parmdecl_map
[REGNO (pimag
)] = parms
;
298 /* This flag is cleared later
299 if the function ever modifies the value of the parm. */
300 TREE_READONLY (parms
) = 1;
303 /* Assume we start out in the insns that set up the parameters. */
304 in_nonparm_insns
= 0;
306 /* The list of DECL_SAVED_INSNS, starts off with a header which
307 contains the following information:
309 the first insn of the function (not including the insns that copy
310 parameters into registers).
311 the first parameter insn of the function,
312 the first label used by that function,
313 the last label used by that function,
314 the highest register number used for parameters,
315 the total number of registers used,
316 the size of the incoming stack area for parameters,
317 the number of bytes popped on return,
319 some flags that are used to restore compiler globals,
320 the value of current_function_outgoing_args_size,
321 the original argument vector,
322 and the original DECL_INITIAL. */
324 return gen_inline_header_rtx (NULL_RTX
, NULL_RTX
, min_labelno
, max_labelno
,
325 max_parm_reg
, max_reg
,
326 current_function_args_size
,
327 current_function_pops_args
,
328 stack_slot_list
, forced_labels
, function_flags
,
329 current_function_outgoing_args_size
,
330 arg_vector
, (rtx
) DECL_INITIAL (fndecl
));
333 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
334 things that must be done to make FNDECL expandable as an inline function.
335 HEAD contains the chain of insns to which FNDECL will expand. */
338 finish_inline (fndecl
, head
)
342 NEXT_INSN (head
) = get_first_nonparm_insn ();
343 FIRST_PARM_INSN (head
) = get_insns ();
344 DECL_SAVED_INSNS (fndecl
) = head
;
345 DECL_FRAME_SIZE (fndecl
) = get_frame_size ();
348 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
349 they all point to the new (copied) rtxs. */
352 adjust_copied_decl_tree (block
)
355 register tree subblock
;
356 register rtx original_end
;
358 original_end
= BLOCK_END_NOTE (block
);
361 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
362 NOTE_SOURCE_FILE (original_end
) = 0;
365 /* Process all subblocks. */
366 for (subblock
= BLOCK_SUBBLOCKS (block
);
368 subblock
= TREE_CHAIN (subblock
))
369 adjust_copied_decl_tree (subblock
);
372 /* Make the insns and PARM_DECLs of the current function permanent
373 and record other information in DECL_SAVED_INSNS to allow inlining
374 of this function in subsequent calls.
376 This function is called when we are going to immediately compile
377 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
378 modified by the compilation process, so we copy all of them to
379 new storage and consider the new insns to be the insn chain to be
380 compiled. Our caller (rest_of_compilation) saves the original
381 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
383 /* ??? The nonlocal_label list should be adjusted also. However, since
384 a function that contains a nested function never gets inlined currently,
385 the nonlocal_label list will always be empty, so we don't worry about
389 save_for_inline_copying (fndecl
)
392 rtx first_insn
, last_insn
, insn
;
394 int max_labelno
, min_labelno
, i
, len
;
397 rtx first_nonparm_insn
;
399 /* Make and emit a return-label if we have not already done so.
400 Do this before recording the bounds on label numbers. */
402 if (return_label
== 0)
404 return_label
= gen_label_rtx ();
405 emit_label (return_label
);
408 /* Get some bounds on the labels and registers used. */
410 max_labelno
= max_label_num ();
411 min_labelno
= get_first_label_num ();
412 max_reg
= max_reg_num ();
414 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
415 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
416 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
417 for the parms, prior to elimination of virtual registers.
418 These values are needed for substituting parms properly. */
420 max_parm_reg
= max_parm_reg_num ();
421 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
423 head
= initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, 1);
425 if (current_function_uses_const_pool
)
427 /* Replace any constant pool references with the actual constant. We
428 will put the constants back in the copy made below. */
429 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
430 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
432 save_constants (&PATTERN (insn
));
433 if (REG_NOTES (insn
))
434 save_constants (®_NOTES (insn
));
437 /* Clear out the constant pool so that we can recreate it with the
438 copied constants below. */
439 init_const_rtx_hash_table ();
440 clear_const_double_mem ();
443 max_uid
= INSN_UID (head
);
445 /* We have now allocated all that needs to be allocated permanently
446 on the rtx obstack. Set our high-water mark, so that we
447 can free the rest of this when the time comes. */
451 /* Copy the chain insns of this function.
452 Install the copied chain as the insns of this function,
453 for continued compilation;
454 the original chain is recorded as the DECL_SAVED_INSNS
455 for inlining future calls. */
457 /* If there are insns that copy parms from the stack into pseudo registers,
458 those insns are not copied. `expand_inline_function' must
459 emit the correct code to handle such things. */
462 if (GET_CODE (insn
) != NOTE
)
464 first_insn
= rtx_alloc (NOTE
);
465 NOTE_SOURCE_FILE (first_insn
) = NOTE_SOURCE_FILE (insn
);
466 NOTE_LINE_NUMBER (first_insn
) = NOTE_LINE_NUMBER (insn
);
467 INSN_UID (first_insn
) = INSN_UID (insn
);
468 PREV_INSN (first_insn
) = NULL
;
469 NEXT_INSN (first_insn
) = NULL
;
470 last_insn
= first_insn
;
472 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
473 Make these new rtx's now, and install them in regno_reg_rtx, so they
474 will be the official pseudo-reg rtx's for the rest of compilation. */
476 reg_map
= (rtx
*) alloca ((max_reg
+ 1) * sizeof (rtx
));
478 len
= sizeof (struct rtx_def
) + (GET_RTX_LENGTH (REG
) - 1) * sizeof (rtunion
);
479 for (i
= max_reg
- 1; i
> LAST_VIRTUAL_REGISTER
; i
--)
480 reg_map
[i
] = (rtx
)obstack_copy (function_maybepermanent_obstack
,
481 regno_reg_rtx
[i
], len
);
483 bcopy ((char *) (reg_map
+ LAST_VIRTUAL_REGISTER
+ 1),
484 (char *) (regno_reg_rtx
+ LAST_VIRTUAL_REGISTER
+ 1),
485 (max_reg
- (LAST_VIRTUAL_REGISTER
+ 1)) * sizeof (rtx
));
487 /* Likewise each label rtx must have a unique rtx as its copy. */
489 label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
490 label_map
-= min_labelno
;
492 for (i
= min_labelno
; i
< max_labelno
; i
++)
493 label_map
[i
] = gen_label_rtx ();
495 /* Record the mapping of old insns to copied insns. */
497 insn_map
= (rtx
*) alloca (max_uid
* sizeof (rtx
));
498 bzero ((char *) insn_map
, max_uid
* sizeof (rtx
));
500 /* Get the insn which signals the end of parameter setup code. */
501 first_nonparm_insn
= get_first_nonparm_insn ();
503 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504 (the former occurs when a variable has its address taken)
505 since these may be shared and can be changed by virtual
506 register instantiation. DECL_RTL values for our arguments
507 have already been copied by initialize_for_inline. */
508 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_reg
; i
++)
509 if (GET_CODE (regno_reg_rtx
[i
]) == MEM
)
510 XEXP (regno_reg_rtx
[i
], 0)
511 = copy_for_inline (XEXP (regno_reg_rtx
[i
], 0));
513 /* Copy the tree of subblocks of the function, and the decls in them.
514 We will use the copy for compiling this function, then restore the original
515 subblocks and decls for use when inlining this function.
517 Several parts of the compiler modify BLOCK trees. In particular,
518 instantiate_virtual_regs will instantiate any virtual regs
519 mentioned in the DECL_RTLs of the decls, and loop
520 unrolling will replicate any BLOCK trees inside an unrolled loop.
522 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523 which we will use for inlining. The rtl might even contain pseudoregs
524 whose space has been freed. */
526 DECL_INITIAL (fndecl
) = copy_decl_tree (DECL_INITIAL (fndecl
));
527 DECL_ARGUMENTS (fndecl
) = copy_decl_list (DECL_ARGUMENTS (fndecl
));
529 /* Now copy each DECL_RTL which is a MEM,
530 so it is safe to modify their addresses. */
531 copy_decl_rtls (DECL_INITIAL (fndecl
));
533 /* The fndecl node acts as its own progenitor, so mark it as such. */
534 DECL_ABSTRACT_ORIGIN (fndecl
) = fndecl
;
536 /* Now copy the chain of insns. Do this twice. The first copy the insn
537 itself and its body. The second time copy of REG_NOTES. This is because
538 a REG_NOTE may have a forward pointer to another insn. */
540 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
542 orig_asm_operands_vector
= 0;
544 if (insn
== first_nonparm_insn
)
545 in_nonparm_insns
= 1;
547 switch (GET_CODE (insn
))
550 /* No need to keep these. */
551 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
554 copy
= rtx_alloc (NOTE
);
555 NOTE_LINE_NUMBER (copy
) = NOTE_LINE_NUMBER (insn
);
556 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
)
557 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
560 NOTE_SOURCE_FILE (insn
) = (char *) copy
;
561 NOTE_SOURCE_FILE (copy
) = 0;
568 copy
= rtx_alloc (GET_CODE (insn
));
570 if (GET_CODE (insn
) == CALL_INSN
)
571 CALL_INSN_FUNCTION_USAGE (copy
) =
572 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn
));
574 PATTERN (copy
) = copy_for_inline (PATTERN (insn
));
575 INSN_CODE (copy
) = -1;
576 LOG_LINKS (copy
) = NULL_RTX
;
577 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
581 copy
= label_map
[CODE_LABEL_NUMBER (insn
)];
582 LABEL_NAME (copy
) = LABEL_NAME (insn
);
586 copy
= rtx_alloc (BARRIER
);
592 INSN_UID (copy
) = INSN_UID (insn
);
593 insn_map
[INSN_UID (insn
)] = copy
;
594 NEXT_INSN (last_insn
) = copy
;
595 PREV_INSN (copy
) = last_insn
;
599 adjust_copied_decl_tree (DECL_INITIAL (fndecl
));
601 /* Now copy the REG_NOTES. */
602 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= NEXT_INSN (insn
))
603 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
604 && insn_map
[INSN_UID(insn
)])
605 REG_NOTES (insn_map
[INSN_UID (insn
)])
606 = copy_for_inline (REG_NOTES (insn
));
608 NEXT_INSN (last_insn
) = NULL
;
610 finish_inline (fndecl
, head
);
612 set_new_first_and_last_insn (first_insn
, last_insn
);
615 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
616 For example, this can copy a list made of TREE_LIST nodes. While copying,
617 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
618 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
619 point to the corresponding (abstract) original node. */
622 copy_decl_list (list
)
626 register tree prev
, next
;
631 head
= prev
= copy_node (list
);
632 if (DECL_ABSTRACT_ORIGIN (head
) == NULL_TREE
)
633 DECL_ABSTRACT_ORIGIN (head
) = list
;
634 next
= TREE_CHAIN (list
);
639 copy
= copy_node (next
);
640 if (DECL_ABSTRACT_ORIGIN (copy
) == NULL_TREE
)
641 DECL_ABSTRACT_ORIGIN (copy
) = next
;
642 TREE_CHAIN (prev
) = copy
;
644 next
= TREE_CHAIN (next
);
649 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
652 copy_decl_tree (block
)
655 tree t
, vars
, subblocks
;
657 vars
= copy_decl_list (BLOCK_VARS (block
));
660 /* Process all subblocks. */
661 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
663 tree copy
= copy_decl_tree (t
);
664 TREE_CHAIN (copy
) = subblocks
;
668 t
= copy_node (block
);
669 BLOCK_VARS (t
) = vars
;
670 BLOCK_SUBBLOCKS (t
) = nreverse (subblocks
);
671 /* If the BLOCK being cloned is already marked as having been instantiated
672 from something else, then leave that `origin' marking alone. Elsewise,
673 mark the clone as having originated from the BLOCK we are cloning. */
674 if (BLOCK_ABSTRACT_ORIGIN (t
) == NULL_TREE
)
675 BLOCK_ABSTRACT_ORIGIN (t
) = block
;
679 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
682 copy_decl_rtls (block
)
687 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
688 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
689 DECL_RTL (t
) = copy_for_inline (DECL_RTL (t
));
691 /* Process all subblocks. */
692 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
696 /* Make the insns and PARM_DECLs of the current function permanent
697 and record other information in DECL_SAVED_INSNS to allow inlining
698 of this function in subsequent calls.
700 This routine need not copy any insns because we are not going
701 to immediately compile the insns in the insn chain. There
702 are two cases when we would compile the insns for FNDECL:
703 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
704 be output at the end of other compilation, because somebody took
705 its address. In the first case, the insns of FNDECL are copied
706 as it is expanded inline, so FNDECL's saved insns are not
707 modified. In the second case, FNDECL is used for the last time,
708 so modifying the rtl is not a problem.
710 ??? Actually, we do not verify that FNDECL is not inline expanded
711 by other functions which must also be written down at the end
712 of compilation. We could set flag_no_inline to nonzero when
713 the time comes to write down such functions. */
716 save_for_inline_nocopy (fndecl
)
721 rtx first_nonparm_insn
;
723 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
724 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
725 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
726 for the parms, prior to elimination of virtual registers.
727 These values are needed for substituting parms properly. */
729 max_parm_reg
= max_parm_reg_num ();
730 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
732 /* Make and emit a return-label if we have not already done so. */
734 if (return_label
== 0)
736 return_label
= gen_label_rtx ();
737 emit_label (return_label
);
740 head
= initialize_for_inline (fndecl
, get_first_label_num (),
741 max_label_num (), max_reg_num (), 0);
743 /* If there are insns that copy parms from the stack into pseudo registers,
744 those insns are not copied. `expand_inline_function' must
745 emit the correct code to handle such things. */
748 if (GET_CODE (insn
) != NOTE
)
751 /* Get the insn which signals the end of parameter setup code. */
752 first_nonparm_insn
= get_first_nonparm_insn ();
754 /* Now just scan the chain of insns to see what happens to our
755 PARM_DECLs. If a PARM_DECL is used but never modified, we
756 can substitute its rtl directly when expanding inline (and
757 perform constant folding when its incoming value is constant).
758 Otherwise, we have to copy its value into a new register and track
759 the new register's life. */
761 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
763 if (insn
== first_nonparm_insn
)
764 in_nonparm_insns
= 1;
766 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
768 if (current_function_uses_const_pool
)
770 /* Replace any constant pool references with the actual constant.
771 We will put the constant back if we need to write the
772 function out after all. */
773 save_constants (&PATTERN (insn
));
774 if (REG_NOTES (insn
))
775 save_constants (®_NOTES (insn
));
778 /* Record what interesting things happen to our parameters. */
779 note_stores (PATTERN (insn
), note_modified_parmregs
);
783 /* We have now allocated all that needs to be allocated permanently
784 on the rtx obstack. Set our high-water mark, so that we
785 can free the rest of this when the time comes. */
789 finish_inline (fndecl
, head
);
792 /* Given PX, a pointer into an insn, search for references to the constant
793 pool. Replace each with a CONST that has the mode of the original
794 constant, contains the constant, and has RTX_INTEGRATED_P set.
795 Similarly, constant pool addresses not enclosed in a MEM are replaced
796 with an ADDRESS rtx which also gives the constant, mode, and has
797 RTX_INTEGRATED_P set. */
809 /* If this is a CONST_DOUBLE, don't try to fix things up in
810 CONST_DOUBLE_MEM, because this is an infinite recursion. */
811 if (GET_CODE (x
) == CONST_DOUBLE
)
813 else if (GET_CODE (x
) == MEM
&& GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
814 && CONSTANT_POOL_ADDRESS_P (XEXP (x
,0)))
816 enum machine_mode const_mode
= get_pool_mode (XEXP (x
, 0));
817 rtx
new = gen_rtx (CONST
, const_mode
, get_pool_constant (XEXP (x
, 0)));
818 RTX_INTEGRATED_P (new) = 1;
820 /* If the MEM was in a different mode than the constant (perhaps we
821 were only looking at the low-order part), surround it with a
822 SUBREG so we can save both modes. */
824 if (GET_MODE (x
) != const_mode
)
826 new = gen_rtx (SUBREG
, GET_MODE (x
), new, 0);
827 RTX_INTEGRATED_P (new) = 1;
831 save_constants (&XEXP (*px
, 0));
833 else if (GET_CODE (x
) == SYMBOL_REF
834 && CONSTANT_POOL_ADDRESS_P (x
))
836 *px
= gen_rtx (ADDRESS
, get_pool_mode (x
), get_pool_constant (x
));
837 save_constants (&XEXP (*px
, 0));
838 RTX_INTEGRATED_P (*px
) = 1;
843 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
844 int len
= GET_RTX_LENGTH (GET_CODE (x
));
846 for (i
= len
-1; i
>= 0; i
--)
851 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
852 save_constants (&XVECEXP (x
, i
, j
));
856 if (XEXP (x
, i
) == 0)
860 /* Hack tail-recursion here. */
864 save_constants (&XEXP (x
, i
));
871 /* Note whether a parameter is modified or not. */
874 note_modified_parmregs (reg
, x
)
878 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
879 && REGNO (reg
) < max_parm_reg
880 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
881 && parmdecl_map
[REGNO (reg
)] != 0)
882 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
885 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
886 according to `reg_map' and `label_map'. The original rtl insns
887 will be saved for inlining; this is used to make a copy
888 which is used to finish compiling the inline function itself.
890 If we find a "saved" constant pool entry, one which was replaced with
891 the value of the constant, convert it back to a constant pool entry.
892 Since the pool wasn't touched, this should simply restore the old
895 All other kinds of rtx are copied except those that can never be
896 changed during compilation. */
899 copy_for_inline (orig
)
902 register rtx x
= orig
;
904 register enum rtx_code code
;
905 register char *format_ptr
;
912 /* These types may be freely shared. */
924 /* We have to make a new CONST_DOUBLE to ensure that we account for
925 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
926 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
930 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
931 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
934 return immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
938 /* Get constant pool entry for constant in the pool. */
939 if (RTX_INTEGRATED_P (x
))
940 return validize_mem (force_const_mem (GET_MODE (x
),
941 copy_for_inline (XEXP (x
, 0))));
945 /* Get constant pool entry, but access in different mode. */
946 if (RTX_INTEGRATED_P (x
))
949 = force_const_mem (GET_MODE (SUBREG_REG (x
)),
950 copy_for_inline (XEXP (SUBREG_REG (x
), 0)));
952 PUT_MODE (new, GET_MODE (x
));
953 return validize_mem (new);
958 /* If not special for constant pool error. Else get constant pool
960 if (! RTX_INTEGRATED_P (x
))
963 return XEXP (force_const_mem (GET_MODE (x
),
964 copy_for_inline (XEXP (x
, 0))), 0);
967 /* If a single asm insn contains multiple output operands
968 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
969 We must make sure that the copied insn continues to share it. */
970 if (orig_asm_operands_vector
== XVEC (orig
, 3))
972 x
= rtx_alloc (ASM_OPERANDS
);
973 x
->volatil
= orig
->volatil
;
974 XSTR (x
, 0) = XSTR (orig
, 0);
975 XSTR (x
, 1) = XSTR (orig
, 1);
976 XINT (x
, 2) = XINT (orig
, 2);
977 XVEC (x
, 3) = copy_asm_operands_vector
;
978 XVEC (x
, 4) = copy_asm_constraints_vector
;
979 XSTR (x
, 5) = XSTR (orig
, 5);
980 XINT (x
, 6) = XINT (orig
, 6);
986 /* A MEM is usually allowed to be shared if its address is constant
987 or is a constant plus one of the special registers.
989 We do not allow sharing of addresses that are either a special
990 register or the sum of a constant and a special register because
991 it is possible for unshare_all_rtl to copy the address, into memory
992 that won't be saved. Although the MEM can safely be shared, and
993 won't be copied there, the address itself cannot be shared, and may
996 There are also two exceptions with constants: The first is if the
997 constant is a LABEL_REF or the sum of the LABEL_REF
998 and an integer. This case can happen if we have an inline
999 function that supplies a constant operand to the call of another
1000 inline function that uses it in a switch statement. In this case,
1001 we will be replacing the LABEL_REF, so we have to replace this MEM
1004 The second case is if we have a (const (plus (address ..) ...)).
1005 In that case we need to put back the address of the constant pool
1008 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
1009 && GET_CODE (XEXP (x
, 0)) != LABEL_REF
1010 && ! (GET_CODE (XEXP (x
, 0)) == CONST
1011 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
1012 && ((GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1014 || (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1020 /* If this is a non-local label, just make a new LABEL_REF.
1021 Otherwise, use the new label as well. */
1022 x
= gen_rtx (LABEL_REF
, GET_MODE (orig
),
1023 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1024 : label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
1025 LABEL_REF_NONLOCAL_P (x
) = LABEL_REF_NONLOCAL_P (orig
);
1026 LABEL_OUTSIDE_LOOP_P (x
) = LABEL_OUTSIDE_LOOP_P (orig
);
1030 if (REGNO (x
) > LAST_VIRTUAL_REGISTER
)
1031 return reg_map
[REGNO (x
)];
1036 /* If a parm that gets modified lives in a pseudo-reg,
1037 clear its TREE_READONLY to prevent certain optimizations. */
1039 rtx dest
= SET_DEST (x
);
1041 while (GET_CODE (dest
) == STRICT_LOW_PART
1042 || GET_CODE (dest
) == ZERO_EXTRACT
1043 || GET_CODE (dest
) == SUBREG
)
1044 dest
= XEXP (dest
, 0);
1046 if (GET_CODE (dest
) == REG
1047 && REGNO (dest
) < max_parm_reg
1048 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
1049 && parmdecl_map
[REGNO (dest
)] != 0
1050 /* The insn to load an arg pseudo from a stack slot
1051 does not count as modifying it. */
1052 && in_nonparm_insns
)
1053 TREE_READONLY (parmdecl_map
[REGNO (dest
)]) = 0;
1057 #if 0 /* This is a good idea, but here is the wrong place for it. */
1058 /* Arrange that CONST_INTs always appear as the second operand
1059 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1060 always appear as the first. */
1062 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
1063 || (XEXP (x
, 1) == frame_pointer_rtx
1064 || (ARG_POINTER_REGNUM
!= FRAME_POINTER_REGNUM
1065 && XEXP (x
, 1) == arg_pointer_rtx
)))
1067 rtx t
= XEXP (x
, 0);
1068 XEXP (x
, 0) = XEXP (x
, 1);
1075 /* Replace this rtx with a copy of itself. */
1077 x
= rtx_alloc (code
);
1078 bcopy ((char *) orig
, (char *) x
,
1079 (sizeof (*x
) - sizeof (x
->fld
)
1080 + sizeof (x
->fld
[0]) * GET_RTX_LENGTH (code
)));
1082 /* Now scan the subexpressions recursively.
1083 We can store any replaced subexpressions directly into X
1084 since we know X is not shared! Any vectors in X
1085 must be copied if X was copied. */
1087 format_ptr
= GET_RTX_FORMAT (code
);
1089 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1091 switch (*format_ptr
++)
1094 XEXP (x
, i
) = copy_for_inline (XEXP (x
, i
));
1098 /* Change any references to old-insns to point to the
1099 corresponding copied insns. */
1100 XEXP (x
, i
) = insn_map
[INSN_UID (XEXP (x
, i
))];
1104 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
1108 XVEC (x
, i
) = gen_rtvec_v (XVECLEN (x
, i
), &XVECEXP (x
, i
, 0));
1109 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1111 = copy_for_inline (XVECEXP (x
, i
, j
));
1117 if (code
== ASM_OPERANDS
&& orig_asm_operands_vector
== 0)
1119 orig_asm_operands_vector
= XVEC (orig
, 3);
1120 copy_asm_operands_vector
= XVEC (x
, 3);
1121 copy_asm_constraints_vector
= XVEC (x
, 4);
1127 /* Unfortunately, we need a global copy of const_equiv map for communication
1128 with a function called from note_stores. Be *very* careful that this
1129 is used properly in the presence of recursion. */
1131 rtx
*global_const_equiv_map
;
1132 int global_const_equiv_map_size
;
1134 #define FIXED_BASE_PLUS_P(X) \
1135 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1136 && GET_CODE (XEXP (X, 0)) == REG \
1137 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1138 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1140 /* Integrate the procedure defined by FNDECL. Note that this function
1141 may wind up calling itself. Since the static variables are not
1142 reentrant, we do not assign them until after the possibility
1143 of recursion is eliminated.
1145 If IGNORE is nonzero, do not produce a value.
1146 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1149 (rtx)-1 if we could not substitute the function
1150 0 if we substituted it and it does not produce a value
1151 else an rtx for where the value is stored. */
1154 expand_inline_function (fndecl
, parms
, target
, ignore
, type
, structure_value_addr
)
1159 rtx structure_value_addr
;
1161 tree formal
, actual
, block
;
1162 rtx header
= DECL_SAVED_INSNS (fndecl
);
1163 rtx insns
= FIRST_FUNCTION_INSN (header
);
1164 rtx parm_insns
= FIRST_PARM_INSN (header
);
1170 int min_labelno
= FIRST_LABELNO (header
);
1171 int max_labelno
= LAST_LABELNO (header
);
1173 rtx local_return_label
= 0;
1177 struct inline_remap
*map
;
1179 rtvec arg_vector
= ORIGINAL_ARG_VECTOR (header
);
1180 rtx static_chain_value
= 0;
1182 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1183 max_regno
= MAX_REGNUM (header
) + 3;
1184 if (max_regno
< FIRST_PSEUDO_REGISTER
)
1187 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
1189 /* Check that the parms type match and that sufficient arguments were
1190 passed. Since the appropriate conversions or default promotions have
1191 already been applied, the machine modes should match exactly. */
1193 for (formal
= DECL_ARGUMENTS (fndecl
),
1196 formal
= TREE_CHAIN (formal
),
1197 actual
= TREE_CHAIN (actual
))
1200 enum machine_mode mode
;
1203 return (rtx
) (HOST_WIDE_INT
) -1;
1205 arg
= TREE_VALUE (actual
);
1206 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
1208 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
1209 /* If they are block mode, the types should match exactly.
1210 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1211 which could happen if the parameter has incomplete type. */
1212 || (mode
== BLKmode
&& TREE_TYPE (arg
) != TREE_TYPE (formal
)))
1213 return (rtx
) (HOST_WIDE_INT
) -1;
1216 /* Extra arguments are valid, but will be ignored below, so we must
1217 evaluate them here for side-effects. */
1218 for (; actual
; actual
= TREE_CHAIN (actual
))
1219 expand_expr (TREE_VALUE (actual
), const0_rtx
,
1220 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
1222 /* Make a binding contour to keep inline cleanups called at
1223 outer function-scope level from looking like they are shadowing
1224 parameter declarations. */
1227 /* Make a fresh binding contour that we can easily remove. */
1229 expand_start_bindings (0);
1231 /* Expand the function arguments. Do this first so that any
1232 new registers get created before we allocate the maps. */
1234 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
1235 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
1237 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
1239 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
1241 /* Actual parameter, converted to the type of the argument within the
1243 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
1244 /* Mode of the variable used within the function. */
1245 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
1249 loc
= RTVEC_ELT (arg_vector
, i
);
1251 /* If this is an object passed by invisible reference, we copy the
1252 object into a stack slot and save its address. If this will go
1253 into memory, we do nothing now. Otherwise, we just expand the
1255 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1256 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1259 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
1260 int_size_in_bytes (TREE_TYPE (arg
)), 1);
1261 MEM_IN_STRUCT_P (stack_slot
) = AGGREGATE_TYPE_P (TREE_TYPE (arg
));
1263 store_expr (arg
, stack_slot
, 0);
1265 arg_vals
[i
] = XEXP (stack_slot
, 0);
1268 else if (GET_CODE (loc
) != MEM
)
1270 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
1271 /* The mode if LOC and ARG can differ if LOC was a variable
1272 that had its mode promoted via PROMOTED_MODE. */
1273 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
1274 TYPE_MODE (TREE_TYPE (arg
)),
1275 expand_expr (arg
, NULL_RTX
, mode
,
1277 TREE_UNSIGNED (TREE_TYPE (formal
)));
1279 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
1284 if (arg_vals
[i
] != 0
1285 && (! TREE_READONLY (formal
)
1286 /* If the parameter is not read-only, copy our argument through
1287 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1288 TARGET in any way. In the inline function, they will likely
1289 be two different pseudos, and `safe_from_p' will make all
1290 sorts of smart assumptions about their not conflicting.
1291 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1292 wrong, so put ARG_VALS[I] into a fresh register.
1293 Don't worry about invisible references, since their stack
1294 temps will never overlap the target. */
1297 && (GET_CODE (arg_vals
[i
]) == REG
1298 || GET_CODE (arg_vals
[i
]) == SUBREG
1299 || GET_CODE (arg_vals
[i
]) == MEM
)
1300 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
1301 /* ??? We must always copy a SUBREG into a REG, because it might
1302 get substituted into an address, and not all ports correctly
1303 handle SUBREGs in addresses. */
1304 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
1305 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
1308 /* Allocate the structures we use to remap things. */
1310 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
1311 map
->fndecl
= fndecl
;
1313 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
1314 bzero ((char *) map
->reg_map
, max_regno
* sizeof (rtx
));
1316 map
->label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
1317 map
->label_map
-= min_labelno
;
1319 map
->insn_map
= (rtx
*) alloca (INSN_UID (header
) * sizeof (rtx
));
1320 bzero ((char *) map
->insn_map
, INSN_UID (header
) * sizeof (rtx
));
1321 map
->min_insnno
= 0;
1322 map
->max_insnno
= INSN_UID (header
);
1324 map
->integrating
= 1;
1326 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1327 be large enough for all our pseudos. This is the number we are currently
1328 using plus the number in the called routine, plus 15 for each arg,
1329 five to compute the virtual frame pointer, and five for the return value.
1330 This should be enough for most cases. We do not reference entries
1331 outside the range of the map.
1333 ??? These numbers are quite arbitrary and were obtained by
1334 experimentation. At some point, we should try to allocate the
1335 table after all the parameters are set up so we an more accurately
1336 estimate the number of pseudos we will need. */
1338 map
->const_equiv_map_size
1339 = max_reg_num () + (max_regno
- FIRST_PSEUDO_REGISTER
) + 15 * nargs
+ 10;
1341 map
->const_equiv_map
1342 = (rtx
*)alloca (map
->const_equiv_map_size
* sizeof (rtx
));
1343 bzero ((char *) map
->const_equiv_map
,
1344 map
->const_equiv_map_size
* sizeof (rtx
));
1347 = (unsigned *)alloca (map
->const_equiv_map_size
* sizeof (unsigned));
1348 bzero ((char *) map
->const_age_map
,
1349 map
->const_equiv_map_size
* sizeof (unsigned));
1352 /* Record the current insn in case we have to set up pointers to frame
1353 and argument memory blocks. */
1354 map
->insns_at_start
= get_last_insn ();
1356 /* Update the outgoing argument size to allow for those in the inlined
1358 if (OUTGOING_ARGS_SIZE (header
) > current_function_outgoing_args_size
)
1359 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (header
);
1361 /* If the inline function needs to make PIC references, that means
1362 that this function's PIC offset table must be used. */
1363 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
1364 current_function_uses_pic_offset_table
= 1;
1366 /* If this function needs a context, set it up. */
1367 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
1368 static_chain_value
= lookup_static_chain (fndecl
);
1370 if (GET_CODE (parm_insns
) == NOTE
1371 && NOTE_LINE_NUMBER (parm_insns
) > 0)
1373 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
1374 NOTE_LINE_NUMBER (parm_insns
));
1376 RTX_INTEGRATED_P (note
) = 1;
1379 /* Process each argument. For each, set up things so that the function's
1380 reference to the argument will refer to the argument being passed.
1381 We only replace REG with REG here. Any simplifications are done
1382 via const_equiv_map.
1384 We make two passes: In the first, we deal with parameters that will
1385 be placed into registers, since we need to ensure that the allocated
1386 register number fits in const_equiv_map. Then we store all non-register
1387 parameters into their memory location. */
1389 /* Don't try to free temp stack slots here, because we may put one of the
1390 parameters into a temp stack slot. */
1392 for (i
= 0; i
< nargs
; i
++)
1394 rtx copy
= arg_vals
[i
];
1396 loc
= RTVEC_ELT (arg_vector
, i
);
1398 /* There are three cases, each handled separately. */
1399 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1400 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1402 /* This must be an object passed by invisible reference (it could
1403 also be a variable-sized object, but we forbid inlining functions
1404 with variable-sized arguments). COPY is the address of the
1405 actual value (this computation will cause it to be copied). We
1406 map that address for the register, noting the actual address as
1407 an equivalent in case it can be substituted into the insns. */
1409 if (GET_CODE (copy
) != REG
)
1411 temp
= copy_addr_to_reg (copy
);
1412 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1413 && REGNO (temp
) < map
->const_equiv_map_size
)
1415 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1416 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1420 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
1422 else if (GET_CODE (loc
) == MEM
)
1424 /* This is the case of a parameter that lives in memory.
1425 It will live in the block we allocate in the called routine's
1426 frame that simulates the incoming argument area. Do nothing
1427 now; we will call store_expr later. */
1430 else if (GET_CODE (loc
) == REG
)
1432 /* This is the good case where the parameter is in a register.
1433 If it is read-only and our argument is a constant, set up the
1434 constant equivalence.
1436 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1437 that flag set if it is a register.
1439 Also, don't allow hard registers here; they might not be valid
1440 when substituted into insns. */
1442 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
1443 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
1444 && ! REG_USERVAR_P (copy
))
1445 || (GET_CODE (copy
) == REG
1446 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
1448 temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
1449 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
1450 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1451 && REGNO (temp
) < map
->const_equiv_map_size
)
1453 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1454 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1458 map
->reg_map
[REGNO (loc
)] = copy
;
1460 else if (GET_CODE (loc
) == CONCAT
)
1462 /* This is the good case where the parameter is in a
1463 pair of separate pseudos.
1464 If it is read-only and our argument is a constant, set up the
1465 constant equivalence.
1467 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1468 that flag set if it is a register.
1470 Also, don't allow hard registers here; they might not be valid
1471 when substituted into insns. */
1472 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
1473 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
1474 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
1475 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
1477 if ((GET_CODE (copyreal
) != REG
&& GET_CODE (copyreal
) != SUBREG
)
1478 || (GET_CODE (copyreal
) == REG
&& REG_USERVAR_P (locreal
)
1479 && ! REG_USERVAR_P (copyreal
))
1480 || (GET_CODE (copyreal
) == REG
1481 && REGNO (copyreal
) < FIRST_PSEUDO_REGISTER
))
1483 temp
= copy_to_mode_reg (GET_MODE (locreal
), copyreal
);
1484 REG_USERVAR_P (temp
) = REG_USERVAR_P (locreal
);
1485 if ((CONSTANT_P (copyreal
) || FIXED_BASE_PLUS_P (copyreal
))
1486 && REGNO (temp
) < map
->const_equiv_map_size
)
1488 map
->const_equiv_map
[REGNO (temp
)] = copyreal
;
1489 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1493 map
->reg_map
[REGNO (locreal
)] = copyreal
;
1495 if ((GET_CODE (copyimag
) != REG
&& GET_CODE (copyimag
) != SUBREG
)
1496 || (GET_CODE (copyimag
) == REG
&& REG_USERVAR_P (locimag
)
1497 && ! REG_USERVAR_P (copyimag
))
1498 || (GET_CODE (copyimag
) == REG
1499 && REGNO (copyimag
) < FIRST_PSEUDO_REGISTER
))
1501 temp
= copy_to_mode_reg (GET_MODE (locimag
), copyimag
);
1502 REG_USERVAR_P (temp
) = REG_USERVAR_P (locimag
);
1503 if ((CONSTANT_P (copyimag
) || FIXED_BASE_PLUS_P (copyimag
))
1504 && REGNO (temp
) < map
->const_equiv_map_size
)
1506 map
->const_equiv_map
[REGNO (temp
)] = copyimag
;
1507 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1511 map
->reg_map
[REGNO (locimag
)] = copyimag
;
1517 /* Now do the parameters that will be placed in memory. */
1519 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
1520 formal
; formal
= TREE_CHAIN (formal
), i
++)
1522 loc
= RTVEC_ELT (arg_vector
, i
);
1524 if (GET_CODE (loc
) == MEM
1525 /* Exclude case handled above. */
1526 && ! (GET_CODE (XEXP (loc
, 0)) == REG
1527 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
1529 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1530 DECL_SOURCE_LINE (formal
));
1532 RTX_INTEGRATED_P (note
) = 1;
1534 /* Compute the address in the area we reserved and store the
1536 temp
= copy_rtx_and_substitute (loc
, map
);
1537 subst_constants (&temp
, NULL_RTX
, map
);
1538 apply_change_group ();
1539 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1540 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
1541 store_expr (arg_trees
[i
], temp
, 0);
1545 /* Deal with the places that the function puts its result.
1546 We are driven by what is placed into DECL_RESULT.
1548 Initially, we assume that we don't have anything special handling for
1549 REG_FUNCTION_RETURN_VALUE_P. */
1551 map
->inline_target
= 0;
1552 loc
= DECL_RTL (DECL_RESULT (fndecl
));
1553 if (TYPE_MODE (type
) == VOIDmode
)
1554 /* There is no return value to worry about. */
1556 else if (GET_CODE (loc
) == MEM
)
1558 if (! structure_value_addr
|| ! aggregate_value_p (DECL_RESULT (fndecl
)))
1561 /* Pass the function the address in which to return a structure value.
1562 Note that a constructor can cause someone to call us with
1563 STRUCTURE_VALUE_ADDR, but the initialization takes place
1564 via the first parameter, rather than the struct return address.
1566 We have two cases: If the address is a simple register indirect,
1567 use the mapping mechanism to point that register to our structure
1568 return address. Otherwise, store the structure return value into
1569 the place that it will be referenced from. */
1571 if (GET_CODE (XEXP (loc
, 0)) == REG
)
1573 temp
= force_reg (Pmode
, structure_value_addr
);
1574 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
1575 if ((CONSTANT_P (structure_value_addr
)
1576 || (GET_CODE (structure_value_addr
) == PLUS
1577 && XEXP (structure_value_addr
, 0) == virtual_stack_vars_rtx
1578 && GET_CODE (XEXP (structure_value_addr
, 1)) == CONST_INT
))
1579 && REGNO (temp
) < map
->const_equiv_map_size
)
1581 map
->const_equiv_map
[REGNO (temp
)] = structure_value_addr
;
1582 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1587 temp
= copy_rtx_and_substitute (loc
, map
);
1588 subst_constants (&temp
, NULL_RTX
, map
);
1589 apply_change_group ();
1590 emit_move_insn (temp
, structure_value_addr
);
1594 /* We will ignore the result value, so don't look at its structure.
1595 Note that preparations for an aggregate return value
1596 do need to be made (above) even if it will be ignored. */
1598 else if (GET_CODE (loc
) == REG
)
1600 /* The function returns an object in a register and we use the return
1601 value. Set up our target for remapping. */
1603 /* Machine mode function was declared to return. */
1604 enum machine_mode departing_mode
= TYPE_MODE (type
);
1605 /* (Possibly wider) machine mode it actually computes
1606 (for the sake of callers that fail to declare it right). */
1607 enum machine_mode arriving_mode
1608 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl
)));
1611 /* Don't use MEMs as direct targets because on some machines
1612 substituting a MEM for a REG makes invalid insns.
1613 Let the combiner substitute the MEM if that is valid. */
1614 if (target
== 0 || GET_CODE (target
) != REG
1615 || GET_MODE (target
) != departing_mode
)
1616 target
= gen_reg_rtx (departing_mode
);
1618 /* If function's value was promoted before return,
1619 avoid machine mode mismatch when we substitute INLINE_TARGET.
1620 But TARGET is what we will return to the caller. */
1621 if (arriving_mode
!= departing_mode
)
1622 reg_to_map
= gen_rtx (SUBREG
, arriving_mode
, target
, 0);
1624 reg_to_map
= target
;
1626 /* Usually, the result value is the machine's return register.
1627 Sometimes it may be a pseudo. Handle both cases. */
1628 if (REG_FUNCTION_VALUE_P (loc
))
1629 map
->inline_target
= reg_to_map
;
1631 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1634 /* Make new label equivalences for the labels in the called function. */
1635 for (i
= min_labelno
; i
< max_labelno
; i
++)
1636 map
->label_map
[i
] = gen_label_rtx ();
1638 /* Perform postincrements before actually calling the function. */
1641 /* Clean up stack so that variables might have smaller offsets. */
1642 do_pending_stack_adjust ();
1644 /* Save a copy of the location of const_equiv_map for mark_stores, called
1646 global_const_equiv_map
= map
->const_equiv_map
;
1647 global_const_equiv_map_size
= map
->const_equiv_map_size
;
1649 /* If the called function does an alloca, save and restore the
1650 stack pointer around the call. This saves stack space, but
1651 also is required if this inline is being done between two
1653 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1654 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1656 /* Now copy the insns one by one. Do this in two passes, first the insns and
1657 then their REG_NOTES, just like save_for_inline. */
1659 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1661 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1663 rtx copy
, pattern
, set
;
1665 map
->orig_asm_operands_vector
= 0;
1667 switch (GET_CODE (insn
))
1670 pattern
= PATTERN (insn
);
1671 set
= single_set (insn
);
1673 if (GET_CODE (pattern
) == USE
1674 && GET_CODE (XEXP (pattern
, 0)) == REG
1675 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1676 /* The (USE (REG n)) at return from the function should
1677 be ignored since we are changing (REG n) into
1681 /* Ignore setting a function value that we don't want to use. */
1682 if (map
->inline_target
== 0
1684 && GET_CODE (SET_DEST (set
)) == REG
1685 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1687 if (volatile_refs_p (SET_SRC (set
)))
1691 /* If we must not delete the source,
1692 load it into a new temporary. */
1693 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1695 new_set
= single_set (copy
);
1700 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1706 /* If this is setting the static chain rtx, omit it. */
1707 else if (static_chain_value
!= 0
1709 && GET_CODE (SET_DEST (set
)) == REG
1710 && rtx_equal_p (SET_DEST (set
),
1711 static_chain_incoming_rtx
))
1714 /* If this is setting the static chain pseudo, set it from
1715 the value we want to give it instead. */
1716 else if (static_chain_value
!= 0
1718 && rtx_equal_p (SET_SRC (set
),
1719 static_chain_incoming_rtx
))
1721 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
);
1723 copy
= emit_move_insn (newdest
, static_chain_value
);
1724 static_chain_value
= 0;
1727 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1728 /* REG_NOTES will be copied later. */
1731 /* If this insn is setting CC0, it may need to look at
1732 the insn that uses CC0 to see what type of insn it is.
1733 In that case, the call to recog via validate_change will
1734 fail. So don't substitute constants here. Instead,
1735 do it when we emit the following insn.
1737 For example, see the pyr.md file. That machine has signed and
1738 unsigned compares. The compare patterns must check the
1739 following branch insn to see which what kind of compare to
1742 If the previous insn set CC0, substitute constants on it as
1744 if (sets_cc0_p (PATTERN (copy
)) != 0)
1749 try_constants (cc0_insn
, map
);
1751 try_constants (copy
, map
);
1754 try_constants (copy
, map
);
1759 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1761 if (local_return_label
== 0)
1762 local_return_label
= gen_label_rtx ();
1763 pattern
= gen_jump (local_return_label
);
1766 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1768 copy
= emit_jump_insn (pattern
);
1772 try_constants (cc0_insn
, map
);
1775 try_constants (copy
, map
);
1777 /* If this used to be a conditional jump insn but whose branch
1778 direction is now know, we must do something special. */
1779 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1782 /* The previous insn set cc0 for us. So delete it. */
1783 delete_insn (PREV_INSN (copy
));
1786 /* If this is now a no-op, delete it. */
1787 if (map
->last_pc_value
== pc_rtx
)
1793 /* Otherwise, this is unconditional jump so we must put a
1794 BARRIER after it. We could do some dead code elimination
1795 here, but jump.c will do it just as well. */
1801 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1802 copy
= emit_call_insn (pattern
);
1804 /* Because the USAGE information potentially contains objects other
1805 than hard registers, we need to copy it. */
1806 CALL_INSN_FUNCTION_USAGE (copy
) =
1807 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
1811 try_constants (cc0_insn
, map
);
1814 try_constants (copy
, map
);
1816 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1817 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1818 map
->const_equiv_map
[i
] = 0;
1822 copy
= emit_label (map
->label_map
[CODE_LABEL_NUMBER (insn
)]);
1823 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1828 copy
= emit_barrier ();
1832 /* It is important to discard function-end and function-beg notes,
1833 so we have only one of each in the current function.
1834 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1835 deleted these in the copy used for continuing compilation,
1836 not the copy used for inlining). */
1837 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1838 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1839 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1840 copy
= emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
1851 RTX_INTEGRATED_P (copy
) = 1;
1853 map
->insn_map
[INSN_UID (insn
)] = copy
;
1856 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1857 from parameters can be substituted in. These are the only ones that
1858 are valid across the entire function. */
1860 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1861 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1862 && map
->insn_map
[INSN_UID (insn
)]
1863 && REG_NOTES (insn
))
1865 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
1866 /* We must also do subst_constants, in case one of our parameters
1867 has const type and constant value. */
1868 subst_constants (&tem
, NULL_RTX
, map
);
1869 apply_change_group ();
1870 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1873 if (local_return_label
)
1874 emit_label (local_return_label
);
1876 /* Restore the stack pointer if we saved it above. */
1877 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1878 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1880 /* Make copies of the decls of the symbols in the inline function, so that
1881 the copies of the variables get declared in the current function. Set
1882 up things so that lookup_static_chain knows that to interpret registers
1883 in SAVE_EXPRs for TYPE_SIZEs as local. */
1885 inline_function_decl
= fndecl
;
1886 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1887 integrate_decl_tree ((tree
) ORIGINAL_DECL_INITIAL (header
), 0, map
);
1888 inline_function_decl
= 0;
1890 /* End the scope containing the copied formal parameter variables
1891 and copied LABEL_DECLs. */
1893 expand_end_bindings (getdecls (), 1, 1);
1894 block
= poplevel (1, 1, 0);
1895 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
1896 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
1898 emit_line_note (input_filename
, lineno
);
1900 if (structure_value_addr
)
1902 target
= gen_rtx (MEM
, TYPE_MODE (type
),
1903 memory_address (TYPE_MODE (type
), structure_value_addr
));
1904 MEM_IN_STRUCT_P (target
) = 1;
1909 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1910 push all of those decls and give each one the corresponding home. */
1913 integrate_parm_decls (args
, map
, arg_vector
)
1915 struct inline_remap
*map
;
1921 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1923 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
1926 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
1928 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
1929 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1930 here, but that's going to require some more work. */
1931 /* DECL_INCOMING_RTL (decl) = ?; */
1932 /* These args would always appear unused, if not for this. */
1933 TREE_USED (decl
) = 1;
1934 /* Prevent warning for shadowing with these. */
1935 DECL_ABSTRACT_ORIGIN (decl
) = tail
;
1937 /* Fully instantiate the address with the equivalent form so that the
1938 debugging information contains the actual register, instead of the
1939 virtual register. Do this by not passing an insn to
1941 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
1942 apply_change_group ();
1943 DECL_RTL (decl
) = new_decl_rtl
;
1947 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1948 current function a tree of contexts isomorphic to the one that is given.
1950 LEVEL indicates how far down into the BLOCK tree is the node we are
1951 currently traversing. It is always zero except for recursive calls.
1953 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1954 registers used in the DECL_RTL field should be remapped. If it is zero,
1955 no mapping is necessary. */
1958 integrate_decl_tree (let
, level
, map
)
1961 struct inline_remap
*map
;
1968 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1972 push_obstacks_nochange ();
1973 saveable_allocation ();
1977 if (DECL_RTL (t
) != 0)
1979 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
1980 /* Fully instantiate the address with the equivalent form so that the
1981 debugging information contains the actual register, instead of the
1982 virtual register. Do this by not passing an insn to
1984 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
1985 apply_change_group ();
1987 /* These args would always appear unused, if not for this. */
1989 /* Prevent warning for shadowing with these. */
1990 DECL_ABSTRACT_ORIGIN (d
) = t
;
1992 if (DECL_LANG_SPECIFIC (d
))
1998 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1999 integrate_decl_tree (t
, level
+ 1, map
);
2003 node
= poplevel (1, 0, 0);
2006 TREE_USED (node
) = TREE_USED (let
);
2007 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
2012 /* Create a new copy of an rtx.
2013 Recursively copies the operands of the rtx,
2014 except for those few rtx codes that are sharable.
2016 We always return an rtx that is similar to that incoming rtx, with the
2017 exception of possibly changing a REG to a SUBREG or vice versa. No
2018 rtl is ever emitted.
2020 Handle constants that need to be placed in the constant pool by
2021 calling `force_const_mem'. */
2024 copy_rtx_and_substitute (orig
, map
)
2026 struct inline_remap
*map
;
2028 register rtx copy
, temp
;
2030 register RTX_CODE code
;
2031 register enum machine_mode mode
;
2032 register char *format_ptr
;
2038 code
= GET_CODE (orig
);
2039 mode
= GET_MODE (orig
);
2044 /* If the stack pointer register shows up, it must be part of
2045 stack-adjustments (*not* because we eliminated the frame pointer!).
2046 Small hard registers are returned as-is. Pseudo-registers
2047 go through their `reg_map'. */
2048 regno
= REGNO (orig
);
2049 if (regno
<= LAST_VIRTUAL_REGISTER
)
2051 /* Some hard registers are also mapped,
2052 but others are not translated. */
2053 if (map
->reg_map
[regno
] != 0)
2054 return map
->reg_map
[regno
];
2056 /* If this is the virtual frame pointer, make space in current
2057 function's stack frame for the stack frame of the inline function.
2059 Copy the address of this area into a pseudo. Map
2060 virtual_stack_vars_rtx to this pseudo and set up a constant
2061 equivalence for it to be the address. This will substitute the
2062 address into insns where it can be substituted and use the new
2063 pseudo where it can't. */
2064 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
2067 int size
= DECL_FRAME_SIZE (map
->fndecl
);
2071 loc
= assign_stack_temp (BLKmode
, size
, 1);
2072 loc
= XEXP (loc
, 0);
2073 #ifdef FRAME_GROWS_DOWNWARD
2074 /* In this case, virtual_stack_vars_rtx points to one byte
2075 higher than the top of the frame area. So compute the offset
2076 to one byte higher than our substitute frame.
2077 Keep the fake frame pointer aligned like a real one. */
2078 rounded
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2079 loc
= plus_constant (loc
, rounded
);
2081 map
->reg_map
[regno
] = temp
2082 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2084 if (REGNO (temp
) < map
->const_equiv_map_size
)
2086 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2087 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2090 seq
= gen_sequence ();
2092 emit_insn_after (seq
, map
->insns_at_start
);
2095 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
2097 /* Do the same for a block to contain any arguments referenced
2100 int size
= FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map
->fndecl
));
2103 loc
= assign_stack_temp (BLKmode
, size
, 1);
2104 loc
= XEXP (loc
, 0);
2105 /* When arguments grow downward, the virtual incoming
2106 args pointer points to the top of the argument block,
2107 so the remapped location better do the same. */
2108 #ifdef ARGS_GROW_DOWNWARD
2109 loc
= plus_constant (loc
, size
);
2111 map
->reg_map
[regno
] = temp
2112 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2114 if (REGNO (temp
) < map
->const_equiv_map_size
)
2116 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2117 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2120 seq
= gen_sequence ();
2122 emit_insn_after (seq
, map
->insns_at_start
);
2125 else if (REG_FUNCTION_VALUE_P (orig
))
2127 /* This is a reference to the function return value. If
2128 the function doesn't have a return value, error. If the
2129 mode doesn't agree, make a SUBREG. */
2130 if (map
->inline_target
== 0)
2131 /* Must be unrolling loops or replicating code if we
2132 reach here, so return the register unchanged. */
2134 else if (mode
!= GET_MODE (map
->inline_target
))
2135 return gen_lowpart (mode
, map
->inline_target
);
2137 return map
->inline_target
;
2141 if (map
->reg_map
[regno
] == NULL
)
2143 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
2144 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
2145 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
2146 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
2147 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2149 return map
->reg_map
[regno
];
2152 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
2153 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2154 if (GET_CODE (copy
) == SUBREG
)
2155 return gen_rtx (SUBREG
, GET_MODE (orig
), SUBREG_REG (copy
),
2156 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
2157 else if (GET_CODE (copy
) == CONCAT
)
2158 return (subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1));
2160 return gen_rtx (SUBREG
, GET_MODE (orig
), copy
,
2161 SUBREG_WORD (orig
));
2165 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2166 to (use foo) if the original insn didn't have a subreg.
2167 Removing the subreg distorts the VAX movstrhi pattern
2168 by changing the mode of an operand. */
2169 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2170 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
2171 copy
= SUBREG_REG (copy
);
2172 return gen_rtx (code
, VOIDmode
, copy
);
2175 LABEL_PRESERVE_P (map
->label_map
[CODE_LABEL_NUMBER (orig
)])
2176 = LABEL_PRESERVE_P (orig
);
2177 return map
->label_map
[CODE_LABEL_NUMBER (orig
)];
2180 copy
= gen_rtx (LABEL_REF
, mode
,
2181 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
2182 : map
->label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
2183 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
2185 /* The fact that this label was previously nonlocal does not mean
2186 it still is, so we must check if it is within the range of
2187 this function's labels. */
2188 LABEL_REF_NONLOCAL_P (copy
)
2189 = (LABEL_REF_NONLOCAL_P (orig
)
2190 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
2191 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
2193 /* If we have made a nonlocal label local, it means that this
2194 inlined call will be referring to our nonlocal goto handler.
2195 So make sure we create one for this block; we normally would
2196 not since this is not otherwise considered a "call". */
2197 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
2198 function_call_count
++;
2208 /* Symbols which represent the address of a label stored in the constant
2209 pool must be modified to point to a constant pool entry for the
2210 remapped label. Otherwise, symbols are returned unchanged. */
2211 if (CONSTANT_POOL_ADDRESS_P (orig
))
2213 rtx constant
= get_pool_constant (orig
);
2214 if (GET_CODE (constant
) == LABEL_REF
)
2215 return XEXP (force_const_mem (Pmode
,
2216 copy_rtx_and_substitute (constant
,
2224 /* We have to make a new copy of this CONST_DOUBLE because don't want
2225 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2226 duplicate of a CONST_DOUBLE we have already seen. */
2227 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2231 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2232 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2235 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2236 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2239 /* Make new constant pool entry for a constant
2240 that was in the pool of the inline function. */
2241 if (RTX_INTEGRATED_P (orig
))
2243 /* If this was an address of a constant pool entry that itself
2244 had to be placed in the constant pool, it might not be a
2245 valid address. So the recursive call below might turn it
2246 into a register. In that case, it isn't a constant any
2247 more, so return it. This has the potential of changing a
2248 MEM into a REG, but we'll assume that it safe. */
2249 temp
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2250 if (! CONSTANT_P (temp
))
2252 return validize_mem (force_const_mem (GET_MODE (orig
), temp
));
2257 /* If from constant pool address, make new constant pool entry and
2258 return its address. */
2259 if (! RTX_INTEGRATED_P (orig
))
2262 temp
= force_const_mem (GET_MODE (orig
),
2263 copy_rtx_and_substitute (XEXP (orig
, 0), map
));
2266 /* Legitimizing the address here is incorrect.
2268 The only ADDRESS rtx's that can reach here are ones created by
2269 save_constants. Hence the operand of the ADDRESS is always valid
2270 in this position of the instruction, since the original rtx without
2271 the ADDRESS was valid.
2273 The reason we don't legitimize the address here is that on the
2274 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2275 This code forces the operand of the address to a register, which
2276 fails because we can not take the HIGH part of a register.
2278 Also, change_address may create new registers. These registers
2279 will not have valid reg_map entries. This can cause try_constants()
2280 to fail because assumes that all registers in the rtx have valid
2281 reg_map entries, and it may end up replacing one of these new
2282 registers with junk. */
2284 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2285 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2288 return XEXP (temp
, 0);
2291 /* If a single asm insn contains multiple output operands
2292 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2293 We must make sure that the copied insn continues to share it. */
2294 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
2296 copy
= rtx_alloc (ASM_OPERANDS
);
2297 copy
->volatil
= orig
->volatil
;
2298 XSTR (copy
, 0) = XSTR (orig
, 0);
2299 XSTR (copy
, 1) = XSTR (orig
, 1);
2300 XINT (copy
, 2) = XINT (orig
, 2);
2301 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
2302 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
2303 XSTR (copy
, 5) = XSTR (orig
, 5);
2304 XINT (copy
, 6) = XINT (orig
, 6);
2310 /* This is given special treatment because the first
2311 operand of a CALL is a (MEM ...) which may get
2312 forced into a register for cse. This is undesirable
2313 if function-address cse isn't wanted or if we won't do cse. */
2314 #ifndef NO_FUNCTION_CSE
2315 if (! (optimize
&& ! flag_no_function_cse
))
2317 return gen_rtx (CALL
, GET_MODE (orig
),
2318 gen_rtx (MEM
, GET_MODE (XEXP (orig
, 0)),
2319 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
2320 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
2324 /* Must be ifdefed out for loop unrolling to work. */
2330 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2332 If the nonlocal goto is into the current function,
2333 this will result in unnecessarily bad code, but should work. */
2334 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2335 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2336 return gen_rtx (SET
, VOIDmode
, SET_DEST (orig
),
2337 copy_rtx_and_substitute (SET_SRC (orig
), map
));
2341 copy
= rtx_alloc (MEM
);
2342 PUT_MODE (copy
, mode
);
2343 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2344 MEM_IN_STRUCT_P (copy
) = MEM_IN_STRUCT_P (orig
);
2345 MEM_VOLATILE_P (copy
) = MEM_VOLATILE_P (orig
);
2347 /* If doing function inlining, this MEM might not be const in the
2348 function that it is being inlined into, and thus may not be
2349 unchanging after function inlining. Constant pool references are
2350 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2352 if (! map
->integrating
)
2353 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2358 copy
= rtx_alloc (code
);
2359 PUT_MODE (copy
, mode
);
2360 copy
->in_struct
= orig
->in_struct
;
2361 copy
->volatil
= orig
->volatil
;
2362 copy
->unchanging
= orig
->unchanging
;
2364 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2366 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2368 switch (*format_ptr
++)
2374 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
2378 /* Change any references to old-insns to point to the
2379 corresponding copied insns. */
2380 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2384 XVEC (copy
, i
) = XVEC (orig
, i
);
2385 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2387 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2388 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2389 XVECEXP (copy
, i
, j
)
2390 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
2395 XWINT (copy
, i
) = XWINT (orig
, i
);
2399 XINT (copy
, i
) = XINT (orig
, i
);
2403 XSTR (copy
, i
) = XSTR (orig
, i
);
2411 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2413 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2414 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2415 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2421 /* Substitute known constant values into INSN, if that is valid. */
2424 try_constants (insn
, map
)
2426 struct inline_remap
*map
;
2431 subst_constants (&PATTERN (insn
), insn
, map
);
2433 /* Apply the changes if they are valid; otherwise discard them. */
2434 apply_change_group ();
2436 /* Show we don't know the value of anything stored or clobbered. */
2437 note_stores (PATTERN (insn
), mark_stores
);
2438 map
->last_pc_value
= 0;
2440 map
->last_cc0_value
= 0;
2443 /* Set up any constant equivalences made in this insn. */
2444 for (i
= 0; i
< map
->num_sets
; i
++)
2446 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2448 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2450 if (regno
< map
->const_equiv_map_size
2451 && (map
->const_equiv_map
[regno
] == 0
2452 /* Following clause is a hack to make case work where GNU C++
2453 reassigns a variable to make cse work right. */
2454 || ! rtx_equal_p (map
->const_equiv_map
[regno
],
2455 map
->equiv_sets
[i
].equiv
)))
2457 map
->const_equiv_map
[regno
] = map
->equiv_sets
[i
].equiv
;
2458 map
->const_age_map
[regno
] = map
->const_age
;
2461 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2462 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2464 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2465 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2470 /* Substitute known constants for pseudo regs in the contents of LOC,
2471 which are part of INSN.
2472 If INSN is zero, the substitution should always be done (this is used to
2474 These changes are taken out by try_constants if the result is not valid.
2476 Note that we are more concerned with determining when the result of a SET
2477 is a constant, for further propagation, than actually inserting constants
2478 into insns; cse will do the latter task better.
2480 This function is also used to adjust address of items previously addressed
2481 via the virtual stack variable or virtual incoming arguments registers. */
2484 subst_constants (loc
, insn
, map
)
2487 struct inline_remap
*map
;
2491 register enum rtx_code code
;
2492 register char *format_ptr
;
2493 int num_changes
= num_validated_changes ();
2495 enum machine_mode op0_mode
;
2497 code
= GET_CODE (x
);
2512 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2518 /* The only thing we can do with a USE or CLOBBER is possibly do
2519 some substitutions in a MEM within it. */
2520 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2521 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2525 /* Substitute for parms and known constants. Don't replace
2526 hard regs used as user variables with constants. */
2528 int regno
= REGNO (x
);
2530 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2531 && regno
< map
->const_equiv_map_size
2532 && map
->const_equiv_map
[regno
] != 0
2533 && map
->const_age_map
[regno
] >= map
->const_age
)
2534 validate_change (insn
, loc
, map
->const_equiv_map
[regno
], 1);
2539 /* SUBREG applied to something other than a reg
2540 should be treated as ordinary, since that must
2541 be a special hack and we don't know how to treat it specially.
2542 Consider for example mulsidi3 in m68k.md.
2543 Ordinary SUBREG of a REG needs this special treatment. */
2544 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2546 rtx inner
= SUBREG_REG (x
);
2549 /* We can't call subst_constants on &SUBREG_REG (x) because any
2550 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2551 see what is inside, try to form the new SUBREG and see if that is
2552 valid. We handle two cases: extracting a full word in an
2553 integral mode and extracting the low part. */
2554 subst_constants (&inner
, NULL_RTX
, map
);
2556 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2557 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2558 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2559 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2560 GET_MODE (SUBREG_REG (x
)));
2562 if (new == 0 && subreg_lowpart_p (x
))
2563 new = gen_lowpart_common (GET_MODE (x
), inner
);
2566 validate_change (insn
, loc
, new, 1);
2573 subst_constants (&XEXP (x
, 0), insn
, map
);
2575 /* If a memory address got spoiled, change it back. */
2576 if (insn
!= 0 && num_validated_changes () != num_changes
2577 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2578 cancel_changes (num_changes
);
2583 /* Substitute constants in our source, and in any arguments to a
2584 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2586 rtx
*dest_loc
= &SET_DEST (x
);
2587 rtx dest
= *dest_loc
;
2590 subst_constants (&SET_SRC (x
), insn
, map
);
2593 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2594 /* By convention, we always use ZERO_EXTRACT in the dest. */
2595 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2596 || GET_CODE (*dest_loc
) == SUBREG
2597 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2599 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2601 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2602 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2604 dest_loc
= &XEXP (*dest_loc
, 0);
2607 /* Do substitute in the address of a destination in memory. */
2608 if (GET_CODE (*dest_loc
) == MEM
)
2609 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2611 /* Check for the case of DEST a SUBREG, both it and the underlying
2612 register are less than one word, and the SUBREG has the wider mode.
2613 In the case, we are really setting the underlying register to the
2614 source converted to the mode of DEST. So indicate that. */
2615 if (GET_CODE (dest
) == SUBREG
2616 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2617 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2618 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2619 <= GET_MODE_SIZE (GET_MODE (dest
)))
2620 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2622 src
= tem
, dest
= SUBREG_REG (dest
);
2624 /* If storing a recognizable value save it for later recording. */
2625 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2626 && (CONSTANT_P (src
)
2627 || (GET_CODE (src
) == REG
2628 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2629 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2630 || (GET_CODE (src
) == PLUS
2631 && GET_CODE (XEXP (src
, 0)) == REG
2632 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2633 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2634 && CONSTANT_P (XEXP (src
, 1)))
2635 || GET_CODE (src
) == COMPARE
2640 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2641 || GET_CODE (src
) == LABEL_REF
))))
2643 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2644 it will cause us to save the COMPARE with any constants
2645 substituted, which is what we want for later. */
2646 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2647 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2654 format_ptr
= GET_RTX_FORMAT (code
);
2656 /* If the first operand is an expression, save its mode for later. */
2657 if (*format_ptr
== 'e')
2658 op0_mode
= GET_MODE (XEXP (x
, 0));
2660 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2662 switch (*format_ptr
++)
2669 subst_constants (&XEXP (x
, i
), insn
, map
);
2679 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2682 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2683 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
2692 /* If this is a commutative operation, move a constant to the second
2693 operand unless the second operand is already a CONST_INT. */
2694 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2695 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2697 rtx tem
= XEXP (x
, 0);
2698 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2699 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2702 /* Simplify the expression in case we put in some constants. */
2703 switch (GET_RTX_CLASS (code
))
2706 new = simplify_unary_operation (code
, GET_MODE (x
),
2707 XEXP (x
, 0), op0_mode
);
2712 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2713 if (op_mode
== VOIDmode
)
2714 op_mode
= GET_MODE (XEXP (x
, 1));
2715 new = simplify_relational_operation (code
, op_mode
,
2716 XEXP (x
, 0), XEXP (x
, 1));
2717 #ifdef FLOAT_STORE_FLAG_VALUE
2718 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2719 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2720 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2728 new = simplify_binary_operation (code
, GET_MODE (x
),
2729 XEXP (x
, 0), XEXP (x
, 1));
2734 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2735 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
2740 validate_change (insn
, loc
, new, 1);
2743 /* Show that register modified no longer contain known constants. We are
2744 called from note_stores with parts of the new insn. */
2747 mark_stores (dest
, x
)
2752 enum machine_mode mode
;
2754 /* DEST is always the innermost thing set, except in the case of
2755 SUBREGs of hard registers. */
2757 if (GET_CODE (dest
) == REG
)
2758 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2759 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2761 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2762 mode
= GET_MODE (SUBREG_REG (dest
));
2767 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2768 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2771 for (i
= regno
; i
<= last_reg
; i
++)
2772 if (i
< global_const_equiv_map_size
)
2773 global_const_equiv_map
[i
] = 0;
2777 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2778 pointed to by PX, they represent constants in the constant pool.
2779 Replace these with a new memory reference obtained from force_const_mem.
2780 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2781 address of a constant pool entry. Replace them with the address of
2782 a new constant pool entry obtained from force_const_mem. */
2785 restore_constants (px
)
2795 if (GET_CODE (x
) == CONST_DOUBLE
)
2797 /* We have to make a new CONST_DOUBLE to ensure that we account for
2798 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2799 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2803 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
2804 *px
= CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
2807 *px
= immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
2811 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == CONST
)
2813 restore_constants (&XEXP (x
, 0));
2814 *px
= validize_mem (force_const_mem (GET_MODE (x
), XEXP (x
, 0)));
2816 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == SUBREG
)
2818 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2819 rtx
new = XEXP (SUBREG_REG (x
), 0);
2821 restore_constants (&new);
2822 new = force_const_mem (GET_MODE (SUBREG_REG (x
)), new);
2823 PUT_MODE (new, GET_MODE (x
));
2824 *px
= validize_mem (new);
2826 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == ADDRESS
)
2828 restore_constants (&XEXP (x
, 0));
2829 *px
= XEXP (force_const_mem (GET_MODE (x
), XEXP (x
, 0)), 0);
2833 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2834 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
2839 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2840 restore_constants (&XVECEXP (x
, i
, j
));
2844 restore_constants (&XEXP (x
, i
));
2851 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2852 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2853 that it points to the node itself, thus indicating that the node is its
2854 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2855 the given node is NULL, recursively descend the decl/block tree which
2856 it is the root of, and for each other ..._DECL or BLOCK node contained
2857 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2858 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2859 values to point to themselves. */
2862 set_block_origin_self (stmt
)
2865 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2867 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2870 register tree local_decl
;
2872 for (local_decl
= BLOCK_VARS (stmt
);
2873 local_decl
!= NULL_TREE
;
2874 local_decl
= TREE_CHAIN (local_decl
))
2875 set_decl_origin_self (local_decl
); /* Potential recursion. */
2879 register tree subblock
;
2881 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2882 subblock
!= NULL_TREE
;
2883 subblock
= BLOCK_CHAIN (subblock
))
2884 set_block_origin_self (subblock
); /* Recurse. */
2889 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2890 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2891 node to so that it points to the node itself, thus indicating that the
2892 node represents its own (abstract) origin. Additionally, if the
2893 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2894 the decl/block tree of which the given node is the root of, and for
2895 each other ..._DECL or BLOCK node contained therein whose
2896 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2897 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2898 point to themselves. */
2901 set_decl_origin_self (decl
)
2904 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2906 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2907 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2911 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2912 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2913 if (DECL_INITIAL (decl
) != NULL_TREE
)
2914 set_block_origin_self (DECL_INITIAL (decl
));
2919 /* Given a pointer to some BLOCK node, and a boolean value to set the
2920 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2921 the given block, and for all local decls and all local sub-blocks
2922 (recursively) which are contained therein. */
2925 set_block_abstract_flags (stmt
, setting
)
2927 register int setting
;
2929 BLOCK_ABSTRACT (stmt
) = setting
;
2932 register tree local_decl
;
2934 for (local_decl
= BLOCK_VARS (stmt
);
2935 local_decl
!= NULL_TREE
;
2936 local_decl
= TREE_CHAIN (local_decl
))
2937 set_decl_abstract_flags (local_decl
, setting
);
2941 register tree subblock
;
2943 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2944 subblock
!= NULL_TREE
;
2945 subblock
= BLOCK_CHAIN (subblock
))
2946 set_block_abstract_flags (subblock
, setting
);
2950 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2951 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2952 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2953 set the abstract flags for all of the parameters, local vars, local
2954 blocks and sub-blocks (recursively) to the same setting. */
2957 set_decl_abstract_flags (decl
, setting
)
2959 register int setting
;
2961 DECL_ABSTRACT (decl
) = setting
;
2962 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2966 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2967 DECL_ABSTRACT (arg
) = setting
;
2968 if (DECL_INITIAL (decl
) != NULL_TREE
)
2969 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2973 /* Output the assembly language code for the function FNDECL
2974 from its DECL_SAVED_INSNS. Used for inline functions that are output
2975 at end of compilation instead of where they came in the source. */
2978 output_inline_function (fndecl
)
2984 if (output_bytecode
)
2986 warning ("`inline' ignored for bytecode output");
2990 head
= DECL_SAVED_INSNS (fndecl
);
2991 current_function_decl
= fndecl
;
2993 /* This call is only used to initialize global variables. */
2994 init_function_start (fndecl
, "lossage", 1);
2996 /* Redo parameter determinations in case the FUNCTION_...
2997 macros took machine-specific actions that need to be redone. */
2998 assign_parms (fndecl
, 1);
3000 /* Set stack frame size. */
3001 assign_stack_local (BLKmode
, DECL_FRAME_SIZE (fndecl
), 0);
3003 restore_reg_data (FIRST_PARM_INSN (head
));
3005 stack_slot_list
= STACK_SLOT_LIST (head
);
3006 forced_labels
= FORCED_LABELS (head
);
3008 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
3009 current_function_calls_alloca
= 1;
3011 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_SETJMP
)
3012 current_function_calls_setjmp
= 1;
3014 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_LONGJMP
)
3015 current_function_calls_longjmp
= 1;
3017 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_STRUCT
)
3018 current_function_returns_struct
= 1;
3020 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT
)
3021 current_function_returns_pcc_struct
= 1;
3023 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
3024 current_function_needs_context
= 1;
3026 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
)
3027 current_function_has_nonlocal_label
= 1;
3029 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_POINTER
)
3030 current_function_returns_pointer
= 1;
3032 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_CONST_POOL
)
3033 current_function_uses_const_pool
= 1;
3035 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
3036 current_function_uses_pic_offset_table
= 1;
3038 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (head
);
3039 current_function_pops_args
= POPS_ARGS (head
);
3041 /* This is the only thing the expand_function_end call that uses to be here
3042 actually does and that call can cause problems. */
3043 immediate_size_expand
--;
3045 /* Find last insn and rebuild the constant pool. */
3046 for (last
= FIRST_PARM_INSN (head
);
3047 NEXT_INSN (last
); last
= NEXT_INSN (last
))
3049 if (GET_RTX_CLASS (GET_CODE (last
)) == 'i')
3051 restore_constants (&PATTERN (last
));
3052 restore_constants (®_NOTES (last
));
3056 set_new_first_and_last_insn (FIRST_PARM_INSN (head
), last
);
3057 set_new_first_and_last_label_num (FIRST_LABELNO (head
), LAST_LABELNO (head
));
3059 /* We must have already output DWARF debugging information for the
3060 original (abstract) inline function declaration/definition, so
3061 we want to make sure that the debugging information we generate
3062 for this special instance of the inline function refers back to
3063 the information we already generated. To make sure that happens,
3064 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3065 node (and for all of the local ..._DECL nodes which are its children)
3066 so that they all point to themselves. */
3068 set_decl_origin_self (fndecl
);
3070 /* We're not deferring this any longer. */
3071 DECL_DEFER_OUTPUT (fndecl
) = 0;
3073 /* Compile this function all the way down to assembly code. */
3074 rest_of_compilation (fndecl
);
3076 current_function_decl
= 0;