]>
Commit | Line | Data |
---|---|---|
175160e7 MT |
1 | /* Procedure integration for GNU CC. |
2 | Copyright (C) 1988-1991 Free Software Foundation, Inc. | |
3 | Contributed by Michael Tiemann (tiemann@cygnus.com) | |
4 | ||
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
20 | ||
21 | ||
22 | #include <stdio.h> | |
23 | ||
24 | #include "config.h" | |
25 | #include "rtl.h" | |
26 | #include "tree.h" | |
27 | #include "flags.h" | |
28 | #include "insn-config.h" | |
29 | #include "insn-flags.h" | |
30 | #include "expr.h" | |
31 | #include "output.h" | |
32 | #include "integrate.h" | |
33 | #include "real.h" | |
34 | #include "function.h" | |
35 | ||
36 | #include "obstack.h" | |
37 | #define obstack_chunk_alloc xmalloc | |
38 | #define obstack_chunk_free free | |
39 | extern int xmalloc (); | |
40 | extern void free (); | |
41 | ||
42 | extern struct obstack *function_maybepermanent_obstack; | |
43 | ||
44 | extern tree pushdecl (); | |
45 | extern tree poplevel (); | |
46 | ||
47 | /* Similar, but round to the next highest integer that meets the | |
48 | alignment. */ | |
49 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
50 | ||
51 | /* Default max number of insns a function can have and still be inline. | |
52 | This is overridden on RISC machines. */ | |
53 | #ifndef INTEGRATE_THRESHOLD | |
54 | #define INTEGRATE_THRESHOLD(DECL) \ | |
55 | (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))) | |
56 | #endif | |
57 | \f | |
58 | /* Save any constant pool constants in an insn. */ | |
59 | static void save_constants (); | |
60 | ||
61 | /* Note when parameter registers are the destination of a SET. */ | |
62 | static void note_modified_parmregs (); | |
63 | ||
64 | /* Copy an rtx for save_for_inline_copying. */ | |
65 | static rtx copy_for_inline (); | |
66 | ||
67 | /* Make copies of MEMs in DECL_RTLs. */ | |
68 | static void copy_decl_rtls (); | |
69 | ||
70 | static tree copy_decl_tree (); | |
71 | ||
72 | /* Return the constant equivalent of a given rtx, or 0 if none. */ | |
73 | static rtx const_equiv (); | |
74 | ||
75 | static void integrate_parm_decls (); | |
76 | static void integrate_decl_tree (); | |
77 | ||
78 | static void subst_constants (); | |
79 | static rtx fold_out_const_cc0 (); | |
80 | \f | |
81 | /* Zero if the current function (whose FUNCTION_DECL is FNDECL) | |
82 | is safe and reasonable to integrate into other functions. | |
83 | Nonzero means value is a warning message with a single %s | |
84 | for the function's name. */ | |
85 | ||
86 | char * | |
87 | function_cannot_inline_p (fndecl) | |
88 | register tree fndecl; | |
89 | { | |
90 | register rtx insn; | |
91 | tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl))); | |
92 | int max_insns = INTEGRATE_THRESHOLD (fndecl); | |
93 | register int ninsns = 0; | |
94 | register tree parms; | |
95 | ||
96 | /* No inlines with varargs. `grokdeclarator' gives a warning | |
97 | message about that if `inline' is specified. This code | |
98 | it put in to catch the volunteers. */ | |
99 | if ((last && TREE_VALUE (last) != void_type_node) | |
100 | || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl)) | |
101 | && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))), | |
102 | "__builtin_va_alist"))) | |
103 | return "varargs function cannot be inline"; | |
104 | ||
105 | if (current_function_calls_alloca) | |
106 | return "function using alloca cannot be inline"; | |
107 | ||
108 | if (current_function_contains_functions) | |
109 | return "function with nested functions cannot be inline"; | |
110 | ||
111 | /* This restriction may be eliminated sometime soon. But for now, don't | |
112 | worry about remapping the static chain. */ | |
113 | if (current_function_needs_context) | |
114 | return "nested function cannot be inline"; | |
115 | ||
116 | /* If its not even close, don't even look. */ | |
117 | if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns) | |
118 | return "function too large to be inline"; | |
119 | ||
120 | #if 0 | |
121 | /* Large stacks are OK now that inlined functions can share them. */ | |
122 | /* Don't inline functions with large stack usage, | |
123 | since they can make other recursive functions burn up stack. */ | |
124 | if (!TREE_INLINE (fndecl) && get_frame_size () > 100) | |
125 | return "function stack frame for inlining"; | |
126 | #endif | |
127 | ||
128 | #if 0 | |
129 | /* Don't inline functions which do not specify a function prototype and | |
130 | have BLKmode argument or take the address of a parameter. */ | |
131 | for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms)) | |
132 | { | |
133 | if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode) | |
134 | TREE_ADDRESSABLE (parms) = 1; | |
135 | if (last == NULL_TREE && TREE_ADDRESSABLE (parms)) | |
136 | return "no prototype, and parameter address used; cannot be inline"; | |
137 | } | |
138 | #endif | |
139 | ||
140 | /* We can't inline functions that return structures | |
141 | the old-fashioned PCC way, copying into a static block. */ | |
142 | if (current_function_returns_pcc_struct) | |
143 | return "inline functions not supported for this return value type"; | |
144 | ||
145 | /* We can't inline functions that return structures of varying size. */ | |
146 | if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0) | |
147 | return "function with varying-size return value cannot be inline"; | |
148 | ||
149 | /* Cannot inline a function with a varying size argument. */ | |
150 | for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms)) | |
151 | if (int_size_in_bytes (TREE_TYPE (parms)) < 0) | |
152 | return "function with varying-size parameter cannot be inline"; | |
153 | ||
154 | if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns) | |
155 | { | |
156 | for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns; | |
157 | insn = NEXT_INSN (insn)) | |
158 | { | |
159 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
160 | ninsns++; | |
161 | } | |
162 | ||
163 | if (ninsns >= max_insns) | |
164 | return "function too large to be inline"; | |
165 | } | |
166 | ||
167 | return 0; | |
168 | } | |
169 | \f | |
170 | /* Variables used within save_for_inline. */ | |
171 | ||
172 | /* Mapping from old pseudo-register to new pseudo-registers. | |
173 | The first element of this map is reg_map[FIRST_PSEUDO_REGISTER]. | |
174 | It is allocated in `save_for_inline' and `expand_inline_function', | |
175 | and deallocated on exit from each of those routines. */ | |
176 | static rtx *reg_map; | |
177 | ||
178 | /* Mapping from old code-labels to new code-labels. | |
179 | The first element of this map is label_map[min_labelno]. | |
180 | It is allocated in `save_for_inline' and `expand_inline_function', | |
181 | and deallocated on exit from each of those routines. */ | |
182 | static rtx *label_map; | |
183 | ||
184 | /* Mapping from old insn uid's to copied insns. | |
185 | It is allocated in `save_for_inline' and `expand_inline_function', | |
186 | and deallocated on exit from each of those routines. */ | |
187 | static rtx *insn_map; | |
188 | ||
189 | /* Map pseudo reg number into the PARM_DECL for the parm living in the reg. | |
190 | Zero for a reg that isn't a parm's home. | |
191 | Only reg numbers less than max_parm_reg are mapped here. */ | |
192 | static tree *parmdecl_map; | |
193 | ||
194 | /* Keep track of first pseudo-register beyond those that are parms. */ | |
195 | static int max_parm_reg; | |
196 | ||
197 | /* When an insn is being copied by copy_for_inline, | |
198 | this is nonzero if we have copied an ASM_OPERANDS. | |
199 | In that case, it is the original input-operand vector. */ | |
200 | static rtvec orig_asm_operands_vector; | |
201 | ||
202 | /* When an insn is being copied by copy_for_inline, | |
203 | this is nonzero if we have copied an ASM_OPERANDS. | |
204 | In that case, it is the copied input-operand vector. */ | |
205 | static rtvec copy_asm_operands_vector; | |
206 | ||
207 | /* Likewise, this is the copied constraints vector. */ | |
208 | static rtvec copy_asm_constraints_vector; | |
209 | ||
210 | /* In save_for_inline, nonzero if past the parm-initialization insns. */ | |
211 | static int in_nonparm_insns; | |
212 | \f | |
213 | /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization | |
214 | needed to save FNDECL's insns and info for future inline expansion. */ | |
215 | ||
216 | static rtx | |
217 | initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy) | |
218 | tree fndecl; | |
219 | int min_labelno; | |
220 | int max_labelno; | |
221 | int max_reg; | |
222 | int copy; | |
223 | { | |
224 | int function_flags, i; | |
225 | rtvec arg_vector; | |
226 | tree parms; | |
227 | ||
228 | /* Compute the values of any flags we must restore when inlining this. */ | |
229 | ||
230 | function_flags | |
231 | = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA | |
232 | + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP | |
233 | + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP | |
234 | + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT | |
235 | + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT | |
236 | + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT | |
237 | + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL | |
238 | + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER | |
239 | + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL | |
240 | + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE); | |
241 | ||
242 | /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */ | |
243 | bzero (parmdecl_map, max_parm_reg * sizeof (tree)); | |
244 | arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl))); | |
245 | ||
246 | for (parms = DECL_ARGUMENTS (fndecl), i = 0; | |
247 | parms; | |
248 | parms = TREE_CHAIN (parms), i++) | |
249 | { | |
250 | rtx p = DECL_RTL (parms); | |
251 | ||
252 | if (GET_CODE (p) == MEM && copy) | |
253 | /* Copy the rtl so that modifications of the address | |
254 | later in compilation won't affect this arg_vector. | |
255 | Virtual register instantiation can screw the address | |
256 | of the rtl. */ | |
257 | DECL_RTL (parms) = copy_rtx (p); | |
258 | ||
259 | RTVEC_ELT (arg_vector, i) = p; | |
260 | ||
261 | if (GET_CODE (p) == REG) | |
262 | parmdecl_map[REGNO (p)] = parms; | |
263 | TREE_READONLY (parms) = 1; | |
264 | } | |
265 | ||
266 | /* Assume we start out in the insns that set up the parameters. */ | |
267 | in_nonparm_insns = 0; | |
268 | ||
269 | /* The list of DECL_SAVED_INSNS, starts off with a header which | |
270 | contains the following information: | |
271 | ||
272 | the first insn of the function (not including the insns that copy | |
273 | parameters into registers). | |
274 | the first parameter insn of the function, | |
275 | the first label used by that function, | |
276 | the last label used by that function, | |
277 | the highest register number used for parameters, | |
278 | the total number of registers used, | |
279 | the size of the incoming stack area for parameters, | |
280 | the number of bytes popped on return, | |
281 | the stack slot list, | |
282 | some flags that are used to restore compiler globals, | |
283 | the value of current_function_outgoing_args_size, | |
284 | the original argument vector, | |
285 | and the original DECL_INITIAL. */ | |
286 | ||
287 | return gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno, | |
288 | max_parm_reg, max_reg, | |
289 | current_function_args_size, | |
290 | current_function_pops_args, | |
291 | stack_slot_list, function_flags, | |
292 | current_function_outgoing_args_size, | |
293 | arg_vector, (rtx) DECL_INITIAL (fndecl)); | |
294 | } | |
295 | ||
296 | /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the | |
297 | things that must be done to make FNDECL expandable as an inline function. | |
298 | HEAD contains the chain of insns to which FNDECL will expand. */ | |
299 | ||
300 | static void | |
301 | finish_inline (fndecl, head) | |
302 | tree fndecl; | |
303 | rtx head; | |
304 | { | |
305 | NEXT_INSN (head) = get_first_nonparm_insn (); | |
306 | FIRST_PARM_INSN (head) = get_insns (); | |
307 | DECL_SAVED_INSNS (fndecl) = head; | |
308 | DECL_FRAME_SIZE (fndecl) = get_frame_size (); | |
309 | TREE_INLINE (fndecl) = 1; | |
310 | } | |
311 | ||
312 | /* Make the insns and PARM_DECLs of the current function permanent | |
313 | and record other information in DECL_SAVED_INSNS to allow inlining | |
314 | of this function in subsequent calls. | |
315 | ||
316 | This function is called when we are going to immediately compile | |
317 | the insns for FNDECL. The insns in maybepermanent_obstack cannot be | |
318 | modified by the compilation process, so we copy all of them to | |
319 | new storage and consider the new insns to be the insn chain to be | |
320 | compiled. */ | |
321 | ||
322 | void | |
323 | save_for_inline_copying (fndecl) | |
324 | tree fndecl; | |
325 | { | |
326 | rtx first_insn, last_insn, insn; | |
327 | rtx head, copy; | |
328 | int max_labelno, min_labelno, i, len; | |
329 | int max_reg; | |
330 | int max_uid; | |
331 | rtx first_nonparm_insn; | |
332 | ||
333 | /* Make and emit a return-label if we have not already done so. | |
334 | Do this before recording the bounds on label numbers. */ | |
335 | ||
336 | if (return_label == 0) | |
337 | { | |
338 | return_label = gen_label_rtx (); | |
339 | emit_label (return_label); | |
340 | } | |
341 | ||
342 | /* Get some bounds on the labels and registers used. */ | |
343 | ||
344 | max_labelno = max_label_num (); | |
345 | min_labelno = get_first_label_num (); | |
346 | max_reg = max_reg_num (); | |
347 | ||
348 | /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL. | |
349 | Later we set TREE_READONLY to 0 if the parm is modified inside the fn. | |
350 | Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values | |
351 | for the parms, prior to elimination of virtual registers. | |
352 | These values are needed for substituting parms properly. */ | |
353 | ||
354 | max_parm_reg = max_parm_reg_num (); | |
355 | parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree)); | |
356 | ||
357 | head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1); | |
358 | ||
359 | if (current_function_uses_const_pool) | |
360 | { | |
361 | /* Replace any constant pool references with the actual constant. We | |
362 | will put the constants back in the copy made below. */ | |
363 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
364 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
365 | { | |
366 | save_constants (&PATTERN (insn)); | |
367 | if (REG_NOTES (insn)) | |
368 | save_constants (®_NOTES (insn)); | |
369 | } | |
370 | ||
371 | /* Clear out the constant pool so that we can recreate it with the | |
372 | copied constants below. */ | |
373 | init_const_rtx_hash_table (); | |
374 | clear_const_double_mem (); | |
375 | } | |
376 | ||
377 | max_uid = INSN_UID (head); | |
378 | ||
379 | /* We have now allocated all that needs to be allocated permanently | |
380 | on the rtx obstack. Set our high-water mark, so that we | |
381 | can free the rest of this when the time comes. */ | |
382 | ||
383 | preserve_data (); | |
384 | ||
385 | /* Copy the chain insns of this function. | |
386 | Install the copied chain as the insns of this function, | |
387 | for continued compilation; | |
388 | the original chain is recorded as the DECL_SAVED_INSNS | |
389 | for inlining future calls. */ | |
390 | ||
391 | /* If there are insns that copy parms from the stack into pseudo registers, | |
392 | those insns are not copied. `expand_inline_function' must | |
393 | emit the correct code to handle such things. */ | |
394 | ||
395 | insn = get_insns (); | |
396 | if (GET_CODE (insn) != NOTE) | |
397 | abort (); | |
398 | first_insn = rtx_alloc (NOTE); | |
399 | NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn); | |
400 | NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn); | |
401 | INSN_UID (first_insn) = INSN_UID (insn); | |
402 | PREV_INSN (first_insn) = NULL; | |
403 | NEXT_INSN (first_insn) = NULL; | |
404 | last_insn = first_insn; | |
405 | ||
406 | /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy. | |
407 | Make these new rtx's now, and install them in regno_reg_rtx, so they | |
408 | will be the official pseudo-reg rtx's for the rest of compilation. */ | |
409 | ||
410 | reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx)); | |
411 | ||
412 | len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion); | |
413 | for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--) | |
414 | reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack, | |
415 | regno_reg_rtx[i], len); | |
416 | ||
417 | bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1, | |
418 | regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1, | |
419 | (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx)); | |
420 | ||
421 | /* Likewise each label rtx must have a unique rtx as its copy. */ | |
422 | ||
423 | label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx)); | |
424 | label_map -= min_labelno; | |
425 | ||
426 | for (i = min_labelno; i < max_labelno; i++) | |
427 | label_map[i] = gen_label_rtx (); | |
428 | ||
429 | /* Record the mapping of old insns to copied insns. */ | |
430 | ||
431 | insn_map = (rtx *) alloca (max_uid * sizeof (rtx)); | |
432 | bzero (insn_map, max_uid * sizeof (rtx)); | |
433 | ||
434 | /* Get the insn which signals the end of parameter setup code. */ | |
435 | first_nonparm_insn = get_first_nonparm_insn (); | |
436 | ||
437 | /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM | |
438 | (the former occurs when a variable has its address taken) | |
439 | since these may be shared and can be changed by virtual | |
440 | register instantiation. DECL_RTL values for our arguments | |
441 | have already been copied by initialize_for_inline. */ | |
442 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++) | |
443 | if (GET_CODE (regno_reg_rtx[i]) == MEM) | |
444 | XEXP (regno_reg_rtx[i], 0) | |
445 | = copy_for_inline (XEXP (regno_reg_rtx[i], 0)); | |
446 | ||
447 | /* Copy the tree of subblocks of the function, and the decls in them. | |
448 | We will use the copy for compiling this function, then restore the original | |
449 | subblocks and decls for use when inlining this function. | |
450 | ||
451 | Several parts of the compiler modify BLOCK trees. In particular, | |
452 | instantiate_virtual_regs will instantiate any virtual regs | |
453 | mentioned in the DECL_RTLs of the decls, and loop | |
454 | unrolling will replicate any BLOCK trees inside an unrolled loop. | |
455 | ||
456 | The modified subblocks or DECL_RTLs would be incorrect for the original rtl | |
457 | which we will use for inlining. The rtl might even contain pseudoregs | |
458 | whose space has been freed. */ | |
459 | ||
460 | DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl)); | |
461 | ||
462 | /* Now copy each DECL_RTL which is a MEM, | |
463 | so it is safe to modify their addresses. */ | |
464 | copy_decl_rtls (DECL_INITIAL (fndecl)); | |
465 | ||
466 | /* Now copy the chain of insns. Do this twice. The first copy the insn | |
467 | itself and its body. The second time copy of REG_NOTES. This is because | |
468 | a REG_NOTE may have a forward pointer to another insn. */ | |
469 | ||
470 | for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn)) | |
471 | { | |
472 | orig_asm_operands_vector = 0; | |
473 | ||
474 | if (insn == first_nonparm_insn) | |
475 | in_nonparm_insns = 1; | |
476 | ||
477 | switch (GET_CODE (insn)) | |
478 | { | |
479 | case NOTE: | |
480 | /* No need to keep these. */ | |
481 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED) | |
482 | continue; | |
483 | ||
484 | copy = rtx_alloc (NOTE); | |
485 | NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn); | |
486 | NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn); | |
487 | break; | |
488 | ||
489 | case INSN: | |
490 | case CALL_INSN: | |
491 | case JUMP_INSN: | |
492 | copy = rtx_alloc (GET_CODE (insn)); | |
493 | PATTERN (copy) = copy_for_inline (PATTERN (insn)); | |
494 | INSN_CODE (copy) = -1; | |
495 | LOG_LINKS (copy) = NULL; | |
496 | RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn); | |
497 | break; | |
498 | ||
499 | case CODE_LABEL: | |
500 | copy = label_map[CODE_LABEL_NUMBER (insn)]; | |
501 | break; | |
502 | ||
503 | case BARRIER: | |
504 | copy = rtx_alloc (BARRIER); | |
505 | break; | |
506 | ||
507 | default: | |
508 | abort (); | |
509 | } | |
510 | INSN_UID (copy) = INSN_UID (insn); | |
511 | insn_map[INSN_UID (insn)] = copy; | |
512 | NEXT_INSN (last_insn) = copy; | |
513 | PREV_INSN (copy) = last_insn; | |
514 | last_insn = copy; | |
515 | } | |
516 | ||
517 | /* Now copy the REG_NOTES. */ | |
518 | for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn)) | |
519 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
520 | && insn_map[INSN_UID(insn)]) | |
521 | REG_NOTES (insn_map[INSN_UID (insn)]) | |
522 | = copy_for_inline (REG_NOTES (insn)); | |
523 | ||
524 | NEXT_INSN (last_insn) = NULL; | |
525 | ||
526 | finish_inline (fndecl, head); | |
527 | ||
528 | set_new_first_and_last_insn (first_insn, last_insn); | |
529 | } | |
530 | ||
531 | /* Make a copy of the entire tree of blocks BLOCK, and return it. */ | |
532 | ||
533 | static tree | |
534 | copy_decl_tree (block) | |
535 | tree block; | |
536 | { | |
537 | tree t, vars, subblocks; | |
538 | ||
539 | vars = copy_list (BLOCK_VARS (block)); | |
540 | subblocks = 0; | |
541 | ||
542 | /* Process all subblocks. */ | |
543 | for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) | |
544 | { | |
545 | tree copy = copy_decl_tree (t); | |
546 | TREE_CHAIN (copy) = subblocks; | |
547 | subblocks = copy; | |
548 | } | |
549 | ||
550 | t = copy_node (block); | |
551 | BLOCK_VARS (t) = vars; | |
552 | BLOCK_SUBBLOCKS (t) = nreverse (subblocks); | |
553 | return t; | |
554 | } | |
555 | ||
556 | /* Copy DECL_RTLs in all decls in the given BLOCK node. */ | |
557 | ||
558 | static void | |
559 | copy_decl_rtls (block) | |
560 | tree block; | |
561 | { | |
562 | tree t; | |
563 | ||
564 | for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) | |
565 | if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM) | |
566 | DECL_RTL (t) = copy_for_inline (DECL_RTL (t)); | |
567 | ||
568 | /* Process all subblocks. */ | |
569 | for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) | |
570 | copy_decl_rtls (t); | |
571 | } | |
572 | ||
573 | /* Make the insns and PARM_DECLs of the current function permanent | |
574 | and record other information in DECL_SAVED_INSNS to allow inlining | |
575 | of this function in subsequent calls. | |
576 | ||
577 | This routine need not copy any insns because we are not going | |
578 | to immediately compile the insns in the insn chain. There | |
579 | are two cases when we would compile the insns for FNDECL: | |
580 | (1) when FNDECL is expanded inline, and (2) when FNDECL needs to | |
581 | be output at the end of other compilation, because somebody took | |
582 | its address. In the first case, the insns of FNDECL are copied | |
583 | as it is expanded inline, so FNDECL's saved insns are not | |
584 | modified. In the second case, FNDECL is used for the last time, | |
585 | so modifying the rtl is not a problem. | |
586 | ||
587 | ??? Actually, we do not verify that FNDECL is not inline expanded | |
588 | by other functions which must also be written down at the end | |
589 | of compilation. We could set flag_no_inline to nonzero when | |
590 | the time comes to write down such functions. */ | |
591 | ||
592 | void | |
593 | save_for_inline_nocopy (fndecl) | |
594 | tree fndecl; | |
595 | { | |
596 | rtx insn; | |
597 | rtx head, copy; | |
598 | tree parms; | |
599 | int max_labelno, min_labelno, i, len; | |
600 | int max_reg; | |
601 | int max_uid; | |
602 | rtx first_nonparm_insn; | |
603 | int function_flags; | |
604 | ||
605 | /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL. | |
606 | Later we set TREE_READONLY to 0 if the parm is modified inside the fn. | |
607 | Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values | |
608 | for the parms, prior to elimination of virtual registers. | |
609 | These values are needed for substituting parms properly. */ | |
610 | ||
611 | max_parm_reg = max_parm_reg_num (); | |
612 | parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree)); | |
613 | ||
614 | /* Make and emit a return-label if we have not already done so. */ | |
615 | ||
616 | if (return_label == 0) | |
617 | { | |
618 | return_label = gen_label_rtx (); | |
619 | emit_label (return_label); | |
620 | } | |
621 | ||
622 | head = initialize_for_inline (fndecl, get_first_label_num (), | |
623 | max_label_num (), max_reg_num (), 0); | |
624 | ||
625 | /* If there are insns that copy parms from the stack into pseudo registers, | |
626 | those insns are not copied. `expand_inline_function' must | |
627 | emit the correct code to handle such things. */ | |
628 | ||
629 | insn = get_insns (); | |
630 | if (GET_CODE (insn) != NOTE) | |
631 | abort (); | |
632 | ||
633 | /* Get the insn which signals the end of parameter setup code. */ | |
634 | first_nonparm_insn = get_first_nonparm_insn (); | |
635 | ||
636 | /* Now just scan the chain of insns to see what happens to our | |
637 | PARM_DECLs. If a PARM_DECL is used but never modified, we | |
638 | can substitute its rtl directly when expanding inline (and | |
639 | perform constant folding when its incoming value is constant). | |
640 | Otherwise, we have to copy its value into a new register and track | |
641 | the new register's life. */ | |
642 | ||
643 | for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn)) | |
644 | { | |
645 | if (insn == first_nonparm_insn) | |
646 | in_nonparm_insns = 1; | |
647 | ||
648 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
649 | { | |
650 | if (current_function_uses_const_pool) | |
651 | { | |
652 | /* Replace any constant pool references with the actual constant. | |
653 | We will put the constant back if we need to write the | |
654 | function out after all. */ | |
655 | save_constants (&PATTERN (insn)); | |
656 | if (REG_NOTES (insn)) | |
657 | save_constants (®_NOTES (insn)); | |
658 | } | |
659 | ||
660 | /* Record what interesting things happen to our parameters. */ | |
661 | note_stores (PATTERN (insn), note_modified_parmregs); | |
662 | } | |
663 | } | |
664 | ||
665 | /* We have now allocated all that needs to be allocated permanently | |
666 | on the rtx obstack. Set our high-water mark, so that we | |
667 | can free the rest of this when the time comes. */ | |
668 | ||
669 | preserve_data (); | |
670 | ||
671 | finish_inline (fndecl, head); | |
672 | } | |
673 | \f | |
674 | /* Given PX, a pointer into an insn, search for references to the constant | |
675 | pool. Replace each with a CONST that has the mode of the original | |
676 | constant, contains the constant, and has RTX_INTEGRATED_P set. | |
677 | Similarly, constant pool addresses not enclosed in a MEM are replaced | |
678 | with an ADDRESS rtx which also gives the constant, mode, and has | |
679 | RTX_INTEGRATED_P set. */ | |
680 | ||
681 | static void | |
682 | save_constants (px) | |
683 | rtx *px; | |
684 | { | |
685 | rtx x; | |
686 | int i, j; | |
687 | ||
688 | again: | |
689 | x = *px; | |
690 | ||
691 | /* If this is a CONST_DOUBLE, don't try to fix things up in | |
692 | CONST_DOUBLE_MEM, because this is an infinite recursion. */ | |
693 | if (GET_CODE (x) == CONST_DOUBLE) | |
694 | return; | |
695 | else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
696 | && CONSTANT_POOL_ADDRESS_P (XEXP (x,0))) | |
697 | { | |
698 | enum machine_mode const_mode = get_pool_mode (XEXP (x, 0)); | |
699 | rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0))); | |
700 | RTX_INTEGRATED_P (new) = 1; | |
701 | ||
702 | /* If the MEM was in a different mode than the constant (perhaps we | |
703 | were only looking at the low-order part), surround it with a | |
704 | SUBREG so we can save both modes. */ | |
705 | ||
706 | if (GET_MODE (x) != const_mode) | |
707 | { | |
708 | new = gen_rtx (SUBREG, GET_MODE (x), new, 0); | |
709 | RTX_INTEGRATED_P (new) = 1; | |
710 | } | |
711 | ||
712 | *px = new; | |
713 | save_constants (&XEXP (*px, 0)); | |
714 | } | |
715 | else if (GET_CODE (x) == SYMBOL_REF | |
716 | && CONSTANT_POOL_ADDRESS_P (x)) | |
717 | { | |
718 | *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x)); | |
719 | save_constants (&XEXP (*px, 0)); | |
720 | RTX_INTEGRATED_P (*px) = 1; | |
721 | } | |
722 | ||
723 | else | |
724 | { | |
725 | char *fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
726 | int len = GET_RTX_LENGTH (GET_CODE (x)); | |
727 | ||
728 | for (i = len-1; i >= 0; i--) | |
729 | { | |
730 | switch (fmt[i]) | |
731 | { | |
732 | case 'E': | |
733 | for (j = 0; j < XVECLEN (x, i); j++) | |
734 | save_constants (&XVECEXP (x, i, j)); | |
735 | break; | |
736 | ||
737 | case 'e': | |
738 | if (XEXP (x, i) == 0) | |
739 | continue; | |
740 | if (i == 0) | |
741 | { | |
742 | /* Hack tail-recursion here. */ | |
743 | px = &XEXP (x, 0); | |
744 | goto again; | |
745 | } | |
746 | save_constants (&XEXP (x, i)); | |
747 | break; | |
748 | } | |
749 | } | |
750 | } | |
751 | } | |
752 | \f | |
753 | /* Note whether a parameter is modified or not. */ | |
754 | ||
755 | static void | |
756 | note_modified_parmregs (reg, x) | |
757 | rtx reg; | |
758 | rtx x; | |
759 | { | |
760 | if (GET_CODE (reg) == REG && in_nonparm_insns | |
761 | && REGNO (reg) < max_parm_reg | |
762 | && REGNO (reg) >= FIRST_PSEUDO_REGISTER | |
763 | && parmdecl_map[REGNO (reg)] != 0) | |
764 | TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0; | |
765 | } | |
766 | ||
767 | /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels | |
768 | according to `reg_map' and `label_map'. The original rtl insns | |
769 | will be saved for inlining; this is used to make a copy | |
770 | which is used to finish compiling the inline function itself. | |
771 | ||
772 | If we find a "saved" constant pool entry, one which was replaced with | |
773 | the value of the constant, convert it back to a constant pool entry. | |
774 | Since the pool wasn't touched, this should simply restore the old | |
775 | address. | |
776 | ||
777 | All other kinds of rtx are copied except those that can never be | |
778 | changed during compilation. */ | |
779 | ||
780 | static rtx | |
781 | copy_for_inline (orig) | |
782 | rtx orig; | |
783 | { | |
784 | register rtx x = orig; | |
785 | register int i; | |
786 | register enum rtx_code code; | |
787 | register char *format_ptr; | |
788 | ||
789 | if (x == 0) | |
790 | return x; | |
791 | ||
792 | code = GET_CODE (x); | |
793 | ||
794 | /* These types may be freely shared. */ | |
795 | ||
796 | switch (code) | |
797 | { | |
798 | case QUEUED: | |
799 | case CONST_INT: | |
800 | case SYMBOL_REF: | |
801 | case PC: | |
802 | case CC0: | |
803 | return x; | |
804 | ||
805 | case CONST_DOUBLE: | |
806 | /* We have to make a new CONST_DOUBLE to ensure that we account for | |
807 | it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */ | |
808 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) | |
809 | { | |
810 | REAL_VALUE_TYPE d; | |
811 | ||
812 | REAL_VALUE_FROM_CONST_DOUBLE (d, x); | |
813 | return immed_real_const_1 (d, GET_MODE (x)); | |
814 | } | |
815 | else | |
816 | return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x), | |
817 | VOIDmode); | |
818 | ||
819 | case CONST: | |
820 | /* Get constant pool entry for constant in the pool. */ | |
821 | if (RTX_INTEGRATED_P (x)) | |
822 | return validize_mem (force_const_mem (GET_MODE (x), | |
823 | copy_for_inline (XEXP (x, 0)))); | |
824 | break; | |
825 | ||
826 | case SUBREG: | |
827 | /* Get constant pool entry, but access in different mode. */ | |
828 | if (RTX_INTEGRATED_P (x)) | |
829 | { | |
830 | rtx new | |
831 | = force_const_mem (GET_MODE (SUBREG_REG (x)), | |
832 | copy_for_inline (XEXP (SUBREG_REG (x), 0))); | |
833 | ||
834 | PUT_MODE (new, GET_MODE (x)); | |
835 | return validize_mem (new); | |
836 | } | |
837 | break; | |
838 | ||
839 | case ADDRESS: | |
840 | /* If not special for constant pool error. Else get constant pool | |
841 | address. */ | |
842 | if (! RTX_INTEGRATED_P (x)) | |
843 | abort (); | |
844 | ||
845 | return XEXP (force_const_mem (GET_MODE (x), | |
846 | copy_for_inline (XEXP (x, 0))), 0); | |
847 | ||
848 | case ASM_OPERANDS: | |
849 | /* If a single asm insn contains multiple output operands | |
850 | then it contains multiple ASM_OPERANDS rtx's that share operand 3. | |
851 | We must make sure that the copied insn continues to share it. */ | |
852 | if (orig_asm_operands_vector == XVEC (orig, 3)) | |
853 | { | |
854 | x = rtx_alloc (ASM_OPERANDS); | |
855 | XSTR (x, 0) = XSTR (orig, 0); | |
856 | XSTR (x, 1) = XSTR (orig, 1); | |
857 | XINT (x, 2) = XINT (orig, 2); | |
858 | XVEC (x, 3) = copy_asm_operands_vector; | |
859 | XVEC (x, 4) = copy_asm_constraints_vector; | |
860 | XSTR (x, 5) = XSTR (orig, 5); | |
861 | XINT (x, 6) = XINT (orig, 6); | |
862 | return x; | |
863 | } | |
864 | break; | |
865 | ||
866 | case MEM: | |
867 | /* A MEM is usually allowed to be shared if its address is constant | |
868 | or is a constant plus one of the special registers. | |
869 | ||
870 | We do not allow sharing of addresses that are either a special | |
871 | register or the sum of a constant and a special register because | |
872 | it is possible for unshare_all_rtl to copy the address, into memory | |
873 | that won't be saved. Although the MEM can safely be shared, and | |
874 | won't be copied there, the address itself cannot be shared, and may | |
875 | need to be copied. | |
876 | ||
877 | There are also two exceptions with constants: The first is if the | |
878 | constant is a LABEL_REF or the sum of the LABEL_REF | |
879 | and an integer. This case can happen if we have an inline | |
880 | function that supplies a constant operand to the call of another | |
881 | inline function that uses it in a switch statement. In this case, | |
882 | we will be replacing the LABEL_REF, so we have to replace this MEM | |
883 | as well. | |
884 | ||
885 | The second case is if we have a (const (plus (address ..) ...)). | |
886 | In that case we need to put back the address of the constant pool | |
887 | entry. */ | |
888 | ||
889 | if (CONSTANT_ADDRESS_P (XEXP (x, 0)) | |
890 | && GET_CODE (XEXP (x, 0)) != LABEL_REF | |
891 | && ! (GET_CODE (XEXP (x, 0)) == CONST | |
892 | && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS | |
893 | && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) | |
894 | == LABEL_REF) | |
895 | || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) | |
896 | == ADDRESS))))) | |
897 | return x; | |
898 | break; | |
899 | ||
900 | case LABEL_REF: | |
901 | { | |
902 | /* Must point to the new insn. */ | |
903 | return gen_rtx (LABEL_REF, GET_MODE (orig), | |
904 | label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]); | |
905 | } | |
906 | ||
907 | case REG: | |
908 | if (REGNO (x) > LAST_VIRTUAL_REGISTER) | |
909 | return reg_map [REGNO (x)]; | |
910 | else | |
911 | return x; | |
912 | ||
913 | case SET: | |
914 | /* If a parm that gets modified lives in a pseudo-reg, | |
915 | clear its TREE_READONLY to prevent certain optimizations. */ | |
916 | { | |
917 | rtx dest = SET_DEST (x); | |
918 | ||
919 | while (GET_CODE (dest) == STRICT_LOW_PART | |
920 | || GET_CODE (dest) == ZERO_EXTRACT | |
921 | || GET_CODE (dest) == SUBREG) | |
922 | dest = XEXP (dest, 0); | |
923 | ||
924 | if (GET_CODE (dest) == REG | |
925 | && REGNO (dest) < max_parm_reg | |
926 | && REGNO (dest) >= FIRST_PSEUDO_REGISTER | |
927 | && parmdecl_map[REGNO (dest)] != 0 | |
928 | /* The insn to load an arg pseudo from a stack slot | |
929 | does not count as modifying it. */ | |
930 | && in_nonparm_insns) | |
931 | TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0; | |
932 | } | |
933 | break; | |
934 | ||
935 | #if 0 /* This is a good idea, but here is the wrong place for it. */ | |
936 | /* Arrange that CONST_INTs always appear as the second operand | |
937 | if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx' | |
938 | always appear as the first. */ | |
939 | case PLUS: | |
940 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
941 | || (XEXP (x, 1) == frame_pointer_rtx | |
942 | || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
943 | && XEXP (x, 1) == arg_pointer_rtx))) | |
944 | { | |
945 | rtx t = XEXP (x, 0); | |
946 | XEXP (x, 0) = XEXP (x, 1); | |
947 | XEXP (x, 1) = t; | |
948 | } | |
949 | break; | |
950 | #endif | |
951 | } | |
952 | ||
953 | /* Replace this rtx with a copy of itself. */ | |
954 | ||
955 | x = rtx_alloc (code); | |
956 | bcopy (orig, x, (sizeof (*x) - sizeof (x->fld) | |
957 | + sizeof (x->fld[0]) * GET_RTX_LENGTH (code))); | |
958 | ||
959 | /* Now scan the subexpressions recursively. | |
960 | We can store any replaced subexpressions directly into X | |
961 | since we know X is not shared! Any vectors in X | |
962 | must be copied if X was copied. */ | |
963 | ||
964 | format_ptr = GET_RTX_FORMAT (code); | |
965 | ||
966 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
967 | { | |
968 | switch (*format_ptr++) | |
969 | { | |
970 | case 'e': | |
971 | XEXP (x, i) = copy_for_inline (XEXP (x, i)); | |
972 | break; | |
973 | ||
974 | case 'u': | |
975 | /* Change any references to old-insns to point to the | |
976 | corresponding copied insns. */ | |
977 | XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))]; | |
978 | break; | |
979 | ||
980 | case 'E': | |
981 | if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0) | |
982 | { | |
983 | register int j; | |
984 | ||
985 | XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0)); | |
986 | for (j = 0; j < XVECLEN (x, i); j++) | |
987 | XVECEXP (x, i, j) | |
988 | = copy_for_inline (XVECEXP (x, i, j)); | |
989 | } | |
990 | break; | |
991 | } | |
992 | } | |
993 | ||
994 | if (code == ASM_OPERANDS && orig_asm_operands_vector == 0) | |
995 | { | |
996 | orig_asm_operands_vector = XVEC (orig, 3); | |
997 | copy_asm_operands_vector = XVEC (x, 3); | |
998 | copy_asm_constraints_vector = XVEC (x, 4); | |
999 | } | |
1000 | ||
1001 | return x; | |
1002 | } | |
1003 | ||
1004 | /* Unfortunately, we need a global copy of const_equiv map for communication | |
1005 | with a function called from note_stores. Be *very* careful that this | |
1006 | is used properly in the presence of recursion. */ | |
1007 | ||
1008 | rtx *global_const_equiv_map; | |
1009 | \f | |
1010 | #define FIXED_BASE_PLUS_P(X) \ | |
1011 | (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ | |
1012 | && GET_CODE (XEXP (X, 0)) == REG \ | |
1013 | && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \ | |
1014 | && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER) | |
1015 | ||
1016 | /* Integrate the procedure defined by FNDECL. Note that this function | |
1017 | may wind up calling itself. Since the static variables are not | |
1018 | reentrant, we do not assign them until after the possibility | |
1019 | or recursion is eliminated. | |
1020 | ||
1021 | If IGNORE is nonzero, do not produce a value. | |
1022 | Otherwise store the value in TARGET if it is nonzero and that is convenient. | |
1023 | ||
1024 | Value is: | |
1025 | (rtx)-1 if we could not substitute the function | |
1026 | 0 if we substituted it and it does not produce a value | |
1027 | else an rtx for where the value is stored. */ | |
1028 | ||
1029 | rtx | |
1030 | expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr) | |
1031 | tree fndecl, parms; | |
1032 | rtx target; | |
1033 | int ignore; | |
1034 | tree type; | |
1035 | rtx structure_value_addr; | |
1036 | { | |
1037 | tree formal, actual; | |
1038 | rtx header = DECL_SAVED_INSNS (fndecl); | |
1039 | rtx insns = FIRST_FUNCTION_INSN (header); | |
1040 | rtx parm_insns = FIRST_PARM_INSN (header); | |
1041 | tree *arg_trees; | |
1042 | rtx *arg_vals; | |
1043 | rtx insn; | |
1044 | int max_regno; | |
1045 | int equiv_map_size; | |
1046 | register int i; | |
1047 | int min_labelno = FIRST_LABELNO (header); | |
1048 | int max_labelno = LAST_LABELNO (header); | |
1049 | int nargs; | |
1050 | rtx local_return_label = 0; | |
1051 | rtx loc; | |
1052 | rtx temp; | |
1053 | struct inline_remap *map; | |
1054 | rtx cc0_insn = 0; | |
1055 | rtvec arg_vector = ORIGINAL_ARG_VECTOR (header); | |
1056 | ||
1057 | /* Allow for equivalences of the pseudos we make for virtual fp and ap. */ | |
1058 | max_regno = MAX_REGNUM (header) + 3; | |
1059 | if (max_regno < FIRST_PSEUDO_REGISTER) | |
1060 | abort (); | |
1061 | ||
1062 | nargs = list_length (DECL_ARGUMENTS (fndecl)); | |
1063 | ||
1064 | /* We expect PARMS to have the right length; don't crash if not. */ | |
1065 | if (list_length (parms) != nargs) | |
1066 | return (rtx)-1; | |
1067 | /* Also check that the parms type match. Since the appropriate | |
1068 | conversions or default promotions have already been applied, | |
1069 | the machine modes should match exactly. */ | |
1070 | for (formal = DECL_ARGUMENTS (fndecl), | |
1071 | actual = parms; | |
1072 | formal; | |
1073 | formal = TREE_CHAIN (formal), | |
1074 | actual = TREE_CHAIN (actual)) | |
1075 | { | |
1076 | tree arg = TREE_VALUE (actual); | |
1077 | enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal)); | |
1078 | if (mode != TYPE_MODE (TREE_TYPE (arg))) | |
1079 | return (rtx)-1; | |
1080 | /* If they are block mode, the types should match exactly. | |
1081 | They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE, | |
1082 | which could happen if the parameter has incomplete type. */ | |
1083 | if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)) | |
1084 | return (rtx)-1; | |
1085 | } | |
1086 | ||
1087 | /* Make a binding contour to keep inline cleanups called at | |
1088 | outer function-scope level from looking like they are shadowing | |
1089 | parameter declarations. */ | |
1090 | pushlevel (0); | |
1091 | ||
1092 | /* Make a fresh binding contour that we can easily remove. */ | |
1093 | pushlevel (0); | |
1094 | expand_start_bindings (0); | |
1095 | if (GET_CODE (parm_insns) == NOTE | |
1096 | && NOTE_LINE_NUMBER (parm_insns) > 0) | |
1097 | emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns)); | |
1098 | ||
1099 | /* Expand the function arguments. Do this first so that any | |
1100 | new registers get created before we allocate the maps. */ | |
1101 | ||
1102 | arg_vals = (rtx *) alloca (nargs * sizeof (rtx)); | |
1103 | arg_trees = (tree *) alloca (nargs * sizeof (tree)); | |
1104 | ||
1105 | for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0; | |
1106 | formal; | |
1107 | formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++) | |
1108 | { | |
1109 | /* Actual parameter, converted to the type of the argument within the | |
1110 | function. */ | |
1111 | tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual)); | |
1112 | /* Mode of the variable used within the function. */ | |
1113 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal)); | |
1114 | /* Where parameter is located in the function. */ | |
1115 | rtx copy; | |
1116 | ||
1117 | emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal)); | |
1118 | ||
1119 | arg_trees[i] = arg; | |
1120 | loc = RTVEC_ELT (arg_vector, i); | |
1121 | ||
1122 | /* If this is an object passed by invisible reference, we copy the | |
1123 | object into a stack slot and save its address. If this will go | |
1124 | into memory, we do nothing now. Otherwise, we just expand the | |
1125 | argument. */ | |
1126 | if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG | |
1127 | && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER) | |
1128 | { | |
1129 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); | |
1130 | rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1); | |
1131 | ||
1132 | store_expr (arg, stack_slot, 0); | |
1133 | ||
1134 | arg_vals[i] = XEXP (stack_slot, 0); | |
1135 | } | |
1136 | else if (GET_CODE (loc) != MEM) | |
1137 | arg_vals[i] = expand_expr (arg, 0, mode, EXPAND_SUM); | |
1138 | else | |
1139 | arg_vals[i] = 0; | |
1140 | ||
1141 | if (arg_vals[i] != 0 | |
1142 | && (! TREE_READONLY (formal) | |
1143 | /* If the parameter is not read-only, copy our argument through | |
1144 | a register. Also, we cannot use ARG_VALS[I] if it overlaps | |
1145 | TARGET in any way. In the inline function, they will likely | |
1146 | be two different pseudos, and `safe_from_p' will make all | |
1147 | sorts of smart assumptions about their not conflicting. | |
1148 | But if ARG_VALS[I] overlaps TARGET, these assumptions are | |
1149 | wrong, so put ARG_VALS[I] into a fresh register. */ | |
1150 | || (target != 0 | |
1151 | && (GET_CODE (arg_vals[i]) == REG | |
1152 | || GET_CODE (arg_vals[i]) == SUBREG | |
1153 | || GET_CODE (arg_vals[i]) == MEM) | |
1154 | && reg_overlap_mentioned_p (arg_vals[i], target)))) | |
1155 | arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]); | |
1156 | } | |
1157 | ||
1158 | /* Allocate the structures we use to remap things. */ | |
1159 | ||
1160 | map = (struct inline_remap *) alloca (sizeof (struct inline_remap)); | |
1161 | map->fndecl = fndecl; | |
1162 | ||
1163 | map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx)); | |
1164 | bzero (map->reg_map, max_regno * sizeof (rtx)); | |
1165 | ||
1166 | map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx)); | |
1167 | map->label_map -= min_labelno; | |
1168 | ||
1169 | map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx)); | |
1170 | bzero (map->insn_map, INSN_UID (header) * sizeof (rtx)); | |
1171 | map->min_insnno = 0; | |
1172 | map->max_insnno = INSN_UID (header); | |
1173 | ||
1174 | /* const_equiv_map maps pseudos in our routine to constants, so it needs to | |
1175 | be large enough for all our pseudos. This is the number we are currently | |
1176 | using plus the number in the called routine, plus one for each arg and | |
1177 | one for the return value. */ | |
1178 | equiv_map_size | |
1179 | = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + nargs + 1; | |
1180 | ||
1181 | map->const_equiv_map = (rtx *)alloca (equiv_map_size * sizeof (rtx)); | |
1182 | bzero (map->const_equiv_map, equiv_map_size * sizeof (rtx)); | |
1183 | ||
1184 | map->const_age_map = (unsigned *)alloca (equiv_map_size * sizeof (unsigned)); | |
1185 | bzero (map->const_age_map, equiv_map_size * sizeof (unsigned)); | |
1186 | map->const_age = 0; | |
1187 | ||
1188 | /* Record the current insn in case we have to set up pointers to frame | |
1189 | and argument memory blocks. */ | |
1190 | map->insns_at_start = get_last_insn (); | |
1191 | ||
1192 | /* Update the outgoing argument size to allow for those in the inlined | |
1193 | function. */ | |
1194 | if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size) | |
1195 | current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header); | |
1196 | ||
1197 | /* If the inline function needs to make PIC references, that means | |
1198 | that this function's PIC offset table must be used. */ | |
1199 | if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE) | |
1200 | current_function_uses_pic_offset_table = 1; | |
1201 | ||
1202 | /* Process each argument. For each, set up things so that the function's | |
1203 | reference to the argument will refer to the argument being passed. | |
1204 | We only replace REG with REG here. Any simplifications are done | |
1205 | via const_equiv_map. | |
1206 | ||
1207 | We make two passes: In the first, we deal with parameters that will | |
1208 | be placed into registers, since we need to ensure that the allocated | |
1209 | register number fits in const_equiv_map. Then we store all non-register | |
1210 | parameters into their memory location. */ | |
1211 | ||
1212 | for (i = 0; i < nargs; i++) | |
1213 | { | |
1214 | rtx copy = arg_vals[i]; | |
1215 | ||
1216 | loc = RTVEC_ELT (arg_vector, i); | |
1217 | ||
1218 | /* There are three cases, each handled separately. */ | |
1219 | if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG | |
1220 | && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER) | |
1221 | { | |
1222 | /* This must be an object passed by invisible reference (it could | |
1223 | also be a variable-sized object, but we forbid inlining functions | |
1224 | with variable-sized arguments). COPY is the address of the | |
1225 | actual value (this computation will cause it to be copied). We | |
1226 | map that address for the register, noting the actual address as | |
1227 | an equivalent in case it can be substituted into the insns. */ | |
1228 | ||
1229 | if (GET_CODE (copy) != REG) | |
1230 | { | |
1231 | temp = copy_addr_to_reg (copy); | |
1232 | if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy)) | |
1233 | { | |
1234 | map->const_equiv_map[REGNO (temp)] = copy; | |
1235 | map->const_age_map[REGNO (temp)] = CONST_AGE_PARM; | |
1236 | } | |
1237 | copy = temp; | |
1238 | } | |
1239 | map->reg_map[REGNO (XEXP (loc, 0))] = copy; | |
1240 | } | |
1241 | else if (GET_CODE (loc) == MEM) | |
1242 | { | |
1243 | /* This is the case of a parameter that lives in memory. | |
1244 | It will live in the block we allocate in the called routine's | |
1245 | frame that simulates the incoming argument area. Do nothing | |
1246 | now; we will call store_expr later. */ | |
1247 | ; | |
1248 | } | |
1249 | else if (GET_CODE (loc) == REG) | |
1250 | { | |
1251 | /* This is the good case where the parameter is in a register. | |
1252 | If it is read-only and our argument is a constant, set up the | |
1253 | constant equivalence. */ | |
1254 | if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG) | |
1255 | { | |
1256 | temp = copy_to_mode_reg (GET_MODE (loc), copy); | |
1257 | if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy)) | |
1258 | { | |
1259 | map->const_equiv_map[REGNO (temp)] = copy; | |
1260 | map->const_age_map[REGNO (temp)] = CONST_AGE_PARM; | |
1261 | } | |
1262 | copy = temp; | |
1263 | } | |
1264 | map->reg_map[REGNO (loc)] = copy; | |
1265 | } | |
1266 | else | |
1267 | abort (); | |
1268 | ||
1269 | /* Free any temporaries we made setting up this parameter. */ | |
1270 | free_temp_slots (); | |
1271 | } | |
1272 | ||
1273 | /* Now do the parameters that will be placed in memory. */ | |
1274 | ||
1275 | for (formal = DECL_ARGUMENTS (fndecl), i = 0; | |
1276 | formal; formal = TREE_CHAIN (formal), i++) | |
1277 | { | |
1278 | rtx copy = arg_vals[i]; | |
1279 | ||
1280 | loc = RTVEC_ELT (arg_vector, i); | |
1281 | ||
1282 | if (GET_CODE (loc) == MEM | |
1283 | /* Exclude case handled above. */ | |
1284 | && ! (GET_CODE (XEXP (loc, 0)) == REG | |
1285 | && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)) | |
1286 | { | |
1287 | emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal)); | |
1288 | ||
1289 | /* Compute the address in the area we reserved and store the | |
1290 | value there. */ | |
1291 | temp = copy_rtx_and_substitute (loc, map); | |
1292 | subst_constants (&temp, 0, map); | |
1293 | apply_change_group (); | |
1294 | if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0))) | |
1295 | temp = change_address (temp, VOIDmode, XEXP (temp, 0)); | |
1296 | store_expr (arg_trees[i], temp, 0); | |
1297 | ||
1298 | /* Free any temporaries we made setting up this parameter. */ | |
1299 | free_temp_slots (); | |
1300 | } | |
1301 | } | |
1302 | ||
1303 | /* Deal with the places that the function puts its result. | |
1304 | We are driven by what is placed into DECL_RESULT. | |
1305 | ||
1306 | Initially, we assume that we don't have anything special handling for | |
1307 | REG_FUNCTION_RETURN_VALUE_P. */ | |
1308 | ||
1309 | map->inline_target = 0; | |
1310 | loc = DECL_RTL (DECL_RESULT (fndecl)); | |
1311 | if (TYPE_MODE (type) == VOIDmode) | |
1312 | /* There is no return value to worry about. */ | |
1313 | ; | |
1314 | else if (GET_CODE (loc) == MEM) | |
1315 | { | |
1316 | if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl))) | |
1317 | abort (); | |
1318 | ||
1319 | /* Pass the function the address in which to return a structure value. | |
1320 | Note that a constructor can cause someone to call us with | |
1321 | STRUCTURE_VALUE_ADDR, but the initialization takes place | |
1322 | via the first parameter, rather than the struct return address. | |
1323 | ||
1324 | We have two cases: If the address is a simple register indirect, | |
1325 | use the mapping mechanism to point that register to our structure | |
1326 | return address. Otherwise, store the structure return value into | |
1327 | the place that it will be referenced from. */ | |
1328 | ||
1329 | if (GET_CODE (XEXP (loc, 0)) == REG) | |
1330 | { | |
1331 | temp = force_reg (Pmode, structure_value_addr); | |
1332 | map->reg_map[REGNO (XEXP (loc, 0))] = temp; | |
1333 | if (CONSTANT_P (structure_value_addr) | |
1334 | || (GET_CODE (structure_value_addr) == PLUS | |
1335 | && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx | |
1336 | && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT)) | |
1337 | { | |
1338 | map->const_equiv_map[REGNO (temp)] = structure_value_addr; | |
1339 | map->const_age_map[REGNO (temp)] = CONST_AGE_PARM; | |
1340 | } | |
1341 | } | |
1342 | else | |
1343 | { | |
1344 | temp = copy_rtx_and_substitute (loc, map); | |
1345 | subst_constants (&temp, 0, map); | |
1346 | apply_change_group (); | |
1347 | emit_move_insn (temp, structure_value_addr); | |
1348 | } | |
1349 | } | |
1350 | else if (ignore) | |
1351 | /* We will ignore the result value, so don't look at its structure. | |
1352 | Note that preparations for an aggregate return value | |
1353 | do need to be made (above) even if it will be ignored. */ | |
1354 | ; | |
1355 | else if (GET_CODE (loc) == REG) | |
1356 | { | |
1357 | /* The function returns an object in a register and we use the return | |
1358 | value. Set up our target for remapping. */ | |
1359 | ||
1360 | /* Machine mode function was declared to return. */ | |
1361 | enum machine_mode departing_mode = TYPE_MODE (type); | |
1362 | /* (Possibly wider) machine mode it actually computes | |
1363 | (for the sake of callers that fail to declare it right). */ | |
1364 | enum machine_mode arriving_mode | |
1365 | = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl))); | |
1366 | rtx reg_to_map; | |
1367 | ||
1368 | /* Don't use MEMs as direct targets because on some machines | |
1369 | substituting a MEM for a REG makes invalid insns. | |
1370 | Let the combiner substitute the MEM if that is valid. */ | |
1371 | if (target == 0 || GET_CODE (target) != REG | |
1372 | || GET_MODE (target) != departing_mode) | |
1373 | target = gen_reg_rtx (departing_mode); | |
1374 | ||
1375 | /* If function's value was promoted before return, | |
1376 | avoid machine mode mismatch when we substitute INLINE_TARGET. | |
1377 | But TARGET is what we will return to the caller. */ | |
1378 | if (arriving_mode != departing_mode) | |
1379 | reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0); | |
1380 | else | |
1381 | reg_to_map = target; | |
1382 | ||
1383 | /* Usually, the result value is the machine's return register. | |
1384 | Sometimes it may be a pseudo. Handle both cases. */ | |
1385 | if (REG_FUNCTION_VALUE_P (loc)) | |
1386 | map->inline_target = reg_to_map; | |
1387 | else | |
1388 | map->reg_map[REGNO (loc)] = reg_to_map; | |
1389 | } | |
1390 | ||
1391 | /* Make new label equivalences for the labels in the called function. */ | |
1392 | for (i = min_labelno; i < max_labelno; i++) | |
1393 | map->label_map[i] = gen_label_rtx (); | |
1394 | ||
1395 | /* Perform postincrements before actually calling the function. */ | |
1396 | emit_queue (); | |
1397 | ||
1398 | /* Clean up stack so that variables might have smaller offsets. */ | |
1399 | do_pending_stack_adjust (); | |
1400 | ||
1401 | /* Save a copy of the location of const_equiv_map for mark_stores, called | |
1402 | via note_stores. */ | |
1403 | global_const_equiv_map = map->const_equiv_map; | |
1404 | ||
1405 | /* Now copy the insns one by one. Do this in two passes, first the insns and | |
1406 | then their REG_NOTES, just like save_for_inline. */ | |
1407 | ||
1408 | /* This loop is very similar to the loop in copy_loop_body in unroll.c. */ | |
1409 | ||
1410 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
1411 | { | |
1412 | rtx copy, pattern; | |
1413 | ||
1414 | map->orig_asm_operands_vector = 0; | |
1415 | ||
1416 | switch (GET_CODE (insn)) | |
1417 | { | |
1418 | case INSN: | |
1419 | pattern = PATTERN (insn); | |
1420 | copy = 0; | |
1421 | if (GET_CODE (pattern) == USE | |
1422 | && GET_CODE (XEXP (pattern, 0)) == REG | |
1423 | && REG_FUNCTION_VALUE_P (XEXP (pattern, 0))) | |
1424 | /* The (USE (REG n)) at return from the function should | |
1425 | be ignored since we are changing (REG n) into | |
1426 | inline_target. */ | |
1427 | break; | |
1428 | ||
1429 | /* Ignore setting a function value that we don't want to use. */ | |
1430 | if (map->inline_target == 0 | |
1431 | && GET_CODE (pattern) == SET | |
1432 | && GET_CODE (SET_DEST (pattern)) == REG | |
1433 | && REG_FUNCTION_VALUE_P (SET_DEST (pattern))) | |
1434 | break; | |
1435 | ||
1436 | copy = emit_insn (copy_rtx_and_substitute (pattern, map)); | |
1437 | /* REG_NOTES will be copied later. */ | |
1438 | ||
1439 | #ifdef HAVE_cc0 | |
1440 | /* If this insn is setting CC0, it may need to look at | |
1441 | the insn that uses CC0 to see what type of insn it is. | |
1442 | In that case, the call to recog via validate_change will | |
1443 | fail. So don't substitute constants here. Instead, | |
1444 | do it when we emit the following insn. | |
1445 | ||
1446 | For example, see the pyr.md file. That machine has signed and | |
1447 | unsigned compares. The compare patterns must check the | |
1448 | following branch insn to see which what kind of compare to | |
1449 | emit. | |
1450 | ||
1451 | If the previous insn set CC0, substitute constants on it as | |
1452 | well. */ | |
1453 | if (sets_cc0_p (PATTERN (copy)) != 0) | |
1454 | cc0_insn = copy; | |
1455 | else | |
1456 | { | |
1457 | if (cc0_insn) | |
1458 | try_constants (cc0_insn, map); | |
1459 | cc0_insn = 0; | |
1460 | try_constants (copy, map); | |
1461 | } | |
1462 | #else | |
1463 | try_constants (copy, map); | |
1464 | #endif | |
1465 | break; | |
1466 | ||
1467 | case JUMP_INSN: | |
1468 | if (GET_CODE (PATTERN (insn)) == RETURN) | |
1469 | { | |
1470 | if (local_return_label == 0) | |
1471 | local_return_label = gen_label_rtx (); | |
1472 | pattern = gen_jump (local_return_label); | |
1473 | } | |
1474 | else | |
1475 | pattern = copy_rtx_and_substitute (PATTERN (insn), map); | |
1476 | ||
1477 | copy = emit_jump_insn (pattern); | |
1478 | ||
1479 | #ifdef HAVE_cc0 | |
1480 | if (cc0_insn) | |
1481 | try_constants (cc0_insn, map); | |
1482 | cc0_insn = 0; | |
1483 | #endif | |
1484 | try_constants (copy, map); | |
1485 | ||
1486 | /* If this used to be a conditional jump insn but whose branch | |
1487 | direction is now know, we must do something special. */ | |
1488 | if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value) | |
1489 | { | |
1490 | #ifdef HAVE_cc0 | |
1491 | /* The previous insn set cc0 for us. So delete it. */ | |
1492 | delete_insn (PREV_INSN (copy)); | |
1493 | #endif | |
1494 | ||
1495 | /* If this is now a no-op, delete it. */ | |
1496 | if (map->last_pc_value == pc_rtx) | |
1497 | { | |
1498 | delete_insn (copy); | |
1499 | copy = 0; | |
1500 | } | |
1501 | else | |
1502 | /* Otherwise, this is unconditional jump so we must put a | |
1503 | BARRIER after it. We could do some dead code elimination | |
1504 | here, but jump.c will do it just as well. */ | |
1505 | emit_barrier (); | |
1506 | } | |
1507 | break; | |
1508 | ||
1509 | case CALL_INSN: | |
1510 | pattern = copy_rtx_and_substitute (PATTERN (insn), map); | |
1511 | copy = emit_call_insn (pattern); | |
1512 | ||
1513 | #ifdef HAVE_cc0 | |
1514 | if (cc0_insn) | |
1515 | try_constants (cc0_insn, map); | |
1516 | cc0_insn = 0; | |
1517 | #endif | |
1518 | try_constants (copy, map); | |
1519 | ||
1520 | /* Be lazy and assume CALL_INSNs clobber all hard registers. */ | |
1521 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1522 | map->const_equiv_map[i] = 0; | |
1523 | break; | |
1524 | ||
1525 | case CODE_LABEL: | |
1526 | copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]); | |
1527 | map->const_age++; | |
1528 | break; | |
1529 | ||
1530 | case BARRIER: | |
1531 | copy = emit_barrier (); | |
1532 | break; | |
1533 | ||
1534 | case NOTE: | |
1535 | /* It is important to discard function-end and function-beg notes, | |
1536 | so we have only one of each in the current function. | |
1537 | Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline | |
1538 | deleted these in the copy used for continuing compilation, | |
1539 | not the copy used for inlining). */ | |
1540 | if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END | |
1541 | && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG | |
1542 | && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED) | |
1543 | copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn)); | |
1544 | else | |
1545 | copy = 0; | |
1546 | break; | |
1547 | ||
1548 | default: | |
1549 | abort (); | |
1550 | break; | |
1551 | } | |
1552 | ||
1553 | if (copy) | |
1554 | RTX_INTEGRATED_P (copy) = 1; | |
1555 | ||
1556 | map->insn_map[INSN_UID (insn)] = copy; | |
1557 | } | |
1558 | ||
1559 | /* Now copy the REG_NOTES. */ | |
1560 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
1561 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
1562 | && map->insn_map[INSN_UID (insn)]) | |
1563 | REG_NOTES (map->insn_map[INSN_UID (insn)]) | |
1564 | = copy_rtx_and_substitute (REG_NOTES (insn), map); | |
1565 | ||
1566 | if (local_return_label) | |
1567 | emit_label (local_return_label); | |
1568 | ||
1569 | /* Make copies of the decls of the symbols in the inline function, so that | |
1570 | the copies of the variables get declared in the current function. Set | |
1571 | up things so that lookup_static_chain knows that to interpret registers | |
1572 | in SAVE_EXPRs for TYPE_SIZEs as local. */ | |
1573 | ||
1574 | inline_function_decl = fndecl; | |
1575 | integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map, 0); | |
1576 | integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector); | |
1577 | inline_function_decl = 0; | |
1578 | ||
1579 | /* End the scope containing the copied formal parameter variables. */ | |
1580 | ||
1581 | expand_end_bindings (getdecls (), 1, 1); | |
1582 | poplevel (1, 1, 0); | |
1583 | poplevel (0, 0, 0); | |
1584 | emit_line_note (input_filename, lineno); | |
1585 | ||
1586 | if (structure_value_addr) | |
1587 | return gen_rtx (MEM, TYPE_MODE (type), | |
1588 | memory_address (TYPE_MODE (type), structure_value_addr)); | |
1589 | return target; | |
1590 | } | |
1591 | \f | |
1592 | /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL, | |
1593 | push all of those decls and give each one the corresponding home. */ | |
1594 | ||
1595 | static void | |
1596 | integrate_parm_decls (args, map, arg_vector) | |
1597 | tree args; | |
1598 | struct inline_remap *map; | |
1599 | rtvec arg_vector; | |
1600 | { | |
1601 | register tree tail; | |
1602 | register int i; | |
1603 | ||
1604 | for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++) | |
1605 | { | |
1606 | register tree decl = build_decl (VAR_DECL, DECL_NAME (tail), | |
1607 | TREE_TYPE (tail)); | |
1608 | rtx new_decl_rtl | |
1609 | = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map); | |
1610 | ||
1611 | /* These args would always appear unused, if not for this. */ | |
1612 | TREE_USED (decl) = 1; | |
1613 | /* Prevent warning for shadowing with these. */ | |
1614 | DECL_FROM_INLINE (decl) = 1; | |
1615 | pushdecl (decl); | |
1616 | /* Fully instantiate the address with the equivalent form so that the | |
1617 | debugging information contains the actual register, instead of the | |
1618 | virtual register. Do this by not passing an insn to | |
1619 | subst_constants. */ | |
1620 | subst_constants (&new_decl_rtl, 0, map); | |
1621 | apply_change_group (); | |
1622 | DECL_RTL (decl) = new_decl_rtl; | |
1623 | } | |
1624 | } | |
1625 | ||
1626 | /* Given a BLOCK node LET, push decls and levels so as to construct in the | |
1627 | current function a tree of contexts isomorphic to the one that is given. | |
1628 | ||
1629 | LEVEL indicates how far down into the BLOCK tree is the node we are | |
1630 | currently traversing. It is always zero for the initial call. | |
1631 | ||
1632 | MAP, if nonzero, is a pointer to a inline_remap map which indicates how | |
1633 | registers used in the DECL_RTL field should be remapped. If it is zero, | |
1634 | no mapping is necessary. | |
1635 | ||
1636 | FUNCTIONBODY indicates whether the top level block tree corresponds to | |
1637 | a function body. This is identical in meaning to the functionbody | |
1638 | argument of poplevel. */ | |
1639 | ||
1640 | static void | |
1641 | integrate_decl_tree (let, level, map, functionbody) | |
1642 | tree let; | |
1643 | int level; | |
1644 | struct inline_remap *map; | |
1645 | int functionbody; | |
1646 | { | |
1647 | tree t, node; | |
1648 | ||
1649 | pushlevel (0); | |
1650 | ||
1651 | for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) | |
1652 | { | |
1653 | tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t)); | |
1654 | DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t); | |
1655 | DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t); | |
1656 | if (! functionbody && DECL_RTL (t) != 0) | |
1657 | { | |
1658 | DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map); | |
1659 | /* Fully instantiate the address with the equivalent form so that the | |
1660 | debugging information contains the actual register, instead of the | |
1661 | virtual register. Do this by not passing an insn to | |
1662 | subst_constants. */ | |
1663 | subst_constants (&DECL_RTL (d), 0, map); | |
1664 | apply_change_group (); | |
1665 | } | |
1666 | else if (DECL_RTL (t)) | |
1667 | DECL_RTL (d) = copy_rtx (DECL_RTL (t)); | |
1668 | TREE_EXTERNAL (d) = TREE_EXTERNAL (t); | |
1669 | TREE_STATIC (d) = TREE_STATIC (t); | |
1670 | TREE_PUBLIC (d) = TREE_PUBLIC (t); | |
1671 | TREE_CONSTANT (d) = TREE_CONSTANT (t); | |
1672 | TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t); | |
1673 | TREE_READONLY (d) = TREE_READONLY (t); | |
1674 | TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t); | |
1675 | /* These args would always appear unused, if not for this. */ | |
1676 | TREE_USED (d) = 1; | |
1677 | /* Prevent warning for shadowing with these. */ | |
1678 | DECL_FROM_INLINE (d) = 1; | |
1679 | pushdecl (d); | |
1680 | } | |
1681 | ||
1682 | for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) | |
1683 | integrate_decl_tree (t, level + 1, map, functionbody); | |
1684 | ||
1685 | node = poplevel (level > 0, 0, level == 0 && functionbody); | |
1686 | if (node) | |
1687 | TREE_USED (node) = TREE_USED (let); | |
1688 | } | |
1689 | \f | |
1690 | /* Create a new copy of an rtx. | |
1691 | Recursively copies the operands of the rtx, | |
1692 | except for those few rtx codes that are sharable. | |
1693 | ||
1694 | We always return an rtx that is similar to that incoming rtx, with the | |
1695 | exception of possibly changing a REG to a SUBREG or vice versa. No | |
1696 | rtl is ever emitted. | |
1697 | ||
1698 | Handle constants that need to be placed in the constant pool by | |
1699 | calling `force_const_mem'. */ | |
1700 | ||
1701 | rtx | |
1702 | copy_rtx_and_substitute (orig, map) | |
1703 | register rtx orig; | |
1704 | struct inline_remap *map; | |
1705 | { | |
1706 | register rtx copy, temp; | |
1707 | register int i, j; | |
1708 | register RTX_CODE code; | |
1709 | register enum machine_mode mode; | |
1710 | register char *format_ptr; | |
1711 | int regno; | |
1712 | ||
1713 | if (orig == 0) | |
1714 | return 0; | |
1715 | ||
1716 | code = GET_CODE (orig); | |
1717 | mode = GET_MODE (orig); | |
1718 | ||
1719 | switch (code) | |
1720 | { | |
1721 | case REG: | |
1722 | /* If the stack pointer register shows up, it must be part of | |
1723 | stack-adjustments (*not* because we eliminated the frame pointer!). | |
1724 | Small hard registers are returned as-is. Pseudo-registers | |
1725 | go through their `reg_map'. */ | |
1726 | regno = REGNO (orig); | |
1727 | if (regno <= LAST_VIRTUAL_REGISTER) | |
1728 | { | |
1729 | /* Some hard registers are also mapped, | |
1730 | but others are not translated. */ | |
1731 | if (map->reg_map[regno] != 0) | |
1732 | return map->reg_map[regno]; | |
1733 | ||
1734 | /* If this is the virtual frame pointer, make space in current | |
1735 | function's stack frame for the stack frame of the inline function. | |
1736 | ||
1737 | Copy the address of this area into a pseudo. Map | |
1738 | virtual_stack_vars_rtx to this pseudo and set up a constant | |
1739 | equivalence for it to be the address. This will substitute the | |
1740 | address into insns where it can be substituted and use the new | |
1741 | pseudo where it can't. */ | |
1742 | if (regno == VIRTUAL_STACK_VARS_REGNUM) | |
1743 | { | |
1744 | rtx loc, seq; | |
1745 | int size = DECL_FRAME_SIZE (map->fndecl); | |
1746 | int rounded; | |
1747 | ||
1748 | start_sequence (); | |
1749 | loc = assign_stack_temp (BLKmode, size, 1); | |
1750 | loc = XEXP (loc, 0); | |
1751 | #ifdef FRAME_GROWS_DOWNWARD | |
1752 | /* In this case, virtual_stack_vars_rtx points to one byte | |
1753 | higher than the top of the frame area. So compute the offset | |
1754 | to one byte higher than our substitute frame. | |
1755 | Keep the fake frame pointer aligned like a real one. */ | |
1756 | rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT); | |
1757 | loc = plus_constant (loc, rounded); | |
1758 | #endif | |
1759 | map->reg_map[regno] = force_operand (loc, 0); | |
1760 | map->const_equiv_map[regno] = loc; | |
1761 | map->const_age_map[regno] = CONST_AGE_PARM; | |
1762 | ||
1763 | seq = gen_sequence (); | |
1764 | end_sequence (); | |
1765 | emit_insn_after (seq, map->insns_at_start); | |
1766 | return map->reg_map[regno]; | |
1767 | } | |
1768 | else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM) | |
1769 | { | |
1770 | /* Do the same for a block to contain any arguments referenced | |
1771 | in memory. */ | |
1772 | rtx loc, seq; | |
1773 | int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl)); | |
1774 | ||
1775 | start_sequence (); | |
1776 | loc = assign_stack_temp (BLKmode, size, 1); | |
1777 | loc = XEXP (loc, 0); | |
1778 | map->reg_map[regno] = force_operand (loc, 0); | |
1779 | map->const_equiv_map[regno] = loc; | |
1780 | map->const_age_map[regno] = CONST_AGE_PARM; | |
1781 | ||
1782 | seq = gen_sequence (); | |
1783 | end_sequence (); | |
1784 | emit_insn_after (seq, map->insns_at_start); | |
1785 | return map->reg_map[regno]; | |
1786 | } | |
1787 | else if (REG_FUNCTION_VALUE_P (orig)) | |
1788 | { | |
1789 | /* This is a reference to the function return value. If | |
1790 | the function doesn't have a return value, error. If the | |
1791 | mode doesn't agree, make a SUBREG. */ | |
1792 | if (map->inline_target == 0) | |
1793 | /* Must be unrolling loops or replicating code if we | |
1794 | reach here, so return the register unchanged. */ | |
1795 | return orig; | |
1796 | else if (mode != GET_MODE (map->inline_target)) | |
1797 | return gen_rtx (SUBREG, mode, map->inline_target, 0); | |
1798 | else | |
1799 | return map->inline_target; | |
1800 | } | |
1801 | return orig; | |
1802 | } | |
1803 | if (map->reg_map[regno] == NULL) | |
1804 | { | |
1805 | map->reg_map[regno] = gen_reg_rtx (mode); | |
1806 | REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig); | |
1807 | REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig); | |
1808 | RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig); | |
1809 | /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */ | |
1810 | } | |
1811 | return map->reg_map[regno]; | |
1812 | ||
1813 | case SUBREG: | |
1814 | copy = copy_rtx_and_substitute (SUBREG_REG (orig), map); | |
1815 | /* SUBREG is ordinary, but don't make nested SUBREGs. */ | |
1816 | if (GET_CODE (copy) == SUBREG) | |
1817 | return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy), | |
1818 | SUBREG_WORD (orig) + SUBREG_WORD (copy)); | |
1819 | else | |
1820 | return gen_rtx (SUBREG, GET_MODE (orig), copy, | |
1821 | SUBREG_WORD (orig)); | |
1822 | ||
1823 | case USE: | |
1824 | case CLOBBER: | |
1825 | /* USE and CLOBBER are ordinary, but we convert (use (subreg foo)) | |
1826 | to (use foo). */ | |
1827 | copy = copy_rtx_and_substitute (XEXP (orig, 0), map); | |
1828 | if (GET_CODE (copy) == SUBREG) | |
1829 | copy = SUBREG_REG (copy); | |
1830 | return gen_rtx (code, VOIDmode, copy); | |
1831 | ||
1832 | case CODE_LABEL: | |
1833 | LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)]) | |
1834 | = LABEL_PRESERVE_P (orig); | |
1835 | return map->label_map[CODE_LABEL_NUMBER (orig)]; | |
1836 | ||
1837 | case LABEL_REF: | |
1838 | copy = rtx_alloc (LABEL_REF); | |
1839 | PUT_MODE (copy, mode); | |
1840 | XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]; | |
1841 | LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig); | |
1842 | return copy; | |
1843 | ||
1844 | case PC: | |
1845 | case CC0: | |
1846 | case CONST_INT: | |
1847 | case SYMBOL_REF: | |
1848 | return orig; | |
1849 | ||
1850 | case CONST_DOUBLE: | |
1851 | /* We have to make a new copy of this CONST_DOUBLE because don't want | |
1852 | to use the old value of CONST_DOUBLE_MEM. Also, this may be a | |
1853 | duplicate of a CONST_DOUBLE we have already seen. */ | |
1854 | if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT) | |
1855 | { | |
1856 | REAL_VALUE_TYPE d; | |
1857 | ||
1858 | REAL_VALUE_FROM_CONST_DOUBLE (d, orig); | |
1859 | return immed_real_const_1 (d, GET_MODE (orig)); | |
1860 | } | |
1861 | else | |
1862 | return immed_double_const (CONST_DOUBLE_LOW (orig), | |
1863 | CONST_DOUBLE_HIGH (orig), VOIDmode); | |
1864 | ||
1865 | case CONST: | |
1866 | /* Make new constant pool entry for a constant | |
1867 | that was in the pool of the inline function. */ | |
1868 | if (RTX_INTEGRATED_P (orig)) | |
1869 | { | |
1870 | /* If this was an address of a constant pool entry that itself | |
1871 | had to be placed in the constant pool, it might not be a | |
1872 | valid address. So the recursive call below might turn it | |
1873 | into a register. In that case, it isn't a constant any | |
1874 | more, so return it. This has the potential of changing a | |
1875 | MEM into a REG, but we'll assume that it safe. */ | |
1876 | temp = copy_rtx_and_substitute (XEXP (orig, 0), map); | |
1877 | if (! CONSTANT_P (temp)) | |
1878 | return temp; | |
1879 | return validize_mem (force_const_mem (GET_MODE (orig), temp)); | |
1880 | } | |
1881 | break; | |
1882 | ||
1883 | case ADDRESS: | |
1884 | /* If from constant pool address, make new constant pool entry and | |
1885 | return its address. */ | |
1886 | if (! RTX_INTEGRATED_P (orig)) | |
1887 | abort (); | |
1888 | ||
1889 | temp = force_const_mem (GET_MODE (orig), | |
1890 | copy_rtx_and_substitute (XEXP (orig, 0), map)); | |
1891 | ||
1892 | #if 0 | |
1893 | /* Legitimizing the address here is incorrect. | |
1894 | ||
1895 | The only ADDRESS rtx's that can reach here are ones created by | |
1896 | save_constants. Hence the operand of the ADDRESS is always legal | |
1897 | in this position of the instruction, since the original rtx without | |
1898 | the ADDRESS was legal. | |
1899 | ||
1900 | The reason we don't legitimize the address here is that on the | |
1901 | Sparc, the caller may have a (high ...) surrounding this ADDRESS. | |
1902 | This code forces the operand of the address to a register, which | |
1903 | fails because we can not take the HIGH part of a register. | |
1904 | ||
1905 | Also, change_address may create new registers. These registers | |
1906 | will not have valid reg_map entries. This can cause try_constants() | |
1907 | to fail because assumes that all registers in the rtx have valid | |
1908 | reg_map entries, and it may end up replacing one of these new | |
1909 | registers with junk. */ | |
1910 | ||
1911 | if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0))) | |
1912 | temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0)); | |
1913 | #endif | |
1914 | ||
1915 | return XEXP (temp, 0); | |
1916 | ||
1917 | case ASM_OPERANDS: | |
1918 | /* If a single asm insn contains multiple output operands | |
1919 | then it contains multiple ASM_OPERANDS rtx's that share operand 3. | |
1920 | We must make sure that the copied insn continues to share it. */ | |
1921 | if (map->orig_asm_operands_vector == XVEC (orig, 3)) | |
1922 | { | |
1923 | copy = rtx_alloc (ASM_OPERANDS); | |
1924 | XSTR (copy, 0) = XSTR (orig, 0); | |
1925 | XSTR (copy, 1) = XSTR (orig, 1); | |
1926 | XINT (copy, 2) = XINT (orig, 2); | |
1927 | XVEC (copy, 3) = map->copy_asm_operands_vector; | |
1928 | XVEC (copy, 4) = map->copy_asm_constraints_vector; | |
1929 | XSTR (copy, 5) = XSTR (orig, 5); | |
1930 | XINT (copy, 6) = XINT (orig, 6); | |
1931 | return copy; | |
1932 | } | |
1933 | break; | |
1934 | ||
1935 | case CALL: | |
1936 | /* This is given special treatment because the first | |
1937 | operand of a CALL is a (MEM ...) which may get | |
1938 | forced into a register for cse. This is undesirable | |
1939 | if function-address cse isn't wanted or if we won't do cse. */ | |
1940 | #ifndef NO_FUNCTION_CSE | |
1941 | if (! (optimize && ! flag_no_function_cse)) | |
1942 | #endif | |
1943 | return gen_rtx (CALL, GET_MODE (orig), | |
1944 | gen_rtx (MEM, GET_MODE (XEXP (orig, 0)), | |
1945 | copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)), | |
1946 | copy_rtx_and_substitute (XEXP (orig, 1), map)); | |
1947 | break; | |
1948 | ||
1949 | #if 0 | |
1950 | /* Must be ifdefed out for loop unrolling to work. */ | |
1951 | case RETURN: | |
1952 | abort (); | |
1953 | #endif | |
1954 | ||
1955 | case SET: | |
1956 | /* If this is setting fp or ap, it means that we have a nonlocal goto. | |
1957 | Don't alter that. | |
1958 | If the nonlocal goto is into the current function, | |
1959 | this will result in unnecessarily bad code, but should work. */ | |
1960 | if (SET_DEST (orig) == virtual_stack_vars_rtx | |
1961 | || SET_DEST (orig) == virtual_incoming_args_rtx) | |
1962 | return gen_rtx (SET, VOIDmode, SET_DEST (orig), | |
1963 | copy_rtx_and_substitute (SET_SRC (orig), map)); | |
1964 | break; | |
1965 | ||
1966 | case MEM: | |
1967 | copy = rtx_alloc (MEM); | |
1968 | PUT_MODE (copy, mode); | |
1969 | XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map); | |
1970 | MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig); | |
1971 | MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig); | |
1972 | RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig); | |
1973 | return copy; | |
1974 | } | |
1975 | ||
1976 | copy = rtx_alloc (code); | |
1977 | PUT_MODE (copy, mode); | |
1978 | copy->in_struct = orig->in_struct; | |
1979 | copy->volatil = orig->volatil; | |
1980 | copy->unchanging = orig->unchanging; | |
1981 | ||
1982 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); | |
1983 | ||
1984 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
1985 | { | |
1986 | switch (*format_ptr++) | |
1987 | { | |
1988 | case '0': | |
1989 | break; | |
1990 | ||
1991 | case 'e': | |
1992 | XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map); | |
1993 | break; | |
1994 | ||
1995 | case 'u': | |
1996 | /* Change any references to old-insns to point to the | |
1997 | corresponding copied insns. */ | |
1998 | XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))]; | |
1999 | break; | |
2000 | ||
2001 | case 'E': | |
2002 | XVEC (copy, i) = XVEC (orig, i); | |
2003 | if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0) | |
2004 | { | |
2005 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
2006 | for (j = 0; j < XVECLEN (copy, i); j++) | |
2007 | XVECEXP (copy, i, j) | |
2008 | = copy_rtx_and_substitute (XVECEXP (orig, i, j), map); | |
2009 | } | |
2010 | break; | |
2011 | ||
2012 | case 'i': | |
2013 | XINT (copy, i) = XINT (orig, i); | |
2014 | break; | |
2015 | ||
2016 | case 's': | |
2017 | XSTR (copy, i) = XSTR (orig, i); | |
2018 | break; | |
2019 | ||
2020 | default: | |
2021 | abort (); | |
2022 | } | |
2023 | } | |
2024 | ||
2025 | if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0) | |
2026 | { | |
2027 | map->orig_asm_operands_vector = XVEC (orig, 3); | |
2028 | map->copy_asm_operands_vector = XVEC (copy, 3); | |
2029 | map->copy_asm_constraints_vector = XVEC (copy, 4); | |
2030 | } | |
2031 | ||
2032 | return copy; | |
2033 | } | |
2034 | \f | |
2035 | /* Substitute known constant values into INSN, if that is valid. */ | |
2036 | ||
2037 | void | |
2038 | try_constants (insn, map) | |
2039 | rtx insn; | |
2040 | struct inline_remap *map; | |
2041 | { | |
2042 | int i; | |
2043 | ||
2044 | map->num_sets = 0; | |
2045 | subst_constants (&PATTERN (insn), insn, map); | |
2046 | ||
2047 | /* Apply the changes if they are valid; otherwise discard them. */ | |
2048 | apply_change_group (); | |
2049 | ||
2050 | /* Show we don't know the value of anything stored or clobbered. */ | |
2051 | note_stores (PATTERN (insn), mark_stores); | |
2052 | map->last_pc_value = 0; | |
2053 | #ifdef HAVE_cc0 | |
2054 | map->last_cc0_value = 0; | |
2055 | #endif | |
2056 | ||
2057 | /* Set up any constant equivalences made in this insn. */ | |
2058 | for (i = 0; i < map->num_sets; i++) | |
2059 | { | |
2060 | if (GET_CODE (map->equiv_sets[i].dest) == REG) | |
2061 | { | |
2062 | int regno = REGNO (map->equiv_sets[i].dest); | |
2063 | ||
2064 | if (map->const_equiv_map[regno] == 0 | |
2065 | /* Following clause is a hack to make case work where GNU C++ | |
2066 | reassigns a variable to make cse work right. */ | |
2067 | || ! rtx_equal_p (map->const_equiv_map[regno], | |
2068 | map->equiv_sets[i].equiv)) | |
2069 | { | |
2070 | map->const_equiv_map[regno] = map->equiv_sets[i].equiv; | |
2071 | map->const_age_map[regno] = map->const_age; | |
2072 | } | |
2073 | } | |
2074 | else if (map->equiv_sets[i].dest == pc_rtx) | |
2075 | map->last_pc_value = map->equiv_sets[i].equiv; | |
2076 | #ifdef HAVE_cc0 | |
2077 | else if (map->equiv_sets[i].dest == cc0_rtx) | |
2078 | map->last_cc0_value = map->equiv_sets[i].equiv; | |
2079 | #endif | |
2080 | } | |
2081 | } | |
2082 | \f | |
2083 | /* Substitute known constants for pseudo regs in the contents of LOC, | |
2084 | which are part of INSN. | |
2085 | If INSN is zero, the substition should always be done (this is used to | |
2086 | update DECL_RTL). | |
2087 | These changes are taken out by try_constants if the result is not valid. | |
2088 | ||
2089 | Note that we are more concerned with determining when the result of a SET | |
2090 | is a constant, for further propagation, than actually inserting constants | |
2091 | into insns; cse will do the latter task better. | |
2092 | ||
2093 | This function is also used to adjust address of items previously addressed | |
2094 | via the virtual stack variable or virtual incoming arguments registers. */ | |
2095 | ||
2096 | static void | |
2097 | subst_constants (loc, insn, map) | |
2098 | rtx *loc; | |
2099 | rtx insn; | |
2100 | struct inline_remap *map; | |
2101 | { | |
2102 | rtx x = *loc; | |
2103 | register int i; | |
2104 | register enum rtx_code code; | |
2105 | register char *format_ptr; | |
2106 | int num_changes = num_validated_changes (); | |
2107 | rtx new = 0; | |
2108 | enum machine_mode op0_mode; | |
2109 | ||
2110 | code = GET_CODE (x); | |
2111 | ||
2112 | switch (code) | |
2113 | { | |
2114 | case PC: | |
2115 | case CONST_INT: | |
2116 | case CONST_DOUBLE: | |
2117 | case SYMBOL_REF: | |
2118 | case CONST: | |
2119 | case LABEL_REF: | |
2120 | case ADDRESS: | |
2121 | return; | |
2122 | ||
2123 | #ifdef HAVE_cc0 | |
2124 | case CC0: | |
2125 | validate_change (insn, loc, map->last_cc0_value, 1); | |
2126 | return; | |
2127 | #endif | |
2128 | ||
2129 | case USE: | |
2130 | case CLOBBER: | |
2131 | /* The only thing we can do with a USE or CLOBBER is possibly do | |
2132 | some substitutions in a MEM within it. */ | |
2133 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
2134 | subst_constants (&XEXP (XEXP (x, 0), 0), insn, map); | |
2135 | return; | |
2136 | ||
2137 | case REG: | |
2138 | /* Substitute for parms and known constants. Don't replace | |
2139 | hard regs used as user variables with constants. */ | |
2140 | { | |
2141 | int regno = REGNO (x); | |
2142 | if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x)) | |
2143 | && map->const_equiv_map[regno] != 0 | |
2144 | && map->const_age_map[regno] >= map->const_age) | |
2145 | validate_change (insn, loc, map->const_equiv_map[regno], 1); | |
2146 | return; | |
2147 | } | |
2148 | ||
2149 | case SUBREG: | |
2150 | /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify | |
2151 | constants. */ | |
2152 | { | |
2153 | rtx inner = SUBREG_REG (x); | |
2154 | rtx new = 0; | |
2155 | ||
2156 | /* We can't call subst_constants on &SUBREG_REG (x) because any | |
2157 | constant or SUBREG wouldn't be valid inside our SUBEG. Instead, | |
2158 | see what is inside, try to form the new SUBREG and see if that is | |
2159 | valid. We handle two cases: extracting a full word in an | |
2160 | integral mode and extracting the low part. */ | |
2161 | subst_constants (&inner, 0, map); | |
2162 | ||
2163 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
2164 | && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD | |
2165 | && GET_MODE (SUBREG_REG (x)) != VOIDmode) | |
2166 | new = operand_subword (inner, SUBREG_WORD (x), 0, | |
2167 | GET_MODE (SUBREG_REG (x))); | |
2168 | ||
2169 | if (new == 0 && subreg_lowpart_p (x)) | |
2170 | new = gen_lowpart_common (GET_MODE (x), inner); | |
2171 | ||
2172 | if (new) | |
2173 | validate_change (insn, loc, new, 1); | |
2174 | ||
2175 | return; | |
2176 | } | |
2177 | ||
2178 | case MEM: | |
2179 | subst_constants (&XEXP (x, 0), insn, map); | |
2180 | ||
2181 | /* If a memory address got spoiled, change it back. */ | |
2182 | if (insn != 0 && num_validated_changes () != num_changes | |
2183 | && !memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
2184 | cancel_changes (num_changes); | |
2185 | return; | |
2186 | ||
2187 | case SET: | |
2188 | { | |
2189 | /* Substitute constants in our source, and in any arguments to a | |
2190 | complex (e..g, ZERO_EXTRACT) destination, but not in the destination | |
2191 | itself. */ | |
2192 | rtx *dest_loc = &SET_DEST (x); | |
2193 | rtx dest = *dest_loc; | |
2194 | rtx src, tem; | |
2195 | ||
2196 | subst_constants (&SET_SRC (x), insn, map); | |
2197 | src = SET_SRC (x); | |
2198 | ||
2199 | while (GET_CODE (*dest_loc) == ZERO_EXTRACT | |
2200 | || GET_CODE (*dest_loc) == SIGN_EXTRACT | |
2201 | || GET_CODE (*dest_loc) == SUBREG | |
2202 | || GET_CODE (*dest_loc) == STRICT_LOW_PART) | |
2203 | { | |
2204 | if (GET_CODE (*dest_loc) == ZERO_EXTRACT) | |
2205 | { | |
2206 | subst_constants (&XEXP (*dest_loc, 1), insn, map); | |
2207 | subst_constants (&XEXP (*dest_loc, 2), insn, map); | |
2208 | } | |
2209 | dest_loc = &XEXP (*dest_loc, 0); | |
2210 | } | |
2211 | ||
2212 | /* Check for the case of DEST a SUBREG, both it and the underlying | |
2213 | register are less than one word, and the SUBREG has the wider mode. | |
2214 | In the case, we are really setting the underlying register to the | |
2215 | source converted to the mode of DEST. So indicate that. */ | |
2216 | if (GET_CODE (dest) == SUBREG | |
2217 | && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD | |
2218 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD | |
2219 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
2220 | <= GET_MODE_SIZE (GET_MODE (dest))) | |
2221 | && (tem = gen_lowpart_if_possible (GET_MODE (dest), src))) | |
2222 | src = tem, dest = SUBREG_REG (dest); | |
2223 | ||
2224 | /* If storing a recognizable value save it for later recording. */ | |
2225 | if ((map->num_sets < MAX_RECOG_OPERANDS) | |
2226 | && (CONSTANT_P (src) | |
2227 | || (GET_CODE (src) == PLUS | |
2228 | && GET_CODE (XEXP (src, 0)) == REG | |
2229 | && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER | |
2230 | && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER | |
2231 | && CONSTANT_P (XEXP (src, 1))) | |
2232 | || GET_CODE (src) == COMPARE | |
2233 | #ifdef HAVE_cc0 | |
2234 | || dest == cc0_rtx | |
2235 | #endif | |
2236 | || (dest == pc_rtx | |
2237 | && (src == pc_rtx || GET_CODE (src) == RETURN | |
2238 | || GET_CODE (src) == LABEL_REF)))) | |
2239 | { | |
2240 | /* Normally, this copy won't do anything. But, if SRC is a COMPARE | |
2241 | it will cause us to save the COMPARE with any constants | |
2242 | substituted, which is what we want for later. */ | |
2243 | map->equiv_sets[map->num_sets].equiv = copy_rtx (src); | |
2244 | map->equiv_sets[map->num_sets++].dest = dest; | |
2245 | } | |
2246 | ||
2247 | return; | |
2248 | } | |
2249 | } | |
2250 | ||
2251 | format_ptr = GET_RTX_FORMAT (code); | |
2252 | ||
2253 | /* If the first operand is an expression, save its mode for later. */ | |
2254 | if (*format_ptr == 'e') | |
2255 | op0_mode = GET_MODE (XEXP (x, 0)); | |
2256 | ||
2257 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
2258 | { | |
2259 | switch (*format_ptr++) | |
2260 | { | |
2261 | case '0': | |
2262 | break; | |
2263 | ||
2264 | case 'e': | |
2265 | if (XEXP (x, i)) | |
2266 | subst_constants (&XEXP (x, i), insn, map); | |
2267 | break; | |
2268 | ||
2269 | case 'u': | |
2270 | case 'i': | |
2271 | case 's': | |
2272 | break; | |
2273 | ||
2274 | case 'E': | |
2275 | if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0) | |
2276 | { | |
2277 | int j; | |
2278 | for (j = 0; j < XVECLEN (x, i); j++) | |
2279 | subst_constants (&XVECEXP (x, i, j), insn, map); | |
2280 | } | |
2281 | break; | |
2282 | ||
2283 | default: | |
2284 | abort (); | |
2285 | } | |
2286 | } | |
2287 | ||
2288 | /* If this is a commutative operation, move a constant to the second | |
2289 | operand unless the second operand is already a CONST_INT. */ | |
2290 | if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ) | |
2291 | && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT) | |
2292 | { | |
2293 | rtx tem = XEXP (x, 0); | |
2294 | validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1); | |
2295 | validate_change (insn, &XEXP (x, 1), tem, 1); | |
2296 | } | |
2297 | ||
2298 | /* Simplify the expression in case we put in some constants. */ | |
2299 | switch (GET_RTX_CLASS (code)) | |
2300 | { | |
2301 | case '1': | |
2302 | new = simplify_unary_operation (code, GET_MODE (x), | |
2303 | XEXP (x, 0), op0_mode); | |
2304 | break; | |
2305 | ||
2306 | case '<': | |
2307 | { | |
2308 | enum machine_mode op_mode = GET_MODE (XEXP (x, 0)); | |
2309 | if (op_mode == VOIDmode) | |
2310 | op_mode = GET_MODE (XEXP (x, 1)); | |
2311 | new = simplify_relational_operation (code, op_mode, | |
2312 | XEXP (x, 0), XEXP (x, 1)); | |
2313 | break; | |
2314 | } | |
2315 | ||
2316 | case '2': | |
2317 | case 'c': | |
2318 | new = simplify_binary_operation (code, GET_MODE (x), | |
2319 | XEXP (x, 0), XEXP (x, 1)); | |
2320 | break; | |
2321 | ||
2322 | case 'b': | |
2323 | case '3': | |
2324 | new = simplify_ternary_operation (code, GET_MODE (x), op0_mode, | |
2325 | XEXP (x, 0), XEXP (x, 1), XEXP (x, 2)); | |
2326 | break; | |
2327 | } | |
2328 | ||
2329 | if (new) | |
2330 | validate_change (insn, loc, new, 1); | |
2331 | } | |
2332 | ||
2333 | /* Show that register modified no longer contain known constants. We are | |
2334 | called from note_stores with parts of the new insn. */ | |
2335 | ||
2336 | void | |
2337 | mark_stores (dest, x) | |
2338 | rtx dest; | |
2339 | rtx x; | |
2340 | { | |
2341 | if (GET_CODE (dest) == SUBREG) | |
2342 | dest = SUBREG_REG (dest); | |
2343 | ||
2344 | if (GET_CODE (dest) == REG) | |
2345 | global_const_equiv_map[REGNO (dest)] = 0; | |
2346 | } | |
2347 | \f | |
2348 | /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx | |
2349 | pointed to by PX, they represent constants in the constant pool. | |
2350 | Replace these with a new memory reference obtained from force_const_mem. | |
2351 | Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the | |
2352 | address of a constant pool entry. Replace them with the address of | |
2353 | a new constant pool entry obtained from force_const_mem. */ | |
2354 | ||
2355 | static void | |
2356 | restore_constants (px) | |
2357 | rtx *px; | |
2358 | { | |
2359 | rtx x = *px; | |
2360 | int i, j; | |
2361 | char *fmt; | |
2362 | ||
2363 | if (x == 0) | |
2364 | return; | |
2365 | ||
2366 | if (GET_CODE (x) == CONST_DOUBLE) | |
2367 | { | |
2368 | /* We have to make a new CONST_DOUBLE to ensure that we account for | |
2369 | it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */ | |
2370 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) | |
2371 | { | |
2372 | REAL_VALUE_TYPE d; | |
2373 | ||
2374 | REAL_VALUE_FROM_CONST_DOUBLE (d, x); | |
2375 | *px = immed_real_const_1 (d, GET_MODE (x)); | |
2376 | } | |
2377 | else | |
2378 | *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x), | |
2379 | VOIDmode); | |
2380 | } | |
2381 | ||
2382 | else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST) | |
2383 | { | |
2384 | restore_constants (&XEXP (x, 0)); | |
2385 | *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0))); | |
2386 | } | |
2387 | else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG) | |
2388 | { | |
2389 | /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */ | |
2390 | rtx new = XEXP (SUBREG_REG (x), 0); | |
2391 | ||
2392 | restore_constants (&new); | |
2393 | new = force_const_mem (GET_MODE (SUBREG_REG (x)), new); | |
2394 | PUT_MODE (new, GET_MODE (x)); | |
2395 | *px = validize_mem (new); | |
2396 | } | |
2397 | else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS) | |
2398 | { | |
2399 | restore_constants (&XEXP (x, 0)); | |
2400 | *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0); | |
2401 | } | |
2402 | else | |
2403 | { | |
2404 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
2405 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++) | |
2406 | { | |
2407 | switch (*fmt++) | |
2408 | { | |
2409 | case 'E': | |
2410 | for (j = 0; j < XVECLEN (x, i); j++) | |
2411 | restore_constants (&XVECEXP (x, i, j)); | |
2412 | break; | |
2413 | ||
2414 | case 'e': | |
2415 | restore_constants (&XEXP (x, i)); | |
2416 | break; | |
2417 | } | |
2418 | } | |
2419 | } | |
2420 | } | |
2421 | \f | |
2422 | /* Output the assembly language code for the function FNDECL | |
2423 | from its DECL_SAVED_INSNS. Used for inline functions that are output | |
2424 | at end of compilation instead of where they came in the source. */ | |
2425 | ||
2426 | void | |
2427 | output_inline_function (fndecl) | |
2428 | tree fndecl; | |
2429 | { | |
2430 | rtx head = DECL_SAVED_INSNS (fndecl); | |
2431 | rtx last; | |
2432 | ||
2433 | temporary_allocation (); | |
2434 | ||
2435 | current_function_decl = fndecl; | |
2436 | ||
2437 | /* This call is only used to initialize global variables. */ | |
2438 | init_function_start (fndecl, "lossage", 1); | |
2439 | ||
2440 | /* Redo parameter determinations in case the FUNCTION_... | |
2441 | macros took machine-specific actions that need to be redone. */ | |
2442 | assign_parms (fndecl, 1); | |
2443 | ||
2444 | /* Set stack frame size. */ | |
2445 | assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0); | |
2446 | ||
2447 | restore_reg_data (FIRST_PARM_INSN (head)); | |
2448 | ||
2449 | stack_slot_list = STACK_SLOT_LIST (head); | |
2450 | ||
2451 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA) | |
2452 | current_function_calls_alloca = 1; | |
2453 | ||
2454 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP) | |
2455 | current_function_calls_setjmp = 1; | |
2456 | ||
2457 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP) | |
2458 | current_function_calls_longjmp = 1; | |
2459 | ||
2460 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT) | |
2461 | current_function_returns_struct = 1; | |
2462 | ||
2463 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT) | |
2464 | current_function_returns_pcc_struct = 1; | |
2465 | ||
2466 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT) | |
2467 | current_function_needs_context = 1; | |
2468 | ||
2469 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL) | |
2470 | current_function_has_nonlocal_label = 1; | |
2471 | ||
2472 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER) | |
2473 | current_function_returns_pointer = 1; | |
2474 | ||
2475 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL) | |
2476 | current_function_uses_const_pool = 1; | |
2477 | ||
2478 | if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE) | |
2479 | current_function_uses_pic_offset_table = 1; | |
2480 | ||
2481 | current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head); | |
2482 | current_function_pops_args = POPS_ARGS (head); | |
2483 | ||
2484 | /* There is no need to output a return label again. */ | |
2485 | return_label = 0; | |
2486 | ||
2487 | expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl)); | |
2488 | ||
2489 | /* Find last insn and rebuild the constant pool. */ | |
2490 | for (last = FIRST_PARM_INSN (head); | |
2491 | NEXT_INSN (last); last = NEXT_INSN (last)) | |
2492 | { | |
2493 | if (GET_RTX_CLASS (GET_CODE (last)) == 'i') | |
2494 | { | |
2495 | restore_constants (&PATTERN (last)); | |
2496 | restore_constants (®_NOTES (last)); | |
2497 | } | |
2498 | } | |
2499 | ||
2500 | set_new_first_and_last_insn (FIRST_PARM_INSN (head), last); | |
2501 | set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head)); | |
2502 | ||
2503 | /* Compile this function all the way down to assembly code. */ | |
2504 | rest_of_compilation (fndecl); | |
2505 | ||
2506 | current_function_decl = 0; | |
2507 | ||
2508 | permanent_allocation (); | |
2509 | } |