]>
Commit | Line | Data |
---|---|---|
1322177d | 1 | /* Procedure integration for GCC. |
8beccec8 | 2 | Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
d9221e01 | 3 | 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
175160e7 MT |
4 | Contributed by Michael Tiemann (tiemann@cygnus.com) |
5 | ||
1322177d | 6 | This file is part of GCC. |
175160e7 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 2, or (at your option) any later | |
11 | version. | |
175160e7 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
175160e7 MT |
17 | |
18 | You should have received a copy of the GNU General Public License | |
1322177d LB |
19 | along with GCC; see the file COPYING. If not, write to the Free |
20 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
21 | 02111-1307, USA. */ | |
175160e7 | 22 | |
175160e7 | 23 | #include "config.h" |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
ccd043a9 | 27 | |
175160e7 MT |
28 | #include "rtl.h" |
29 | #include "tree.h" | |
6baf1cc8 | 30 | #include "tm_p.h" |
12307ca2 | 31 | #include "regs.h" |
175160e7 | 32 | #include "flags.h" |
135d50f1 | 33 | #include "debug.h" |
175160e7 | 34 | #include "insn-config.h" |
175160e7 MT |
35 | #include "expr.h" |
36 | #include "output.h" | |
e9a25f70 | 37 | #include "recog.h" |
175160e7 MT |
38 | #include "integrate.h" |
39 | #include "real.h" | |
6adb4e3a | 40 | #include "except.h" |
175160e7 | 41 | #include "function.h" |
d6f4ec51 | 42 | #include "toplev.h" |
ab87f8c8 | 43 | #include "intl.h" |
c6d9a88c | 44 | #include "params.h" |
c0e7830f | 45 | #include "ggc.h" |
91d231cb | 46 | #include "target.h" |
63e1b1c4 | 47 | #include "langhooks.h" |
175160e7 | 48 | |
6de9cd9a | 49 | /* Round to the next highest integer that meets the alignment. */ |
175160e7 | 50 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) |
175160e7 | 51 | \f |
c0e7830f | 52 | |
dc297297 | 53 | /* Private type used by {get/has}_func_hard_reg_initial_val. */ |
e2500fed | 54 | typedef struct initial_value_pair GTY(()) { |
c0e7830f DD |
55 | rtx hard_reg; |
56 | rtx pseudo; | |
57 | } initial_value_pair; | |
e2500fed | 58 | typedef struct initial_value_struct GTY(()) { |
c0e7830f DD |
59 | int num_entries; |
60 | int max_entries; | |
e2500fed | 61 | initial_value_pair * GTY ((length ("%h.num_entries"))) entries; |
c0e7830f DD |
62 | } initial_value_struct; |
63 | ||
1d088dee AJ |
64 | static void subst_constants (rtx *, rtx, struct inline_remap *, int); |
65 | static void set_block_origin_self (tree); | |
66 | static void set_block_abstract_flags (tree, int); | |
1d088dee | 67 | static void mark_stores (rtx, rtx, void *); |
175160e7 | 68 | \f |
1f3d3a31 | 69 | /* Returns the Ith entry in the label_map contained in MAP. If the |
e5e809f4 JL |
70 | Ith entry has not yet been set, return a fresh label. This function |
71 | performs a lazy initialization of label_map, thereby avoiding huge memory | |
72 | explosions when the label_map gets very large. */ | |
73 | ||
1f3d3a31 | 74 | rtx |
1d088dee | 75 | get_label_from_map (struct inline_remap *map, int i) |
1f3d3a31 JL |
76 | { |
77 | rtx x = map->label_map[i]; | |
78 | ||
79 | if (x == NULL_RTX) | |
00174bdf | 80 | x = map->label_map[i] = gen_label_rtx (); |
1f3d3a31 JL |
81 | |
82 | return x; | |
83 | } | |
84 | ||
91d231cb JM |
85 | /* Return false if the function FNDECL cannot be inlined on account of its |
86 | attributes, true otherwise. */ | |
588d3ade | 87 | bool |
1d088dee | 88 | function_attribute_inlinable_p (tree fndecl) |
91d231cb | 89 | { |
b9a26d09 | 90 | if (targetm.attribute_table) |
91d231cb | 91 | { |
b9a26d09 | 92 | tree a; |
91d231cb | 93 | |
b9a26d09 | 94 | for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a)) |
91d231cb | 95 | { |
b9a26d09 NB |
96 | tree name = TREE_PURPOSE (a); |
97 | int i; | |
98 | ||
99 | for (i = 0; targetm.attribute_table[i].name != NULL; i++) | |
100 | if (is_attribute_p (targetm.attribute_table[i].name, name)) | |
5fd9b178 | 101 | return targetm.function_attribute_inlinable_p (fndecl); |
91d231cb | 102 | } |
91d231cb JM |
103 | } |
104 | ||
b9a26d09 | 105 | return true; |
91d231cb | 106 | } |
175160e7 | 107 | \f |
94755d92 | 108 | /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL |
00174bdf | 109 | originally was in the FROM_FN, but now it will be in the |
94755d92 | 110 | TO_FN. */ |
02e24c7a | 111 | |
94755d92 | 112 | tree |
1d088dee | 113 | copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn) |
02e24c7a | 114 | { |
94755d92 MM |
115 | tree copy; |
116 | ||
117 | /* Copy the declaration. */ | |
118 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
a8f8d1cc | 119 | { |
c246c65d JM |
120 | tree type; |
121 | int invisiref = 0; | |
122 | ||
123 | /* See if the frontend wants to pass this by invisible reference. */ | |
124 | if (TREE_CODE (decl) == PARM_DECL | |
125 | && DECL_ARG_TYPE (decl) != TREE_TYPE (decl) | |
126 | && POINTER_TYPE_P (DECL_ARG_TYPE (decl)) | |
127 | && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl)) | |
128 | { | |
129 | invisiref = 1; | |
130 | type = DECL_ARG_TYPE (decl); | |
131 | } | |
132 | else | |
133 | type = TREE_TYPE (decl); | |
134 | ||
a8f8d1cc MM |
135 | /* For a parameter, we must make an equivalent VAR_DECL, not a |
136 | new PARM_DECL. */ | |
c246c65d JM |
137 | copy = build_decl (VAR_DECL, DECL_NAME (decl), type); |
138 | if (!invisiref) | |
139 | { | |
140 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
141 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
142 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
143 | } | |
144 | else | |
145 | { | |
146 | TREE_ADDRESSABLE (copy) = 0; | |
147 | TREE_READONLY (copy) = 1; | |
148 | TREE_THIS_VOLATILE (copy) = 0; | |
149 | } | |
a8f8d1cc | 150 | } |
94755d92 MM |
151 | else |
152 | { | |
153 | copy = copy_node (decl); | |
1e7ee6ad MM |
154 | /* The COPY is not abstract; it will be generated in TO_FN. */ |
155 | DECL_ABSTRACT (copy) = 0; | |
ae2bcd98 | 156 | lang_hooks.dup_lang_specific_decl (copy); |
a71811fe MM |
157 | |
158 | /* TREE_ADDRESSABLE isn't used to indicate that a label's | |
159 | address has been taken; it's for internal bookkeeping in | |
160 | expand_goto_internal. */ | |
161 | if (TREE_CODE (copy) == LABEL_DECL) | |
6de9cd9a DN |
162 | { |
163 | TREE_ADDRESSABLE (copy) = 0; | |
164 | DECL_TOO_LATE (copy) = 0; | |
165 | } | |
94755d92 MM |
166 | } |
167 | ||
168 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
169 | declaration inspired this copy. */ | |
99ceae26 | 170 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); |
94755d92 MM |
171 | |
172 | /* The new variable/label has no RTL, yet. */ | |
4e8dca1c JM |
173 | if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) |
174 | SET_DECL_RTL (copy, NULL_RTX); | |
94755d92 MM |
175 | |
176 | /* These args would always appear unused, if not for this. */ | |
177 | TREE_USED (copy) = 1; | |
178 | ||
179 | /* Set the context for the new declaration. */ | |
180 | if (!DECL_CONTEXT (decl)) | |
181 | /* Globals stay global. */ | |
00174bdf | 182 | ; |
94755d92 MM |
183 | else if (DECL_CONTEXT (decl) != from_fn) |
184 | /* Things that weren't in the scope of the function we're inlining | |
4e8dca1c | 185 | from aren't in the scope we're inlining to, either. */ |
94755d92 MM |
186 | ; |
187 | else if (TREE_STATIC (decl)) | |
4e8dca1c | 188 | /* Function-scoped static variables should stay in the original |
94755d92 | 189 | function. */ |
02e24c7a MM |
190 | ; |
191 | else | |
94755d92 MM |
192 | /* Ordinary automatic local variables are now in the scope of the |
193 | new function. */ | |
194 | DECL_CONTEXT (copy) = to_fn; | |
02e24c7a MM |
195 | |
196 | return copy; | |
197 | } | |
175160e7 | 198 | \f |
175160e7 MT |
199 | /* Unfortunately, we need a global copy of const_equiv map for communication |
200 | with a function called from note_stores. Be *very* careful that this | |
201 | is used properly in the presence of recursion. */ | |
202 | ||
c68da89c | 203 | varray_type global_const_equiv_varray; |
175160e7 | 204 | |
14a774a9 | 205 | /* Create a new copy of an rtx. Recursively copies the operands of the rtx, |
175160e7 MT |
206 | except for those few rtx codes that are sharable. |
207 | ||
208 | We always return an rtx that is similar to that incoming rtx, with the | |
209 | exception of possibly changing a REG to a SUBREG or vice versa. No | |
210 | rtl is ever emitted. | |
211 | ||
14a774a9 RK |
212 | If FOR_LHS is nonzero, if means we are processing something that will |
213 | be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if | |
214 | inlining since we need to be conservative in how it is set for | |
215 | such cases. | |
216 | ||
175160e7 MT |
217 | Handle constants that need to be placed in the constant pool by |
218 | calling `force_const_mem'. */ | |
219 | ||
220 | rtx | |
1d088dee | 221 | copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs) |
175160e7 | 222 | { |
b3694847 SS |
223 | rtx copy, temp; |
224 | int i, j; | |
225 | RTX_CODE code; | |
226 | enum machine_mode mode; | |
227 | const char *format_ptr; | |
175160e7 MT |
228 | int regno; |
229 | ||
230 | if (orig == 0) | |
231 | return 0; | |
232 | ||
233 | code = GET_CODE (orig); | |
234 | mode = GET_MODE (orig); | |
235 | ||
236 | switch (code) | |
237 | { | |
238 | case REG: | |
239 | /* If the stack pointer register shows up, it must be part of | |
240 | stack-adjustments (*not* because we eliminated the frame pointer!). | |
241 | Small hard registers are returned as-is. Pseudo-registers | |
242 | go through their `reg_map'. */ | |
243 | regno = REGNO (orig); | |
6de9cd9a | 244 | if (regno <= LAST_VIRTUAL_REGISTER) |
175160e7 MT |
245 | { |
246 | /* Some hard registers are also mapped, | |
247 | but others are not translated. */ | |
c826ae21 | 248 | if (map->reg_map[regno] != 0) |
175160e7 MT |
249 | return map->reg_map[regno]; |
250 | ||
251 | /* If this is the virtual frame pointer, make space in current | |
252 | function's stack frame for the stack frame of the inline function. | |
253 | ||
254 | Copy the address of this area into a pseudo. Map | |
255 | virtual_stack_vars_rtx to this pseudo and set up a constant | |
256 | equivalence for it to be the address. This will substitute the | |
257 | address into insns where it can be substituted and use the new | |
258 | pseudo where it can't. */ | |
b5d7770c | 259 | else if (regno == VIRTUAL_STACK_VARS_REGNUM) |
175160e7 MT |
260 | { |
261 | rtx loc, seq; | |
1da326c3 SB |
262 | int size |
263 | = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl)); | |
d219c7f1 | 264 | #ifdef FRAME_GROWS_DOWNWARD |
c2f8b491 | 265 | int alignment |
1da326c3 | 266 | = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed |
c2f8b491 | 267 | / BITS_PER_UNIT); |
175160e7 | 268 | |
3e42d56b DE |
269 | /* In this case, virtual_stack_vars_rtx points to one byte |
270 | higher than the top of the frame area. So make sure we | |
271 | allocate a big enough chunk to keep the frame pointer | |
272 | aligned like a real one. */ | |
c2f8b491 JH |
273 | if (alignment) |
274 | size = CEIL_ROUND (size, alignment); | |
3e42d56b | 275 | #endif |
175160e7 MT |
276 | start_sequence (); |
277 | loc = assign_stack_temp (BLKmode, size, 1); | |
278 | loc = XEXP (loc, 0); | |
279 | #ifdef FRAME_GROWS_DOWNWARD | |
280 | /* In this case, virtual_stack_vars_rtx points to one byte | |
281 | higher than the top of the frame area. So compute the offset | |
3e42d56b DE |
282 | to one byte higher than our substitute frame. */ |
283 | loc = plus_constant (loc, size); | |
175160e7 | 284 | #endif |
59b2d722 RK |
285 | map->reg_map[regno] = temp |
286 | = force_reg (Pmode, force_operand (loc, NULL_RTX)); | |
2b145ea8 | 287 | |
12307ca2 | 288 | #ifdef STACK_BOUNDARY |
bdb429a5 | 289 | mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY); |
12307ca2 RK |
290 | #endif |
291 | ||
c68da89c | 292 | SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM); |
175160e7 | 293 | |
2f937369 | 294 | seq = get_insns (); |
175160e7 MT |
295 | end_sequence (); |
296 | emit_insn_after (seq, map->insns_at_start); | |
5c23c401 | 297 | return temp; |
175160e7 | 298 | } |
6de9cd9a | 299 | else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM) |
175160e7 MT |
300 | { |
301 | /* Do the same for a block to contain any arguments referenced | |
0f41302f | 302 | in memory. */ |
175160e7 | 303 | rtx loc, seq; |
1da326c3 | 304 | int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size; |
175160e7 MT |
305 | |
306 | start_sequence (); | |
307 | loc = assign_stack_temp (BLKmode, size, 1); | |
308 | loc = XEXP (loc, 0); | |
00174bdf | 309 | /* When arguments grow downward, the virtual incoming |
931553d8 | 310 | args pointer points to the top of the argument block, |
0f41302f | 311 | so the remapped location better do the same. */ |
931553d8 RS |
312 | #ifdef ARGS_GROW_DOWNWARD |
313 | loc = plus_constant (loc, size); | |
314 | #endif | |
59b2d722 RK |
315 | map->reg_map[regno] = temp |
316 | = force_reg (Pmode, force_operand (loc, NULL_RTX)); | |
2b145ea8 | 317 | |
12307ca2 | 318 | #ifdef STACK_BOUNDARY |
bdb429a5 | 319 | mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY); |
12307ca2 RK |
320 | #endif |
321 | ||
c68da89c | 322 | SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM); |
175160e7 | 323 | |
2f937369 | 324 | seq = get_insns (); |
175160e7 MT |
325 | end_sequence (); |
326 | emit_insn_after (seq, map->insns_at_start); | |
5c23c401 | 327 | return temp; |
175160e7 MT |
328 | } |
329 | else if (REG_FUNCTION_VALUE_P (orig)) | |
330 | { | |
6de9cd9a DN |
331 | if (rtx_equal_function_value_matters) |
332 | /* This is an ignored return value. We must not | |
333 | leave it in with REG_FUNCTION_VALUE_P set, since | |
334 | that would confuse subsequent inlining of the | |
335 | current function into a later function. */ | |
336 | return gen_rtx_REG (GET_MODE (orig), regno); | |
175160e7 | 337 | else |
6de9cd9a DN |
338 | /* Must be unrolling loops or replicating code if we |
339 | reach here, so return the register unchanged. */ | |
340 | return orig; | |
b5d7770c | 341 | } |
b5d7770c AO |
342 | else |
343 | return orig; | |
344 | ||
345 | abort (); | |
175160e7 MT |
346 | } |
347 | if (map->reg_map[regno] == NULL) | |
348 | { | |
349 | map->reg_map[regno] = gen_reg_rtx (mode); | |
350 | REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig); | |
351 | REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig); | |
352 | RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig); | |
353 | /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */ | |
12307ca2 | 354 | |
3502dc9c | 355 | if (REG_POINTER (map->x_regno_reg_rtx[regno])) |
12307ca2 RK |
356 | mark_reg_pointer (map->reg_map[regno], |
357 | map->regno_pointer_align[regno]); | |
175160e7 MT |
358 | } |
359 | return map->reg_map[regno]; | |
360 | ||
361 | case SUBREG: | |
14a774a9 | 362 | copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs); |
e5c56fd9 JH |
363 | return simplify_gen_subreg (GET_MODE (orig), copy, |
364 | GET_MODE (SUBREG_REG (orig)), | |
365 | SUBREG_BYTE (orig)); | |
175160e7 | 366 | |
e9a25f70 | 367 | case ADDRESSOF: |
38a448ca | 368 | copy = gen_rtx_ADDRESSOF (mode, |
14a774a9 RK |
369 | copy_rtx_and_substitute (XEXP (orig, 0), |
370 | map, for_lhs), | |
00174bdf | 371 | 0, ADDRESSOF_DECL (orig)); |
e9a25f70 JL |
372 | regno = ADDRESSOF_REGNO (orig); |
373 | if (map->reg_map[regno]) | |
374 | regno = REGNO (map->reg_map[regno]); | |
375 | else if (regno > LAST_VIRTUAL_REGISTER) | |
376 | { | |
377 | temp = XEXP (orig, 0); | |
378 | map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp)); | |
379 | REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp); | |
380 | REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp); | |
381 | RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp); | |
382 | /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */ | |
383 | ||
f81a79ef JH |
384 | /* Objects may initially be represented as registers, but |
385 | but turned into a MEM if their address is taken by | |
386 | put_var_into_stack. Therefore, the register table may have | |
387 | entries which are MEMs. | |
388 | ||
389 | We briefly tried to clear such entries, but that ended up | |
390 | cascading into many changes due to the optimizers not being | |
391 | prepared for empty entries in the register table. So we've | |
392 | decided to allow the MEMs in the register table for now. */ | |
393 | if (REG_P (map->x_regno_reg_rtx[regno]) | |
394 | && REG_POINTER (map->x_regno_reg_rtx[regno])) | |
e9a25f70 JL |
395 | mark_reg_pointer (map->reg_map[regno], |
396 | map->regno_pointer_align[regno]); | |
397 | regno = REGNO (map->reg_map[regno]); | |
398 | } | |
399 | ADDRESSOF_REGNO (copy) = regno; | |
400 | return copy; | |
401 | ||
175160e7 MT |
402 | case USE: |
403 | case CLOBBER: | |
404 | /* USE and CLOBBER are ordinary, but we convert (use (subreg foo)) | |
d632e927 RS |
405 | to (use foo) if the original insn didn't have a subreg. |
406 | Removing the subreg distorts the VAX movstrhi pattern | |
407 | by changing the mode of an operand. */ | |
14a774a9 | 408 | copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER); |
d632e927 | 409 | if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG) |
175160e7 | 410 | copy = SUBREG_REG (copy); |
38a448ca | 411 | return gen_rtx_fmt_e (code, VOIDmode, copy); |
175160e7 | 412 | |
bc8d3f91 JH |
413 | /* We need to handle "deleted" labels that appear in the DECL_RTL |
414 | of a LABEL_DECL. */ | |
415 | case NOTE: | |
416 | if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL) | |
417 | break; | |
418 | ||
5d3cc252 | 419 | /* Fall through. */ |
175160e7 | 420 | case CODE_LABEL: |
1f3d3a31 | 421 | LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig))) |
175160e7 | 422 | = LABEL_PRESERVE_P (orig); |
1f3d3a31 | 423 | return get_label_from_map (map, CODE_LABEL_NUMBER (orig)); |
175160e7 MT |
424 | |
425 | case LABEL_REF: | |
c5c76735 JL |
426 | copy |
427 | = gen_rtx_LABEL_REF | |
428 | (mode, | |
429 | LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0) | |
430 | : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0)))); | |
431 | ||
175160e7 | 432 | LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig); |
c1ceaaa6 RK |
433 | |
434 | /* The fact that this label was previously nonlocal does not mean | |
435 | it still is, so we must check if it is within the range of | |
436 | this function's labels. */ | |
437 | LABEL_REF_NONLOCAL_P (copy) | |
438 | = (LABEL_REF_NONLOCAL_P (orig) | |
439 | && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num () | |
440 | && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ())); | |
81d57b8e RK |
441 | |
442 | /* If we have made a nonlocal label local, it means that this | |
9faa82d8 | 443 | inlined call will be referring to our nonlocal goto handler. |
81d57b8e RK |
444 | So make sure we create one for this block; we normally would |
445 | not since this is not otherwise considered a "call". */ | |
446 | if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy)) | |
447 | function_call_count++; | |
448 | ||
175160e7 MT |
449 | return copy; |
450 | ||
451 | case PC: | |
452 | case CC0: | |
453 | case CONST_INT: | |
69ef87e2 | 454 | case CONST_VECTOR: |
f543676f JW |
455 | return orig; |
456 | ||
175160e7 | 457 | case SYMBOL_REF: |
f543676f JW |
458 | /* Symbols which represent the address of a label stored in the constant |
459 | pool must be modified to point to a constant pool entry for the | |
460 | remapped label. Otherwise, symbols are returned unchanged. */ | |
461 | if (CONSTANT_POOL_ADDRESS_P (orig)) | |
462 | { | |
6de9cd9a | 463 | struct function *f = cfun; |
36edd3cc | 464 | rtx constant = get_pool_constant_for_function (f, orig); |
6de9cd9a | 465 | if (GET_CODE (constant) == LABEL_REF) |
14a774a9 RK |
466 | return XEXP (force_const_mem |
467 | (GET_MODE (orig), | |
468 | copy_rtx_and_substitute (constant, map, for_lhs)), | |
c1ceaaa6 | 469 | 0); |
f543676f | 470 | } |
175160e7 MT |
471 | return orig; |
472 | ||
473 | case CONST_DOUBLE: | |
474 | /* We have to make a new copy of this CONST_DOUBLE because don't want | |
475 | to use the old value of CONST_DOUBLE_MEM. Also, this may be a | |
476 | duplicate of a CONST_DOUBLE we have already seen. */ | |
477 | if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT) | |
478 | { | |
479 | REAL_VALUE_TYPE d; | |
480 | ||
481 | REAL_VALUE_FROM_CONST_DOUBLE (d, orig); | |
81fbaa41 | 482 | return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig)); |
175160e7 MT |
483 | } |
484 | else | |
485 | return immed_double_const (CONST_DOUBLE_LOW (orig), | |
486 | CONST_DOUBLE_HIGH (orig), VOIDmode); | |
487 | ||
488 | case CONST: | |
36edd3cc | 489 | break; |
175160e7 MT |
490 | |
491 | case ASM_OPERANDS: | |
6462bb43 AO |
492 | /* If a single asm insn contains multiple output operands then |
493 | it contains multiple ASM_OPERANDS rtx's that share the input | |
494 | and constraint vecs. We must make sure that the copied insn | |
495 | continues to share it. */ | |
496 | if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig)) | |
175160e7 MT |
497 | { |
498 | copy = rtx_alloc (ASM_OPERANDS); | |
2adc7f12 | 499 | RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil); |
dde068d9 | 500 | PUT_MODE (copy, GET_MODE (orig)); |
6462bb43 AO |
501 | ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig); |
502 | ASM_OPERANDS_OUTPUT_CONSTRAINT (copy) | |
503 | = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig); | |
504 | ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig); | |
505 | ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector; | |
506 | ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy) | |
507 | = map->copy_asm_constraints_vector; | |
508 | ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig); | |
509 | ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig); | |
175160e7 MT |
510 | return copy; |
511 | } | |
512 | break; | |
513 | ||
514 | case CALL: | |
515 | /* This is given special treatment because the first | |
516 | operand of a CALL is a (MEM ...) which may get | |
517 | forced into a register for cse. This is undesirable | |
518 | if function-address cse isn't wanted or if we won't do cse. */ | |
519 | #ifndef NO_FUNCTION_CSE | |
520 | if (! (optimize && ! flag_no_function_cse)) | |
521 | #endif | |
8ac61af7 RK |
522 | { |
523 | rtx copy | |
524 | = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)), | |
525 | copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), | |
526 | map, 0)); | |
527 | ||
72403582 | 528 | MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0)); |
8ac61af7 RK |
529 | |
530 | return | |
1d088dee | 531 | gen_rtx_CALL (GET_MODE (orig), copy, |
8ac61af7 RK |
532 | copy_rtx_and_substitute (XEXP (orig, 1), map, 0)); |
533 | } | |
175160e7 MT |
534 | break; |
535 | ||
536 | #if 0 | |
6de9cd9a DN |
537 | /* Must be ifdefed out for loop unrolling to work. */ |
538 | /* ??? Is this for the old or the new unroller? */ | |
175160e7 MT |
539 | case RETURN: |
540 | abort (); | |
541 | #endif | |
542 | ||
543 | case SET: | |
544 | /* If this is setting fp or ap, it means that we have a nonlocal goto. | |
e9a25f70 | 545 | Adjust the setting by the offset of the area we made. |
175160e7 MT |
546 | If the nonlocal goto is into the current function, |
547 | this will result in unnecessarily bad code, but should work. */ | |
548 | if (SET_DEST (orig) == virtual_stack_vars_rtx | |
549 | || SET_DEST (orig) == virtual_incoming_args_rtx) | |
e9a25f70 | 550 | { |
00174bdf | 551 | /* In case a translation hasn't occurred already, make one now. */ |
d6e6c585 JL |
552 | rtx equiv_reg; |
553 | rtx equiv_loc; | |
554 | HOST_WIDE_INT loc_offset; | |
555 | ||
14a774a9 | 556 | copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs); |
d6e6c585 | 557 | equiv_reg = map->reg_map[REGNO (SET_DEST (orig))]; |
14a774a9 RK |
558 | equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray, |
559 | REGNO (equiv_reg)).rtx; | |
d6e6c585 | 560 | loc_offset |
f8cfc6aa | 561 | = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1)); |
00174bdf | 562 | |
38a448ca RH |
563 | return gen_rtx_SET (VOIDmode, SET_DEST (orig), |
564 | force_operand | |
565 | (plus_constant | |
14a774a9 RK |
566 | (copy_rtx_and_substitute (SET_SRC (orig), |
567 | map, 0), | |
38a448ca RH |
568 | - loc_offset), |
569 | NULL_RTX)); | |
e9a25f70 | 570 | } |
14a774a9 RK |
571 | else |
572 | return gen_rtx_SET (VOIDmode, | |
573 | copy_rtx_and_substitute (SET_DEST (orig), map, 1), | |
574 | copy_rtx_and_substitute (SET_SRC (orig), map, 0)); | |
175160e7 MT |
575 | break; |
576 | ||
577 | case MEM: | |
c81f560b RH |
578 | copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0), |
579 | map, 0)); | |
580 | MEM_COPY_ATTRIBUTES (copy, orig); | |
175160e7 | 581 | return copy; |
00174bdf | 582 | |
e9a25f70 JL |
583 | default: |
584 | break; | |
175160e7 MT |
585 | } |
586 | ||
587 | copy = rtx_alloc (code); | |
588 | PUT_MODE (copy, mode); | |
2adc7f12 JJ |
589 | RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct); |
590 | RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil); | |
591 | RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging); | |
175160e7 MT |
592 | |
593 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); | |
594 | ||
595 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
596 | { | |
597 | switch (*format_ptr++) | |
598 | { | |
599 | case '0': | |
e1de1560 | 600 | X0ANY (copy, i) = X0ANY (orig, i); |
175160e7 MT |
601 | break; |
602 | ||
603 | case 'e': | |
14a774a9 RK |
604 | XEXP (copy, i) |
605 | = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs); | |
175160e7 MT |
606 | break; |
607 | ||
608 | case 'u': | |
609 | /* Change any references to old-insns to point to the | |
610 | corresponding copied insns. */ | |
611 | XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))]; | |
612 | break; | |
613 | ||
614 | case 'E': | |
615 | XVEC (copy, i) = XVEC (orig, i); | |
616 | if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0) | |
617 | { | |
618 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
619 | for (j = 0; j < XVECLEN (copy, i); j++) | |
620 | XVECEXP (copy, i, j) | |
14a774a9 RK |
621 | = copy_rtx_and_substitute (XVECEXP (orig, i, j), |
622 | map, for_lhs); | |
175160e7 MT |
623 | } |
624 | break; | |
625 | ||
02bea8a8 RK |
626 | case 'w': |
627 | XWINT (copy, i) = XWINT (orig, i); | |
628 | break; | |
629 | ||
175160e7 MT |
630 | case 'i': |
631 | XINT (copy, i) = XINT (orig, i); | |
632 | break; | |
633 | ||
634 | case 's': | |
635 | XSTR (copy, i) = XSTR (orig, i); | |
636 | break; | |
637 | ||
8f985ec4 ZW |
638 | case 't': |
639 | XTREE (copy, i) = XTREE (orig, i); | |
640 | break; | |
641 | ||
175160e7 MT |
642 | default: |
643 | abort (); | |
644 | } | |
645 | } | |
646 | ||
647 | if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0) | |
648 | { | |
6462bb43 AO |
649 | map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); |
650 | map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); | |
651 | map->copy_asm_constraints_vector | |
652 | = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); | |
175160e7 MT |
653 | } |
654 | ||
655 | return copy; | |
656 | } | |
657 | \f | |
658 | /* Substitute known constant values into INSN, if that is valid. */ | |
659 | ||
660 | void | |
1d088dee | 661 | try_constants (rtx insn, struct inline_remap *map) |
175160e7 MT |
662 | { |
663 | int i; | |
664 | ||
665 | map->num_sets = 0; | |
175160e7 | 666 | |
14a774a9 RK |
667 | /* First try just updating addresses, then other things. This is |
668 | important when we have something like the store of a constant | |
669 | into memory and we can update the memory address but the machine | |
670 | does not support a constant source. */ | |
671 | subst_constants (&PATTERN (insn), insn, map, 1); | |
672 | apply_change_group (); | |
673 | subst_constants (&PATTERN (insn), insn, map, 0); | |
175160e7 | 674 | apply_change_group (); |
1d088dee | 675 | |
ee960939 OH |
676 | /* Enforce consistency between the addresses in the regular insn flow |
677 | and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */ | |
678 | if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn)) | |
679 | { | |
680 | subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1); | |
681 | apply_change_group (); | |
682 | } | |
175160e7 MT |
683 | |
684 | /* Show we don't know the value of anything stored or clobbered. */ | |
84832317 | 685 | note_stores (PATTERN (insn), mark_stores, NULL); |
175160e7 MT |
686 | map->last_pc_value = 0; |
687 | #ifdef HAVE_cc0 | |
688 | map->last_cc0_value = 0; | |
689 | #endif | |
690 | ||
691 | /* Set up any constant equivalences made in this insn. */ | |
692 | for (i = 0; i < map->num_sets; i++) | |
693 | { | |
f8cfc6aa | 694 | if (REG_P (map->equiv_sets[i].dest)) |
175160e7 MT |
695 | { |
696 | int regno = REGNO (map->equiv_sets[i].dest); | |
697 | ||
c68da89c KR |
698 | MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno); |
699 | if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0 | |
700 | /* Following clause is a hack to make case work where GNU C++ | |
701 | reassigns a variable to make cse work right. */ | |
702 | || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray, | |
703 | regno).rtx, | |
704 | map->equiv_sets[i].equiv)) | |
705 | SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest, | |
706 | map->equiv_sets[i].equiv, map->const_age); | |
175160e7 MT |
707 | } |
708 | else if (map->equiv_sets[i].dest == pc_rtx) | |
709 | map->last_pc_value = map->equiv_sets[i].equiv; | |
710 | #ifdef HAVE_cc0 | |
711 | else if (map->equiv_sets[i].dest == cc0_rtx) | |
712 | map->last_cc0_value = map->equiv_sets[i].equiv; | |
713 | #endif | |
714 | } | |
715 | } | |
716 | \f | |
717 | /* Substitute known constants for pseudo regs in the contents of LOC, | |
718 | which are part of INSN. | |
d45cf215 | 719 | If INSN is zero, the substitution should always be done (this is used to |
175160e7 MT |
720 | update DECL_RTL). |
721 | These changes are taken out by try_constants if the result is not valid. | |
722 | ||
723 | Note that we are more concerned with determining when the result of a SET | |
724 | is a constant, for further propagation, than actually inserting constants | |
725 | into insns; cse will do the latter task better. | |
726 | ||
727 | This function is also used to adjust address of items previously addressed | |
00174bdf | 728 | via the virtual stack variable or virtual incoming arguments registers. |
14a774a9 RK |
729 | |
730 | If MEMONLY is nonzero, only make changes inside a MEM. */ | |
175160e7 MT |
731 | |
732 | static void | |
1d088dee | 733 | subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly) |
175160e7 MT |
734 | { |
735 | rtx x = *loc; | |
b3694847 SS |
736 | int i, j; |
737 | enum rtx_code code; | |
738 | const char *format_ptr; | |
175160e7 MT |
739 | int num_changes = num_validated_changes (); |
740 | rtx new = 0; | |
a30d557c | 741 | enum machine_mode op0_mode = MAX_MACHINE_MODE; |
175160e7 MT |
742 | |
743 | code = GET_CODE (x); | |
744 | ||
745 | switch (code) | |
746 | { | |
747 | case PC: | |
748 | case CONST_INT: | |
749 | case CONST_DOUBLE: | |
69ef87e2 | 750 | case CONST_VECTOR: |
175160e7 MT |
751 | case SYMBOL_REF: |
752 | case CONST: | |
753 | case LABEL_REF: | |
754 | case ADDRESS: | |
755 | return; | |
756 | ||
757 | #ifdef HAVE_cc0 | |
758 | case CC0: | |
14a774a9 RK |
759 | if (! memonly) |
760 | validate_change (insn, loc, map->last_cc0_value, 1); | |
175160e7 MT |
761 | return; |
762 | #endif | |
763 | ||
764 | case USE: | |
765 | case CLOBBER: | |
766 | /* The only thing we can do with a USE or CLOBBER is possibly do | |
767 | some substitutions in a MEM within it. */ | |
768 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
14a774a9 | 769 | subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0); |
175160e7 MT |
770 | return; |
771 | ||
772 | case REG: | |
773 | /* Substitute for parms and known constants. Don't replace | |
774 | hard regs used as user variables with constants. */ | |
14a774a9 RK |
775 | if (! memonly) |
776 | { | |
777 | int regno = REGNO (x); | |
778 | struct const_equiv_data *p; | |
779 | ||
780 | if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x)) | |
781 | && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray) | |
782 | && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno), | |
783 | p->rtx != 0) | |
784 | && p->age >= map->const_age) | |
785 | validate_change (insn, loc, p->rtx, 1); | |
786 | } | |
787 | return; | |
175160e7 MT |
788 | |
789 | case SUBREG: | |
637c5064 RS |
790 | /* SUBREG applied to something other than a reg |
791 | should be treated as ordinary, since that must | |
792 | be a special hack and we don't know how to treat it specially. | |
793 | Consider for example mulsidi3 in m68k.md. | |
794 | Ordinary SUBREG of a REG needs this special treatment. */ | |
f8cfc6aa | 795 | if (! memonly && REG_P (SUBREG_REG (x))) |
637c5064 RS |
796 | { |
797 | rtx inner = SUBREG_REG (x); | |
798 | rtx new = 0; | |
175160e7 | 799 | |
637c5064 RS |
800 | /* We can't call subst_constants on &SUBREG_REG (x) because any |
801 | constant or SUBREG wouldn't be valid inside our SUBEG. Instead, | |
802 | see what is inside, try to form the new SUBREG and see if that is | |
00174bdf | 803 | valid. We handle two cases: extracting a full word in an |
637c5064 | 804 | integral mode and extracting the low part. */ |
14a774a9 | 805 | subst_constants (&inner, NULL_RTX, map, 0); |
0631e0bf | 806 | new = simplify_gen_subreg (GET_MODE (x), inner, |
1d088dee | 807 | GET_MODE (SUBREG_REG (x)), |
0631e0bf | 808 | SUBREG_BYTE (x)); |
175160e7 | 809 | |
637c5064 RS |
810 | if (new) |
811 | validate_change (insn, loc, new, 1); | |
0631e0bf JH |
812 | else |
813 | cancel_changes (num_changes); | |
175160e7 | 814 | |
637c5064 RS |
815 | return; |
816 | } | |
817 | break; | |
175160e7 MT |
818 | |
819 | case MEM: | |
14a774a9 | 820 | subst_constants (&XEXP (x, 0), insn, map, 0); |
175160e7 MT |
821 | |
822 | /* If a memory address got spoiled, change it back. */ | |
14a774a9 RK |
823 | if (! memonly && insn != 0 && num_validated_changes () != num_changes |
824 | && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
175160e7 MT |
825 | cancel_changes (num_changes); |
826 | return; | |
827 | ||
828 | case SET: | |
829 | { | |
830 | /* Substitute constants in our source, and in any arguments to a | |
831 | complex (e..g, ZERO_EXTRACT) destination, but not in the destination | |
832 | itself. */ | |
833 | rtx *dest_loc = &SET_DEST (x); | |
834 | rtx dest = *dest_loc; | |
835 | rtx src, tem; | |
96e60f0c JJ |
836 | enum machine_mode compare_mode = VOIDmode; |
837 | ||
838 | /* If SET_SRC is a COMPARE which subst_constants would turn into | |
839 | COMPARE of 2 VOIDmode constants, note the mode in which comparison | |
840 | is to be done. */ | |
841 | if (GET_CODE (SET_SRC (x)) == COMPARE) | |
842 | { | |
843 | src = SET_SRC (x); | |
844 | if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC | |
8beccec8 | 845 | || CC0_P (dest)) |
96e60f0c JJ |
846 | { |
847 | compare_mode = GET_MODE (XEXP (src, 0)); | |
848 | if (compare_mode == VOIDmode) | |
849 | compare_mode = GET_MODE (XEXP (src, 1)); | |
850 | } | |
851 | } | |
175160e7 | 852 | |
14a774a9 | 853 | subst_constants (&SET_SRC (x), insn, map, memonly); |
175160e7 MT |
854 | src = SET_SRC (x); |
855 | ||
856 | while (GET_CODE (*dest_loc) == ZERO_EXTRACT | |
175160e7 MT |
857 | || GET_CODE (*dest_loc) == SUBREG |
858 | || GET_CODE (*dest_loc) == STRICT_LOW_PART) | |
859 | { | |
860 | if (GET_CODE (*dest_loc) == ZERO_EXTRACT) | |
861 | { | |
14a774a9 RK |
862 | subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly); |
863 | subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly); | |
175160e7 MT |
864 | } |
865 | dest_loc = &XEXP (*dest_loc, 0); | |
866 | } | |
867 | ||
91594e43 RS |
868 | /* Do substitute in the address of a destination in memory. */ |
869 | if (GET_CODE (*dest_loc) == MEM) | |
14a774a9 | 870 | subst_constants (&XEXP (*dest_loc, 0), insn, map, 0); |
91594e43 | 871 | |
175160e7 MT |
872 | /* Check for the case of DEST a SUBREG, both it and the underlying |
873 | register are less than one word, and the SUBREG has the wider mode. | |
874 | In the case, we are really setting the underlying register to the | |
875 | source converted to the mode of DEST. So indicate that. */ | |
876 | if (GET_CODE (dest) == SUBREG | |
877 | && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD | |
878 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD | |
879 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
880 | <= GET_MODE_SIZE (GET_MODE (dest))) | |
e2eb57b7 RK |
881 | && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)), |
882 | src))) | |
175160e7 MT |
883 | src = tem, dest = SUBREG_REG (dest); |
884 | ||
885 | /* If storing a recognizable value save it for later recording. */ | |
886 | if ((map->num_sets < MAX_RECOG_OPERANDS) | |
887 | && (CONSTANT_P (src) | |
f8cfc6aa | 888 | || (REG_P (src) |
83b93f40 RK |
889 | && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM |
890 | || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM)) | |
175160e7 | 891 | || (GET_CODE (src) == PLUS |
f8cfc6aa | 892 | && REG_P (XEXP (src, 0)) |
83b93f40 RK |
893 | && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM |
894 | || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM) | |
175160e7 MT |
895 | && CONSTANT_P (XEXP (src, 1))) |
896 | || GET_CODE (src) == COMPARE | |
8beccec8 | 897 | || CC0_P (dest) |
175160e7 MT |
898 | || (dest == pc_rtx |
899 | && (src == pc_rtx || GET_CODE (src) == RETURN | |
900 | || GET_CODE (src) == LABEL_REF)))) | |
901 | { | |
902 | /* Normally, this copy won't do anything. But, if SRC is a COMPARE | |
903 | it will cause us to save the COMPARE with any constants | |
904 | substituted, which is what we want for later. */ | |
96e60f0c JJ |
905 | rtx src_copy = copy_rtx (src); |
906 | map->equiv_sets[map->num_sets].equiv = src_copy; | |
175160e7 | 907 | map->equiv_sets[map->num_sets++].dest = dest; |
96e60f0c JJ |
908 | if (compare_mode != VOIDmode |
909 | && GET_CODE (src) == COMPARE | |
910 | && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC | |
8beccec8 | 911 | || CC0_P (dest)) |
96e60f0c JJ |
912 | && GET_MODE (XEXP (src, 0)) == VOIDmode |
913 | && GET_MODE (XEXP (src, 1)) == VOIDmode) | |
914 | { | |
915 | map->compare_src = src_copy; | |
916 | map->compare_mode = compare_mode; | |
917 | } | |
175160e7 | 918 | } |
175160e7 | 919 | } |
e9a25f70 JL |
920 | return; |
921 | ||
922 | default: | |
923 | break; | |
175160e7 MT |
924 | } |
925 | ||
926 | format_ptr = GET_RTX_FORMAT (code); | |
00174bdf | 927 | |
175160e7 MT |
928 | /* If the first operand is an expression, save its mode for later. */ |
929 | if (*format_ptr == 'e') | |
930 | op0_mode = GET_MODE (XEXP (x, 0)); | |
931 | ||
932 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
933 | { | |
934 | switch (*format_ptr++) | |
935 | { | |
936 | case '0': | |
937 | break; | |
938 | ||
939 | case 'e': | |
940 | if (XEXP (x, i)) | |
14a774a9 | 941 | subst_constants (&XEXP (x, i), insn, map, memonly); |
175160e7 MT |
942 | break; |
943 | ||
944 | case 'u': | |
945 | case 'i': | |
946 | case 's': | |
02bea8a8 | 947 | case 'w': |
00174bdf | 948 | case 'n': |
8f985ec4 | 949 | case 't': |
2ff581c3 | 950 | case 'B': |
175160e7 MT |
951 | break; |
952 | ||
953 | case 'E': | |
954 | if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0) | |
14a774a9 RK |
955 | for (j = 0; j < XVECLEN (x, i); j++) |
956 | subst_constants (&XVECEXP (x, i, j), insn, map, memonly); | |
957 | ||
175160e7 MT |
958 | break; |
959 | ||
960 | default: | |
961 | abort (); | |
962 | } | |
963 | } | |
964 | ||
965 | /* If this is a commutative operation, move a constant to the second | |
966 | operand unless the second operand is already a CONST_INT. */ | |
14a774a9 | 967 | if (! memonly |
ec8e098d PB |
968 | && (GET_RTX_CLASS (code) == RTX_COMM_ARITH |
969 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
175160e7 MT |
970 | && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT) |
971 | { | |
972 | rtx tem = XEXP (x, 0); | |
973 | validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1); | |
974 | validate_change (insn, &XEXP (x, 1), tem, 1); | |
975 | } | |
976 | ||
977 | /* Simplify the expression in case we put in some constants. */ | |
14a774a9 RK |
978 | if (! memonly) |
979 | switch (GET_RTX_CLASS (code)) | |
175160e7 | 980 | { |
ec8e098d | 981 | case RTX_UNARY: |
14a774a9 RK |
982 | if (op0_mode == MAX_MACHINE_MODE) |
983 | abort (); | |
984 | new = simplify_unary_operation (code, GET_MODE (x), | |
985 | XEXP (x, 0), op0_mode); | |
986 | break; | |
987 | ||
ec8e098d PB |
988 | case RTX_COMPARE: |
989 | case RTX_COMM_COMPARE: | |
14a774a9 RK |
990 | { |
991 | enum machine_mode op_mode = GET_MODE (XEXP (x, 0)); | |
992 | ||
993 | if (op_mode == VOIDmode) | |
994 | op_mode = GET_MODE (XEXP (x, 1)); | |
c6fb08ad | 995 | |
7ce3e360 | 996 | new = simplify_relational_operation (code, GET_MODE (x), op_mode, |
14a774a9 | 997 | XEXP (x, 0), XEXP (x, 1)); |
14a774a9 | 998 | break; |
00174bdf | 999 | } |
175160e7 | 1000 | |
ec8e098d PB |
1001 | case RTX_BIN_ARITH: |
1002 | case RTX_COMM_ARITH: | |
14a774a9 RK |
1003 | new = simplify_binary_operation (code, GET_MODE (x), |
1004 | XEXP (x, 0), XEXP (x, 1)); | |
1005 | break; | |
175160e7 | 1006 | |
ec8e098d PB |
1007 | case RTX_BITFIELD_OPS: |
1008 | case RTX_TERNARY: | |
14a774a9 RK |
1009 | if (op0_mode == MAX_MACHINE_MODE) |
1010 | abort (); | |
1011 | ||
96e60f0c JJ |
1012 | if (code == IF_THEN_ELSE) |
1013 | { | |
1014 | rtx op0 = XEXP (x, 0); | |
1015 | ||
ec8e098d | 1016 | if (COMPARISON_P (op0) |
96e60f0c JJ |
1017 | && GET_MODE (op0) == VOIDmode |
1018 | && ! side_effects_p (op0) | |
1019 | && XEXP (op0, 0) == map->compare_src | |
1020 | && GET_MODE (XEXP (op0, 1)) == VOIDmode) | |
1021 | { | |
1022 | /* We have compare of two VOIDmode constants for which | |
1023 | we recorded the comparison mode. */ | |
c6fb08ad PB |
1024 | rtx tem = |
1025 | simplify_gen_relational (GET_CODE (op0), GET_MODE (op0), | |
1026 | map->compare_mode, XEXP (op0, 0), | |
1027 | XEXP (op0, 1)); | |
1028 | ||
1029 | if (GET_CODE (tem) != CONST_INT) | |
1030 | new = simplify_ternary_operation (code, GET_MODE (x), | |
1031 | op0_mode, tem, XEXP (x, 1), | |
1032 | XEXP (x, 2)); | |
1033 | else if (tem == const0_rtx) | |
96e60f0c | 1034 | new = XEXP (x, 2); |
c6fb08ad | 1035 | else |
96e60f0c JJ |
1036 | new = XEXP (x, 1); |
1037 | } | |
1038 | } | |
1039 | if (!new) | |
1040 | new = simplify_ternary_operation (code, GET_MODE (x), op0_mode, | |
1041 | XEXP (x, 0), XEXP (x, 1), | |
1042 | XEXP (x, 2)); | |
14a774a9 | 1043 | break; |
ec8e098d PB |
1044 | |
1045 | default: | |
1046 | break; | |
14a774a9 | 1047 | } |
175160e7 MT |
1048 | |
1049 | if (new) | |
1050 | validate_change (insn, loc, new, 1); | |
1051 | } | |
1052 | ||
1053 | /* Show that register modified no longer contain known constants. We are | |
1054 | called from note_stores with parts of the new insn. */ | |
1055 | ||
915b80ed | 1056 | static void |
1d088dee | 1057 | mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED) |
175160e7 | 1058 | { |
e2eb57b7 | 1059 | int regno = -1; |
6a651371 | 1060 | enum machine_mode mode = VOIDmode; |
e2eb57b7 RK |
1061 | |
1062 | /* DEST is always the innermost thing set, except in the case of | |
1063 | SUBREGs of hard registers. */ | |
175160e7 | 1064 | |
f8cfc6aa | 1065 | if (REG_P (dest)) |
e2eb57b7 | 1066 | regno = REGNO (dest), mode = GET_MODE (dest); |
f8cfc6aa | 1067 | else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest))) |
e2eb57b7 | 1068 | { |
ddef6bc7 JJ |
1069 | regno = REGNO (SUBREG_REG (dest)); |
1070 | if (regno < FIRST_PSEUDO_REGISTER) | |
1071 | regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)), | |
1072 | GET_MODE (SUBREG_REG (dest)), | |
1073 | SUBREG_BYTE (dest), | |
1074 | GET_MODE (dest)); | |
e2eb57b7 RK |
1075 | mode = GET_MODE (SUBREG_REG (dest)); |
1076 | } | |
1077 | ||
1078 | if (regno >= 0) | |
1079 | { | |
770ae6cc RK |
1080 | unsigned int uregno = regno; |
1081 | unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno | |
66fd46b6 | 1082 | : uregno + hard_regno_nregs[uregno][mode] - 1); |
770ae6cc | 1083 | unsigned int i; |
e2eb57b7 | 1084 | |
e9a25f70 JL |
1085 | /* Ignore virtual stack var or virtual arg register since those |
1086 | are handled separately. */ | |
770ae6cc RK |
1087 | if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM |
1088 | && uregno != VIRTUAL_STACK_VARS_REGNUM) | |
1089 | for (i = uregno; i <= last_reg; i++) | |
6a651371 | 1090 | if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray)) |
c68da89c | 1091 | VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0; |
e2eb57b7 | 1092 | } |
175160e7 MT |
1093 | } |
1094 | \f | |
81578142 RS |
1095 | /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the |
1096 | given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so | |
1097 | that it points to the node itself, thus indicating that the node is its | |
1098 | own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for | |
1099 | the given node is NULL, recursively descend the decl/block tree which | |
1100 | it is the root of, and for each other ..._DECL or BLOCK node contained | |
1101 | therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also | |
1102 | still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN | |
1103 | values to point to themselves. */ | |
1104 | ||
81578142 | 1105 | static void |
1d088dee | 1106 | set_block_origin_self (tree stmt) |
81578142 RS |
1107 | { |
1108 | if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE) | |
1109 | { | |
1110 | BLOCK_ABSTRACT_ORIGIN (stmt) = stmt; | |
1111 | ||
1112 | { | |
b3694847 | 1113 | tree local_decl; |
81578142 | 1114 | |
00174bdf | 1115 | for (local_decl = BLOCK_VARS (stmt); |
81578142 RS |
1116 | local_decl != NULL_TREE; |
1117 | local_decl = TREE_CHAIN (local_decl)) | |
00174bdf | 1118 | set_decl_origin_self (local_decl); /* Potential recursion. */ |
81578142 RS |
1119 | } |
1120 | ||
1121 | { | |
b3694847 | 1122 | tree subblock; |
81578142 | 1123 | |
00174bdf | 1124 | for (subblock = BLOCK_SUBBLOCKS (stmt); |
81578142 RS |
1125 | subblock != NULL_TREE; |
1126 | subblock = BLOCK_CHAIN (subblock)) | |
00174bdf | 1127 | set_block_origin_self (subblock); /* Recurse. */ |
81578142 RS |
1128 | } |
1129 | } | |
1130 | } | |
1131 | ||
1132 | /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for | |
1133 | the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the | |
1134 | node to so that it points to the node itself, thus indicating that the | |
1135 | node represents its own (abstract) origin. Additionally, if the | |
1136 | DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend | |
1137 | the decl/block tree of which the given node is the root of, and for | |
1138 | each other ..._DECL or BLOCK node contained therein whose | |
1139 | DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL, | |
1140 | set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to | |
1141 | point to themselves. */ | |
1142 | ||
1cfdcc15 | 1143 | void |
1d088dee | 1144 | set_decl_origin_self (tree decl) |
81578142 RS |
1145 | { |
1146 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE) | |
1147 | { | |
1148 | DECL_ABSTRACT_ORIGIN (decl) = decl; | |
1149 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
1150 | { | |
b3694847 | 1151 | tree arg; |
81578142 RS |
1152 | |
1153 | for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg)) | |
1154 | DECL_ABSTRACT_ORIGIN (arg) = arg; | |
29d356fb RK |
1155 | if (DECL_INITIAL (decl) != NULL_TREE |
1156 | && DECL_INITIAL (decl) != error_mark_node) | |
81578142 RS |
1157 | set_block_origin_self (DECL_INITIAL (decl)); |
1158 | } | |
1159 | } | |
1160 | } | |
1161 | \f | |
1162 | /* Given a pointer to some BLOCK node, and a boolean value to set the | |
1163 | "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for | |
1164 | the given block, and for all local decls and all local sub-blocks | |
1165 | (recursively) which are contained therein. */ | |
1166 | ||
81578142 | 1167 | static void |
1d088dee | 1168 | set_block_abstract_flags (tree stmt, int setting) |
81578142 | 1169 | { |
b3694847 SS |
1170 | tree local_decl; |
1171 | tree subblock; | |
81578142 | 1172 | |
12307ca2 | 1173 | BLOCK_ABSTRACT (stmt) = setting; |
81578142 | 1174 | |
12307ca2 RK |
1175 | for (local_decl = BLOCK_VARS (stmt); |
1176 | local_decl != NULL_TREE; | |
1177 | local_decl = TREE_CHAIN (local_decl)) | |
1178 | set_decl_abstract_flags (local_decl, setting); | |
81578142 | 1179 | |
12307ca2 RK |
1180 | for (subblock = BLOCK_SUBBLOCKS (stmt); |
1181 | subblock != NULL_TREE; | |
1182 | subblock = BLOCK_CHAIN (subblock)) | |
1183 | set_block_abstract_flags (subblock, setting); | |
81578142 RS |
1184 | } |
1185 | ||
1186 | /* Given a pointer to some ..._DECL node, and a boolean value to set the | |
1187 | "abstract" flags to, set that value into the DECL_ABSTRACT flag for the | |
1188 | given decl, and (in the case where the decl is a FUNCTION_DECL) also | |
1189 | set the abstract flags for all of the parameters, local vars, local | |
1190 | blocks and sub-blocks (recursively) to the same setting. */ | |
1191 | ||
1192 | void | |
1d088dee | 1193 | set_decl_abstract_flags (tree decl, int setting) |
81578142 RS |
1194 | { |
1195 | DECL_ABSTRACT (decl) = setting; | |
1196 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
1197 | { | |
b3694847 | 1198 | tree arg; |
81578142 RS |
1199 | |
1200 | for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg)) | |
1201 | DECL_ABSTRACT (arg) = setting; | |
29d356fb RK |
1202 | if (DECL_INITIAL (decl) != NULL_TREE |
1203 | && DECL_INITIAL (decl) != error_mark_node) | |
81578142 RS |
1204 | set_block_abstract_flags (DECL_INITIAL (decl), setting); |
1205 | } | |
1206 | } | |
c0e7830f DD |
1207 | \f |
1208 | /* Functions to keep track of the values hard regs had at the start of | |
1209 | the function. */ | |
1210 | ||
902197eb | 1211 | rtx |
1d088dee | 1212 | get_hard_reg_initial_reg (struct function *fun, rtx reg) |
902197eb DD |
1213 | { |
1214 | struct initial_value_struct *ivs = fun->hard_reg_initial_vals; | |
1215 | int i; | |
1216 | ||
1217 | if (ivs == 0) | |
1218 | return NULL_RTX; | |
1219 | ||
1220 | for (i = 0; i < ivs->num_entries; i++) | |
1221 | if (rtx_equal_p (ivs->entries[i].pseudo, reg)) | |
1222 | return ivs->entries[i].hard_reg; | |
1223 | ||
1224 | return NULL_RTX; | |
1225 | } | |
1226 | ||
c0e7830f | 1227 | rtx |
1d088dee | 1228 | has_func_hard_reg_initial_val (struct function *fun, rtx reg) |
c0e7830f DD |
1229 | { |
1230 | struct initial_value_struct *ivs = fun->hard_reg_initial_vals; | |
1231 | int i; | |
1232 | ||
1233 | if (ivs == 0) | |
1234 | return NULL_RTX; | |
1235 | ||
1236 | for (i = 0; i < ivs->num_entries; i++) | |
1237 | if (rtx_equal_p (ivs->entries[i].hard_reg, reg)) | |
1238 | return ivs->entries[i].pseudo; | |
1239 | ||
1240 | return NULL_RTX; | |
1241 | } | |
1242 | ||
1243 | rtx | |
1d088dee | 1244 | get_func_hard_reg_initial_val (struct function *fun, rtx reg) |
c0e7830f DD |
1245 | { |
1246 | struct initial_value_struct *ivs = fun->hard_reg_initial_vals; | |
1247 | rtx rv = has_func_hard_reg_initial_val (fun, reg); | |
1248 | ||
1249 | if (rv) | |
1250 | return rv; | |
1251 | ||
1252 | if (ivs == 0) | |
1253 | { | |
703ad42b | 1254 | fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct)); |
c0e7830f DD |
1255 | ivs = fun->hard_reg_initial_vals; |
1256 | ivs->num_entries = 0; | |
1257 | ivs->max_entries = 5; | |
703ad42b | 1258 | ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair)); |
c0e7830f DD |
1259 | } |
1260 | ||
1261 | if (ivs->num_entries >= ivs->max_entries) | |
1262 | { | |
1263 | ivs->max_entries += 5; | |
703ad42b KG |
1264 | ivs->entries = ggc_realloc (ivs->entries, |
1265 | ivs->max_entries | |
1266 | * sizeof (initial_value_pair)); | |
c0e7830f DD |
1267 | } |
1268 | ||
1269 | ivs->entries[ivs->num_entries].hard_reg = reg; | |
1270 | ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg)); | |
1271 | ||
1272 | return ivs->entries[ivs->num_entries++].pseudo; | |
1273 | } | |
1274 | ||
1275 | rtx | |
1d088dee | 1276 | get_hard_reg_initial_val (enum machine_mode mode, int regno) |
c0e7830f DD |
1277 | { |
1278 | return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno)); | |
1279 | } | |
1280 | ||
1281 | rtx | |
1d088dee | 1282 | has_hard_reg_initial_val (enum machine_mode mode, int regno) |
c0e7830f DD |
1283 | { |
1284 | return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno)); | |
1285 | } | |
1286 | ||
c0e7830f | 1287 | void |
1d088dee | 1288 | emit_initial_value_sets (void) |
c0e7830f DD |
1289 | { |
1290 | struct initial_value_struct *ivs = cfun->hard_reg_initial_vals; | |
1291 | int i; | |
1292 | rtx seq; | |
1293 | ||
1294 | if (ivs == 0) | |
1295 | return; | |
1296 | ||
1297 | start_sequence (); | |
1298 | for (i = 0; i < ivs->num_entries; i++) | |
1299 | emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg); | |
1300 | seq = get_insns (); | |
1301 | end_sequence (); | |
1302 | ||
2f937369 | 1303 | emit_insn_after (seq, get_insns ()); |
c0e7830f | 1304 | } |
385b6e2d R |
1305 | |
1306 | /* If the backend knows where to allocate pseudos for hard | |
1307 | register initial values, register these allocations now. */ | |
1308 | void | |
1d088dee | 1309 | allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED) |
385b6e2d R |
1310 | { |
1311 | #ifdef ALLOCATE_INITIAL_VALUE | |
1312 | struct initial_value_struct *ivs = cfun->hard_reg_initial_vals; | |
1313 | int i; | |
1314 | ||
1315 | if (ivs == 0) | |
1316 | return; | |
1317 | ||
1318 | for (i = 0; i < ivs->num_entries; i++) | |
1319 | { | |
1320 | int regno = REGNO (ivs->entries[i].pseudo); | |
1321 | rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg); | |
1322 | ||
1323 | if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1) | |
1324 | ; /* Do nothing. */ | |
1325 | else if (GET_CODE (x) == MEM) | |
1326 | reg_equiv_memory_loc[regno] = x; | |
f8cfc6aa | 1327 | else if (REG_P (x)) |
385b6e2d R |
1328 | { |
1329 | reg_renumber[regno] = REGNO (x); | |
1330 | /* Poke the regno right into regno_reg_rtx | |
1331 | so that even fixed regs are accepted. */ | |
1332 | REGNO (ivs->entries[i].pseudo) = REGNO (x); | |
1333 | } | |
1334 | else abort (); | |
1335 | } | |
1336 | #endif | |
1337 | } | |
e2500fed GK |
1338 | |
1339 | #include "gt-integrate.h" |