]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
[multiple changes]
[gcc.git] / gcc / integrate.c
CommitLineData
1322177d 1/* Procedure integration for GCC.
8beccec8 2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
175160e7
MT
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
1322177d 6This file is part of GCC.
175160e7 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
175160e7 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
175160e7
MT
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
175160e7 22
175160e7 23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
ccd043a9 27
175160e7
MT
28#include "rtl.h"
29#include "tree.h"
6baf1cc8 30#include "tm_p.h"
12307ca2 31#include "regs.h"
175160e7 32#include "flags.h"
135d50f1 33#include "debug.h"
175160e7 34#include "insn-config.h"
175160e7
MT
35#include "expr.h"
36#include "output.h"
e9a25f70 37#include "recog.h"
175160e7
MT
38#include "integrate.h"
39#include "real.h"
6adb4e3a 40#include "except.h"
175160e7 41#include "function.h"
d6f4ec51 42#include "toplev.h"
ab87f8c8 43#include "intl.h"
c6d9a88c 44#include "params.h"
c0e7830f 45#include "ggc.h"
91d231cb 46#include "target.h"
63e1b1c4 47#include "langhooks.h"
175160e7 48
6de9cd9a 49/* Round to the next highest integer that meets the alignment. */
175160e7 50#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
175160e7 51\f
c0e7830f 52
dc297297 53/* Private type used by {get/has}_func_hard_reg_initial_val. */
e2500fed 54typedef struct initial_value_pair GTY(()) {
c0e7830f
DD
55 rtx hard_reg;
56 rtx pseudo;
57} initial_value_pair;
e2500fed 58typedef struct initial_value_struct GTY(()) {
c0e7830f
DD
59 int num_entries;
60 int max_entries;
e2500fed 61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
c0e7830f
DD
62} initial_value_struct;
63
1d088dee
AJ
64static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65static void set_block_origin_self (tree);
66static void set_block_abstract_flags (tree, int);
1d088dee 67static void mark_stores (rtx, rtx, void *);
175160e7 68\f
1f3d3a31 69/* Returns the Ith entry in the label_map contained in MAP. If the
e5e809f4
JL
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
1f3d3a31 74rtx
1d088dee 75get_label_from_map (struct inline_remap *map, int i)
1f3d3a31
JL
76{
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
00174bdf 80 x = map->label_map[i] = gen_label_rtx ();
1f3d3a31
JL
81
82 return x;
83}
84
91d231cb
JM
85/* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
588d3ade 87bool
1d088dee 88function_attribute_inlinable_p (tree fndecl)
91d231cb 89{
b9a26d09 90 if (targetm.attribute_table)
91d231cb 91 {
b9a26d09 92 tree a;
91d231cb 93
b9a26d09 94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
91d231cb 95 {
b9a26d09
NB
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
5fd9b178 101 return targetm.function_attribute_inlinable_p (fndecl);
91d231cb 102 }
91d231cb
JM
103 }
104
b9a26d09 105 return true;
91d231cb 106}
175160e7 107\f
5377d5ba
RK
108/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
02e24c7a 110
94755d92 111tree
1d088dee 112copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
02e24c7a 113{
94755d92
MM
114 tree copy;
115
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
a8f8d1cc 118 {
669d6ecc 119 tree type = TREE_TYPE (decl);
c246c65d 120
5377d5ba 121 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
a8f8d1cc 122 new PARM_DECL. */
c246c65d 123 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
669d6ecc
JM
124 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
125 TREE_READONLY (copy) = TREE_READONLY (decl);
126 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
a8f8d1cc 127 }
94755d92
MM
128 else
129 {
130 copy = copy_node (decl);
1e7ee6ad
MM
131 /* The COPY is not abstract; it will be generated in TO_FN. */
132 DECL_ABSTRACT (copy) = 0;
ae2bcd98 133 lang_hooks.dup_lang_specific_decl (copy);
a71811fe
MM
134
135 /* TREE_ADDRESSABLE isn't used to indicate that a label's
136 address has been taken; it's for internal bookkeeping in
137 expand_goto_internal. */
138 if (TREE_CODE (copy) == LABEL_DECL)
6de9cd9a
DN
139 {
140 TREE_ADDRESSABLE (copy) = 0;
6de9cd9a 141 }
94755d92
MM
142 }
143
144 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
145 declaration inspired this copy. */
99ceae26 146 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
94755d92
MM
147
148 /* The new variable/label has no RTL, yet. */
4e8dca1c
JM
149 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
150 SET_DECL_RTL (copy, NULL_RTX);
94755d92
MM
151
152 /* These args would always appear unused, if not for this. */
153 TREE_USED (copy) = 1;
154
155 /* Set the context for the new declaration. */
156 if (!DECL_CONTEXT (decl))
157 /* Globals stay global. */
00174bdf 158 ;
94755d92
MM
159 else if (DECL_CONTEXT (decl) != from_fn)
160 /* Things that weren't in the scope of the function we're inlining
4e8dca1c 161 from aren't in the scope we're inlining to, either. */
94755d92
MM
162 ;
163 else if (TREE_STATIC (decl))
4e8dca1c 164 /* Function-scoped static variables should stay in the original
94755d92 165 function. */
02e24c7a
MM
166 ;
167 else
94755d92
MM
168 /* Ordinary automatic local variables are now in the scope of the
169 new function. */
170 DECL_CONTEXT (copy) = to_fn;
02e24c7a
MM
171
172 return copy;
173}
175160e7 174\f
175160e7
MT
175/* Unfortunately, we need a global copy of const_equiv map for communication
176 with a function called from note_stores. Be *very* careful that this
177 is used properly in the presence of recursion. */
178
c68da89c 179varray_type global_const_equiv_varray;
175160e7 180
14a774a9 181/* Create a new copy of an rtx. Recursively copies the operands of the rtx,
175160e7
MT
182 except for those few rtx codes that are sharable.
183
184 We always return an rtx that is similar to that incoming rtx, with the
185 exception of possibly changing a REG to a SUBREG or vice versa. No
186 rtl is ever emitted.
187
14a774a9
RK
188 If FOR_LHS is nonzero, if means we are processing something that will
189 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
190 inlining since we need to be conservative in how it is set for
191 such cases.
192
175160e7
MT
193 Handle constants that need to be placed in the constant pool by
194 calling `force_const_mem'. */
195
196rtx
1d088dee 197copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
175160e7 198{
b3694847
SS
199 rtx copy, temp;
200 int i, j;
201 RTX_CODE code;
202 enum machine_mode mode;
203 const char *format_ptr;
175160e7
MT
204 int regno;
205
206 if (orig == 0)
207 return 0;
208
209 code = GET_CODE (orig);
210 mode = GET_MODE (orig);
211
212 switch (code)
213 {
214 case REG:
215 /* If the stack pointer register shows up, it must be part of
216 stack-adjustments (*not* because we eliminated the frame pointer!).
217 Small hard registers are returned as-is. Pseudo-registers
218 go through their `reg_map'. */
219 regno = REGNO (orig);
6de9cd9a 220 if (regno <= LAST_VIRTUAL_REGISTER)
175160e7
MT
221 {
222 /* Some hard registers are also mapped,
223 but others are not translated. */
c826ae21 224 if (map->reg_map[regno] != 0)
175160e7
MT
225 return map->reg_map[regno];
226
227 /* If this is the virtual frame pointer, make space in current
228 function's stack frame for the stack frame of the inline function.
229
230 Copy the address of this area into a pseudo. Map
231 virtual_stack_vars_rtx to this pseudo and set up a constant
232 equivalence for it to be the address. This will substitute the
233 address into insns where it can be substituted and use the new
234 pseudo where it can't. */
b5d7770c 235 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
236 {
237 rtx loc, seq;
1da326c3
SB
238 int size
239 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
d219c7f1 240#ifdef FRAME_GROWS_DOWNWARD
c2f8b491 241 int alignment
1da326c3 242 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
c2f8b491 243 / BITS_PER_UNIT);
175160e7 244
3e42d56b
DE
245 /* In this case, virtual_stack_vars_rtx points to one byte
246 higher than the top of the frame area. So make sure we
247 allocate a big enough chunk to keep the frame pointer
248 aligned like a real one. */
c2f8b491
JH
249 if (alignment)
250 size = CEIL_ROUND (size, alignment);
3e42d56b 251#endif
175160e7
MT
252 start_sequence ();
253 loc = assign_stack_temp (BLKmode, size, 1);
254 loc = XEXP (loc, 0);
255#ifdef FRAME_GROWS_DOWNWARD
256 /* In this case, virtual_stack_vars_rtx points to one byte
257 higher than the top of the frame area. So compute the offset
3e42d56b
DE
258 to one byte higher than our substitute frame. */
259 loc = plus_constant (loc, size);
175160e7 260#endif
59b2d722
RK
261 map->reg_map[regno] = temp
262 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 263
12307ca2 264#ifdef STACK_BOUNDARY
bdb429a5 265 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
266#endif
267
c68da89c 268 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7 269
2f937369 270 seq = get_insns ();
175160e7
MT
271 end_sequence ();
272 emit_insn_after (seq, map->insns_at_start);
5c23c401 273 return temp;
175160e7 274 }
6de9cd9a 275 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
175160e7
MT
276 {
277 /* Do the same for a block to contain any arguments referenced
0f41302f 278 in memory. */
175160e7 279 rtx loc, seq;
1da326c3 280 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
175160e7
MT
281
282 start_sequence ();
283 loc = assign_stack_temp (BLKmode, size, 1);
284 loc = XEXP (loc, 0);
00174bdf 285 /* When arguments grow downward, the virtual incoming
931553d8 286 args pointer points to the top of the argument block,
0f41302f 287 so the remapped location better do the same. */
931553d8
RS
288#ifdef ARGS_GROW_DOWNWARD
289 loc = plus_constant (loc, size);
290#endif
59b2d722
RK
291 map->reg_map[regno] = temp
292 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 293
12307ca2 294#ifdef STACK_BOUNDARY
bdb429a5 295 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
296#endif
297
c68da89c 298 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7 299
2f937369 300 seq = get_insns ();
175160e7
MT
301 end_sequence ();
302 emit_insn_after (seq, map->insns_at_start);
5c23c401 303 return temp;
175160e7
MT
304 }
305 else if (REG_FUNCTION_VALUE_P (orig))
306 {
6de9cd9a
DN
307 if (rtx_equal_function_value_matters)
308 /* This is an ignored return value. We must not
309 leave it in with REG_FUNCTION_VALUE_P set, since
310 that would confuse subsequent inlining of the
311 current function into a later function. */
312 return gen_rtx_REG (GET_MODE (orig), regno);
175160e7 313 else
6de9cd9a
DN
314 /* Must be unrolling loops or replicating code if we
315 reach here, so return the register unchanged. */
316 return orig;
b5d7770c 317 }
b5d7770c
AO
318 else
319 return orig;
320
321 abort ();
175160e7
MT
322 }
323 if (map->reg_map[regno] == NULL)
324 {
325 map->reg_map[regno] = gen_reg_rtx (mode);
326 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
327 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
328 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
329 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2 330
3502dc9c 331 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
12307ca2
RK
332 mark_reg_pointer (map->reg_map[regno],
333 map->regno_pointer_align[regno]);
175160e7
MT
334 }
335 return map->reg_map[regno];
336
337 case SUBREG:
14a774a9 338 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
e5c56fd9
JH
339 return simplify_gen_subreg (GET_MODE (orig), copy,
340 GET_MODE (SUBREG_REG (orig)),
341 SUBREG_BYTE (orig));
175160e7
MT
342
343 case USE:
344 case CLOBBER:
345 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927 346 to (use foo) if the original insn didn't have a subreg.
70128ad9 347 Removing the subreg distorts the VAX movmemhi pattern
d632e927 348 by changing the mode of an operand. */
14a774a9 349 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
d632e927 350 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7 351 copy = SUBREG_REG (copy);
38a448ca 352 return gen_rtx_fmt_e (code, VOIDmode, copy);
175160e7 353
bc8d3f91
JH
354 /* We need to handle "deleted" labels that appear in the DECL_RTL
355 of a LABEL_DECL. */
356 case NOTE:
357 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
358 break;
359
5d3cc252 360 /* Fall through. */
175160e7 361 case CODE_LABEL:
1f3d3a31 362 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
175160e7 363 = LABEL_PRESERVE_P (orig);
1f3d3a31 364 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
175160e7
MT
365
366 case LABEL_REF:
c5c76735
JL
367 copy
368 = gen_rtx_LABEL_REF
369 (mode,
370 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
371 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
372
175160e7 373 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
374
375 /* The fact that this label was previously nonlocal does not mean
376 it still is, so we must check if it is within the range of
377 this function's labels. */
378 LABEL_REF_NONLOCAL_P (copy)
379 = (LABEL_REF_NONLOCAL_P (orig)
380 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
381 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e 382
175160e7
MT
383 return copy;
384
385 case PC:
386 case CC0:
387 case CONST_INT:
69ef87e2 388 case CONST_VECTOR:
f543676f
JW
389 return orig;
390
175160e7 391 case SYMBOL_REF:
f543676f
JW
392 /* Symbols which represent the address of a label stored in the constant
393 pool must be modified to point to a constant pool entry for the
394 remapped label. Otherwise, symbols are returned unchanged. */
395 if (CONSTANT_POOL_ADDRESS_P (orig))
396 {
6de9cd9a 397 struct function *f = cfun;
36edd3cc 398 rtx constant = get_pool_constant_for_function (f, orig);
6de9cd9a 399 if (GET_CODE (constant) == LABEL_REF)
14a774a9
RK
400 return XEXP (force_const_mem
401 (GET_MODE (orig),
402 copy_rtx_and_substitute (constant, map, for_lhs)),
c1ceaaa6 403 0);
f543676f 404 }
175160e7
MT
405 return orig;
406
407 case CONST_DOUBLE:
408 /* We have to make a new copy of this CONST_DOUBLE because don't want
409 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
410 duplicate of a CONST_DOUBLE we have already seen. */
411 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
412 {
413 REAL_VALUE_TYPE d;
414
415 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 416 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
417 }
418 else
419 return immed_double_const (CONST_DOUBLE_LOW (orig),
420 CONST_DOUBLE_HIGH (orig), VOIDmode);
421
422 case CONST:
36edd3cc 423 break;
175160e7
MT
424
425 case ASM_OPERANDS:
6462bb43
AO
426 /* If a single asm insn contains multiple output operands then
427 it contains multiple ASM_OPERANDS rtx's that share the input
428 and constraint vecs. We must make sure that the copied insn
429 continues to share it. */
430 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
175160e7
MT
431 {
432 copy = rtx_alloc (ASM_OPERANDS);
2adc7f12 433 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
dde068d9 434 PUT_MODE (copy, GET_MODE (orig));
6462bb43
AO
435 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
436 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
437 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
438 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
439 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
440 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
441 = map->copy_asm_constraints_vector;
6773e15f
PB
442#ifdef USE_MAPPED_LOCATION
443 ASM_OPERANDS_SOURCE_LOCATION (copy)
444 = ASM_OPERANDS_SOURCE_LOCATION (orig);
445#else
6462bb43
AO
446 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
447 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
6773e15f 448#endif
175160e7
MT
449 return copy;
450 }
451 break;
452
453 case CALL:
454 /* This is given special treatment because the first
455 operand of a CALL is a (MEM ...) which may get
456 forced into a register for cse. This is undesirable
457 if function-address cse isn't wanted or if we won't do cse. */
458#ifndef NO_FUNCTION_CSE
459 if (! (optimize && ! flag_no_function_cse))
460#endif
8ac61af7
RK
461 {
462 rtx copy
463 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
464 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
465 map, 0));
466
72403582 467 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
8ac61af7
RK
468
469 return
1d088dee 470 gen_rtx_CALL (GET_MODE (orig), copy,
8ac61af7
RK
471 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
472 }
175160e7
MT
473 break;
474
475#if 0
6de9cd9a
DN
476 /* Must be ifdefed out for loop unrolling to work. */
477 /* ??? Is this for the old or the new unroller? */
175160e7
MT
478 case RETURN:
479 abort ();
480#endif
481
482 case SET:
483 /* If this is setting fp or ap, it means that we have a nonlocal goto.
e9a25f70 484 Adjust the setting by the offset of the area we made.
175160e7
MT
485 If the nonlocal goto is into the current function,
486 this will result in unnecessarily bad code, but should work. */
487 if (SET_DEST (orig) == virtual_stack_vars_rtx
488 || SET_DEST (orig) == virtual_incoming_args_rtx)
e9a25f70 489 {
00174bdf 490 /* In case a translation hasn't occurred already, make one now. */
d6e6c585
JL
491 rtx equiv_reg;
492 rtx equiv_loc;
493 HOST_WIDE_INT loc_offset;
494
14a774a9 495 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
d6e6c585 496 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
14a774a9
RK
497 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
498 REGNO (equiv_reg)).rtx;
d6e6c585 499 loc_offset
f8cfc6aa 500 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
00174bdf 501
38a448ca
RH
502 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
503 force_operand
504 (plus_constant
14a774a9
RK
505 (copy_rtx_and_substitute (SET_SRC (orig),
506 map, 0),
38a448ca
RH
507 - loc_offset),
508 NULL_RTX));
e9a25f70 509 }
14a774a9
RK
510 else
511 return gen_rtx_SET (VOIDmode,
512 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
513 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
175160e7
MT
514 break;
515
516 case MEM:
c81f560b
RH
517 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
518 map, 0));
519 MEM_COPY_ATTRIBUTES (copy, orig);
175160e7 520 return copy;
00174bdf 521
e9a25f70
JL
522 default:
523 break;
175160e7
MT
524 }
525
526 copy = rtx_alloc (code);
527 PUT_MODE (copy, mode);
2adc7f12
JJ
528 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
529 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
530 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
175160e7
MT
531
532 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
533
534 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
535 {
536 switch (*format_ptr++)
537 {
538 case '0':
e1de1560 539 X0ANY (copy, i) = X0ANY (orig, i);
175160e7
MT
540 break;
541
542 case 'e':
14a774a9
RK
543 XEXP (copy, i)
544 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
175160e7
MT
545 break;
546
547 case 'u':
548 /* Change any references to old-insns to point to the
549 corresponding copied insns. */
550 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
551 break;
552
553 case 'E':
554 XVEC (copy, i) = XVEC (orig, i);
555 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
556 {
557 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
558 for (j = 0; j < XVECLEN (copy, i); j++)
559 XVECEXP (copy, i, j)
14a774a9
RK
560 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
561 map, for_lhs);
175160e7
MT
562 }
563 break;
564
02bea8a8
RK
565 case 'w':
566 XWINT (copy, i) = XWINT (orig, i);
567 break;
568
175160e7
MT
569 case 'i':
570 XINT (copy, i) = XINT (orig, i);
571 break;
572
573 case 's':
574 XSTR (copy, i) = XSTR (orig, i);
575 break;
576
8f985ec4
ZW
577 case 't':
578 XTREE (copy, i) = XTREE (orig, i);
579 break;
580
175160e7
MT
581 default:
582 abort ();
583 }
584 }
585
586 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
587 {
6462bb43
AO
588 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
589 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
590 map->copy_asm_constraints_vector
591 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
175160e7
MT
592 }
593
594 return copy;
595}
596\f
597/* Substitute known constant values into INSN, if that is valid. */
598
599void
1d088dee 600try_constants (rtx insn, struct inline_remap *map)
175160e7
MT
601{
602 int i;
603
604 map->num_sets = 0;
175160e7 605
14a774a9
RK
606 /* First try just updating addresses, then other things. This is
607 important when we have something like the store of a constant
608 into memory and we can update the memory address but the machine
609 does not support a constant source. */
610 subst_constants (&PATTERN (insn), insn, map, 1);
611 apply_change_group ();
612 subst_constants (&PATTERN (insn), insn, map, 0);
175160e7 613 apply_change_group ();
1d088dee 614
ee960939
OH
615 /* Enforce consistency between the addresses in the regular insn flow
616 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
4b4bf941 617 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
ee960939
OH
618 {
619 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
620 apply_change_group ();
621 }
175160e7
MT
622
623 /* Show we don't know the value of anything stored or clobbered. */
84832317 624 note_stores (PATTERN (insn), mark_stores, NULL);
175160e7
MT
625 map->last_pc_value = 0;
626#ifdef HAVE_cc0
627 map->last_cc0_value = 0;
628#endif
629
630 /* Set up any constant equivalences made in this insn. */
631 for (i = 0; i < map->num_sets; i++)
632 {
f8cfc6aa 633 if (REG_P (map->equiv_sets[i].dest))
175160e7
MT
634 {
635 int regno = REGNO (map->equiv_sets[i].dest);
636
c68da89c
KR
637 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
638 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
639 /* Following clause is a hack to make case work where GNU C++
640 reassigns a variable to make cse work right. */
641 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
642 regno).rtx,
643 map->equiv_sets[i].equiv))
644 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
645 map->equiv_sets[i].equiv, map->const_age);
175160e7
MT
646 }
647 else if (map->equiv_sets[i].dest == pc_rtx)
648 map->last_pc_value = map->equiv_sets[i].equiv;
649#ifdef HAVE_cc0
650 else if (map->equiv_sets[i].dest == cc0_rtx)
651 map->last_cc0_value = map->equiv_sets[i].equiv;
652#endif
653 }
654}
655\f
656/* Substitute known constants for pseudo regs in the contents of LOC,
657 which are part of INSN.
d45cf215 658 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
659 update DECL_RTL).
660 These changes are taken out by try_constants if the result is not valid.
661
662 Note that we are more concerned with determining when the result of a SET
663 is a constant, for further propagation, than actually inserting constants
664 into insns; cse will do the latter task better.
665
666 This function is also used to adjust address of items previously addressed
00174bdf 667 via the virtual stack variable or virtual incoming arguments registers.
14a774a9
RK
668
669 If MEMONLY is nonzero, only make changes inside a MEM. */
175160e7
MT
670
671static void
1d088dee 672subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
175160e7
MT
673{
674 rtx x = *loc;
b3694847
SS
675 int i, j;
676 enum rtx_code code;
677 const char *format_ptr;
175160e7
MT
678 int num_changes = num_validated_changes ();
679 rtx new = 0;
a30d557c 680 enum machine_mode op0_mode = MAX_MACHINE_MODE;
175160e7
MT
681
682 code = GET_CODE (x);
683
684 switch (code)
685 {
686 case PC:
687 case CONST_INT:
688 case CONST_DOUBLE:
69ef87e2 689 case CONST_VECTOR:
175160e7
MT
690 case SYMBOL_REF:
691 case CONST:
692 case LABEL_REF:
693 case ADDRESS:
694 return;
695
696#ifdef HAVE_cc0
697 case CC0:
14a774a9
RK
698 if (! memonly)
699 validate_change (insn, loc, map->last_cc0_value, 1);
175160e7
MT
700 return;
701#endif
702
703 case USE:
704 case CLOBBER:
705 /* The only thing we can do with a USE or CLOBBER is possibly do
706 some substitutions in a MEM within it. */
3c0cb5de 707 if (MEM_P (XEXP (x, 0)))
14a774a9 708 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
175160e7
MT
709 return;
710
711 case REG:
712 /* Substitute for parms and known constants. Don't replace
713 hard regs used as user variables with constants. */
14a774a9
RK
714 if (! memonly)
715 {
716 int regno = REGNO (x);
717 struct const_equiv_data *p;
718
719 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
720 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
721 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
722 p->rtx != 0)
723 && p->age >= map->const_age)
724 validate_change (insn, loc, p->rtx, 1);
725 }
726 return;
175160e7
MT
727
728 case SUBREG:
637c5064
RS
729 /* SUBREG applied to something other than a reg
730 should be treated as ordinary, since that must
731 be a special hack and we don't know how to treat it specially.
732 Consider for example mulsidi3 in m68k.md.
733 Ordinary SUBREG of a REG needs this special treatment. */
f8cfc6aa 734 if (! memonly && REG_P (SUBREG_REG (x)))
637c5064
RS
735 {
736 rtx inner = SUBREG_REG (x);
737 rtx new = 0;
175160e7 738
637c5064
RS
739 /* We can't call subst_constants on &SUBREG_REG (x) because any
740 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
741 see what is inside, try to form the new SUBREG and see if that is
00174bdf 742 valid. We handle two cases: extracting a full word in an
637c5064 743 integral mode and extracting the low part. */
14a774a9 744 subst_constants (&inner, NULL_RTX, map, 0);
0631e0bf 745 new = simplify_gen_subreg (GET_MODE (x), inner,
1d088dee 746 GET_MODE (SUBREG_REG (x)),
0631e0bf 747 SUBREG_BYTE (x));
175160e7 748
637c5064
RS
749 if (new)
750 validate_change (insn, loc, new, 1);
0631e0bf
JH
751 else
752 cancel_changes (num_changes);
175160e7 753
637c5064
RS
754 return;
755 }
756 break;
175160e7
MT
757
758 case MEM:
14a774a9 759 subst_constants (&XEXP (x, 0), insn, map, 0);
175160e7
MT
760
761 /* If a memory address got spoiled, change it back. */
14a774a9
RK
762 if (! memonly && insn != 0 && num_validated_changes () != num_changes
763 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
175160e7
MT
764 cancel_changes (num_changes);
765 return;
766
767 case SET:
768 {
769 /* Substitute constants in our source, and in any arguments to a
770 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
771 itself. */
772 rtx *dest_loc = &SET_DEST (x);
773 rtx dest = *dest_loc;
774 rtx src, tem;
96e60f0c
JJ
775 enum machine_mode compare_mode = VOIDmode;
776
777 /* If SET_SRC is a COMPARE which subst_constants would turn into
778 COMPARE of 2 VOIDmode constants, note the mode in which comparison
779 is to be done. */
780 if (GET_CODE (SET_SRC (x)) == COMPARE)
781 {
782 src = SET_SRC (x);
783 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
8beccec8 784 || CC0_P (dest))
96e60f0c
JJ
785 {
786 compare_mode = GET_MODE (XEXP (src, 0));
787 if (compare_mode == VOIDmode)
788 compare_mode = GET_MODE (XEXP (src, 1));
789 }
790 }
175160e7 791
14a774a9 792 subst_constants (&SET_SRC (x), insn, map, memonly);
175160e7
MT
793 src = SET_SRC (x);
794
795 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
796 || GET_CODE (*dest_loc) == SUBREG
797 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
798 {
799 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
800 {
14a774a9
RK
801 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
802 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
175160e7
MT
803 }
804 dest_loc = &XEXP (*dest_loc, 0);
805 }
806
91594e43 807 /* Do substitute in the address of a destination in memory. */
3c0cb5de 808 if (MEM_P (*dest_loc))
14a774a9 809 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
91594e43 810
175160e7
MT
811 /* Check for the case of DEST a SUBREG, both it and the underlying
812 register are less than one word, and the SUBREG has the wider mode.
813 In the case, we are really setting the underlying register to the
814 source converted to the mode of DEST. So indicate that. */
815 if (GET_CODE (dest) == SUBREG
816 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
817 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
818 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
819 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
820 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
821 src)))
175160e7
MT
822 src = tem, dest = SUBREG_REG (dest);
823
824 /* If storing a recognizable value save it for later recording. */
825 if ((map->num_sets < MAX_RECOG_OPERANDS)
826 && (CONSTANT_P (src)
f8cfc6aa 827 || (REG_P (src)
83b93f40
RK
828 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
829 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7 830 || (GET_CODE (src) == PLUS
f8cfc6aa 831 && REG_P (XEXP (src, 0))
83b93f40
RK
832 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
833 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
834 && CONSTANT_P (XEXP (src, 1)))
835 || GET_CODE (src) == COMPARE
8beccec8 836 || CC0_P (dest)
175160e7
MT
837 || (dest == pc_rtx
838 && (src == pc_rtx || GET_CODE (src) == RETURN
839 || GET_CODE (src) == LABEL_REF))))
840 {
841 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
842 it will cause us to save the COMPARE with any constants
843 substituted, which is what we want for later. */
96e60f0c
JJ
844 rtx src_copy = copy_rtx (src);
845 map->equiv_sets[map->num_sets].equiv = src_copy;
175160e7 846 map->equiv_sets[map->num_sets++].dest = dest;
96e60f0c
JJ
847 if (compare_mode != VOIDmode
848 && GET_CODE (src) == COMPARE
849 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
8beccec8 850 || CC0_P (dest))
96e60f0c
JJ
851 && GET_MODE (XEXP (src, 0)) == VOIDmode
852 && GET_MODE (XEXP (src, 1)) == VOIDmode)
853 {
854 map->compare_src = src_copy;
855 map->compare_mode = compare_mode;
856 }
175160e7 857 }
175160e7 858 }
e9a25f70
JL
859 return;
860
861 default:
862 break;
175160e7
MT
863 }
864
865 format_ptr = GET_RTX_FORMAT (code);
00174bdf 866
175160e7
MT
867 /* If the first operand is an expression, save its mode for later. */
868 if (*format_ptr == 'e')
869 op0_mode = GET_MODE (XEXP (x, 0));
870
871 for (i = 0; i < GET_RTX_LENGTH (code); i++)
872 {
873 switch (*format_ptr++)
874 {
875 case '0':
876 break;
877
878 case 'e':
879 if (XEXP (x, i))
14a774a9 880 subst_constants (&XEXP (x, i), insn, map, memonly);
175160e7
MT
881 break;
882
883 case 'u':
884 case 'i':
885 case 's':
02bea8a8 886 case 'w':
00174bdf 887 case 'n':
8f985ec4 888 case 't':
2ff581c3 889 case 'B':
175160e7
MT
890 break;
891
892 case 'E':
893 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
14a774a9
RK
894 for (j = 0; j < XVECLEN (x, i); j++)
895 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
896
175160e7
MT
897 break;
898
899 default:
900 abort ();
901 }
902 }
903
904 /* If this is a commutative operation, move a constant to the second
905 operand unless the second operand is already a CONST_INT. */
14a774a9 906 if (! memonly
ec8e098d
PB
907 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
908 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
175160e7
MT
909 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
910 {
911 rtx tem = XEXP (x, 0);
912 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
913 validate_change (insn, &XEXP (x, 1), tem, 1);
914 }
915
916 /* Simplify the expression in case we put in some constants. */
14a774a9
RK
917 if (! memonly)
918 switch (GET_RTX_CLASS (code))
175160e7 919 {
ec8e098d 920 case RTX_UNARY:
14a774a9
RK
921 if (op0_mode == MAX_MACHINE_MODE)
922 abort ();
923 new = simplify_unary_operation (code, GET_MODE (x),
924 XEXP (x, 0), op0_mode);
925 break;
926
ec8e098d
PB
927 case RTX_COMPARE:
928 case RTX_COMM_COMPARE:
14a774a9
RK
929 {
930 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
931
932 if (op_mode == VOIDmode)
933 op_mode = GET_MODE (XEXP (x, 1));
c6fb08ad 934
7ce3e360 935 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
14a774a9 936 XEXP (x, 0), XEXP (x, 1));
14a774a9 937 break;
00174bdf 938 }
175160e7 939
ec8e098d
PB
940 case RTX_BIN_ARITH:
941 case RTX_COMM_ARITH:
14a774a9
RK
942 new = simplify_binary_operation (code, GET_MODE (x),
943 XEXP (x, 0), XEXP (x, 1));
944 break;
175160e7 945
ec8e098d
PB
946 case RTX_BITFIELD_OPS:
947 case RTX_TERNARY:
14a774a9
RK
948 if (op0_mode == MAX_MACHINE_MODE)
949 abort ();
950
96e60f0c
JJ
951 if (code == IF_THEN_ELSE)
952 {
953 rtx op0 = XEXP (x, 0);
954
ec8e098d 955 if (COMPARISON_P (op0)
96e60f0c
JJ
956 && GET_MODE (op0) == VOIDmode
957 && ! side_effects_p (op0)
958 && XEXP (op0, 0) == map->compare_src
959 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
960 {
961 /* We have compare of two VOIDmode constants for which
962 we recorded the comparison mode. */
c6fb08ad
PB
963 rtx tem =
964 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
965 map->compare_mode, XEXP (op0, 0),
966 XEXP (op0, 1));
967
968 if (GET_CODE (tem) != CONST_INT)
969 new = simplify_ternary_operation (code, GET_MODE (x),
970 op0_mode, tem, XEXP (x, 1),
971 XEXP (x, 2));
972 else if (tem == const0_rtx)
96e60f0c 973 new = XEXP (x, 2);
c6fb08ad 974 else
96e60f0c
JJ
975 new = XEXP (x, 1);
976 }
977 }
978 if (!new)
979 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
980 XEXP (x, 0), XEXP (x, 1),
981 XEXP (x, 2));
14a774a9 982 break;
ec8e098d
PB
983
984 default:
985 break;
14a774a9 986 }
175160e7
MT
987
988 if (new)
989 validate_change (insn, loc, new, 1);
990}
991
992/* Show that register modified no longer contain known constants. We are
993 called from note_stores with parts of the new insn. */
994
915b80ed 995static void
1d088dee 996mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
175160e7 997{
e2eb57b7 998 int regno = -1;
6a651371 999 enum machine_mode mode = VOIDmode;
e2eb57b7
RK
1000
1001 /* DEST is always the innermost thing set, except in the case of
1002 SUBREGs of hard registers. */
175160e7 1003
f8cfc6aa 1004 if (REG_P (dest))
e2eb57b7 1005 regno = REGNO (dest), mode = GET_MODE (dest);
f8cfc6aa 1006 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
e2eb57b7 1007 {
ddef6bc7
JJ
1008 regno = REGNO (SUBREG_REG (dest));
1009 if (regno < FIRST_PSEUDO_REGISTER)
1010 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1011 GET_MODE (SUBREG_REG (dest)),
1012 SUBREG_BYTE (dest),
1013 GET_MODE (dest));
e2eb57b7
RK
1014 mode = GET_MODE (SUBREG_REG (dest));
1015 }
1016
1017 if (regno >= 0)
1018 {
770ae6cc
RK
1019 unsigned int uregno = regno;
1020 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
66fd46b6 1021 : uregno + hard_regno_nregs[uregno][mode] - 1);
770ae6cc 1022 unsigned int i;
e2eb57b7 1023
e9a25f70
JL
1024 /* Ignore virtual stack var or virtual arg register since those
1025 are handled separately. */
770ae6cc
RK
1026 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1027 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1028 for (i = uregno; i <= last_reg; i++)
6a651371 1029 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
c68da89c 1030 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
e2eb57b7 1031 }
175160e7
MT
1032}
1033\f
81578142
RS
1034/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1035 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1036 that it points to the node itself, thus indicating that the node is its
1037 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1038 the given node is NULL, recursively descend the decl/block tree which
1039 it is the root of, and for each other ..._DECL or BLOCK node contained
1040 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1041 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1042 values to point to themselves. */
1043
81578142 1044static void
1d088dee 1045set_block_origin_self (tree stmt)
81578142
RS
1046{
1047 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1048 {
1049 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1050
1051 {
b3694847 1052 tree local_decl;
81578142 1053
00174bdf 1054 for (local_decl = BLOCK_VARS (stmt);
81578142
RS
1055 local_decl != NULL_TREE;
1056 local_decl = TREE_CHAIN (local_decl))
00174bdf 1057 set_decl_origin_self (local_decl); /* Potential recursion. */
81578142
RS
1058 }
1059
1060 {
b3694847 1061 tree subblock;
81578142 1062
00174bdf 1063 for (subblock = BLOCK_SUBBLOCKS (stmt);
81578142
RS
1064 subblock != NULL_TREE;
1065 subblock = BLOCK_CHAIN (subblock))
00174bdf 1066 set_block_origin_self (subblock); /* Recurse. */
81578142
RS
1067 }
1068 }
1069}
1070
1071/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1072 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1073 node to so that it points to the node itself, thus indicating that the
1074 node represents its own (abstract) origin. Additionally, if the
1075 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1076 the decl/block tree of which the given node is the root of, and for
1077 each other ..._DECL or BLOCK node contained therein whose
1078 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1079 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1080 point to themselves. */
1081
1cfdcc15 1082void
1d088dee 1083set_decl_origin_self (tree decl)
81578142
RS
1084{
1085 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1086 {
1087 DECL_ABSTRACT_ORIGIN (decl) = decl;
1088 if (TREE_CODE (decl) == FUNCTION_DECL)
1089 {
b3694847 1090 tree arg;
81578142
RS
1091
1092 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1093 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
1094 if (DECL_INITIAL (decl) != NULL_TREE
1095 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
1096 set_block_origin_self (DECL_INITIAL (decl));
1097 }
1098 }
1099}
1100\f
1101/* Given a pointer to some BLOCK node, and a boolean value to set the
1102 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1103 the given block, and for all local decls and all local sub-blocks
1104 (recursively) which are contained therein. */
1105
81578142 1106static void
1d088dee 1107set_block_abstract_flags (tree stmt, int setting)
81578142 1108{
b3694847
SS
1109 tree local_decl;
1110 tree subblock;
81578142 1111
12307ca2 1112 BLOCK_ABSTRACT (stmt) = setting;
81578142 1113
12307ca2
RK
1114 for (local_decl = BLOCK_VARS (stmt);
1115 local_decl != NULL_TREE;
1116 local_decl = TREE_CHAIN (local_decl))
1117 set_decl_abstract_flags (local_decl, setting);
81578142 1118
12307ca2
RK
1119 for (subblock = BLOCK_SUBBLOCKS (stmt);
1120 subblock != NULL_TREE;
1121 subblock = BLOCK_CHAIN (subblock))
1122 set_block_abstract_flags (subblock, setting);
81578142
RS
1123}
1124
1125/* Given a pointer to some ..._DECL node, and a boolean value to set the
1126 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1127 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1128 set the abstract flags for all of the parameters, local vars, local
1129 blocks and sub-blocks (recursively) to the same setting. */
1130
1131void
1d088dee 1132set_decl_abstract_flags (tree decl, int setting)
81578142
RS
1133{
1134 DECL_ABSTRACT (decl) = setting;
1135 if (TREE_CODE (decl) == FUNCTION_DECL)
1136 {
b3694847 1137 tree arg;
81578142
RS
1138
1139 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1140 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
1141 if (DECL_INITIAL (decl) != NULL_TREE
1142 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
1143 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1144 }
1145}
c0e7830f
DD
1146\f
1147/* Functions to keep track of the values hard regs had at the start of
1148 the function. */
1149
902197eb 1150rtx
1d088dee 1151get_hard_reg_initial_reg (struct function *fun, rtx reg)
902197eb
DD
1152{
1153 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1154 int i;
1155
1156 if (ivs == 0)
1157 return NULL_RTX;
1158
1159 for (i = 0; i < ivs->num_entries; i++)
1160 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1161 return ivs->entries[i].hard_reg;
1162
1163 return NULL_RTX;
1164}
1165
c0e7830f 1166rtx
1d088dee 1167has_func_hard_reg_initial_val (struct function *fun, rtx reg)
c0e7830f
DD
1168{
1169 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1170 int i;
1171
1172 if (ivs == 0)
1173 return NULL_RTX;
1174
1175 for (i = 0; i < ivs->num_entries; i++)
1176 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1177 return ivs->entries[i].pseudo;
1178
1179 return NULL_RTX;
1180}
1181
1182rtx
1d088dee 1183get_func_hard_reg_initial_val (struct function *fun, rtx reg)
c0e7830f
DD
1184{
1185 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1186 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1187
1188 if (rv)
1189 return rv;
1190
1191 if (ivs == 0)
1192 {
703ad42b 1193 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
c0e7830f
DD
1194 ivs = fun->hard_reg_initial_vals;
1195 ivs->num_entries = 0;
1196 ivs->max_entries = 5;
703ad42b 1197 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
c0e7830f
DD
1198 }
1199
1200 if (ivs->num_entries >= ivs->max_entries)
1201 {
1202 ivs->max_entries += 5;
703ad42b
KG
1203 ivs->entries = ggc_realloc (ivs->entries,
1204 ivs->max_entries
1205 * sizeof (initial_value_pair));
c0e7830f
DD
1206 }
1207
1208 ivs->entries[ivs->num_entries].hard_reg = reg;
1209 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1210
1211 return ivs->entries[ivs->num_entries++].pseudo;
1212}
1213
1214rtx
1d088dee 1215get_hard_reg_initial_val (enum machine_mode mode, int regno)
c0e7830f
DD
1216{
1217 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1218}
1219
1220rtx
1d088dee 1221has_hard_reg_initial_val (enum machine_mode mode, int regno)
c0e7830f
DD
1222{
1223 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1224}
1225
c0e7830f 1226void
1d088dee 1227emit_initial_value_sets (void)
c0e7830f
DD
1228{
1229 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1230 int i;
1231 rtx seq;
1232
1233 if (ivs == 0)
1234 return;
1235
1236 start_sequence ();
1237 for (i = 0; i < ivs->num_entries; i++)
1238 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1239 seq = get_insns ();
1240 end_sequence ();
1241
91278841 1242 emit_insn_after (seq, entry_of_function ());
c0e7830f 1243}
385b6e2d
R
1244
1245/* If the backend knows where to allocate pseudos for hard
1246 register initial values, register these allocations now. */
1247void
1d088dee 1248allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
385b6e2d
R
1249{
1250#ifdef ALLOCATE_INITIAL_VALUE
1251 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1252 int i;
1253
1254 if (ivs == 0)
1255 return;
1256
1257 for (i = 0; i < ivs->num_entries; i++)
1258 {
1259 int regno = REGNO (ivs->entries[i].pseudo);
1260 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1261
1262 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1263 ; /* Do nothing. */
3c0cb5de 1264 else if (MEM_P (x))
385b6e2d 1265 reg_equiv_memory_loc[regno] = x;
f8cfc6aa 1266 else if (REG_P (x))
385b6e2d
R
1267 {
1268 reg_renumber[regno] = REGNO (x);
1269 /* Poke the regno right into regno_reg_rtx
1270 so that even fixed regs are accepted. */
1271 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1272 }
1273 else abort ();
1274 }
1275#endif
1276}
e2500fed
GK
1277
1278#include "gt-integrate.h"
This page took 2.488092 seconds and 5 git commands to generate.