]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
Daily bump.
[gcc.git] / gcc / integrate.c
CommitLineData
1322177d 1/* Procedure integration for GCC.
8beccec8 2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
175160e7
MT
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
1322177d 6This file is part of GCC.
175160e7 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
175160e7 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
175160e7
MT
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
175160e7 22
175160e7 23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
ccd043a9 27
175160e7
MT
28#include "rtl.h"
29#include "tree.h"
6baf1cc8 30#include "tm_p.h"
12307ca2 31#include "regs.h"
175160e7 32#include "flags.h"
135d50f1 33#include "debug.h"
175160e7 34#include "insn-config.h"
175160e7
MT
35#include "expr.h"
36#include "output.h"
e9a25f70 37#include "recog.h"
175160e7
MT
38#include "integrate.h"
39#include "real.h"
6adb4e3a 40#include "except.h"
175160e7 41#include "function.h"
d6f4ec51 42#include "toplev.h"
ab87f8c8 43#include "intl.h"
c6d9a88c 44#include "params.h"
c0e7830f 45#include "ggc.h"
91d231cb 46#include "target.h"
63e1b1c4 47#include "langhooks.h"
175160e7 48
6de9cd9a 49/* Round to the next highest integer that meets the alignment. */
175160e7 50#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
175160e7 51\f
c0e7830f 52
dc297297 53/* Private type used by {get/has}_func_hard_reg_initial_val. */
e2500fed 54typedef struct initial_value_pair GTY(()) {
c0e7830f
DD
55 rtx hard_reg;
56 rtx pseudo;
57} initial_value_pair;
e2500fed 58typedef struct initial_value_struct GTY(()) {
c0e7830f
DD
59 int num_entries;
60 int max_entries;
e2500fed 61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
c0e7830f
DD
62} initial_value_struct;
63
1d088dee
AJ
64static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65static void set_block_origin_self (tree);
66static void set_block_abstract_flags (tree, int);
1d088dee 67static void mark_stores (rtx, rtx, void *);
175160e7 68\f
1f3d3a31 69/* Returns the Ith entry in the label_map contained in MAP. If the
e5e809f4
JL
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
1f3d3a31 74rtx
1d088dee 75get_label_from_map (struct inline_remap *map, int i)
1f3d3a31
JL
76{
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
00174bdf 80 x = map->label_map[i] = gen_label_rtx ();
1f3d3a31
JL
81
82 return x;
83}
84
91d231cb
JM
85/* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
588d3ade 87bool
1d088dee 88function_attribute_inlinable_p (tree fndecl)
91d231cb 89{
b9a26d09 90 if (targetm.attribute_table)
91d231cb 91 {
b9a26d09 92 tree a;
91d231cb 93
b9a26d09 94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
91d231cb 95 {
b9a26d09
NB
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
5fd9b178 101 return targetm.function_attribute_inlinable_p (fndecl);
91d231cb 102 }
91d231cb
JM
103 }
104
b9a26d09 105 return true;
91d231cb 106}
175160e7 107\f
94755d92 108/* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
00174bdf 109 originally was in the FROM_FN, but now it will be in the
94755d92 110 TO_FN. */
02e24c7a 111
94755d92 112tree
1d088dee 113copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
02e24c7a 114{
94755d92
MM
115 tree copy;
116
117 /* Copy the declaration. */
118 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
a8f8d1cc 119 {
c246c65d
JM
120 tree type;
121 int invisiref = 0;
122
123 /* See if the frontend wants to pass this by invisible reference. */
124 if (TREE_CODE (decl) == PARM_DECL
125 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
126 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
127 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
128 {
129 invisiref = 1;
130 type = DECL_ARG_TYPE (decl);
131 }
132 else
133 type = TREE_TYPE (decl);
134
a8f8d1cc
MM
135 /* For a parameter, we must make an equivalent VAR_DECL, not a
136 new PARM_DECL. */
c246c65d
JM
137 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
138 if (!invisiref)
139 {
140 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
141 TREE_READONLY (copy) = TREE_READONLY (decl);
142 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
143 }
144 else
145 {
146 TREE_ADDRESSABLE (copy) = 0;
147 TREE_READONLY (copy) = 1;
148 TREE_THIS_VOLATILE (copy) = 0;
149 }
a8f8d1cc 150 }
94755d92
MM
151 else
152 {
153 copy = copy_node (decl);
1e7ee6ad
MM
154 /* The COPY is not abstract; it will be generated in TO_FN. */
155 DECL_ABSTRACT (copy) = 0;
ae2bcd98 156 lang_hooks.dup_lang_specific_decl (copy);
a71811fe
MM
157
158 /* TREE_ADDRESSABLE isn't used to indicate that a label's
159 address has been taken; it's for internal bookkeeping in
160 expand_goto_internal. */
161 if (TREE_CODE (copy) == LABEL_DECL)
6de9cd9a
DN
162 {
163 TREE_ADDRESSABLE (copy) = 0;
164 DECL_TOO_LATE (copy) = 0;
165 }
94755d92
MM
166 }
167
168 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
169 declaration inspired this copy. */
99ceae26 170 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
94755d92
MM
171
172 /* The new variable/label has no RTL, yet. */
4e8dca1c
JM
173 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
174 SET_DECL_RTL (copy, NULL_RTX);
94755d92
MM
175
176 /* These args would always appear unused, if not for this. */
177 TREE_USED (copy) = 1;
178
179 /* Set the context for the new declaration. */
180 if (!DECL_CONTEXT (decl))
181 /* Globals stay global. */
00174bdf 182 ;
94755d92
MM
183 else if (DECL_CONTEXT (decl) != from_fn)
184 /* Things that weren't in the scope of the function we're inlining
4e8dca1c 185 from aren't in the scope we're inlining to, either. */
94755d92
MM
186 ;
187 else if (TREE_STATIC (decl))
4e8dca1c 188 /* Function-scoped static variables should stay in the original
94755d92 189 function. */
02e24c7a
MM
190 ;
191 else
94755d92
MM
192 /* Ordinary automatic local variables are now in the scope of the
193 new function. */
194 DECL_CONTEXT (copy) = to_fn;
02e24c7a
MM
195
196 return copy;
197}
175160e7 198\f
175160e7
MT
199/* Unfortunately, we need a global copy of const_equiv map for communication
200 with a function called from note_stores. Be *very* careful that this
201 is used properly in the presence of recursion. */
202
c68da89c 203varray_type global_const_equiv_varray;
175160e7 204
14a774a9 205/* Create a new copy of an rtx. Recursively copies the operands of the rtx,
175160e7
MT
206 except for those few rtx codes that are sharable.
207
208 We always return an rtx that is similar to that incoming rtx, with the
209 exception of possibly changing a REG to a SUBREG or vice versa. No
210 rtl is ever emitted.
211
14a774a9
RK
212 If FOR_LHS is nonzero, if means we are processing something that will
213 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
214 inlining since we need to be conservative in how it is set for
215 such cases.
216
175160e7
MT
217 Handle constants that need to be placed in the constant pool by
218 calling `force_const_mem'. */
219
220rtx
1d088dee 221copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
175160e7 222{
b3694847
SS
223 rtx copy, temp;
224 int i, j;
225 RTX_CODE code;
226 enum machine_mode mode;
227 const char *format_ptr;
175160e7
MT
228 int regno;
229
230 if (orig == 0)
231 return 0;
232
233 code = GET_CODE (orig);
234 mode = GET_MODE (orig);
235
236 switch (code)
237 {
238 case REG:
239 /* If the stack pointer register shows up, it must be part of
240 stack-adjustments (*not* because we eliminated the frame pointer!).
241 Small hard registers are returned as-is. Pseudo-registers
242 go through their `reg_map'. */
243 regno = REGNO (orig);
6de9cd9a 244 if (regno <= LAST_VIRTUAL_REGISTER)
175160e7
MT
245 {
246 /* Some hard registers are also mapped,
247 but others are not translated. */
c826ae21 248 if (map->reg_map[regno] != 0)
175160e7
MT
249 return map->reg_map[regno];
250
251 /* If this is the virtual frame pointer, make space in current
252 function's stack frame for the stack frame of the inline function.
253
254 Copy the address of this area into a pseudo. Map
255 virtual_stack_vars_rtx to this pseudo and set up a constant
256 equivalence for it to be the address. This will substitute the
257 address into insns where it can be substituted and use the new
258 pseudo where it can't. */
b5d7770c 259 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
260 {
261 rtx loc, seq;
1da326c3
SB
262 int size
263 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
d219c7f1 264#ifdef FRAME_GROWS_DOWNWARD
c2f8b491 265 int alignment
1da326c3 266 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
c2f8b491 267 / BITS_PER_UNIT);
175160e7 268
3e42d56b
DE
269 /* In this case, virtual_stack_vars_rtx points to one byte
270 higher than the top of the frame area. So make sure we
271 allocate a big enough chunk to keep the frame pointer
272 aligned like a real one. */
c2f8b491
JH
273 if (alignment)
274 size = CEIL_ROUND (size, alignment);
3e42d56b 275#endif
175160e7
MT
276 start_sequence ();
277 loc = assign_stack_temp (BLKmode, size, 1);
278 loc = XEXP (loc, 0);
279#ifdef FRAME_GROWS_DOWNWARD
280 /* In this case, virtual_stack_vars_rtx points to one byte
281 higher than the top of the frame area. So compute the offset
3e42d56b
DE
282 to one byte higher than our substitute frame. */
283 loc = plus_constant (loc, size);
175160e7 284#endif
59b2d722
RK
285 map->reg_map[regno] = temp
286 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 287
12307ca2 288#ifdef STACK_BOUNDARY
bdb429a5 289 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
290#endif
291
c68da89c 292 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7 293
2f937369 294 seq = get_insns ();
175160e7
MT
295 end_sequence ();
296 emit_insn_after (seq, map->insns_at_start);
5c23c401 297 return temp;
175160e7 298 }
6de9cd9a 299 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
175160e7
MT
300 {
301 /* Do the same for a block to contain any arguments referenced
0f41302f 302 in memory. */
175160e7 303 rtx loc, seq;
1da326c3 304 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
175160e7
MT
305
306 start_sequence ();
307 loc = assign_stack_temp (BLKmode, size, 1);
308 loc = XEXP (loc, 0);
00174bdf 309 /* When arguments grow downward, the virtual incoming
931553d8 310 args pointer points to the top of the argument block,
0f41302f 311 so the remapped location better do the same. */
931553d8
RS
312#ifdef ARGS_GROW_DOWNWARD
313 loc = plus_constant (loc, size);
314#endif
59b2d722
RK
315 map->reg_map[regno] = temp
316 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 317
12307ca2 318#ifdef STACK_BOUNDARY
bdb429a5 319 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
320#endif
321
c68da89c 322 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7 323
2f937369 324 seq = get_insns ();
175160e7
MT
325 end_sequence ();
326 emit_insn_after (seq, map->insns_at_start);
5c23c401 327 return temp;
175160e7
MT
328 }
329 else if (REG_FUNCTION_VALUE_P (orig))
330 {
6de9cd9a
DN
331 if (rtx_equal_function_value_matters)
332 /* This is an ignored return value. We must not
333 leave it in with REG_FUNCTION_VALUE_P set, since
334 that would confuse subsequent inlining of the
335 current function into a later function. */
336 return gen_rtx_REG (GET_MODE (orig), regno);
175160e7 337 else
6de9cd9a
DN
338 /* Must be unrolling loops or replicating code if we
339 reach here, so return the register unchanged. */
340 return orig;
b5d7770c 341 }
b5d7770c
AO
342 else
343 return orig;
344
345 abort ();
175160e7
MT
346 }
347 if (map->reg_map[regno] == NULL)
348 {
349 map->reg_map[regno] = gen_reg_rtx (mode);
350 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
351 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
352 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2 354
3502dc9c 355 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
12307ca2
RK
356 mark_reg_pointer (map->reg_map[regno],
357 map->regno_pointer_align[regno]);
175160e7
MT
358 }
359 return map->reg_map[regno];
360
361 case SUBREG:
14a774a9 362 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
e5c56fd9
JH
363 return simplify_gen_subreg (GET_MODE (orig), copy,
364 GET_MODE (SUBREG_REG (orig)),
365 SUBREG_BYTE (orig));
175160e7
MT
366
367 case USE:
368 case CLOBBER:
369 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927
RS
370 to (use foo) if the original insn didn't have a subreg.
371 Removing the subreg distorts the VAX movstrhi pattern
372 by changing the mode of an operand. */
14a774a9 373 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
d632e927 374 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7 375 copy = SUBREG_REG (copy);
38a448ca 376 return gen_rtx_fmt_e (code, VOIDmode, copy);
175160e7 377
bc8d3f91
JH
378 /* We need to handle "deleted" labels that appear in the DECL_RTL
379 of a LABEL_DECL. */
380 case NOTE:
381 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
382 break;
383
5d3cc252 384 /* Fall through. */
175160e7 385 case CODE_LABEL:
1f3d3a31 386 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
175160e7 387 = LABEL_PRESERVE_P (orig);
1f3d3a31 388 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
175160e7
MT
389
390 case LABEL_REF:
c5c76735
JL
391 copy
392 = gen_rtx_LABEL_REF
393 (mode,
394 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
395 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
396
175160e7 397 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
398
399 /* The fact that this label was previously nonlocal does not mean
400 it still is, so we must check if it is within the range of
401 this function's labels. */
402 LABEL_REF_NONLOCAL_P (copy)
403 = (LABEL_REF_NONLOCAL_P (orig)
404 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
405 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e 406
175160e7
MT
407 return copy;
408
409 case PC:
410 case CC0:
411 case CONST_INT:
69ef87e2 412 case CONST_VECTOR:
f543676f
JW
413 return orig;
414
175160e7 415 case SYMBOL_REF:
f543676f
JW
416 /* Symbols which represent the address of a label stored in the constant
417 pool must be modified to point to a constant pool entry for the
418 remapped label. Otherwise, symbols are returned unchanged. */
419 if (CONSTANT_POOL_ADDRESS_P (orig))
420 {
6de9cd9a 421 struct function *f = cfun;
36edd3cc 422 rtx constant = get_pool_constant_for_function (f, orig);
6de9cd9a 423 if (GET_CODE (constant) == LABEL_REF)
14a774a9
RK
424 return XEXP (force_const_mem
425 (GET_MODE (orig),
426 copy_rtx_and_substitute (constant, map, for_lhs)),
c1ceaaa6 427 0);
f543676f 428 }
175160e7
MT
429 return orig;
430
431 case CONST_DOUBLE:
432 /* We have to make a new copy of this CONST_DOUBLE because don't want
433 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
434 duplicate of a CONST_DOUBLE we have already seen. */
435 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
436 {
437 REAL_VALUE_TYPE d;
438
439 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 440 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
441 }
442 else
443 return immed_double_const (CONST_DOUBLE_LOW (orig),
444 CONST_DOUBLE_HIGH (orig), VOIDmode);
445
446 case CONST:
36edd3cc 447 break;
175160e7
MT
448
449 case ASM_OPERANDS:
6462bb43
AO
450 /* If a single asm insn contains multiple output operands then
451 it contains multiple ASM_OPERANDS rtx's that share the input
452 and constraint vecs. We must make sure that the copied insn
453 continues to share it. */
454 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
175160e7
MT
455 {
456 copy = rtx_alloc (ASM_OPERANDS);
2adc7f12 457 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
dde068d9 458 PUT_MODE (copy, GET_MODE (orig));
6462bb43
AO
459 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
460 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
461 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
462 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
463 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
464 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
465 = map->copy_asm_constraints_vector;
6773e15f
PB
466#ifdef USE_MAPPED_LOCATION
467 ASM_OPERANDS_SOURCE_LOCATION (copy)
468 = ASM_OPERANDS_SOURCE_LOCATION (orig);
469#else
6462bb43
AO
470 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
471 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
6773e15f 472#endif
175160e7
MT
473 return copy;
474 }
475 break;
476
477 case CALL:
478 /* This is given special treatment because the first
479 operand of a CALL is a (MEM ...) which may get
480 forced into a register for cse. This is undesirable
481 if function-address cse isn't wanted or if we won't do cse. */
482#ifndef NO_FUNCTION_CSE
483 if (! (optimize && ! flag_no_function_cse))
484#endif
8ac61af7
RK
485 {
486 rtx copy
487 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
488 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
489 map, 0));
490
72403582 491 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
8ac61af7
RK
492
493 return
1d088dee 494 gen_rtx_CALL (GET_MODE (orig), copy,
8ac61af7
RK
495 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
496 }
175160e7
MT
497 break;
498
499#if 0
6de9cd9a
DN
500 /* Must be ifdefed out for loop unrolling to work. */
501 /* ??? Is this for the old or the new unroller? */
175160e7
MT
502 case RETURN:
503 abort ();
504#endif
505
506 case SET:
507 /* If this is setting fp or ap, it means that we have a nonlocal goto.
e9a25f70 508 Adjust the setting by the offset of the area we made.
175160e7
MT
509 If the nonlocal goto is into the current function,
510 this will result in unnecessarily bad code, but should work. */
511 if (SET_DEST (orig) == virtual_stack_vars_rtx
512 || SET_DEST (orig) == virtual_incoming_args_rtx)
e9a25f70 513 {
00174bdf 514 /* In case a translation hasn't occurred already, make one now. */
d6e6c585
JL
515 rtx equiv_reg;
516 rtx equiv_loc;
517 HOST_WIDE_INT loc_offset;
518
14a774a9 519 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
d6e6c585 520 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
14a774a9
RK
521 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
522 REGNO (equiv_reg)).rtx;
d6e6c585 523 loc_offset
f8cfc6aa 524 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
00174bdf 525
38a448ca
RH
526 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
527 force_operand
528 (plus_constant
14a774a9
RK
529 (copy_rtx_and_substitute (SET_SRC (orig),
530 map, 0),
38a448ca
RH
531 - loc_offset),
532 NULL_RTX));
e9a25f70 533 }
14a774a9
RK
534 else
535 return gen_rtx_SET (VOIDmode,
536 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
537 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
175160e7
MT
538 break;
539
540 case MEM:
c81f560b
RH
541 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
542 map, 0));
543 MEM_COPY_ATTRIBUTES (copy, orig);
175160e7 544 return copy;
00174bdf 545
e9a25f70
JL
546 default:
547 break;
175160e7
MT
548 }
549
550 copy = rtx_alloc (code);
551 PUT_MODE (copy, mode);
2adc7f12
JJ
552 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
553 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
554 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
175160e7
MT
555
556 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
557
558 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
559 {
560 switch (*format_ptr++)
561 {
562 case '0':
e1de1560 563 X0ANY (copy, i) = X0ANY (orig, i);
175160e7
MT
564 break;
565
566 case 'e':
14a774a9
RK
567 XEXP (copy, i)
568 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
175160e7
MT
569 break;
570
571 case 'u':
572 /* Change any references to old-insns to point to the
573 corresponding copied insns. */
574 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
575 break;
576
577 case 'E':
578 XVEC (copy, i) = XVEC (orig, i);
579 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
580 {
581 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
582 for (j = 0; j < XVECLEN (copy, i); j++)
583 XVECEXP (copy, i, j)
14a774a9
RK
584 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
585 map, for_lhs);
175160e7
MT
586 }
587 break;
588
02bea8a8
RK
589 case 'w':
590 XWINT (copy, i) = XWINT (orig, i);
591 break;
592
175160e7
MT
593 case 'i':
594 XINT (copy, i) = XINT (orig, i);
595 break;
596
597 case 's':
598 XSTR (copy, i) = XSTR (orig, i);
599 break;
600
8f985ec4
ZW
601 case 't':
602 XTREE (copy, i) = XTREE (orig, i);
603 break;
604
175160e7
MT
605 default:
606 abort ();
607 }
608 }
609
610 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
611 {
6462bb43
AO
612 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
613 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
614 map->copy_asm_constraints_vector
615 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
175160e7
MT
616 }
617
618 return copy;
619}
620\f
621/* Substitute known constant values into INSN, if that is valid. */
622
623void
1d088dee 624try_constants (rtx insn, struct inline_remap *map)
175160e7
MT
625{
626 int i;
627
628 map->num_sets = 0;
175160e7 629
14a774a9
RK
630 /* First try just updating addresses, then other things. This is
631 important when we have something like the store of a constant
632 into memory and we can update the memory address but the machine
633 does not support a constant source. */
634 subst_constants (&PATTERN (insn), insn, map, 1);
635 apply_change_group ();
636 subst_constants (&PATTERN (insn), insn, map, 0);
175160e7 637 apply_change_group ();
1d088dee 638
ee960939
OH
639 /* Enforce consistency between the addresses in the regular insn flow
640 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
641 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
642 {
643 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
644 apply_change_group ();
645 }
175160e7
MT
646
647 /* Show we don't know the value of anything stored or clobbered. */
84832317 648 note_stores (PATTERN (insn), mark_stores, NULL);
175160e7
MT
649 map->last_pc_value = 0;
650#ifdef HAVE_cc0
651 map->last_cc0_value = 0;
652#endif
653
654 /* Set up any constant equivalences made in this insn. */
655 for (i = 0; i < map->num_sets; i++)
656 {
f8cfc6aa 657 if (REG_P (map->equiv_sets[i].dest))
175160e7
MT
658 {
659 int regno = REGNO (map->equiv_sets[i].dest);
660
c68da89c
KR
661 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
662 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
663 /* Following clause is a hack to make case work where GNU C++
664 reassigns a variable to make cse work right. */
665 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
666 regno).rtx,
667 map->equiv_sets[i].equiv))
668 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
669 map->equiv_sets[i].equiv, map->const_age);
175160e7
MT
670 }
671 else if (map->equiv_sets[i].dest == pc_rtx)
672 map->last_pc_value = map->equiv_sets[i].equiv;
673#ifdef HAVE_cc0
674 else if (map->equiv_sets[i].dest == cc0_rtx)
675 map->last_cc0_value = map->equiv_sets[i].equiv;
676#endif
677 }
678}
679\f
680/* Substitute known constants for pseudo regs in the contents of LOC,
681 which are part of INSN.
d45cf215 682 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
683 update DECL_RTL).
684 These changes are taken out by try_constants if the result is not valid.
685
686 Note that we are more concerned with determining when the result of a SET
687 is a constant, for further propagation, than actually inserting constants
688 into insns; cse will do the latter task better.
689
690 This function is also used to adjust address of items previously addressed
00174bdf 691 via the virtual stack variable or virtual incoming arguments registers.
14a774a9
RK
692
693 If MEMONLY is nonzero, only make changes inside a MEM. */
175160e7
MT
694
695static void
1d088dee 696subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
175160e7
MT
697{
698 rtx x = *loc;
b3694847
SS
699 int i, j;
700 enum rtx_code code;
701 const char *format_ptr;
175160e7
MT
702 int num_changes = num_validated_changes ();
703 rtx new = 0;
a30d557c 704 enum machine_mode op0_mode = MAX_MACHINE_MODE;
175160e7
MT
705
706 code = GET_CODE (x);
707
708 switch (code)
709 {
710 case PC:
711 case CONST_INT:
712 case CONST_DOUBLE:
69ef87e2 713 case CONST_VECTOR:
175160e7
MT
714 case SYMBOL_REF:
715 case CONST:
716 case LABEL_REF:
717 case ADDRESS:
718 return;
719
720#ifdef HAVE_cc0
721 case CC0:
14a774a9
RK
722 if (! memonly)
723 validate_change (insn, loc, map->last_cc0_value, 1);
175160e7
MT
724 return;
725#endif
726
727 case USE:
728 case CLOBBER:
729 /* The only thing we can do with a USE or CLOBBER is possibly do
730 some substitutions in a MEM within it. */
3c0cb5de 731 if (MEM_P (XEXP (x, 0)))
14a774a9 732 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
175160e7
MT
733 return;
734
735 case REG:
736 /* Substitute for parms and known constants. Don't replace
737 hard regs used as user variables with constants. */
14a774a9
RK
738 if (! memonly)
739 {
740 int regno = REGNO (x);
741 struct const_equiv_data *p;
742
743 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
744 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
745 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
746 p->rtx != 0)
747 && p->age >= map->const_age)
748 validate_change (insn, loc, p->rtx, 1);
749 }
750 return;
175160e7
MT
751
752 case SUBREG:
637c5064
RS
753 /* SUBREG applied to something other than a reg
754 should be treated as ordinary, since that must
755 be a special hack and we don't know how to treat it specially.
756 Consider for example mulsidi3 in m68k.md.
757 Ordinary SUBREG of a REG needs this special treatment. */
f8cfc6aa 758 if (! memonly && REG_P (SUBREG_REG (x)))
637c5064
RS
759 {
760 rtx inner = SUBREG_REG (x);
761 rtx new = 0;
175160e7 762
637c5064
RS
763 /* We can't call subst_constants on &SUBREG_REG (x) because any
764 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
765 see what is inside, try to form the new SUBREG and see if that is
00174bdf 766 valid. We handle two cases: extracting a full word in an
637c5064 767 integral mode and extracting the low part. */
14a774a9 768 subst_constants (&inner, NULL_RTX, map, 0);
0631e0bf 769 new = simplify_gen_subreg (GET_MODE (x), inner,
1d088dee 770 GET_MODE (SUBREG_REG (x)),
0631e0bf 771 SUBREG_BYTE (x));
175160e7 772
637c5064
RS
773 if (new)
774 validate_change (insn, loc, new, 1);
0631e0bf
JH
775 else
776 cancel_changes (num_changes);
175160e7 777
637c5064
RS
778 return;
779 }
780 break;
175160e7
MT
781
782 case MEM:
14a774a9 783 subst_constants (&XEXP (x, 0), insn, map, 0);
175160e7
MT
784
785 /* If a memory address got spoiled, change it back. */
14a774a9
RK
786 if (! memonly && insn != 0 && num_validated_changes () != num_changes
787 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
175160e7
MT
788 cancel_changes (num_changes);
789 return;
790
791 case SET:
792 {
793 /* Substitute constants in our source, and in any arguments to a
794 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
795 itself. */
796 rtx *dest_loc = &SET_DEST (x);
797 rtx dest = *dest_loc;
798 rtx src, tem;
96e60f0c
JJ
799 enum machine_mode compare_mode = VOIDmode;
800
801 /* If SET_SRC is a COMPARE which subst_constants would turn into
802 COMPARE of 2 VOIDmode constants, note the mode in which comparison
803 is to be done. */
804 if (GET_CODE (SET_SRC (x)) == COMPARE)
805 {
806 src = SET_SRC (x);
807 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
8beccec8 808 || CC0_P (dest))
96e60f0c
JJ
809 {
810 compare_mode = GET_MODE (XEXP (src, 0));
811 if (compare_mode == VOIDmode)
812 compare_mode = GET_MODE (XEXP (src, 1));
813 }
814 }
175160e7 815
14a774a9 816 subst_constants (&SET_SRC (x), insn, map, memonly);
175160e7
MT
817 src = SET_SRC (x);
818
819 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
820 || GET_CODE (*dest_loc) == SUBREG
821 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
822 {
823 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
824 {
14a774a9
RK
825 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
826 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
175160e7
MT
827 }
828 dest_loc = &XEXP (*dest_loc, 0);
829 }
830
91594e43 831 /* Do substitute in the address of a destination in memory. */
3c0cb5de 832 if (MEM_P (*dest_loc))
14a774a9 833 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
91594e43 834
175160e7
MT
835 /* Check for the case of DEST a SUBREG, both it and the underlying
836 register are less than one word, and the SUBREG has the wider mode.
837 In the case, we are really setting the underlying register to the
838 source converted to the mode of DEST. So indicate that. */
839 if (GET_CODE (dest) == SUBREG
840 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
841 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
842 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
843 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
844 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
845 src)))
175160e7
MT
846 src = tem, dest = SUBREG_REG (dest);
847
848 /* If storing a recognizable value save it for later recording. */
849 if ((map->num_sets < MAX_RECOG_OPERANDS)
850 && (CONSTANT_P (src)
f8cfc6aa 851 || (REG_P (src)
83b93f40
RK
852 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
853 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7 854 || (GET_CODE (src) == PLUS
f8cfc6aa 855 && REG_P (XEXP (src, 0))
83b93f40
RK
856 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
857 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
858 && CONSTANT_P (XEXP (src, 1)))
859 || GET_CODE (src) == COMPARE
8beccec8 860 || CC0_P (dest)
175160e7
MT
861 || (dest == pc_rtx
862 && (src == pc_rtx || GET_CODE (src) == RETURN
863 || GET_CODE (src) == LABEL_REF))))
864 {
865 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
866 it will cause us to save the COMPARE with any constants
867 substituted, which is what we want for later. */
96e60f0c
JJ
868 rtx src_copy = copy_rtx (src);
869 map->equiv_sets[map->num_sets].equiv = src_copy;
175160e7 870 map->equiv_sets[map->num_sets++].dest = dest;
96e60f0c
JJ
871 if (compare_mode != VOIDmode
872 && GET_CODE (src) == COMPARE
873 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
8beccec8 874 || CC0_P (dest))
96e60f0c
JJ
875 && GET_MODE (XEXP (src, 0)) == VOIDmode
876 && GET_MODE (XEXP (src, 1)) == VOIDmode)
877 {
878 map->compare_src = src_copy;
879 map->compare_mode = compare_mode;
880 }
175160e7 881 }
175160e7 882 }
e9a25f70
JL
883 return;
884
885 default:
886 break;
175160e7
MT
887 }
888
889 format_ptr = GET_RTX_FORMAT (code);
00174bdf 890
175160e7
MT
891 /* If the first operand is an expression, save its mode for later. */
892 if (*format_ptr == 'e')
893 op0_mode = GET_MODE (XEXP (x, 0));
894
895 for (i = 0; i < GET_RTX_LENGTH (code); i++)
896 {
897 switch (*format_ptr++)
898 {
899 case '0':
900 break;
901
902 case 'e':
903 if (XEXP (x, i))
14a774a9 904 subst_constants (&XEXP (x, i), insn, map, memonly);
175160e7
MT
905 break;
906
907 case 'u':
908 case 'i':
909 case 's':
02bea8a8 910 case 'w':
00174bdf 911 case 'n':
8f985ec4 912 case 't':
2ff581c3 913 case 'B':
175160e7
MT
914 break;
915
916 case 'E':
917 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
14a774a9
RK
918 for (j = 0; j < XVECLEN (x, i); j++)
919 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
920
175160e7
MT
921 break;
922
923 default:
924 abort ();
925 }
926 }
927
928 /* If this is a commutative operation, move a constant to the second
929 operand unless the second operand is already a CONST_INT. */
14a774a9 930 if (! memonly
ec8e098d
PB
931 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
932 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
175160e7
MT
933 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
934 {
935 rtx tem = XEXP (x, 0);
936 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
937 validate_change (insn, &XEXP (x, 1), tem, 1);
938 }
939
940 /* Simplify the expression in case we put in some constants. */
14a774a9
RK
941 if (! memonly)
942 switch (GET_RTX_CLASS (code))
175160e7 943 {
ec8e098d 944 case RTX_UNARY:
14a774a9
RK
945 if (op0_mode == MAX_MACHINE_MODE)
946 abort ();
947 new = simplify_unary_operation (code, GET_MODE (x),
948 XEXP (x, 0), op0_mode);
949 break;
950
ec8e098d
PB
951 case RTX_COMPARE:
952 case RTX_COMM_COMPARE:
14a774a9
RK
953 {
954 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
955
956 if (op_mode == VOIDmode)
957 op_mode = GET_MODE (XEXP (x, 1));
c6fb08ad 958
7ce3e360 959 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
14a774a9 960 XEXP (x, 0), XEXP (x, 1));
14a774a9 961 break;
00174bdf 962 }
175160e7 963
ec8e098d
PB
964 case RTX_BIN_ARITH:
965 case RTX_COMM_ARITH:
14a774a9
RK
966 new = simplify_binary_operation (code, GET_MODE (x),
967 XEXP (x, 0), XEXP (x, 1));
968 break;
175160e7 969
ec8e098d
PB
970 case RTX_BITFIELD_OPS:
971 case RTX_TERNARY:
14a774a9
RK
972 if (op0_mode == MAX_MACHINE_MODE)
973 abort ();
974
96e60f0c
JJ
975 if (code == IF_THEN_ELSE)
976 {
977 rtx op0 = XEXP (x, 0);
978
ec8e098d 979 if (COMPARISON_P (op0)
96e60f0c
JJ
980 && GET_MODE (op0) == VOIDmode
981 && ! side_effects_p (op0)
982 && XEXP (op0, 0) == map->compare_src
983 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
984 {
985 /* We have compare of two VOIDmode constants for which
986 we recorded the comparison mode. */
c6fb08ad
PB
987 rtx tem =
988 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
989 map->compare_mode, XEXP (op0, 0),
990 XEXP (op0, 1));
991
992 if (GET_CODE (tem) != CONST_INT)
993 new = simplify_ternary_operation (code, GET_MODE (x),
994 op0_mode, tem, XEXP (x, 1),
995 XEXP (x, 2));
996 else if (tem == const0_rtx)
96e60f0c 997 new = XEXP (x, 2);
c6fb08ad 998 else
96e60f0c
JJ
999 new = XEXP (x, 1);
1000 }
1001 }
1002 if (!new)
1003 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1004 XEXP (x, 0), XEXP (x, 1),
1005 XEXP (x, 2));
14a774a9 1006 break;
ec8e098d
PB
1007
1008 default:
1009 break;
14a774a9 1010 }
175160e7
MT
1011
1012 if (new)
1013 validate_change (insn, loc, new, 1);
1014}
1015
1016/* Show that register modified no longer contain known constants. We are
1017 called from note_stores with parts of the new insn. */
1018
915b80ed 1019static void
1d088dee 1020mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
175160e7 1021{
e2eb57b7 1022 int regno = -1;
6a651371 1023 enum machine_mode mode = VOIDmode;
e2eb57b7
RK
1024
1025 /* DEST is always the innermost thing set, except in the case of
1026 SUBREGs of hard registers. */
175160e7 1027
f8cfc6aa 1028 if (REG_P (dest))
e2eb57b7 1029 regno = REGNO (dest), mode = GET_MODE (dest);
f8cfc6aa 1030 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
e2eb57b7 1031 {
ddef6bc7
JJ
1032 regno = REGNO (SUBREG_REG (dest));
1033 if (regno < FIRST_PSEUDO_REGISTER)
1034 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1035 GET_MODE (SUBREG_REG (dest)),
1036 SUBREG_BYTE (dest),
1037 GET_MODE (dest));
e2eb57b7
RK
1038 mode = GET_MODE (SUBREG_REG (dest));
1039 }
1040
1041 if (regno >= 0)
1042 {
770ae6cc
RK
1043 unsigned int uregno = regno;
1044 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
66fd46b6 1045 : uregno + hard_regno_nregs[uregno][mode] - 1);
770ae6cc 1046 unsigned int i;
e2eb57b7 1047
e9a25f70
JL
1048 /* Ignore virtual stack var or virtual arg register since those
1049 are handled separately. */
770ae6cc
RK
1050 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1051 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1052 for (i = uregno; i <= last_reg; i++)
6a651371 1053 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
c68da89c 1054 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
e2eb57b7 1055 }
175160e7
MT
1056}
1057\f
81578142
RS
1058/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1059 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1060 that it points to the node itself, thus indicating that the node is its
1061 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1062 the given node is NULL, recursively descend the decl/block tree which
1063 it is the root of, and for each other ..._DECL or BLOCK node contained
1064 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1065 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1066 values to point to themselves. */
1067
81578142 1068static void
1d088dee 1069set_block_origin_self (tree stmt)
81578142
RS
1070{
1071 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1072 {
1073 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1074
1075 {
b3694847 1076 tree local_decl;
81578142 1077
00174bdf 1078 for (local_decl = BLOCK_VARS (stmt);
81578142
RS
1079 local_decl != NULL_TREE;
1080 local_decl = TREE_CHAIN (local_decl))
00174bdf 1081 set_decl_origin_self (local_decl); /* Potential recursion. */
81578142
RS
1082 }
1083
1084 {
b3694847 1085 tree subblock;
81578142 1086
00174bdf 1087 for (subblock = BLOCK_SUBBLOCKS (stmt);
81578142
RS
1088 subblock != NULL_TREE;
1089 subblock = BLOCK_CHAIN (subblock))
00174bdf 1090 set_block_origin_self (subblock); /* Recurse. */
81578142
RS
1091 }
1092 }
1093}
1094
1095/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1096 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1097 node to so that it points to the node itself, thus indicating that the
1098 node represents its own (abstract) origin. Additionally, if the
1099 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1100 the decl/block tree of which the given node is the root of, and for
1101 each other ..._DECL or BLOCK node contained therein whose
1102 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1103 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1104 point to themselves. */
1105
1cfdcc15 1106void
1d088dee 1107set_decl_origin_self (tree decl)
81578142
RS
1108{
1109 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1110 {
1111 DECL_ABSTRACT_ORIGIN (decl) = decl;
1112 if (TREE_CODE (decl) == FUNCTION_DECL)
1113 {
b3694847 1114 tree arg;
81578142
RS
1115
1116 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1117 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
1118 if (DECL_INITIAL (decl) != NULL_TREE
1119 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
1120 set_block_origin_self (DECL_INITIAL (decl));
1121 }
1122 }
1123}
1124\f
1125/* Given a pointer to some BLOCK node, and a boolean value to set the
1126 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1127 the given block, and for all local decls and all local sub-blocks
1128 (recursively) which are contained therein. */
1129
81578142 1130static void
1d088dee 1131set_block_abstract_flags (tree stmt, int setting)
81578142 1132{
b3694847
SS
1133 tree local_decl;
1134 tree subblock;
81578142 1135
12307ca2 1136 BLOCK_ABSTRACT (stmt) = setting;
81578142 1137
12307ca2
RK
1138 for (local_decl = BLOCK_VARS (stmt);
1139 local_decl != NULL_TREE;
1140 local_decl = TREE_CHAIN (local_decl))
1141 set_decl_abstract_flags (local_decl, setting);
81578142 1142
12307ca2
RK
1143 for (subblock = BLOCK_SUBBLOCKS (stmt);
1144 subblock != NULL_TREE;
1145 subblock = BLOCK_CHAIN (subblock))
1146 set_block_abstract_flags (subblock, setting);
81578142
RS
1147}
1148
1149/* Given a pointer to some ..._DECL node, and a boolean value to set the
1150 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1151 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1152 set the abstract flags for all of the parameters, local vars, local
1153 blocks and sub-blocks (recursively) to the same setting. */
1154
1155void
1d088dee 1156set_decl_abstract_flags (tree decl, int setting)
81578142
RS
1157{
1158 DECL_ABSTRACT (decl) = setting;
1159 if (TREE_CODE (decl) == FUNCTION_DECL)
1160 {
b3694847 1161 tree arg;
81578142
RS
1162
1163 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1164 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
1165 if (DECL_INITIAL (decl) != NULL_TREE
1166 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
1167 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1168 }
1169}
c0e7830f
DD
1170\f
1171/* Functions to keep track of the values hard regs had at the start of
1172 the function. */
1173
902197eb 1174rtx
1d088dee 1175get_hard_reg_initial_reg (struct function *fun, rtx reg)
902197eb
DD
1176{
1177 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1178 int i;
1179
1180 if (ivs == 0)
1181 return NULL_RTX;
1182
1183 for (i = 0; i < ivs->num_entries; i++)
1184 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1185 return ivs->entries[i].hard_reg;
1186
1187 return NULL_RTX;
1188}
1189
c0e7830f 1190rtx
1d088dee 1191has_func_hard_reg_initial_val (struct function *fun, rtx reg)
c0e7830f
DD
1192{
1193 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1194 int i;
1195
1196 if (ivs == 0)
1197 return NULL_RTX;
1198
1199 for (i = 0; i < ivs->num_entries; i++)
1200 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1201 return ivs->entries[i].pseudo;
1202
1203 return NULL_RTX;
1204}
1205
1206rtx
1d088dee 1207get_func_hard_reg_initial_val (struct function *fun, rtx reg)
c0e7830f
DD
1208{
1209 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1210 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1211
1212 if (rv)
1213 return rv;
1214
1215 if (ivs == 0)
1216 {
703ad42b 1217 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
c0e7830f
DD
1218 ivs = fun->hard_reg_initial_vals;
1219 ivs->num_entries = 0;
1220 ivs->max_entries = 5;
703ad42b 1221 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
c0e7830f
DD
1222 }
1223
1224 if (ivs->num_entries >= ivs->max_entries)
1225 {
1226 ivs->max_entries += 5;
703ad42b
KG
1227 ivs->entries = ggc_realloc (ivs->entries,
1228 ivs->max_entries
1229 * sizeof (initial_value_pair));
c0e7830f
DD
1230 }
1231
1232 ivs->entries[ivs->num_entries].hard_reg = reg;
1233 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1234
1235 return ivs->entries[ivs->num_entries++].pseudo;
1236}
1237
1238rtx
1d088dee 1239get_hard_reg_initial_val (enum machine_mode mode, int regno)
c0e7830f
DD
1240{
1241 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1242}
1243
1244rtx
1d088dee 1245has_hard_reg_initial_val (enum machine_mode mode, int regno)
c0e7830f
DD
1246{
1247 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1248}
1249
c0e7830f 1250void
1d088dee 1251emit_initial_value_sets (void)
c0e7830f
DD
1252{
1253 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1254 int i;
1255 rtx seq;
1256
1257 if (ivs == 0)
1258 return;
1259
1260 start_sequence ();
1261 for (i = 0; i < ivs->num_entries; i++)
1262 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1263 seq = get_insns ();
1264 end_sequence ();
1265
91278841 1266 emit_insn_after (seq, entry_of_function ());
c0e7830f 1267}
385b6e2d
R
1268
1269/* If the backend knows where to allocate pseudos for hard
1270 register initial values, register these allocations now. */
1271void
1d088dee 1272allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
385b6e2d
R
1273{
1274#ifdef ALLOCATE_INITIAL_VALUE
1275 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1276 int i;
1277
1278 if (ivs == 0)
1279 return;
1280
1281 for (i = 0; i < ivs->num_entries; i++)
1282 {
1283 int regno = REGNO (ivs->entries[i].pseudo);
1284 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1285
1286 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1287 ; /* Do nothing. */
3c0cb5de 1288 else if (MEM_P (x))
385b6e2d 1289 reg_equiv_memory_loc[regno] = x;
f8cfc6aa 1290 else if (REG_P (x))
385b6e2d
R
1291 {
1292 reg_renumber[regno] = REGNO (x);
1293 /* Poke the regno right into regno_reg_rtx
1294 so that even fixed regs are accepted. */
1295 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1296 }
1297 else abort ();
1298 }
1299#endif
1300}
e2500fed
GK
1301
1302#include "gt-integrate.h"
This page took 2.328623 seconds and 5 git commands to generate.