CALL_INSN_FUNCTION_USAGE problem
Richard Kenner
kenner@vlsi1.ultra.nyu.edu
Tue May 9 04:09:00 GMT 2000
reload1.c assumes that each entry is a REG, which is not true.
In the process of looking for other such, I found a few places that
could use a bit of a cleanup.
This was tested on Alpha.
Tue May 9 06:30:20 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* alias.c (nonlocal_reference_p): Minor reformatting.
* reload.c (find_equiv_reg): Simplify logic for
CALL_INSN_FUNCTION_USAGE since can't have SUBREG or pseudos and
some some reformatting.
* reload1.c (reload_combine): Don't assume everything in
CALL_INSN_FUNCTION_USAGE is a REG and clean up code a bit.
*** alias.c 2000/05/04 13:49:47 1.77
--- alias.c 2000/05/09 10:16:42
*************** nonlocal_reference_p (x)
*** 1428,1432 ****
{
x = CALL_INSN_FUNCTION_USAGE (x);
! if (!x) return 0;
}
else
--- 1428,1433 ----
{
x = CALL_INSN_FUNCTION_USAGE (x);
! if (x == 0)
! return 0;
}
else
*** reload.c 2000/04/23 22:25:25 1.105
--- reload.c 2000/05/09 10:16:52
*************** find_equiv_reg (goal, insn, class, other
*** 6301,6328 ****
{
register rtx dest = SET_DEST (pat);
! while (GET_CODE (dest) == SUBREG
! || GET_CODE (dest) == ZERO_EXTRACT
! || GET_CODE (dest) == SIGN_EXTRACT
! || GET_CODE (dest) == STRICT_LOW_PART)
! dest = XEXP (dest, 0);
if (GET_CODE (dest) == REG)
{
register int xregno = REGNO (dest);
! int xnregs;
! if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
! xnregs = HARD_REGNO_NREGS (xregno, GET_MODE (dest));
! else
! xnregs = 1;
if (xregno < regno + nregs
&& xregno + xnregs > regno)
return 0;
! if (xregno < valueno + valuenregs
&& xregno + xnregs > valueno)
return 0;
! if (goal_mem_addr_varies
! && reg_overlap_mentioned_for_reload_p (dest,
goal))
return 0;
}
else if (goal_mem && GET_CODE (dest) == MEM
&& ! push_operand (dest, GET_MODE (dest)))
--- 6301,6323 ----
{
register rtx dest = SET_DEST (pat);
!
if (GET_CODE (dest) == REG)
{
register int xregno = REGNO (dest);
! int xnregs
! = HARD_REGNO_NREGS (xregno, GET_MODE (dest));
!
if (xregno < regno + nregs
&& xregno + xnregs > regno)
return 0;
! else if (xregno < valueno + valuenregs
&& xregno + xnregs > valueno)
return 0;
! else if (goal_mem_addr_varies
! && reg_overlap_mentioned_for_reload_p (dest,
goal))
return 0;
}
+
else if (goal_mem && GET_CODE (dest) == MEM
&& ! push_operand (dest, GET_MODE (dest)))
*** reload1.c 2000/05/04 21:15:05 1.209
--- reload1.c 2000/05/09 10:17:10
*************** reload_combine ()
*** 8378,8381 ****
--- 8378,8382 ----
int first_index_reg = 1, last_index_reg = 0;
int i;
+ unsigned int r;
int last_label_ruid;
int min_labelno, n_labels;
*************** reload_combine ()
*** 8390,8402 ****
/* To avoid wasting too much time later searching for an index register,
determine the minimum and maximum index register numbers. */
! for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
! {
! if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
! {
! if (! last_index_reg)
! last_index_reg = i;
! first_index_reg = i;
! }
! }
/* If no index register is available, we can quit now. */
if (first_index_reg > last_index_reg)
--- 8391,8402 ----
/* To avoid wasting too much time later searching for an index register,
determine the minimum and maximum index register numbers. */
! for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
! if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
! {
! if (! last_index_reg)
! last_index_reg = r;
! first_index_reg = i;
! }
!
/* If no index register is available, we can quit now. */
if (first_index_reg > last_index_reg)
*************** reload_combine ()
*** 8411,8414 ****
--- 8411,8415 ----
label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
CLEAR_HARD_REG_SET (ever_live_at_start);
+
for (i = n_basic_blocks - 1; i >= 0; i--)
{
*************** reload_combine ()
*** 8418,8423 ****
HARD_REG_SET live;
! REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start);
! compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start);
COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
IOR_HARD_REG_SET (ever_live_at_start, live);
--- 8419,8426 ----
HARD_REG_SET live;
! REG_SET_TO_HARD_REG_SET (live,
! BASIC_BLOCK (i)->global_live_at_start);
! compute_use_by_pseudos (&live,
! BASIC_BLOCK (i)->global_live_at_start);
COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
IOR_HARD_REG_SET (ever_live_at_start, live);
*************** reload_combine ()
*** 8427,8437 ****
/* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
last_label_ruid = reload_combine_ruid = 0;
! for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
{
! reg_state[i].store_ruid = reload_combine_ruid;
! if (fixed_regs[i])
! reg_state[i].use_index = -1;
else
! reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
}
--- 8430,8440 ----
/* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
last_label_ruid = reload_combine_ruid = 0;
! for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
{
! reg_state[r].store_ruid = reload_combine_ruid;
! if (fixed_regs[r])
! reg_state[r].use_index = -1;
else
! reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
}
*************** reload_combine ()
*** 8445,8456 ****
if (GET_CODE (insn) == CODE_LABEL)
last_label_ruid = reload_combine_ruid;
! if (GET_CODE (insn) == BARRIER)
! {
! for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
! if (! fixed_regs[i])
! reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
! }
if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
continue;
reload_combine_ruid++;
--- 8448,8459 ----
if (GET_CODE (insn) == CODE_LABEL)
last_label_ruid = reload_combine_ruid;
! else if (GET_CODE (insn) == BARRIER)
! for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
! if (! fixed_regs[r])
! reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
!
if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
continue;
+
reload_combine_ruid++;
*************** reload_combine ()
*** 8482,8486 ****
rtx prev = prev_nonnote_insn (insn);
rtx prev_set = prev ? single_set (prev) : NULL_RTX;
! int regno = REGNO (reg);
rtx const_reg = NULL_RTX;
rtx reg_sum = NULL_RTX;
--- 8485,8489 ----
rtx prev = prev_nonnote_insn (insn);
rtx prev_set = prev ? single_set (prev) : NULL_RTX;
! unsigned int regno = REGNO (reg);
rtx const_reg = NULL_RTX;
rtx reg_sum = NULL_RTX;
*************** reload_combine ()
*** 8509,8513 ****
for (i = first_index_reg; i <= last_index_reg; i++)
{
! if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
&& reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
&& reg_state[i].store_ruid <= reg_state[regno].use_ruid
--- 8512,8517 ----
for (i = first_index_reg; i <= last_index_reg; i++)
{
! if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
! i)
&& reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
&& reg_state[i].store_ruid <= reg_state[regno].use_ruid
*************** reload_combine ()
*** 8515,8518 ****
--- 8519,8523 ----
{
rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
+
const_reg = index_reg;
reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
*************** reload_combine ()
*** 8521,8537 ****
}
}
/* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
(REGY), i.e. BASE, is not clobbered before the last use we'll
create. */
! if (prev_set
&& GET_CODE (SET_SRC (prev_set)) == CONST_INT
&& rtx_equal_p (SET_DEST (prev_set), reg)
&& reg_state[regno].use_index >= 0
! && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
! && reg_sum)
{
int i;
! /* Change destination register and - if necessary - the
constant value in PREV, the constant loading instruction. */
validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
--- 8526,8544 ----
}
}
+
/* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
(REGY), i.e. BASE, is not clobbered before the last use we'll
create. */
! if (prev_set != 0
&& GET_CODE (SET_SRC (prev_set)) == CONST_INT
&& rtx_equal_p (SET_DEST (prev_set), reg)
&& reg_state[regno].use_index >= 0
! && (reg_state[REGNO (base)].store_ruid
! <= reg_state[regno].use_ruid)
! && reg_sum != 0)
{
int i;
! /* Change destination register and, if necessary, the
constant value in PREV, the constant loading instruction. */
validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
*************** reload_combine ()
*** 8542,8545 ****
--- 8549,8553 ----
+ INTVAL (reg_state[regno].offset)),
1);
+
/* Now for every use of REG that we have recorded, replace REG
with REG_SUM. */
*************** reload_combine ()
*** 8560,8613 ****
if (reg_state[regno].offset != const0_rtx)
! {
! /* Previous REG_EQUIV / REG_EQUAL notes for PREV
! are now invalid. */
! for (np = ®_NOTES (prev); *np; )
! {
! if (REG_NOTE_KIND (*np) == REG_EQUAL
! || REG_NOTE_KIND (*np) == REG_EQUIV)
! *np = XEXP (*np, 1);
! else
! np = &XEXP (*np, 1);
! }
! }
reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
continue;
}
}
}
! note_stores (PATTERN (insn), reload_combine_note_store, NULL);
if (GET_CODE (insn) == CALL_INSN)
{
rtx link;
! for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
! {
! if (call_used_regs[i])
! {
! reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[i].store_ruid = reload_combine_ruid;
! }
! }
for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
link = XEXP (link, 1))
! {
! rtx use = XEXP (link, 0);
! int regno = REGNO (XEXP (use, 0));
! if (GET_CODE (use) == CLOBBER)
! {
! reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[regno].store_ruid = reload_combine_ruid;
! }
! else
! reg_state[regno].use_index = -1;
! }
}
! if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
some unknown fashion, so we have to mark the unknown use. */
HARD_REG_SET *live;
if ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn))
--- 8568,8626 ----
if (reg_state[regno].offset != const0_rtx)
! /* Previous REG_EQUIV / REG_EQUAL notes for PREV
! are now invalid. */
! for (np = ®_NOTES (prev); *np; )
! {
! if (REG_NOTE_KIND (*np) == REG_EQUAL
! || REG_NOTE_KIND (*np) == REG_EQUIV)
! *np = XEXP (*np, 1);
! else
! np = &XEXP (*np, 1);
! }
!
reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[REGNO (const_reg)].store_ruid
! = reload_combine_ruid;
continue;
}
}
}
!
! note_stores (PATTERN (insn), reload_combine_note_store, NULL);
!
if (GET_CODE (insn) == CALL_INSN)
{
rtx link;
! for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
! if (call_used_regs[r])
! {
! reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[r].store_ruid = reload_combine_ruid;
! }
!
for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
link = XEXP (link, 1))
! if (GET_CODE (XEXP (XEXP (link, 0), 0)) == REG)
! {
! unsigned int regno = REGNO (XEXP (XEXP (link, 0), 0));
!
! if (GET_CODE (XEXP (link, 0)) == CLOBBER)
! {
! reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
! reg_state[regno].store_ruid = reload_combine_ruid;
! }
! else
! reg_state[regno].use_index = -1;
! }
}
!
! else if (GET_CODE (insn) == JUMP_INSN
! && GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
some unknown fashion, so we have to mark the unknown use. */
HARD_REG_SET *live;
+
if ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn))
*************** reload_combine ()
*** 8615,8624 ****
else
live = &ever_live_at_start;
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
! {
! if (TEST_HARD_REG_BIT (*live, i))
! reg_state[i].use_index = -1;
! }
}
reload_combine_note_use (&PATTERN (insn), insn);
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
--- 8628,8637 ----
else
live = &ever_live_at_start;
+
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
! if (TEST_HARD_REG_BIT (*live, i))
! reg_state[i].use_index = -1;
}
+
reload_combine_note_use (&PATTERN (insn), insn);
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
*************** reload_combine ()
*** 8634,8637 ****
--- 8647,8651 ----
}
}
+
free (label_live);
}
*************** reload_combine ()
*** 8640,8643 ****
--- 8654,8658 ----
update reg_state[regno].store_ruid and reg_state[regno].use_index
accordingly. Called via note_stores from reload_combine. */
+
static void
reload_combine_note_store (dst, set, data)
More information about the Gcc-patches
mailing list