[dataflow]: PATCH: prevent duplicate mw_hardreg, and other misc changes
Seongbae Park
seongbae.park@gmail.com
Tue Nov 14 18:41:00 GMT 2006
This patch does:
1) Prevent duplicate mw_hardreg entries,
and change the verification code to deal with that.
2) Handle calls that are COND_EXEC.
3) Specially handle the case where DF_REF_LOC() is null.
1) was causing a bootstrap failure on PPC and IA64.
2 & 3 were causing a boostrap failure on IA64.
4) Add a new debug counter tail_call to control tail call optimization.
5) Fix a bug in dbg_cnt handling multiple options.
Now, e.g. -fdbg-cnt=tail_call:1,new_dce:10 works fine.
Bootstrapped on x86_64 and PPC.
With this patch, IA64 reached stage3 (and failed during libgfortran build).
The regression tests are still running but as this fixes the bootstrap failure,
I think this needs to go in to allow further progress.
Seongbae
2006-11-14 Seongbae Park <seongbae.park@gmail.com>
* tree-tailcall.c (execute_tail_recursion): Added dbg_cnt().
* df-scan.c (df_mw_hardreg_find_hardreg, df_get_conditional_uses,
df_get_call_refs):
New function.
(df_refs_add_to_chains): Don't add duplicate mw_hardreg.
(df_ins_refs_collect): Refactored to use df_get_conditional_uses
and df_get_call_refs.
(df_insn_refs_verify): Find the matching mw_hardreg.
* dbgcnt.c (dbg_cnt_process_opt): Fix a bug handling multiple
counters. Add a new debug counter tail_call.
* dbgcnt.h (enum debug_counter): Added a new counter tail_call.
* calls.c (expand_call): Check dbg_cnt(tail_call).
* df_problems.c (df_create_unused_note, df_ri_bb_compute):
Handle NULL LOC case.
* dce.c (init_dce): Add a debug dump.
-------------- next part --------------
Index: gcc/tree-tailcall.c
===================================================================
--- gcc/tree-tailcall.c (revision 118799)
+++ gcc/tree-tailcall.c (working copy)
@@ -35,6 +35,7 @@ Boston, MA 02110-1301, USA. */
#include "tree-pass.h"
#include "flags.h"
#include "langhooks.h"
+#include "dbgcnt.h"
/* The file implements the tail recursion elimination. It is also used to
analyze the tail calls in general, passing the results to the rtl level
@@ -1004,7 +1005,7 @@ execute_tail_recursion (void)
static bool
gate_tail_calls (void)
{
- return flag_optimize_sibling_calls != 0;
+ return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
}
static unsigned int
Index: gcc/df-scan.c
===================================================================
--- gcc/df-scan.c (revision 118799)
+++ gcc/df-scan.c (working copy)
@@ -1354,6 +1354,20 @@ df_ref_add_to_chains (struct dataflow *d
}
+static struct df_mw_hardreg *
+df_mw_hardreg_find_hardreg (struct df_mw_hardreg *hardreg, struct df_ref *ref)
+{
+ for (; hardreg; hardreg = hardreg->next)
+ {
+ if (hardreg->type == DF_REF_TYPE (ref)
+ && hardreg->flags == (DF_REF_FLAGS (ref) & ~DF_REF_MW_HARDREG_GROUP)
+ && hardreg->mw_reg == DF_REF_REG (ref))
+ return hardreg;
+ }
+ return NULL;
+}
+
+
/* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info chains
and update other necessary information */
@@ -1376,13 +1390,19 @@ df_refs_add_to_chains (struct dataflow *
/* A beginning of a group of mw hardregs */
struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
- hardreg = pool_alloc (problem_data->mw_reg_pool);
- hardreg->next = insn_info->mw_hardregs;
- insn_info->mw_hardregs = hardreg;
- hardreg->type = DF_REF_TYPE (ref);
- hardreg->flags = DF_REF_FLAGS (ref) & ~DF_REF_MW_HARDREG_GROUP;
- hardreg->mw_reg = DF_REF_REG (ref);
- hardreg->regs = NULL;
+ hardreg = df_mw_hardreg_find_hardreg (insn_info->mw_hardregs,
+ ref);
+ if (hardreg == NULL)
+ {
+ /* Matching hardreg group not found. Create one. */
+ hardreg = pool_alloc (problem_data->mw_reg_pool);
+ hardreg->next = insn_info->mw_hardregs;
+ insn_info->mw_hardregs = hardreg;
+ hardreg->type = DF_REF_TYPE (ref);
+ hardreg->flags = DF_REF_FLAGS (ref) & ~DF_REF_MW_HARDREG_GROUP;
+ hardreg->mw_reg = DF_REF_REG (ref);
+ hardreg->regs = NULL;
+ }
/* MW_HARDREG_GROUP ref is just a placeholder, so free the memory. */
pool_free (problem_data->ref_pool, ref);
@@ -1874,6 +1894,124 @@ df_insn_contains_asm (rtx insn)
}
+
+/* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
+
+static struct df_ref *
+df_get_conditional_uses (struct dataflow *dflow,
+ struct df_ref *ref)
+{
+ struct df_ref dummy;
+ struct df_ref *uses = &dummy;
+
+ DF_REF_NEXT_REF (uses) = NULL;
+
+ for (; ref; ref = DF_REF_NEXT_REF (ref))
+ {
+ if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
+ {
+ struct df_ref *use;
+ enum df_ref_type t = DF_REF_TYPE (ref);
+ switch (t)
+ {
+ case DF_REF_REG_DEF:
+ t = DF_REF_REG_USE;
+ break;
+ case DF_REF_REG_MEM_STORE:
+ t = DF_REF_REG_MEM_LOAD;
+ break;
+ default:
+ /* Ignore non-defs. */
+ continue;
+ }
+ use = df_ref_create_structure (dflow,
+ DF_REF_REG (ref),
+ DF_REF_LOC (ref),
+ DF_REF_BB (ref),
+ DF_REF_INSN (ref),
+ t,
+ DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
+ DF_REF_REGNO (use) = DF_REF_REGNO (ref);
+ DF_REF_CHAIN_APPEND (uses, use);
+ }
+ }
+
+ if (DF_REF_NEXT_REF (&dummy) == NULL)
+ return NULL;
+
+
+ DF_REF_NEXT_REF (uses) = DF_REF_NEXT_REF (&dummy);
+ return uses;
+}
+
+
+/* Get call's extra defs and uses. */
+
+static struct df_ref *
+df_get_call_refs (struct dataflow *dflow,
+ struct df_ref *insn_refs,
+ basic_block bb,
+ rtx insn,
+ enum df_ref_flags flags)
+{
+ rtx note;
+ bitmap_iterator bi;
+ unsigned int ui;
+ bool is_sibling_call;
+ unsigned int i;
+
+ /* Record the registers used to pass arguments, and explicitly
+ noted as clobbered. */
+ for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
+ note = XEXP (note, 1))
+ {
+ if (GET_CODE (XEXP (note, 0)) == USE)
+ insn_refs = df_uses_record (dflow, insn_refs,
+ &XEXP (XEXP (note, 0), 0),
+ DF_REF_REG_USE,
+ bb, insn, flags);
+ else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
+ insn_refs = df_defs_record (dflow, insn_refs,
+ XEXP (note, 0), bb, insn, flags);
+ }
+
+ /* The stack ptr is used (honorarily) by a CALL insn. */
+ insn_refs = df_ref_record (dflow, insn_refs,
+ regno_reg_rtx[STACK_POINTER_REGNUM],
+ NULL,
+ bb, insn,
+ DF_REF_REG_USE,
+ DF_REF_CALL_STACK_USAGE | flags);
+
+ /* Calls may also reference any of the global registers,
+ so they are recorded as used. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (global_regs[i])
+ insn_refs = df_ref_record (dflow, insn_refs,
+ regno_reg_rtx[i],
+ NULL,
+ bb, insn,
+ DF_REF_REG_USE,
+ flags);
+
+ is_sibling_call = SIBLING_CALL_P (insn);
+ EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
+ {
+ if (!is_sibling_call
+ || !bitmap_bit_p (dflow->df->exit_block_uses, ui)
+ || refers_to_regno_p (ui, ui+1,
+ current_function_return_rtx, NULL))
+ insn_refs = df_ref_record (dflow, insn_refs,
+ regno_reg_rtx[ui],
+ NULL,
+ bb, insn,
+ DF_REF_REG_DEF,
+ DF_REF_MAY_CLOBBER | flags);
+ }
+
+ return insn_refs;
+}
+
/* Collect all refs in the INSN.
This function is free of any side-effect -
it will create and return a list of df_ref's (chained through next_ref)
@@ -1884,8 +2022,8 @@ df_insn_refs_collect (struct dataflow *d
{
struct df_ref dummy;
struct df_ref *insn_refs = &dummy;
+ struct df_ref *cond_uses;
rtx note;
- unsigned int i;
DF_REF_NEXT_REF (insn_refs) = NULL;
@@ -1910,80 +2048,25 @@ df_insn_refs_collect (struct dataflow *d
if (CALL_P (insn))
{
- rtx note;
- bitmap_iterator bi;
- unsigned int ui;
- bool is_sibling_call;
-
- /* Record the registers used to pass arguments, and explicitly
- noted as clobbered. */
- for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
- note = XEXP (note, 1))
- {
- if (GET_CODE (XEXP (note, 0)) == USE)
- insn_refs = df_uses_record (dflow, insn_refs,
- &XEXP (XEXP (note, 0), 0),
- DF_REF_REG_USE,
- bb, insn, 0);
- else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
- insn_refs = df_defs_record (dflow, insn_refs,
- XEXP (note, 0), bb, insn, 0);
- }
-
- /* The stack ptr is used (honorarily) by a CALL insn. */
- insn_refs = df_ref_record (dflow, insn_refs,
- regno_reg_rtx[STACK_POINTER_REGNUM], NULL,
- bb, insn, DF_REF_REG_USE,
- DF_REF_CALL_STACK_USAGE);
-
- /* Calls may also reference any of the global registers,
- so they are recorded as used. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (global_regs[i])
- insn_refs = df_ref_record (dflow, insn_refs,
- regno_reg_rtx[i], NULL,
- bb, insn, DF_REF_REG_USE, 0);
-
- is_sibling_call = SIBLING_CALL_P (insn);
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
- {
- if (!is_sibling_call
- || !bitmap_bit_p (dflow->df->exit_block_uses, ui)
- || refers_to_regno_p (ui, ui+1,
- current_function_return_rtx, NULL))
- insn_refs = df_ref_record (dflow, insn_refs,
- regno_reg_rtx[ui], NULL,
- bb, insn, DF_REF_REG_DEF,
- DF_REF_MAY_CLOBBER);
- }
-#if 0
- if (SIBLING_CALL_P (insn))
- {
- struct df_ref *extra_refs = df_exit_block_uses_collect (dflow);
- struct df_ref *ref;
-
- /* Attach extra_refs at the end of insn_refs. */
- DF_REF_NEXT_REF (insn_refs) = extra_refs;
-
- for (ref = extra_refs; ref; ref = DF_REF_NEXT_REF (ref))
- {
- /* This has implication on regs_ever_live marking,
- because what used to be only artificial uses are
- now non-artificial uses. */
- DF_REF_BB (ref) = bb;
- DF_REF_INSN (ref) = insn;
- DF_REF_FLAGS_SET (ref, DF_REF_ARTIFICIAL);
- insn_refs = ref;
- }
- /* insn_refs now points to the last ref in the ref chain. */
- }
-#endif
+ enum df_ref_flags extra_flags = (GET_CODE (PATTERN (insn)) == COND_EXEC)
+ ? DF_REF_CONDITIONAL : 0;
+ insn_refs = df_get_call_refs (dflow, insn_refs, bb, insn, extra_flags);
}
/* Record the register uses. */
insn_refs = df_uses_record (dflow, insn_refs,
&PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
+ /* DF_REF_CONDITIONAL needs corresponding USES. */
+ cond_uses = df_get_conditional_uses (dflow, DF_REF_NEXT_REF (&dummy));
+
+ if (cond_uses)
+ {
+ DF_REF_NEXT_REF (insn_refs) = DF_REF_NEXT_REF (cond_uses);
+ DF_REF_NEXT_REF (cond_uses) = NULL;
+ insn_refs = cond_uses;
+ }
+
return DF_REF_NEXT_REF (&dummy);
}
@@ -2898,16 +2981,8 @@ df_insn_refs_verify (struct dataflow *df
{
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_MW_HARDREG_GROUP))
{
- for (hardreg = DF_INSN_GET (df, insn)->mw_hardregs;
- hardreg;
- hardreg = hardreg->next)
- {
- if (hardreg->type == DF_REF_TYPE (ref)
- && hardreg->flags ==
- (DF_REF_FLAGS (ref) & ~DF_REF_MW_HARDREG_GROUP)
- && hardreg->mw_reg == DF_REF_REG (ref))
- break;
- }
+ hardreg = DF_INSN_GET (df, insn)->mw_hardregs;
+ hardreg = df_mw_hardreg_find_hardreg (hardreg, ref);
if (hardreg == NULL)
{
if (abort_if_fail)
Index: gcc/dbgcnt.c
===================================================================
--- gcc/dbgcnt.c (revision 118799)
+++ gcc/dbgcnt.c (working copy)
@@ -13,7 +13,8 @@ struct string2counter_map {
static struct string2counter_map map[debug_counter_number_of_counters] =
{
-COUNTER(new_dce)
+COUNTER (new_dce),
+COUNTER (tail_call)
};
static int count[debug_counter_number_of_counters];
@@ -63,7 +64,9 @@ dbg_cnt_process_opt (const char *arg)
while (comma)
{
colon = strchr (comma + 1, ':');
- dbg_cnt_set_limit_by_name (comma + 1, colon - comma + 1, atoi (colon + 1));
+ if (colon == NULL || !(colon[1] >= '0' && colon[1] <= '9'))
+ return;
+ dbg_cnt_set_limit_by_name (comma + 1, colon - (comma + 1), atoi (colon + 1));
comma = strchr (colon + 1, ',');
}
}
Index: gcc/dbgcnt.h
===================================================================
--- gcc/dbgcnt.h (revision 118799)
+++ gcc/dbgcnt.h (working copy)
@@ -1,5 +1,6 @@
enum debug_counter {
new_dce,
+ tail_call,
debug_counter_number_of_counters
};
Index: gcc/calls.c
===================================================================
--- gcc/calls.c (revision 118799)
+++ gcc/calls.c (working copy)
@@ -41,6 +41,7 @@ Software Foundation, 51 Franklin Street,
#include "target.h"
#include "cgraph.h"
#include "except.h"
+#include "dbgcnt.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
@@ -2149,7 +2150,8 @@ expand_call (tree exp, rtx target, int i
if (currently_expanding_call++ != 0
|| !flag_optimize_sibling_calls
|| args_size.var
- || lookup_stmt_eh_region (exp) >= 0)
+ || lookup_stmt_eh_region (exp) >= 0
+ || dbg_cnt (tail_call) == false)
try_tail_call = 0;
/* Rest of purposes for tail call optimizations to fail. */
Index: gcc/df-problems.c
===================================================================
--- gcc/df-problems.c (revision 118799)
+++ gcc/df-problems.c (working copy)
@@ -3892,7 +3892,8 @@ df_create_unused_note (basic_block bb, r
&& (!bitmap_bit_p (artificial_uses, dregno))
&& (!df_ignore_stack_reg (dregno)))
{
- rtx reg = *DF_REF_REAL_LOC (def);
+ rtx reg = (DF_REF_LOC (def))
+ ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
rtx note = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
REG_NOTES (insn) = note;
#ifdef REG_DEAD_DEBUGGING
@@ -4099,7 +4100,8 @@ df_ri_bb_compute (struct dataflow *dflow
&& (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
&& (!df_ignore_stack_reg (uregno)))
{
- rtx reg = *DF_REF_REAL_LOC (use);
+ rtx reg = (DF_REF_LOC (use))
+ ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
rtx note = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
REG_NOTES (insn) = note;
if (df->permanent_flags & DF_RI_LIFE)
Index: gcc/dce.c
===================================================================
--- gcc/dce.c (revision 118799)
+++ gcc/dce.c (working copy)
@@ -205,6 +205,9 @@ init_dce (bool fast)
}
}
+ if (dump_file)
+ df_dump (dce_df, dump_file);
+
marked = BITMAP_ALLOC (NULL);
marked_libcalls = BITMAP_ALLOC (NULL);
}
More information about the Gcc-patches
mailing list