This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[PATCH][alias-improvements] Fix up DSE
- From: Richard Guenther <rguenther at suse dot de>
- To: gcc-patches at gcc dot gnu dot org
- Date: Fri, 2 Jan 2009 00:40:05 +0100 (CET)
- Subject: [PATCH][alias-improvements] Fix up DSE
This "fixes" DSE. It removes nearly all the unused/unnecessary crap
and extends it to use the oracle and to also delete unused stores
(stores that are not invalidated by another store - the old implementation
didn't do that. oops.) All the bitmap stuff could be removed as well,
but ...
Now it is still what it seems to be on trunk - a very brute force way
of computing dead stores, no caching, no nothing. Oh well. My plan
is eventually to just remove tree-ssa-dse.c and instead use proper and
precise reaching definitions to do dead store removal from DCE. But
this was easier.
Bootstrapped and tested on x86_64-unknown-linux-gnu, I will apply this
to the branch once the autotester caught the current rev.
Richard.
2009-01-02 Richard Guenther <rguenther@suse.de>
* tree-ssa-alias.c (ref_may_used_by_call_p): New function.
(ref_may_used_by_stmt_p): Likewise.
(call_may_clobber_ref_p): Handle more cases.
(stmt_may_clobber_ref_p): Export.
* tree-ssa-alias.h (ref_may_used_by_stmt_p): Declare.
(stmt_may_clobber_ref_p): Likewise.
* tree-ssa-dse.c (struct address_walk_data): Remove.
(memory_ssa_name_same): Likewise.
(memory_address_same): Likewise.
(get_kill_of_stmt_lhs): Likewise.
(dse_possible_dead_store_p): Simplify, use the oracle. Handle
unused stores.
(dse_optimize_stmt): Simplify. Properly remove stores.
testsuite/
* gcc.dg/noncompile/920507-1.c: Fix invalid dead array access.
Index: alias-improvements/gcc/tree-ssa-alias.c
===================================================================
*** alias-improvements.orig/gcc/tree-ssa-alias.c 2009-01-01 19:19:05.000000000 +0100
--- alias-improvements/gcc/tree-ssa-alias.c 2009-01-01 19:41:22.000000000 +0100
*************** struct gimple_opt_pass pass_build_alias
*** 1045,1050 ****
--- 1045,1113 ----
};
+ /* If the call CALL may use the memory reference REF return true,
+ otherwise return false. */
+
+ static bool
+ ref_may_used_by_call_p (gimple call ATTRIBUTE_UNUSED, tree ref)
+ {
+ tree base = get_base_address (ref);
+ unsigned i;
+
+ /* If the base variable is call-used then it may be used. */
+ if (base
+ && DECL_P (base)
+ && is_call_used (base))
+ return true;
+
+ /* Inspect call arguments for passed-by-value aliases. */
+ for (i = 0; i < gimple_call_num_args (call); ++i)
+ {
+ tree op = gimple_call_arg (call, i);
+
+ if (TREE_CODE (op) == EXC_PTR_EXPR
+ || TREE_CODE (op) == FILTER_EXPR)
+ continue;
+
+ if (TREE_CODE (op) == WITH_SIZE_EXPR)
+ op = TREE_OPERAND (op, 0);
+
+ if (TREE_CODE (op) != SSA_NAME
+ && !is_gimple_min_invariant (op)
+ && refs_may_alias_p (op, ref))
+ return true;
+ }
+
+ return false;
+ }
+
+ /* If the statement STMT may use the memory reference REF return
+ true, otherwise return false. */
+
+ bool
+ ref_may_used_by_stmt_p (gimple stmt, tree ref)
+ {
+ if (is_gimple_assign (stmt))
+ {
+ tree rhs;
+
+ /* All memory assign statements are single. */
+ if (!gimple_assign_single_p (stmt))
+ return false;
+
+ rhs = gimple_assign_rhs1 (stmt);
+ if (is_gimple_reg (rhs)
+ || is_gimple_min_invariant (rhs)
+ || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
+ return false;
+
+ return refs_may_alias_p (rhs, ref);
+ }
+ else if (is_gimple_call (stmt))
+ return ref_may_used_by_call_p (stmt, ref);
+
+ return true;
+ }
/* If the call in statement CALL may clobber the memory reference REF
return true, otherwise return false. */
*************** call_may_clobber_ref_p (gimple call, tre
*** 1060,1066 ****
return false;
base = get_base_address (ref);
! if (SSA_VAR_P (base))
return is_call_clobbered (base);
return true;
--- 1123,1136 ----
return false;
base = get_base_address (ref);
! if (!base)
! return true;
!
! if (TREE_CODE (base) == SSA_NAME
! || CONSTANT_CLASS_P (base))
! return false;
!
! if (DECL_P (base))
return is_call_clobbered (base);
return true;
*************** call_may_clobber_ref_p (gimple call, tre
*** 1069,1075 ****
/* If the statement STMT may clobber the memory reference REF return true,
otherwise return false. */
! static bool
stmt_may_clobber_ref_p (gimple stmt, tree ref)
{
if (is_gimple_call (stmt))
--- 1139,1145 ----
/* If the statement STMT may clobber the memory reference REF return true,
otherwise return false. */
! bool
stmt_may_clobber_ref_p (gimple stmt, tree ref)
{
if (is_gimple_call (stmt))
Index: alias-improvements/gcc/tree-ssa-alias.h
===================================================================
*** alias-improvements.orig/gcc/tree-ssa-alias.h 2009-01-01 19:19:05.000000000 +0100
--- alias-improvements/gcc/tree-ssa-alias.h 2009-01-01 19:41:22.000000000 +0100
***************
*** 25,30 ****
--- 25,32 ----
/* In tree-ssa-alias.c */
bool may_point_to_decl (tree, tree);
+ bool ref_may_used_by_stmt_p (gimple, tree);
+ bool stmt_may_clobber_ref_p (gimple, tree);
void *walk_non_aliased_vuses (tree, tree,
void *(*)(tree, tree, void *), void *);
Index: alias-improvements/gcc/tree-ssa-dse.c
===================================================================
*** alias-improvements.orig/gcc/tree-ssa-dse.c 2009-01-01 19:16:14.000000000 +0100
--- alias-improvements/gcc/tree-ssa-dse.c 2009-01-01 20:07:34.000000000 +0100
*************** struct dse_block_local_data
*** 83,95 ****
bitmap stores;
};
- /* Basic blocks of the potentially dead store and the following
- store, for memory_address_same. */
- struct address_walk_data
- {
- basic_block store1_bb, store2_bb;
- };
-
static bool gate_dse (void);
static unsigned int tree_ssa_dse (void);
static void dse_initialize_block_local_data (struct dom_walk_data *,
--- 83,88 ----
*************** dse_initialize_block_local_data (struct
*** 150,367 ****
}
}
- /* Helper function for memory_address_same via walk_tree. Returns
- non-NULL if it finds an SSA_NAME which is part of the address,
- such that the definition of the SSA_NAME post-dominates the store
- we want to delete but not the store that we believe makes it
- redundant. This indicates that the address may change between
- the two stores. */
-
- static tree
- memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data)
- {
- struct address_walk_data *walk_data = (struct address_walk_data *) data;
- tree expr = *expr_p;
- gimple def_stmt;
- basic_block def_bb;
-
- if (TREE_CODE (expr) != SSA_NAME)
- return NULL_TREE;
-
- /* If we've found a default definition, then there's no problem. Both
- stores will post-dominate it. And def_bb will be NULL. */
- if (SSA_NAME_IS_DEFAULT_DEF (expr))
- return NULL_TREE;
-
- def_stmt = SSA_NAME_DEF_STMT (expr);
- def_bb = gimple_bb (def_stmt);
-
- /* DEF_STMT must dominate both stores. So if it is in the same
- basic block as one, it does not post-dominate that store. */
- if (walk_data->store1_bb != def_bb
- && dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
- {
- if (walk_data->store2_bb == def_bb
- || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
- def_bb))
- /* Return non-NULL to stop the walk. */
- return *expr_p;
- }
-
- return NULL_TREE;
- }
-
- /* Return TRUE if the destination memory address in STORE1 and STORE2
- might be modified after STORE1, before control reaches STORE2. */
-
- static bool
- memory_address_same (gimple store1, gimple store2)
- {
- struct address_walk_data walk_data;
-
- walk_data.store1_bb = gimple_bb (store1);
- walk_data.store2_bb = gimple_bb (store2);
-
- return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same,
- &walk_data, NULL)
- == NULL);
- }
-
- /* Return true if there is a stmt that kills the lhs of STMT and is in the
- virtual def-use chain of STMT without a use in between the kill and STMT.
- Returns false if no such stmt is found.
- *FIRST_USE_P is set to the first use of the single virtual def of
- STMT. *USE_P is set to the vop killed by *USE_STMT. */
-
- static bool
- get_kill_of_stmt_lhs (gimple stmt,
- use_operand_p * first_use_p,
- use_operand_p * use_p, gimple * use_stmt)
- {
- tree lhs;
-
- gcc_assert (is_gimple_assign (stmt));
-
- lhs = gimple_assign_lhs (stmt);
-
- /* We now walk the chain of single uses of the single VDEFs.
- We succeeded finding a kill if the lhs of the use stmt is
- equal to the original lhs. We can keep walking to the next
- use if there are no possible uses of the original lhs in
- the stmt. */
- do
- {
- tree use_lhs;
- def_operand_p def_p;
-
- /* The stmt must have a single VDEF. */
- def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_VDEF);
- if (def_p == NULL_DEF_OPERAND_P)
- return false;
-
- /* Get the single immediate use of the def. */
- if (!single_imm_use (DEF_FROM_PTR (def_p), first_use_p, &stmt))
- return false;
- first_use_p = use_p;
-
- /* If there are possible hidden uses, give up. */
- if (!gimple_assign_single_p (stmt)
- || (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
- && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
- return false;
-
- /* If the use stmts lhs matches the original lhs we have
- found the kill, otherwise continue walking. */
- use_lhs = gimple_assign_lhs (stmt);
- if (operand_equal_p (use_lhs, lhs, 0))
- {
- *use_stmt = stmt;
- return true;
- }
- }
- while (1);
- }
-
/* A helper of dse_optimize_stmt.
! Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one
! use, and that one use is another VDEF clobbering the first one.
!
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
! dse_possible_dead_store_p (gimple stmt,
! use_operand_p *first_use_p,
! use_operand_p *use_p,
! gimple *use_stmt,
! struct dse_global_data *dse_gd,
! struct dse_block_local_data *bd)
! {
! ssa_op_iter op_iter;
! bool fail = false;
! def_operand_p var1;
! vuse_vec_p vv;
! tree defvar = NULL_TREE;
! tree prev_defvar = NULL_TREE;
gimple temp;
- /* We want to verify that each virtual definition in STMT has
- precisely one use and that all the virtual definitions are
- used by the same single statement. When complete, we
- want USE_STMT to refer to the one statement which uses
- all of the virtual definitions from STMT. */
*use_stmt = NULL;
- FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
- {
- defvar = DEF_FROM_PTR (var1);
! /* If this virtual def does not have precisely one use, then
! we will not be able to eliminate STMT. */
! if (!has_single_use (defvar))
! {
! fail = true;
! break;
! }
!
! /* Get the one and only immediate use of DEFVAR. */
! single_imm_use (defvar, use_p, &temp);
! gcc_assert (*use_p != NULL_USE_OPERAND_P);
! *first_use_p = *use_p;
!
! /* ??? If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
! Don't bother to do that for now. */
! if (gimple_code (temp) == GIMPLE_PHI)
! {
! fail = true;
! break;
! }
!
! /* In the case of memory partitions, we may get:
!
! # MPT.764_162 = VDEF <MPT.764_161(D)>
! x = {};
! # MPT.764_167 = VDEF <MPT.764_162>
! y = {};
!
! So we must make sure we're talking about the same LHS.
! */
! if (is_gimple_assign (temp))
{
! tree base1 = get_base_address (gimple_assign_lhs (stmt));
! tree base2 = get_base_address (gimple_assign_lhs (temp));
!
! while (base1 && INDIRECT_REF_P (base1))
! base1 = TREE_OPERAND (base1, 0);
! while (base2 && INDIRECT_REF_P (base2))
! base2 = TREE_OPERAND (base2, 0);
! if (base1 != base2)
{
fail = true;
! break;
}
- }
! /* If the immediate use of DEF_VAR is not the same as the
! previously find immediate uses, then we will not be able
! to eliminate STMT. */
! if (*use_stmt == NULL)
! {
! *use_stmt = temp;
! prev_defvar = defvar;
}
! else if (temp != *use_stmt)
{
! fail = true;
break;
}
}
! if (fail)
! {
! record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
! return false;
! }
return true;
}
--- 143,231 ----
}
}
/* A helper of dse_optimize_stmt.
! Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
! may prove STMT to be dead.
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
! dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
! {
gimple temp;
*use_stmt = NULL;
! /* Find the first dominated statement that clobbers (part of) the
! memory stmt stores to with no intermediate statement that may use
! part of the memory stmt stores. That is, find a store that may
! prove stmt to be a dead store. */
! temp = stmt;
! do
! {
! gimple use_stmt;
! imm_use_iterator ui;
! bool fail = false;
! tree defvar;
!
! defvar = gimple_vdef (temp);
! temp = NULL;
! FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
{
! /* In simple cases we could look through PHI nodes, but we
! have to be careful with loops and with memory references
! containing operands that are also operands of PHI nodes.
! See gcc.c-torture/execute/20051110-*.c. */
! if (gimple_code (use_stmt) == GIMPLE_PHI)
! {
! fail = true;
! BREAK_FROM_IMM_USE_STMT (ui);
! }
! /* If the statement is a use the store is not dead. */
! if (ref_may_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
{
fail = true;
! BREAK_FROM_IMM_USE_STMT (ui);
}
! /* If this is a store, remember it or bail out if we have
! multiple ones (the will be in different CFG parts then). */
! if (gimple_vdef (use_stmt))
! {
! if (temp)
! {
! fail = true;
! BREAK_FROM_IMM_USE_STMT (ui);
! }
! temp = use_stmt;
! }
}
!
! if (fail)
! return false;
!
! /* If we didn't find any definition this means the store is dead
! if it isn't a store to global reachable memory. In this case
! just pretend the stmt makes itself dead. Otherwise fail. */
! if (!temp)
{
! if (is_hidden_global_store (stmt))
! return false;
!
! temp = stmt;
break;
}
}
+ /* We deliberately stop on clobbering statements and not only on
+ killing ones to make walking cheaper. Otherwise we can just
+ continue walking until both stores have equal reference trees.
+ ??? Maybe rather limit the number of stmts we investigate. */
+ while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
! if (!is_gimple_assign (temp))
! return false;
!
! *use_stmt = temp;
return true;
}
*************** dse_optimize_stmt (struct dom_walk_data
*** 405,455 ****
if (is_gimple_assign (stmt))
{
- use_operand_p first_use_p = NULL_USE_OPERAND_P;
- use_operand_p use_p = NULL;
gimple use_stmt;
! if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
! dse_gd, bd))
! return;
! /* If we have precisely one immediate use at this point, then we may
! have found redundant store. Make sure that the stores are to
! the same memory location. This includes checking that any
! SSA-form variables in the address will have the same values. */
! if (use_p != NULL_USE_OPERAND_P
! && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
! && !operand_equal_p (gimple_assign_lhs (stmt),
! gimple_assign_lhs (use_stmt), 0)
! && memory_address_same (stmt, use_stmt))
! {
! /* If we have precisely one immediate use at this point, but
! the stores are not to the same memory location then walk the
! virtual def-use chain to get the stmt which stores to that same
! memory location. */
! if (!get_kill_of_stmt_lhs (stmt, &first_use_p, &use_p, &use_stmt))
! {
! record_voperand_set (dse_gd->stores, &bd->stores,
! gimple_uid (stmt));
! return;
! }
! }
/* If we have precisely one immediate use at this point and the
stores are to the same memory location or there is a chain of
virtual uses from stmt and the stmt which stores to that same
memory location, then we may have found redundant store. */
! if (use_p != NULL_USE_OPERAND_P
! && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& operand_equal_p (gimple_assign_lhs (stmt),
! gimple_assign_lhs (use_stmt), 0)
! && memory_address_same (stmt, use_stmt))
{
- ssa_op_iter op_iter;
- def_operand_p var1;
- vuse_vec_p vv;
- tree stmt_lhs;
-
/* If use_stmt is or might be a nop assignment, e.g. for
struct { ... } S a, b, *p; ...
b = a; b = b;
--- 269,289 ----
if (is_gimple_assign (stmt))
{
gimple use_stmt;
! record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
! if (!dse_possible_dead_store_p (stmt, &use_stmt))
! return;
/* If we have precisely one immediate use at this point and the
stores are to the same memory location or there is a chain of
virtual uses from stmt and the stmt which stores to that same
memory location, then we may have found redundant store. */
! if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& operand_equal_p (gimple_assign_lhs (stmt),
! gimple_assign_lhs (use_stmt), 0))
{
/* If use_stmt is or might be a nop assignment, e.g. for
struct { ... } S a, b, *p; ...
b = a; b = b;
*************** dse_optimize_stmt (struct dom_walk_data
*** 461,477 ****
*p = *u; *p = *v; where p might be v, then USE_STMT
acts as a use as well as definition, so store in STMT
is not dead. */
! if (!is_gimple_reg (gimple_assign_rhs1 (use_stmt))
&& !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
/* ??? Should {} be invariant? */
&& gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
&& refs_may_alias_p (gimple_assign_lhs (use_stmt),
gimple_assign_rhs1 (use_stmt)))
! {
! record_voperand_set (dse_gd->stores, &bd->stores,
! gimple_uid (stmt));
! return;
! }
if (dump_file && (dump_flags & TDF_DETAILS))
{
--- 295,308 ----
*p = *u; *p = *v; where p might be v, then USE_STMT
acts as a use as well as definition, so store in STMT
is not dead. */
! if (stmt != use_stmt
! && !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
&& !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
/* ??? Should {} be invariant? */
&& gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
&& refs_may_alias_p (gimple_assign_lhs (use_stmt),
gimple_assign_rhs1 (use_stmt)))
! return;
if (dump_file && (dump_flags & TDF_DETAILS))
{
*************** dse_optimize_stmt (struct dom_walk_data
*** 481,500 ****
}
/* Then we need to fix the operand of the consuming stmt. */
! stmt_lhs = USE_FROM_PTR (first_use_p);
! FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
{
! tree usevar;
! gimple temp;
! single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
! gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
! usevar = VUSE_ELEMENT_VAR (*vv, 0);
! SET_USE (use_p, usevar);
!
! /* Make sure we propagate the ABNORMAL bit setting. */
! if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
}
/* Remove the dead store. */
--- 312,331 ----
}
/* Then we need to fix the operand of the consuming stmt. */
! if (gimple_vdef (stmt))
{
! use_operand_p use_p;
! imm_use_iterator iter;
! gimple use_stmt;
!
! FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
! {
! FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
! SET_USE (use_p, gimple_vuse (stmt));
! }
! if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
}
/* Remove the dead store. */
*************** dse_optimize_stmt (struct dom_walk_data
*** 504,511 ****
SSA_NAME manager. */
release_defs (stmt);
}
-
- record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
}
}
--- 335,340 ----
Index: alias-improvements/gcc/testsuite/gcc.dg/noncompile/920507-1.c
===================================================================
*** alias-improvements.orig/gcc/testsuite/gcc.dg/noncompile/920507-1.c 2009-01-01 20:15:36.000000000 +0100
--- alias-improvements/gcc/testsuite/gcc.dg/noncompile/920507-1.c 2009-01-01 20:15:41.000000000 +0100
*************** x(void)
*** 3,7 ****
{
register int *a asm("unknown_register"); /* { dg-error "invalid register" } */
int *v[1] = {a};
! return v[1];
}
--- 3,7 ----
{
register int *a asm("unknown_register"); /* { dg-error "invalid register" } */
int *v[1] = {a};
! return v[0];
}