[PATCH][mem-ref2] Fixup CCP
Richard Guenther
rguenther@suse.de
Wed Mar 24 14:40:00 GMT 2010
This tries to re-enable full CCP. On that ground I made
&MEM[&x, CST] satisfy is_gimple_min_invariant.
gimplify_and_update_call_from_tree may end up inserting GIMPLE_NOPs
after calls which may cause a need to split blocks if the function
has non-local labels. I wonder why we didn't run into this on trunk,
but inserting GIMPLE_NOPs is easily avoided.
Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to the
branch.
As usual the separated CLs are fixes that are supposed to land
on trunk early during stage1.
Richard.
2010-03-24 Richard Guenther <rguenther@suse.de>
* tree-ssa-ccp.c (gimplify_and_update_call_from_tree): Avoid
inserting GIMPLE_NOPs into the IL.
* tree-ssa-structalias.c (get_constraint_for_component_ref):
Explicitly strip handled components and indirect references.
* fold-const.c (operand_equal_p): Handle MEM_REF.
(build_fold_addr_expr_with_type_loc): Only fold addresses of
MEM_REF with zero constant offset.
(fold_binary_loc): Do simple MEM_REF combining.
* tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with
MEM_REF. Propagate &foo + CST as &MEM[&foo, CST].
(fold_const_aggregate_ref): Handle MEM_REF.
(ccp_fold_stmt): Use VIEW_CONVERT_EXPR on mismatched types.
(maybe_fold_reference): Use fold_binary.
(fold_gimple_assign): Simplify ADDR_EXPR handling.
* tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace
INDIRECT_REF folding with MEM_REF.
* gimple.c (is_gimple_invariant_address): &MEM[&foo, CST] is
invariant.
* tree-ssa-sccvn.c (ao_ref_init_from_vn_reference): Handle
MEM_REF.
(vn_reference_lookup_3): Replace INDIRECT_REF handling with
MEM_REF.
* tree-ssa-pre.c (create_component_ref_by_pieces_1): Fold
the build MEM_REF.
* gimplify.c (gimplify_expr): Fold MEM_REFs before gimplifying
them. Gimplify operand zero to a register if it doesn't
satisfy the constraint invariant address form.
* tree-object-size.c (addr_object_size): Handle MEM_REFs
instead of INDIRECT_REFs.
* tree-predcom.c (ref_at_iteration): Handle MEM_REFs.
Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c.orig 2010-03-23 16:53:14.000000000 +0100
--- gcc/fold-const.c 2010-03-23 16:56:54.000000000 +0100
*************** operand_equal_p (const_tree arg0, const_
*** 3359,3364 ****
--- 3359,3367 ----
case IMAGPART_EXPR:
return OP_SAME (0);
+ case MEM_REF:
+ return OP_SAME (0) && OP_SAME (1);
+
case ARRAY_REF:
case ARRAY_RANGE_REF:
/* Operands 2 and 3 may be null.
*************** build_fold_addr_expr_with_type_loc (loca
*** 8292,8297 ****
--- 8295,8303 ----
SET_EXPR_LOCATION (t, loc);
}
}
+ else if (TREE_CODE (t) == MEM_REF
+ && integer_zerop (TREE_OPERAND (t, 1)))
+ return TREE_OPERAND (t, 0);
else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
{
t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
*************** build_fold_addr_expr_with_type_loc (loca
*** 8299,8313 ****
if (TREE_TYPE (t) != ptrtype)
t = fold_convert_loc (loc, ptrtype, t);
}
- else if (TREE_CODE (t) == MEM_REF)
- {
- tree tem = fold_convert_loc (loc, ptrtype, TREE_OPERAND (t, 0));
- if (!integer_zerop (TREE_OPERAND (t, 1)))
- t = fold_build2 (POINTER_PLUS_EXPR, ptrtype, tem,
- fold_convert (sizetype, TREE_OPERAND (t, 1)));
- else
- t = tem;
- }
else
{
t = build1 (ADDR_EXPR, ptrtype, t);
--- 8305,8310 ----
*************** fold_binary_loc (location_t loc,
*** 10219,10224 ****
--- 10216,10263 ----
switch (code)
{
+ case MEM_REF:
+ /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
+ {
+ tree iref = TREE_OPERAND (arg0, 0);
+ return fold_build2 (MEM_REF, type,
+ TREE_OPERAND (iref, 0),
+ int_const_binop (PLUS_EXPR, arg1,
+ TREE_OPERAND (iref, 1), 0));
+ }
+
+ /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && handled_component_p (TREE_OPERAND (arg0, 0)))
+ {
+ tree base;
+ HOST_WIDE_INT size, coffset;
+ tree ncoffset;
+ enum machine_mode mode;
+ int dummy;
+
+ base = get_inner_reference (TREE_OPERAND (arg0, 0), &size,
+ &coffset, &ncoffset,
+ &mode, &dummy, &dummy, false);
+ if (coffset % BITS_PER_UNIT != 0
+ || (ncoffset != NULL_TREE
+ && TREE_CODE (ncoffset) != INTEGER_CST))
+ return NULL_TREE;
+ if (!ncoffset)
+ ncoffset = size_int (coffset / BITS_PER_UNIT);
+ else
+ ncoffset = int_const_binop (PLUS_EXPR, ncoffset,
+ size_int (coffset / BITS_PER_UNIT), 0);
+
+ return fold_build2 (MEM_REF, type,
+ build_fold_addr_expr (base),
+ int_const_binop (PLUS_EXPR, arg1, ncoffset, 0));
+ }
+
+ return NULL_TREE;
+
case POINTER_PLUS_EXPR:
/* 0 +p index -> (type)index */
if (integer_zerop (arg0))
Index: gcc/tree-ssa-ccp.c
===================================================================
*** gcc/tree-ssa-ccp.c.orig 2010-03-23 16:53:14.000000000 +0100
--- gcc/tree-ssa-ccp.c 2010-03-24 13:28:50.000000000 +0100
*************** ccp_fold (gimple stmt)
*** 952,971 ****
base = &TREE_OPERAND (rhs, 0);
while (handled_component_p (*base))
base = &TREE_OPERAND (*base, 0);
! if (TREE_CODE (*base) == INDIRECT_REF
&& TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
{
prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
if (val->lattice_val == CONSTANT
! && TREE_CODE (val->value) == ADDR_EXPR
! && may_propagate_address_into_dereference
! (val->value, *base))
{
/* We need to return a new tree, not modify the IL
or share parts of it. So play some tricks to
avoid manually building it. */
! tree ret, save = *base;
! *base = TREE_OPERAND (val->value, 0);
ret = unshare_expr (rhs);
recompute_tree_invariant_for_addr_expr (ret);
*base = save;
--- 952,973 ----
base = &TREE_OPERAND (rhs, 0);
while (handled_component_p (*base))
base = &TREE_OPERAND (*base, 0);
! if (TREE_CODE (*base) == MEM_REF
&& TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
{
prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
if (val->lattice_val == CONSTANT
! && TREE_CODE (val->value) == ADDR_EXPR)
{
+ tree ret, save = *base;
+ tree new_base;
+ new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
+ unshare_expr (val->value),
+ TREE_OPERAND (*base, 1));
/* We need to return a new tree, not modify the IL
or share parts of it. So play some tricks to
avoid manually building it. */
! *base = new_base;
ret = unshare_expr (rhs);
recompute_tree_invariant_for_addr_expr (ret);
*base = save;
*************** ccp_fold (gimple stmt)
*** 1011,1025 ****
TREE_CODE (rhs),
TREE_TYPE (rhs), val->value);
}
! else if (TREE_CODE (rhs) == INDIRECT_REF
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
{
prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
if (val->lattice_val == CONSTANT
! && TREE_CODE (val->value) == ADDR_EXPR
! && useless_type_conversion_p (TREE_TYPE (rhs),
! TREE_TYPE (TREE_TYPE (val->value))))
! rhs = TREE_OPERAND (val->value, 0);
}
return fold_const_aggregate_ref (rhs);
}
--- 1013,1031 ----
TREE_CODE (rhs),
TREE_TYPE (rhs), val->value);
}
! else if (TREE_CODE (rhs) == MEM_REF
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
{
prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
if (val->lattice_val == CONSTANT
! && TREE_CODE (val->value) == ADDR_EXPR)
! {
! tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
! unshare_expr (val->value),
! TREE_OPERAND (rhs, 1));
! if (tem)
! rhs = tem;
! }
}
return fold_const_aggregate_ref (rhs);
}
*************** ccp_fold (gimple stmt)
*** 1097,1115 ****
op1 = val->value;
}
! /* Fold &foo + CST into an invariant reference if possible. */
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
&& TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (op1) == INTEGER_CST)
{
! tree tem = maybe_fold_offset_to_address
! (loc, op0, op1, TREE_TYPE (op0));
! if (tem != NULL_TREE)
! return tem;
}
return fold_binary_loc (loc, subcode,
! gimple_expr_type (stmt), op0, op1);
}
default:
--- 1103,1125 ----
op1 = val->value;
}
! /* Translate &x + CST into an invariant form suitable for
! further propagation. */
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
&& TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (op1) == INTEGER_CST)
{
! tree off = build_int_cst_wide_type (ptr_type_node,
! TREE_INT_CST_LOW (op1),
! TREE_INT_CST_HIGH (op1));
! return build_fold_addr_expr
! (fold_build2 (MEM_REF,
! TREE_TYPE (TREE_TYPE (op0)),
! unshare_expr (op0), off));
}
return fold_binary_loc (loc, subcode,
! gimple_expr_type (stmt), op0, op1);
}
default:
*************** fold_const_aggregate_ref (tree t)
*** 1367,1372 ****
--- 1377,1384 ----
break;
}
+ /* ??? Best do a fold_const_aggregate_ref_off with an extra constant
+ offset argument to avoid creating new trees. */
case INDIRECT_REF:
{
tree base = TREE_OPERAND (t, 0);
*************** fold_const_aggregate_ref (tree t)
*** 1380,1385 ****
--- 1392,1486 ----
break;
}
+ case MEM_REF:
+ /* Get the base object we are accessing. */
+ base = TREE_OPERAND (t, 0);
+ if (TREE_CODE (base) == SSA_NAME
+ && (value = get_value (base))
+ && value->lattice_val == CONSTANT)
+ base = value->value;
+ if (TREE_CODE (base) != ADDR_EXPR)
+ return NULL_TREE;
+ base = TREE_OPERAND (base, 0);
+ switch (TREE_CODE (base))
+ {
+ case VAR_DECL:
+ if (DECL_P (base)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (base))
+ && integer_zerop (TREE_OPERAND (t, 1)))
+ return get_symbol_constant_value (base);
+
+ if (!TREE_READONLY (base)
+ || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
+ || !targetm.binds_local_p (base))
+ return NULL_TREE;
+
+ ctor = DECL_INITIAL (base);
+ break;
+
+ case STRING_CST:
+ case CONSTRUCTOR:
+ ctor = base;
+ break;
+
+ default:
+ return NULL_TREE;
+ }
+
+ if (ctor == NULL_TREE
+ || (TREE_CODE (ctor) != CONSTRUCTOR
+ && TREE_CODE (ctor) != STRING_CST)
+ || !TREE_STATIC (ctor))
+ return NULL_TREE;
+
+ /* Get the byte offset. */
+ idx = TREE_OPERAND (t, 1);
+
+ /* Fold read from constant string. */
+ if (TREE_CODE (ctor) == STRING_CST)
+ {
+ if ((TYPE_MODE (TREE_TYPE (t))
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ == MODE_INT)
+ && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
+ && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
+ return build_int_cst_type (TREE_TYPE (t),
+ (TREE_STRING_POINTER (ctor)
+ [TREE_INT_CST_LOW (idx)]));
+ return NULL_TREE;
+ }
+
+ /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
+ if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
+ && (TYPE_MODE (TREE_TYPE (t))
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
+ && integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
+ idx, size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
+ {
+ idx = int_const_binop (TRUNC_DIV_EXPR,
+ idx, size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0);
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
+ if (tree_int_cst_equal (cfield, idx))
+ {
+ STRIP_NOPS (cval);
+ if (TREE_CODE (cval) == ADDR_EXPR)
+ {
+ tree base = get_base_address (TREE_OPERAND (cval, 0));
+ if (base && TREE_CODE (base) == VAR_DECL)
+ add_referenced_var (base);
+ }
+ if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
+ return cval;
+ else if (CONSTANT_CLASS_P (cval))
+ return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
+ else
+ return NULL_TREE;
+ }
+ }
+ break;
+
default:
break;
}
*************** ccp_fold_stmt (gimple_stmt_iterator *gsi
*** 1566,1572 ****
{
tree rhs = unshare_expr (val->value);
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
! rhs = fold_convert (TREE_TYPE (lhs), rhs);
gimple_assign_set_rhs_from_tree (gsi, rhs);
return true;
}
--- 1667,1673 ----
{
tree rhs = unshare_expr (val->value);
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
! rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
gimple_assign_set_rhs_from_tree (gsi, rhs);
return true;
}
*************** maybe_fold_reference (tree expr, bool is
*** 2450,2475 ****
else if (TREE_CODE (*t) == MEM_REF
&& TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
&& !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
! && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
! && is_gimple_min_invariant (TREE_OPERAND (*t, 0)))
{
! tree base;
! HOST_WIDE_INT offset, size, max_size;
! base = get_ref_base_and_extent (TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
! &offset, &size, &max_size);
! /* We only care for the offset here - and is_gimple_min_invariant
! address should ensure that that is not variable.
! ??? Maybe better use get_inner_reference for this. */
! gcc_assert (offset % BITS_PER_UNIT == 0);
! TREE_OPERAND (*t, 0) = build_fold_addr_expr_loc (EXPR_LOCATION (TREE_OPERAND (*t, 0)),
! base);
! TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
! TREE_OPERAND (*t, 1),
! build_int_cst (TREE_TYPE (TREE_OPERAND (*t, 1)), offset / BITS_PER_UNIT), 0);
! base = maybe_fold_reference (expr, is_lhs);
! if (base)
! return base;
! return expr;
}
else if (!is_lhs
&& DECL_P (*t))
--- 2551,2569 ----
else if (TREE_CODE (*t) == MEM_REF
&& TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
&& !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
! && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
{
! tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
! TREE_OPERAND (*t, 0),
! TREE_OPERAND (*t, 1));
! if (tem)
! {
! *t = tem;
! tem = maybe_fold_reference (expr, is_lhs);
! if (tem)
! return tem;
! return expr;
! }
}
else if (!is_lhs
&& DECL_P (*t))
*************** fold_gimple_assign (gimple_stmt_iterator
*** 2893,2914 ****
tree ref = TREE_OPERAND (rhs, 0);
tree tem = maybe_fold_reference (ref, true);
if (tem
! && TREE_CODE (tem) == MEM_REF)
! {
! ref = tem;
! goto do_mem_ref;
! }
else if (tem)
result = fold_convert (TREE_TYPE (rhs),
build_fold_addr_expr_loc (loc, tem));
! else if (TREE_CODE (ref) == MEM_REF)
! {
! do_mem_ref:
! result = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rhs),
! TREE_OPERAND (ref, 0),
! fold_convert (sizetype,
! TREE_OPERAND (ref, 1)));
! }
}
else if (TREE_CODE (rhs) == CONSTRUCTOR
--- 2987,3001 ----
tree ref = TREE_OPERAND (rhs, 0);
tree tem = maybe_fold_reference (ref, true);
if (tem
! && TREE_CODE (tem) == MEM_REF
! && integer_zerop (TREE_OPERAND (tem, 1)))
! result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
else if (tem)
result = fold_convert (TREE_TYPE (rhs),
build_fold_addr_expr_loc (loc, tem));
! else if (TREE_CODE (ref) == MEM_REF
! && integer_zerop (TREE_OPERAND (ref, 1)))
! result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
}
else if (TREE_CODE (rhs) == CONSTRUCTOR
*************** gimplify_and_update_call_from_tree (gimp
*** 3435,3440 ****
--- 3522,3528 ----
gimple_stmt_iterator i;
gimple_seq stmts = gimple_seq_alloc();
struct gimplify_ctx gctx;
+ gimple last = NULL;
stmt = gsi_stmt (*si_p);
*************** gimplify_and_update_call_from_tree (gimp
*** 3456,3477 ****
/* The replacement can expose previously unreferenced variables. */
for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
! {
! new_stmt = gsi_stmt (i);
! find_new_referenced_vars (new_stmt);
! gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
! mark_symbols_for_renaming (new_stmt);
! gsi_next (si_p);
! }
if (lhs == NULL_TREE)
{
- new_stmt = gimple_build_nop ();
unlink_stmt_vdef (stmt);
release_defs (stmt);
}
else
{
new_stmt = gimple_build_assign (lhs, tmp);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
--- 3544,3574 ----
/* The replacement can expose previously unreferenced variables. */
for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
! {
! if (last)
! {
! gsi_insert_before (si_p, last, GSI_NEW_STMT);
! gsi_next (si_p);
! }
! new_stmt = gsi_stmt (i);
! find_new_referenced_vars (new_stmt);
! mark_symbols_for_renaming (new_stmt);
! last = new_stmt;
! }
if (lhs == NULL_TREE)
{
unlink_stmt_vdef (stmt);
release_defs (stmt);
+ new_stmt = last;
}
else
{
+ if (last)
+ {
+ gsi_insert_before (si_p, last, GSI_NEW_STMT);
+ gsi_next (si_p);
+ }
new_stmt = gimple_build_assign (lhs, tmp);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
Index: gcc/tree-ssa-forwprop.c
===================================================================
*** gcc/tree-ssa-forwprop.c.orig 2010-03-23 16:53:14.000000000 +0100
--- gcc/tree-ssa-forwprop.c 2010-03-24 12:09:11.000000000 +0100
*************** forward_propagate_addr_expr_1 (tree name
*** 774,790 ****
lhsp = &TREE_OPERAND (*lhsp, 0);
lhs = *lhsp;
! /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
! if (TREE_CODE (lhs) == INDIRECT_REF
&& TREE_OPERAND (lhs, 0) == name)
{
! if (may_propagate_address_into_dereference (def_rhs, lhs)
! && (lhsp != gimple_assign_lhs_ptr (use_stmt)
! || useless_type_conversion_p
! (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
{
! *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
--- 774,787 ----
lhsp = &TREE_OPERAND (*lhsp, 0);
lhs = *lhsp;
! /* Now see if the LHS node is a MEM_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
! if (TREE_CODE (lhs) == MEM_REF
&& TREE_OPERAND (lhs, 0) == name)
{
! if (is_gimple_min_invariant (def_rhs))
{
! TREE_OPERAND (lhs, 0) = unshare_expr (def_rhs);
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
*************** forward_propagate_addr_expr_1 (tree name
*** 807,872 ****
rhsp = &TREE_OPERAND (*rhsp, 0);
rhs = *rhsp;
! /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
! if (TREE_CODE (rhs) == INDIRECT_REF
&& TREE_OPERAND (rhs, 0) == name
! && may_propagate_address_into_dereference (def_rhs, rhs))
{
! *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
return res;
}
- /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
- propagate the ADDR_EXPR into the use of NAME and try to
- create a VCE and fold the result. */
- if (TREE_CODE (rhs) == INDIRECT_REF
- && TREE_OPERAND (rhs, 0) == name
- && TYPE_SIZE (TREE_TYPE (rhs))
- && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
- /* Function decls should not be used for VCE either as it could be a
- function descriptor that we want and not the actual function code. */
- && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
- /* We should not convert volatile loads to non volatile loads. */
- && !TYPE_VOLATILE (TREE_TYPE (rhs))
- && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
- && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
- TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
- /* Make sure we only do TBAA compatible replacements. */
- && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
- {
- tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
- new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
- if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
- {
- /* If we have folded the VIEW_CONVERT_EXPR then the result is only
- valid if we can replace the whole rhs of the use statement. */
- if (rhs != gimple_assign_rhs1 (use_stmt))
- return false;
- new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
- true, GSI_NEW_STMT);
- gimple_assign_set_rhs1 (use_stmt, new_rhs);
- tidy_after_forward_propagate_addr (use_stmt);
- return res;
- }
- /* If the defining rhs comes from an indirect reference, then do not
- convert into a VIEW_CONVERT_EXPR. */
- def_rhs_base = TREE_OPERAND (def_rhs, 0);
- while (handled_component_p (def_rhs_base))
- def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
- if (!INDIRECT_REF_P (def_rhs_base))
- {
- /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
- reference. Place it there and fold the thing. */
- *rhsp = new_rhs;
- fold_stmt_inplace (use_stmt);
- tidy_after_forward_propagate_addr (use_stmt);
- return res;
- }
- }
-
/* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
--- 804,821 ----
rhsp = &TREE_OPERAND (*rhsp, 0);
rhs = *rhsp;
! /* Now see if the RHS node is a MEM_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
! if (TREE_CODE (rhs) == MEM_REF
&& TREE_OPERAND (rhs, 0) == name
! && is_gimple_min_invariant (def_rhs))
{
! TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
return res;
}
/* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
Index: gcc/gimple.c
===================================================================
*** gcc/gimple.c.orig 2010-03-23 16:53:14.000000000 +0100
--- gcc/gimple.c 2010-03-23 17:36:43.000000000 +0100
*************** is_gimple_invariant_address (const_tree
*** 2587,2594 ****
return false;
op = strip_invariant_refs (TREE_OPERAND (t, 0));
! return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
}
/* Return true if T is a gimple invariant address at IPA level
--- 2587,2604 ----
return false;
op = strip_invariant_refs (TREE_OPERAND (t, 0));
+ if (!op)
+ return false;
+
+ if (TREE_CODE (op) == MEM_REF)
+ {
+ const_tree op0 = TREE_OPERAND (op, 0);
+ return (TREE_CODE (op0) == ADDR_EXPR
+ && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
+ || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
+ }
! return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
}
/* Return true if T is a gimple invariant address at IPA level
Index: gcc/tree-ssa-sccvn.c
===================================================================
*** gcc/tree-ssa-sccvn.c.orig 2010-03-23 17:38:04.000000000 +0100
--- gcc/tree-ssa-sccvn.c 2010-03-23 17:38:12.000000000 +0100
*************** ao_ref_init_from_vn_reference (ao_ref *r
*** 681,686 ****
--- 681,692 ----
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
+ case MEM_REF:
+ *op0_p = build2 (MEM_REF, op->type,
+ NULL_TREE, op->op0);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
*************** vn_reference_lookup_3 (ao_ref *ref, tree
*** 1099,1105 ****
the copy kills ref. */
else if (gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
! || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
tree base2;
--- 1105,1111 ----
the copy kills ref. */
else if (gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
! || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
tree base2;
Index: gcc/tree-ssa-pre.c
===================================================================
*** gcc/tree-ssa-pre.c.orig 2010-03-23 18:09:44.000000000 +0100
--- gcc/tree-ssa-pre.c 2010-03-23 18:09:51.000000000 +0100
*************** create_component_ref_by_pieces_1 (basic_
*** 2716,2722 ****
off / BITS_PER_UNIT), 0);
baseop = build_fold_addr_expr (base);
}
! return build2 (MEM_REF, currop->type, baseop, offset);
}
break;
case TARGET_MEM_REF:
--- 2716,2722 ----
off / BITS_PER_UNIT), 0);
baseop = build_fold_addr_expr (base);
}
! return fold_build2 (MEM_REF, currop->type, baseop, offset);
}
break;
case TARGET_MEM_REF:
Index: gcc/gimplify.c
===================================================================
*** gcc/gimplify.c.orig 2010-03-22 15:51:59.000000000 +0100
--- gcc/gimplify.c 2010-03-24 12:22:37.000000000 +0100
*************** gimplify_expr (tree *expr_p, gimple_seq
*** 6667,6674 ****
}
case MEM_REF:
! ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
! is_gimple_val, fb_rvalue);
recalculate_side_effects (*expr_p);
break;
--- 6667,6682 ----
}
case MEM_REF:
! tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
! TREE_OPERAND (*expr_p, 0),
! TREE_OPERAND (*expr_p, 1));
! if (tmp)
! *expr_p = tmp;
! if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) != ADDR_EXPR
! || (!CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0))
! && !decl_address_invariant_p (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0))))
! ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
! is_gimple_reg, fb_rvalue);
recalculate_side_effects (*expr_p);
break;
Index: gcc/tree-object-size.c
===================================================================
*** gcc/tree-object-size.c.orig 2010-03-22 14:56:01.000000000 +0100
--- gcc/tree-object-size.c 2010-03-24 13:58:04.000000000 +0100
*************** addr_object_size (struct object_size_inf
*** 165,179 ****
pt_var = get_base_address (pt_var);
if (pt_var
! && TREE_CODE (pt_var) == INDIRECT_REF
&& TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
{
unsigned HOST_WIDE_INT sz;
if (!osi || (object_size_type & 1) != 0)
! sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
! object_size_type & ~1);
else
{
tree var = TREE_OPERAND (pt_var, 0);
--- 165,185 ----
pt_var = get_base_address (pt_var);
if (pt_var
! && TREE_CODE (pt_var) == MEM_REF
&& TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
{
unsigned HOST_WIDE_INT sz;
if (!osi || (object_size_type & 1) != 0)
! {
! sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
! object_size_type & ~1);
! if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
! sz += TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
! else
! sz = offset_limit;
! }
else
{
tree var = TREE_OPERAND (pt_var, 0);
*************** addr_object_size (struct object_size_inf
*** 224,230 ****
&& tree_int_cst_lt (pt_var_size,
TYPE_SIZE_UNIT (TREE_TYPE (var)))))
var = pt_var;
! else if (var != pt_var && TREE_CODE (pt_var) == INDIRECT_REF)
{
tree v = var;
/* For &X->fld, compute object size only if fld isn't the last
--- 230,236 ----
&& tree_int_cst_lt (pt_var_size,
TYPE_SIZE_UNIT (TREE_TYPE (var)))))
var = pt_var;
! else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
{
tree v = var;
/* For &X->fld, compute object size only if fld isn't the last
*************** addr_object_size (struct object_size_inf
*** 327,338 ****
}
if (var != pt_var
&& pt_var_size
! && TREE_CODE (pt_var) == INDIRECT_REF
&& bytes != error_mark_node)
{
tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
if (bytes2 != error_mark_node)
{
if (TREE_CODE (bytes2) == INTEGER_CST
&& tree_int_cst_lt (pt_var_size, bytes2))
bytes2 = size_zero_node;
--- 333,346 ----
}
if (var != pt_var
&& pt_var_size
! && TREE_CODE (pt_var) == MEM_REF
&& bytes != error_mark_node)
{
tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
if (bytes2 != error_mark_node)
{
+ bytes2 = size_binop (PLUS_EXPR, bytes2,
+ TREE_OPERAND (pt_var, 1));
if (TREE_CODE (bytes2) == INTEGER_CST
&& tree_int_cst_lt (pt_var_size, bytes2))
bytes2 = size_zero_node;
Index: gcc/tree-predcom.c
===================================================================
*** gcc/tree-predcom.c.orig 2010-03-22 14:56:01.000000000 +0100
--- gcc/tree-predcom.c 2010-03-24 12:13:17.000000000 +0100
*************** ref_at_iteration (struct loop *loop, tre
*** 1343,1356 ****
if (!op0)
return NULL_TREE;
}
! else if (!INDIRECT_REF_P (ref))
return unshare_expr (ref);
! if (INDIRECT_REF_P (ref))
{
! /* Take care for INDIRECT_REF and MISALIGNED_INDIRECT_REF at
the same time. */
! ret = copy_node (ref);
idx = TREE_OPERAND (ref, 0);
idx_p = &TREE_OPERAND (ret, 0);
}
--- 1343,1358 ----
if (!op0)
return NULL_TREE;
}
! else if (!INDIRECT_REF_P (ref)
! && TREE_CODE (ref) != MEM_REF)
return unshare_expr (ref);
! if (INDIRECT_REF_P (ref)
! || TREE_CODE (ref) == MEM_REF)
{
! /* Take care for MEM_REF and MISALIGNED_INDIRECT_REF at
the same time. */
! ret = unshare_expr (ref);
idx = TREE_OPERAND (ref, 0);
idx_p = &TREE_OPERAND (ret, 0);
}
Index: gcc/tree-ssa-structalias.c
===================================================================
*** gcc/tree-ssa-structalias.c.orig 2010-03-22 15:52:00.000000000 +0100
--- gcc/tree-ssa-structalias.c 2010-03-24 12:28:08.000000000 +0100
*************** get_constraint_for_component_ref (tree t
*** 2950,2956 ****
/* Some people like to do cute things like take the address of
&0->a.b */
forzero = t;
! while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
--- 2950,2958 ----
/* Some people like to do cute things like take the address of
&0->a.b */
forzero = t;
! while (handled_component_p (forzero)
! || INDIRECT_REF_P (forzero)
! || TREE_CODE (forzero) == MEM_REF)
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
More information about the Gcc-patches
mailing list