This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[PATCH][no-undefined-overflow] More address-expression handling
- From: Richard Guenther <rguenther at suse dot de>
- To: gcc-patches at gcc dot gnu dot org
- Date: Mon, 9 Mar 2009 16:55:12 +0100 (CET)
- Subject: [PATCH][no-undefined-overflow] More address-expression handling
This audits address-expression folding. Especially we now always
canonicalize (T1)(X p+ Y) to ((T1)X p+ Y) - the old code looks like
it probably pre-dated POINTER_PLUS_EXPR. In tree_*_nonnegative_p
we can now (well, we could have previously for TYPE_UNDEFINED_OVERFLOW)
treat non-wrapping plus and mult the same as for floating types.
In going over the POINTER_PLUS_EXPR cases I am now somewhat worried
that we might be confused as how no-overflow for the pointer addition
translates to overflow behavior on the offset parts for for example
(ptr p+ o1) p+ o2 which we fold to ptr p+ (o1 + o2) - especially as
a pointer doesn't really have a sign (but we can assume it wraps
around NULL, so it's probably "unsigned") and the offset is of that
strange sizetype, which is unsigned (apart from for Ada) but
sign-extended.
Well, of course one of the original plans was to get rid of the
sizetype specialities. At which point we probably shoud consitently
use a differen type for pointer offsets (which should have been signed).
Another can of worms is/will be the interesting signed overflow
warning handing in VRP (and maybe also in other places). The branch
asks for a major overhaul of at least VRP anyway...
Anyway.
Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to the
branch.
Richard.
2009-03-09 Richard Guenther <rguenther@suse.de>
* fold-const.c (try_move_mult_to_index): Only move idx * size
in &x[i] + idx * size into the index if the multiplication
does not overflow.
(fold_unary): Always fold (T1)(X p+ Y) to ((T1)X p+ Y).
Simplify that folding.
Fold abs(NEGATENV_EXPR) the same as abs(NEGATE_EXPR).
(fold_binary): Audit all pointer-plus folding. Preserve
non-wrapping variants where that is easily correct.
(tree_unary_nonnegative_warnv_p): Remove dead TYPE_OVERFLOW_UNDEFINED
case. Handle CONVERT_EXPR the same as NOP_EXPR.
(tree_binary_nonnegative_warnv_p): POINTER_PLUS*_EXPR are not
handled at all. Handle PLUSNV_EXPR and MULTNV_EXPR specially.
* tree-ssa-ccp.c (likely_value): Handle *NV_EXPR.
(ccp_fold): Also fold POINTER_PLUSNV_EXPR.
(maybe_fold_stmt_indirect): Also fold POINTER_PLUSNV_EXPR.
(fold_stmt_r): Likewise.
* tree-ssa-forwprop.c (forward_propagate_addr_into_variable_arr):
Only fold non-overflowed multiplications.
(forward_propagate_addr_expr_1): Handle POINTER_PLUSNV_EXPR.
* tree-vrp.c (supports_overflow_infinity): The assert that the
type needs overflow infinity is bogus now.
Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c (revision 144722)
--- gcc/fold-const.c (working copy)
*************** try_move_mult_to_index (tree addr, tree
*** 7222,7228 ****
/* Canonicalize op1 into a possibly non-constant delta
and an INTEGER_CST s. */
! if (TREE_CODE (op1) == MULT_EXPR)
{
tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
--- 7222,7228 ----
/* Canonicalize op1 into a possibly non-constant delta
and an INTEGER_CST s. */
! if (TREE_CODE (op1) == MULTNV_EXPR)
{
tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
*************** fold_unary (enum tree_code code, tree ty
*** 8314,8333 ****
}
}
! /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
! when one of the new casts will fold away. Conservatively we assume
! that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
if (POINTER_TYPE_P (type)
! && TREE_CODE (arg0) == POINTER_PLUS_EXPR
! && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
! || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
! || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
! return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
! fold_convert (sizetype, arg01));
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
--- 8314,8328 ----
}
}
! /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type. */
if (POINTER_TYPE_P (type)
! && POINTER_PLUS_EXPR_P (arg0))
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
! return fold_build2 (TREE_CODE (arg0), type,
! fold_convert (type, arg00), arg01);
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
*************** fold_unary (enum tree_code code, tree ty
*** 8408,8414 ****
case ABS_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
! else if (TREE_CODE (arg0) == NEGATE_EXPR)
return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
else if (TREE_CODE (arg0) == NOP_EXPR
--- 8403,8409 ----
case ABS_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
! else if (NEGATE_EXPR_P (arg0))
return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
else if (TREE_CODE (arg0) == NOP_EXPR
*************** fold_binary (enum tree_code code, tree t
*** 9823,9832 ****
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
- /* ??? Auditing required. */
- if (code == POINTER_PLUSNV_EXPR)
- return NULL_TREE;
-
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
--- 9818,9823 ----
*************** fold_binary (enum tree_code code, tree t
*** 9837,9856 ****
/* index +p PTR -> PTR +p index */
if (POINTER_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
! return fold_build2 (POINTER_PLUS_EXPR, type,
fold_convert (type, arg1),
fold_convert (sizetype, arg0));
/* (PTR +p B) +p A -> PTR +p (B + A) */
! if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
{
tree inner;
tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
tree arg00 = TREE_OPERAND (arg0, 0);
inner = fold_build2 (PLUS_EXPR, sizetype,
arg01, fold_convert (sizetype, arg1));
return fold_convert (type,
! fold_build2 (POINTER_PLUS_EXPR,
TREE_TYPE (arg00), arg00, inner));
}
--- 9828,9851 ----
/* index +p PTR -> PTR +p index */
if (POINTER_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
! return fold_build2 (code, type,
fold_convert (type, arg1),
fold_convert (sizetype, arg0));
/* (PTR +p B) +p A -> PTR +p (B + A) */
! if (POINTER_PLUS_EXPR_P (arg0))
{
tree inner;
tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
tree arg00 = TREE_OPERAND (arg0, 0);
+ enum tree_code ncode = POINTER_PLUS_EXPR;
+ if (code == POINTER_PLUSNV_EXPR
+ && TREE_CODE (arg0) == POINTER_PLUSNV_EXPR)
+ ncode = POINTER_PLUSNV_EXPR;
inner = fold_build2 (PLUS_EXPR, sizetype,
arg01, fold_convert (sizetype, arg1));
return fold_convert (type,
! fold_build2 (ncode,
TREE_TYPE (arg00), arg00, inner));
}
*************** tree_unary_nonnegative_warnv_p (enum tre
*** 14343,14353 ****
ABS_EXPR<INT_MIN> = INT_MIN. */
if (!INTEGRAL_TYPE_P (type))
return true;
- if (TYPE_OVERFLOW_UNDEFINED (type))
- {
- *strict_overflow_p = true;
- return true;
- }
break;
case NON_LVALUE_EXPR:
--- 14338,14343 ----
*************** tree_unary_nonnegative_warnv_p (enum tre
*** 14356,14362 ****
return tree_expr_nonnegative_warnv_p (op0,
strict_overflow_p);
! case NOP_EXPR:
{
tree inner_type = TREE_TYPE (op0);
tree outer_type = type;
--- 14346,14352 ----
return tree_expr_nonnegative_warnv_p (op0,
strict_overflow_p);
! CASE_CONVERT:
{
tree inner_type = TREE_TYPE (op0);
tree outer_type = type;
*************** tree_binary_nonnegative_warnv_p (enum tr
*** 14408,14414 ****
--- 14398,14417 ----
switch (code)
{
+ case POINTER_PLUSNV_EXPR:
case POINTER_PLUS_EXPR:
+ /* Pointers do not have a "sign". */
+ return false;
+
+ case PLUSNV_EXPR:
+ if (INTEGRAL_TYPE_P (type))
+ {
+ *strict_overflow_p = true;
+ return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
+ }
+
+ /* Fallthru. */
case PLUS_EXPR:
if (FLOAT_TYPE_P (type))
return (tree_expr_nonnegative_warnv_p (op0,
*************** tree_binary_nonnegative_warnv_p (enum tr
*** 14434,14439 ****
--- 14437,14454 ----
}
break;
+ case MULTNV_EXPR:
+ if (INTEGRAL_TYPE_P (type))
+ {
+ *strict_overflow_p = true;
+ /* x * x without overflowing is always non-negative. */
+ if (operand_equal_p (op0, op1, 0))
+ return true;
+ return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
+ }
+
+ /* Fallthru. */
case MULT_EXPR:
if (FLOAT_TYPE_P (type))
{
Index: gcc/tree-ssa-ccp.c
===================================================================
*** gcc/tree-ssa-ccp.c (revision 144676)
--- gcc/tree-ssa-ccp.c (working copy)
*************** likely_value (gimple stmt)
*** 566,573 ****
--- 566,576 ----
{
/* Unary operators are handled with all_undefined_operands. */
case PLUS_EXPR:
+ case PLUSNV_EXPR:
case MINUS_EXPR:
+ case MINUSNV_EXPR:
case POINTER_PLUS_EXPR:
+ case POINTER_PLUSNV_EXPR:
/* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
Not bitwise operators, one VARYING operand may specify the
result completely. Not logical operators for the same reason.
*************** ccp_fold (gimple stmt)
*** 1027,1034 ****
op1 = val->value;
}
! /* Fold &foo + CST into an invariant reference if possible. */
! if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
&& TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (op1) == INTEGER_CST)
{
--- 1030,1038 ----
op1 = val->value;
}
! /* Fold &foo + CST into an invariant reference if possible.
! ??? Should this be done only for POINTER_PLUSNV_EXPR? */
! if (POINTER_PLUS_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
&& TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (op1) == INTEGER_CST)
{
*************** maybe_fold_stmt_indirect (tree expr, tre
*** 1980,1986 ****
return t;
/* Add in any offset from a POINTER_PLUS_EXPR. */
! if (TREE_CODE (base) == POINTER_PLUS_EXPR)
{
tree offset2;
--- 1984,1990 ----
return t;
/* Add in any offset from a POINTER_PLUS_EXPR. */
! if (POINTER_PLUS_EXPR_P (base))
{
tree offset2;
*************** fold_stmt_r (tree *expr_p, int *walk_sub
*** 2262,2267 ****
--- 2266,2272 ----
break;
case POINTER_PLUS_EXPR:
+ case POINTER_PLUSNV_EXPR:
t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
if (t)
return t;
*************** fold_gimple_assign (gimple_stmt_iterator
*** 2710,2716 ****
case GIMPLE_BINARY_RHS:
/* Try to fold pointer addition. */
! if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
{
tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
--- 2715,2721 ----
case GIMPLE_BINARY_RHS:
/* Try to fold pointer addition. */
! if (POINTER_PLUS_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
{
tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
Index: gcc/tree-ssa-forwprop.c
===================================================================
*** gcc/tree-ssa-forwprop.c (revision 144676)
--- gcc/tree-ssa-forwprop.c (working copy)
*************** forward_propagate_addr_into_variable_arr
*** 624,630 ****
if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
{
if (is_gimple_assign (offset_def)
! && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
return false;
index = offset;
--- 624,630 ----
if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
{
if (is_gimple_assign (offset_def)
! && MULT_EXPR_CODE_P (gimple_assign_rhs_code (offset_def)))
return false;
index = offset;
*************** forward_propagate_addr_into_variable_arr
*** 637,647 ****
return false;
/* The RHS of the statement which defines OFFSET must be a
! multiplication of an object by the size of the array elements.
This implicitly verifies that the size of the array elements
is constant. */
offset = gimple_assign_rhs1 (offset_def);
! if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
|| TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
|| !simple_cst_equal (gimple_assign_rhs2 (offset_def),
TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
--- 637,648 ----
return false;
/* The RHS of the statement which defines OFFSET must be a
! non-overflowing multiplication of an object by the size of the
! array elements.
This implicitly verifies that the size of the array elements
is constant. */
offset = gimple_assign_rhs1 (offset_def);
! if (gimple_assign_rhs_code (offset_def) != MULTNV_EXPR
|| TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
|| !simple_cst_equal (gimple_assign_rhs2 (offset_def),
TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
*************** forward_propagate_addr_expr_1 (tree name
*** 809,815 ****
/* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
! if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
|| gimple_assign_rhs1 (use_stmt) != name)
return false;
--- 810,816 ----
/* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
! if (!POINTER_PLUS_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
|| gimple_assign_rhs1 (use_stmt) != name)
return false;
Index: gcc/tree-vrp.c
===================================================================
*** gcc/tree-vrp.c (revision 144722)
--- gcc/tree-vrp.c (working copy)
*************** static inline bool
*** 205,211 ****
supports_overflow_infinity (const_tree type)
{
tree min = vrp_val_min (type), max = vrp_val_max (type);
! #ifdef ENABLE_CHECKING
gcc_assert (needs_overflow_infinity (type));
#endif
return (min != NULL_TREE
--- 205,212 ----
supports_overflow_infinity (const_tree type)
{
tree min = vrp_val_min (type), max = vrp_val_max (type);
! #if 0
! /* ??? This assert is now bogus. */
gcc_assert (needs_overflow_infinity (type));
#endif
return (min != NULL_TREE