This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
wide-int: fix merge.
- From: Kenneth Zadeck <zadeck at naturalbridge dot com>
- To: gcc-patches <gcc-patches at gcc dot gnu dot org>, Richard Sandiford <r dot sandiford at uk dot ibm dot com>, Richard Biener <rguenther at suse dot de>, Mike Stump <mikestump at comcast dot net>
- Date: Mon, 23 Sep 2013 17:13:20 -0400
- Subject: wide-int: fix merge.
- Authentication-results: sourceware.org; auth=none
Mike started a merge from trunk to the wide-int branch because the
previous merge had happened when bootstrap was broken for ppc.
This patch fixes mike's partial merge and has been bootstrapped and
tested on ppc. I will pull the patch down and test it on x86-64 tonight.
kenny
Index: gcc/postreload.c
===================================================================
--- gcc/postreload.c (revision 202811)
+++ gcc/postreload.c (working copy)
@@ -305,7 +305,7 @@
case ZERO_EXTEND:
result = wide_int (std::make_pair (this_rtx, GET_MODE (src)));
if (GET_MODE_PRECISION (GET_MODE (src)) > GET_MODE_PRECISION (word_mode))
- result = result.zext (GET_MODE_PRECISION (word_mode));
+ result = wi::zext (result, GET_MODE_PRECISION (word_mode));
break;
case SIGN_EXTEND:
result = wide_int (std::make_pair (this_rtx, GET_MODE (src)));
Index: gcc/fold-const.c
===================================================================
--- gcc/fold-const.c (revision 202811)
+++ gcc/fold-const.c (working copy)
@@ -9897,17 +9897,15 @@
/* Mask out the tz least significant bits of X of type TYPE where
tz is the number of trailing zeroes in Y. */
-static double_int
-mask_with_tz (tree type, double_int x, double_int y)
+static wide_int
+mask_with_tz (tree type, wide_int x, wide_int y)
{
- int tz = y.trailing_zeros ();
-
+ int tz = wi::ctz (y);
if (tz > 0)
{
- double_int mask;
+ wide_int mask;
- mask = ~double_int::mask (tz);
- mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
+ mask = wi::mask (tz, true, TYPE_PRECISION (type));
return mask & x;
}
return x;
@@ -11276,7 +11274,7 @@
== INTEGER_CST)
{
tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
- double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
+ wide_int masked = mask_with_tz (type, c3, t);
try_simplify = (masked != c1);
}
@@ -11670,32 +11668,14 @@
&& TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
-<<<<<<< .working
- int arg1tz = wi::ctz (TREE_OPERAND (arg0, 1));
- if (arg1tz > 0)
- {
- wide_int arg1mask, masked;
- arg1mask = wi::mask (arg1tz, true, TYPE_PRECISION (type));
- masked = arg1mask & arg1;
- if (masked == 0)
- return omit_two_operands_loc (loc, type, build_zero_cst (type),
- arg0, arg1);
- else if (masked != arg1)
- return fold_build2_loc (loc, code, type, op0,
- wide_int_to_tree (type, masked));
- }
-=======
- double_int masked
- = mask_with_tz (type, tree_to_double_int (arg1),
- tree_to_double_int (TREE_OPERAND (arg0, 1)));
+ wide_int masked = mask_with_tz (type, arg1, TREE_OPERAND (arg0, 1));
- if (masked.is_zero ())
+ if (masked == 0)
return omit_two_operands_loc (loc, type, build_zero_cst (type),
arg0, arg1);
- else if (masked != tree_to_double_int (arg1))
+ else if (masked != arg1)
return fold_build2_loc (loc, code, type, op0,
- double_int_to_tree (type, masked));
->>>>>>> .merge-right.r202797
+ wide_int_to_tree (type, masked));
}
/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
Index: gcc/gimple-ssa-strength-reduction.c
===================================================================
--- gcc/gimple-ssa-strength-reduction.c (revision 202811)
+++ gcc/gimple-ssa-strength-reduction.c (working copy)
@@ -763,7 +763,7 @@
int (i * S).
Otherwise, just return double int zero. */
-static double_int
+static max_wide_int
backtrace_base_for_ref (tree *pbase)
{
tree base_in = *pbase;
@@ -771,19 +771,19 @@
STRIP_NOPS (base_in);
if (TREE_CODE (base_in) != SSA_NAME)
- return tree_to_double_int (integer_zero_node);
+ return 0;
base_cand = base_cand_from_table (base_in);
while (base_cand && base_cand->kind != CAND_PHI)
{
if (base_cand->kind == CAND_ADD
- && base_cand->index.is_one ()
+ && base_cand->index == 1
&& TREE_CODE (base_cand->stride) == INTEGER_CST)
{
/* X = B + (1 * S), S is integer constant. */
*pbase = base_cand->base_expr;
- return tree_to_double_int (base_cand->stride);
+ return base_cand->stride;
}
else if (base_cand->kind == CAND_ADD
&& TREE_CODE (base_cand->stride) == INTEGER_CST
@@ -800,7 +800,7 @@
base_cand = NULL;
}
- return tree_to_double_int (integer_zero_node);
+ return 0;
}
/* Look for the following pattern:
Index: gcc/genpreds.c
===================================================================
--- gcc/genpreds.c (revision 202811)
+++ gcc/genpreds.c (working copy)
@@ -809,11 +809,7 @@
if (is_const_int || is_const_dbl)
{
enum rtx_code appropriate_code
-#if TARGET_SUPPORTS_WIDE_INT
- = is_const_int ? CONST_INT : CONST_WIDE_INT;
-#else
= is_const_int ? CONST_INT : CONST_DOUBLE;
-#endif
/* Consider relaxing this requirement in the future. */
if (regclass
|| GET_CODE (exp) != AND
Index: gcc/ubsan.c
===================================================================
--- gcc/ubsan.c (revision 202811)
+++ gcc/ubsan.c (working copy)
@@ -233,8 +233,8 @@
static unsigned short
get_ubsan_type_info_for_type (tree type)
{
- gcc_assert (TYPE_SIZE (type) && host_integerp (TYPE_SIZE (type), 1));
- int prec = exact_log2 (tree_low_cst (TYPE_SIZE (type), 1));
+ gcc_assert (TYPE_SIZE (type) && tree_fits_uhwi_p (TYPE_SIZE (type)));
+ int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
gcc_assert (prec != -1);
return (prec << 1) | !TYPE_UNSIGNED (type);
}