/* If both types don't have the same precision, then it is not safe
to strip NOPs. */
- if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
+ if (element_precision (TREE_TYPE (arg0))
+ != element_precision (TREE_TYPE (arg1)))
return 0;
STRIP_NOPS (arg0);
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
bool strict_overflow_p;
+ unsigned int prec;
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 2
STRIP_NOPS (tem);
if (operand_equal_p (tem, arg1, 0))
{
- t1 = build_int_cst_type (type, -1);
+ t1 = build_minus_one_cst (type);
return omit_one_operand_loc (loc, type, t1, arg1);
}
}
STRIP_NOPS (tem);
if (operand_equal_p (arg0, tem, 0))
{
- t1 = build_int_cst_type (type, -1);
+ t1 = build_minus_one_cst (type);
return omit_one_operand_loc (loc, type, t1, arg0);
}
}
TYPE_UNSIGNED (rtype))
/* Only create rotates in complete modes. Other cases are not
expanded properly. */
- && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
+ && (element_precision (rtype)
+ == element_precision (TYPE_MODE (rtype))))
{
tree tree01, tree11;
enum tree_code code01, code11;
&& TREE_INT_CST_HIGH (tree01) == 0
&& TREE_INT_CST_HIGH (tree11) == 0
&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
+ == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
{
tem = build2_loc (loc, LROTATE_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 0)),
STRIP_NOPS (tree111);
if (TREE_CODE (tree110) == INTEGER_CST
&& 0 == compare_tree_int (tree110,
- TYPE_PRECISION
+ element_precision
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree01, tree111, 0))
STRIP_NOPS (tree011);
if (TREE_CODE (tree010) == INTEGER_CST
&& 0 == compare_tree_int (tree010,
- TYPE_PRECISION
+ element_precision
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree11, tree011, 0))
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
{
- unsigned int prec
- = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
+ prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
&& (~TREE_INT_CST_LOW (arg1)
&& TYPE_PRECISION (TREE_TYPE (arg0))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
{
- unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
+ prec = TYPE_PRECISION (TREE_TYPE (arg0));
tree arg00 = TREE_OPERAND (arg0, 0);
/* See if more bits can be proven as zero because of
zero extension. */
newmask = mask | zerobits;
if (newmask != mask && (newmask & (newmask + 1)) == 0)
{
- unsigned int prec;
-
/* Only do the transformation if NEWMASK is some integer
mode's mask. */
for (prec = BITS_PER_UNIT;
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
return NULL_TREE;
+ prec = element_precision (type);
+
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
if (TREE_CODE (op0) == code && host_integerp (arg1, false)
- && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
+ && TREE_INT_CST_LOW (arg1) < prec
&& host_integerp (TREE_OPERAND (arg0, 1), false)
- && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
+ && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
{
HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
+ TREE_INT_CST_LOW (arg1));
/* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
being well defined. */
- if (low >= TYPE_PRECISION (type))
+ if (low >= prec)
{
if (code == LROTATE_EXPR || code == RROTATE_EXPR)
- low = low % TYPE_PRECISION (type);
+ low = low % prec;
else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
- return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
+ return omit_one_operand_loc (loc, type, build_zero_cst (type),
TREE_OPERAND (arg0, 0));
else
- low = TYPE_PRECISION (type) - 1;
+ low = prec - 1;
}
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- build_int_cst (type, low));
+ build_int_cst (TREE_TYPE (arg1), low));
}
/* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
|| (TYPE_UNSIGNED (type)
&& code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
&& host_integerp (arg1, false)
- && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
+ && TREE_INT_CST_LOW (arg1) < prec
&& host_integerp (TREE_OPERAND (arg0, 1), false)
- && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
+ && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
{
HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
{
arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- lshift = build_int_cst (type, -1);
- lshift = int_const_binop (code, lshift, arg1);
+ lshift = build_minus_one_cst (type);
+ lshift = const_binop (code, lshift, arg1);
return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
}
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
- tree tem = build_int_cst (TREE_TYPE (arg1),
- TYPE_PRECISION (type));
+ tree tem = build_int_cst (TREE_TYPE (arg1), prec);
tem = const_binop (MINUS_EXPR, tem, arg1);
return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
}
&& TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
&& ((TREE_INT_CST_LOW (arg1)
+ TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
- == (unsigned int) TYPE_PRECISION (type)))
+ == prec))
return TREE_OPERAND (arg0, 0);
/* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
&& integer_zerop (arg1))
{
tree itype = TREE_TYPE (arg0);
- unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
+ prec = TYPE_PRECISION (itype);
/* Check for a valid shift count. */
if (TREE_INT_CST_HIGH (arg001) == 0
}
}
+/* Return a constant of arithmetic type TYPE which is the
+ opposite of the multiplicative identity of the set TYPE. */
+
+tree
+build_minus_one_cst (tree type)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
+ return build_int_cst (type, -1);
+
+ case REAL_TYPE:
+ return build_real (type, dconstm1);
+
+ case FIXED_POINT_TYPE:
+ /* We can only generate 1 for accum types. */
+ gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
+ return build_fixed (type, fixed_from_double_int (double_int_minus_one,
+ TYPE_MODE (type)));
+
+ case VECTOR_TYPE:
+ {
+ tree scalar = build_minus_one_cst (TREE_TYPE (type));
+
+ return build_vector_from_val (type, scalar);
+ }
+
+ case COMPLEX_TYPE:
+ return build_complex (type,
+ build_minus_one_cst (TREE_TYPE (type)),
+ build_zero_cst (TREE_TYPE (type)));
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* Build 0 constant of type TYPE. This is used by constructor folding
and thus the constant should be represented in memory by
zero(es). */
return true;
}
+/* Return the precision of the type, or for a complex or vector type the
+ precision of the type of its elements. */
+
+unsigned int
+element_precision (const_tree type)
+{
+ enum tree_code code = TREE_CODE (type);
+ if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
+ type = TREE_TYPE (type);
+
+ return TYPE_PRECISION (type);
+}
+
/* Return true if CODE represents an associative tree code. Otherwise
return false. */
bool