[PATCH] bitint: Lower the partial limbs of extended _BitInts with m_limb_type.
Yang Yujie
yangyujie@loongson.cn
Fri Sep 5 06:16:30 GMT 2025
Lower the partial limbs of extended _BitInts like the full limbs for most
operations, so that explicit extensions can be inserted only where they
are really needed.
gcc/ChangeLog:
* gimple-lower-bitint.cc (struct bitint_large_huge): Remove
the abi_load_p parameter of limb_access.
(bitint_large_huge::limb_access): Same.
(bitint_large_huge::limb_access_type): Conditionally express
the extended partial limbs with m_limb_type.
(MAYBE_EXTEND_PARTIAL_LIMB): Define.
(bitint_large_huge::handle_plus_minus): Wrap the actual work
that's moved to handle_plus_minus_1 to extend the partial limbs
afterwards.
(bitint_large_huge::handle_plus_minus_1): Define.
(bitint_large_huge::handle_lshift): Extend the partial limbs
after the operation, if needed.
(bitint_large_huge::handle_cast): Same.
(bitint_large_huge::handle_load): Same.
(bitint_large_huge::handle_stmt): Same.
(bitint_large_huge::lower_mergeable_stmt): Use the type of
the its actual precision when masking the widened partial limb.
(bitint_large_huge::extend_partial_limb): Define.
(bitint_large_huge::lower_muldiv_stmt): Use.
(bitint_large_huge::lower_float_conv_stmt): Use.
(bitint_large_huge::finish_arith_overflow): Use.
(bitint_large_huge::lower_addsub_overflow): Use.
(bitint_large_huge::lower_stmt): Initialize m_extending.
---
gcc/gimple-lower-bitint.cc | 235 ++++++++++++++++++++++++++++---------
1 file changed, 181 insertions(+), 54 deletions(-)
diff --git a/gcc/gimple-lower-bitint.cc b/gcc/gimple-lower-bitint.cc
index 9b4d49395ae..c4d57ef6ab2 100644
--- a/gcc/gimple-lower-bitint.cc
+++ b/gcc/gimple-lower-bitint.cc
@@ -431,8 +431,8 @@ struct bitint_large_huge
~bitint_large_huge ();
void insert_before (gimple *);
- tree limb_access_type (tree, tree);
- tree limb_access (tree, tree, tree, bool, bool = false);
+ tree limb_access_type (tree, tree, bool = false);
+ tree limb_access (tree, tree, tree, bool);
tree build_bit_field_ref (tree, tree, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT);
void if_then (gimple *, profile_probability, edge &, edge &);
@@ -444,6 +444,7 @@ struct bitint_large_huge
tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
tree add_cast (tree, tree);
tree handle_plus_minus (tree_code, tree, tree, tree);
+ tree handle_plus_minus_1 (tree_code, tree, tree, tree);
tree handle_lshift (tree, tree, tree);
tree handle_cast (tree, tree, tree);
tree handle_bit_field_ref (tree, tree);
@@ -455,6 +456,7 @@ struct bitint_large_huge
tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
void lower_shift_stmt (tree, gimple *);
void lower_muldiv_stmt (tree, gimple *);
+ void extend_partial_limb (tree, tree);
void lower_float_conv_stmt (tree, gimple *);
tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
unsigned int, bool);
@@ -553,12 +555,21 @@ struct bitint_large_huge
constant index after comparing the runtime one for equality with the
constant). In these cases, m_cast_conditional is set to true and
the bit-field load then communicates its m_data_cnt to handle_cast
- using m_bitfld_load. */
+ using m_bitfld_load.
+
+ For extended bitints (where bitint_extended is set), the partial limbs
+ are assumed to be already extended and passed around in m_limb_type.
+ m_extending should be set when it is desirable to disable this behavior
+ and express the partial limbs in the type of their actual precision, for
+ example, when handling statements with operations that require an
+ explicit extension to re-fill the unused bits (e.g. plus/minus). */
+
bool m_first;
bool m_var_msb;
unsigned m_upwards_2limb;
bool m_upwards;
bool m_cast_conditional;
+ bool m_extending;
unsigned m_bitfld_load;
vec<tree> m_data;
unsigned int m_data_cnt;
@@ -593,10 +604,20 @@ bitint_large_huge::insert_before (gimple *g)
significant limb if any. */
tree
-bitint_large_huge::limb_access_type (tree type, tree idx)
+bitint_large_huge::limb_access_type (tree type, tree idx,
+ bool extending_p)
{
if (type == NULL_TREE)
return m_limb_type;
+
+ /* For extended _BitInts, if either of m_extending or extending_p is set,
+ this function should return the non-extended type of the partial
+ (high-ordered) limb, so that they get an explicit extension later
+ if needed (e.g. during an explicit cast or after certain operations).
+ Otherwise, it should always return m_limb_type, so that the partial
+ limb is considered already extended to avoid unnecessary extension. */
+ extending_p |= m_extending;
+
unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
unsigned int prec = TYPE_PRECISION (type);
gcc_assert (i * limb_prec < prec);
@@ -605,29 +626,34 @@ bitint_large_huge::limb_access_type (tree type, tree idx)
: (i + 1) * limb_prec <= prec)
return m_limb_type;
else
- return build_nonstandard_integer_type (prec % limb_prec,
- TYPE_UNSIGNED (type));
+ {
+ if (bitint_extended && !extending_p)
+ return m_limb_type;
+ else
+ return build_nonstandard_integer_type (prec % limb_prec,
+ TYPE_UNSIGNED (type));
+ }
}
/* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
tree
-bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
- bool abi_load_p)
+bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
{
tree atype = (tree_fits_uhwi_p (idx)
? limb_access_type (type, idx) : m_limb_type);
- tree ltype = (bitint_extended && abi_load_p) ? atype : m_limb_type;
+ tree ltype = m_limb_type;
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
+ if (as != TYPE_ADDR_SPACE (ltype))
+ ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
+ | ENCODE_QUAL_ADDR_SPACE (as));
+
tree ret;
if (DECL_P (var) && tree_fits_uhwi_p (idx))
{
- if (as != TYPE_ADDR_SPACE (ltype))
- ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
- | ENCODE_QUAL_ADDR_SPACE (as));
tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
ret = build2 (MEM_REF, ltype,
@@ -638,9 +664,6 @@ bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
}
else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
{
- if (as != TYPE_ADDR_SPACE (ltype))
- ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
- | ENCODE_QUAL_ADDR_SPACE (as));
ret
= build2 (MEM_REF, ltype, unshare_expr (TREE_OPERAND (var, 0)),
size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
@@ -653,10 +676,6 @@ bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
}
else
{
- ltype = m_limb_type;
- if (as != TYPE_ADDR_SPACE (ltype))
- ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
- | ENCODE_QUAL_ADDR_SPACE (as));
var = unshare_expr (var);
if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
|| !useless_type_conversion_p (m_limb_type,
@@ -1184,11 +1203,46 @@ bitint_large_huge::add_cast (tree type, tree val)
return lhs;
}
-/* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
+/* For extended _BitInts, it's frequently needed to convert
+ the top limb back to m_limb_type to work with other operands
+ out there. */
+
+#define MAYBE_EXTEND_PARTIAL_LIMB(op) \
+ do { \
+ if (bitint_extended \
+ && !m_extending \
+ && !types_compatible_p (m_limb_type, TREE_TYPE (op))) \
+ op = add_cast (m_limb_type, op); \
+ } while (0)
+
+/* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR with
+ possible extension. */
tree
bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
tree idx)
+{
+ bool m_extending_save = m_extending;
+ m_extending = true;
+
+ rhs2 = handle_operand (rhs2, idx);
+ if (rhs1 == NULL_TREE)
+ rhs1 = build_zero_cst (TREE_TYPE (rhs2));
+ else
+ rhs1 = handle_operand (rhs1, idx);
+
+ m_extending = m_extending_save;
+
+ tree ret = handle_plus_minus_1 (code, rhs1, rhs2, idx);
+ MAYBE_EXTEND_PARTIAL_LIMB (ret);
+ return ret;
+}
+
+/* Subroutine of handle_plus_minus that does the actuall work. */
+
+tree
+bitint_large_huge::handle_plus_minus_1 (tree_code code, tree rhs1, tree rhs2,
+ tree idx)
{
tree lhs, data_out, ctype;
tree rhs1_type = TREE_TYPE (rhs1);
@@ -1300,6 +1354,11 @@ bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
if (cnt == 0)
return rhs1;
+ bool m_extending_save = m_extending;
+ m_extending = true;
+ rhs1 = handle_operand (rhs1, idx);
+ m_extending = m_extending_save;
+
tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
gimple *g;
tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
@@ -1338,6 +1397,8 @@ bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
}
else
lhs = data_in;
+
+ MAYBE_EXTEND_PARTIAL_LIMB (lhs);
m_data[m_data_cnt] = data_out;
m_data_cnt += 2;
return lhs;
@@ -1395,9 +1456,11 @@ bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
rhs1 = handle_operand (rhs1, ridx);
if (tree_fits_uhwi_p (idx))
{
- tree type = limb_access_type (lhs_type, idx);
+ tree type = limb_access_type (lhs_type, idx, true);
if (!types_compatible_p (type, TREE_TYPE (rhs1)))
rhs1 = add_cast (type, rhs1);
+
+ MAYBE_EXTEND_PARTIAL_LIMB (rhs1);
}
return rhs1;
}
@@ -1713,9 +1776,10 @@ bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
else
t = m_data[save_data_cnt + 1];
}
- tree type = limb_access_type (lhs_type, idx);
+ tree type = limb_access_type (lhs_type, idx, true);
if (!useless_type_conversion_p (type, m_limb_type))
t = add_cast (type, t);
+ MAYBE_EXTEND_PARTIAL_LIMB (t);
m_first = save_first;
return t;
}
@@ -1799,10 +1863,11 @@ bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
t = m_data[m_data_cnt + 1];
else
{
- tree type = limb_access_type (lhs_type, idx);
+ tree type = limb_access_type (lhs_type, idx, true);
t = m_data[m_data_cnt + 2];
if (!useless_type_conversion_p (type, m_limb_type))
t = add_cast (type, t);
+ MAYBE_EXTEND_PARTIAL_LIMB (t);
}
m_data_cnt += 3;
return t;
@@ -1816,7 +1881,7 @@ bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
if (tree_fits_uhwi_p (idx))
{
- tree type = limb_access_type (lhs_type, idx);
+ tree type = limb_access_type (lhs_type, idx, true);
if (bitint_big_endian
? tree_to_uhwi (idx) == lcnt - 1 : integer_zerop (idx))
t = m_data[m_data_cnt];
@@ -1829,6 +1894,7 @@ bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
t = m_data[m_data_cnt + 2];
if (!useless_type_conversion_p (type, m_limb_type))
t = add_cast (type, t);
+ MAYBE_EXTEND_PARTIAL_LIMB (t);
m_data_cnt += 3;
return t;
}
@@ -2249,9 +2315,11 @@ bitint_large_huge::handle_load (gimple *stmt, tree idx)
}
if (tree_fits_uhwi_p (idx))
{
- tree atype = limb_access_type (rhs_type, idx);
+ tree atype = limb_access_type (rhs_type, idx, true);
if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
iv = add_cast (atype, iv);
+
+ MAYBE_EXTEND_PARTIAL_LIMB (iv);
}
m_data_cnt += 3;
return iv;
@@ -2261,10 +2329,12 @@ normal_load:
/* Use write_p = true for loads with EH edges to make
sure limb_access doesn't add a cast as separate
statement after it. */
- rhs1 = limb_access (rhs_type, rhs1, idx, eh, !load_bitfield_p);
+ rhs1 = limb_access (rhs_type, rhs1, idx, eh);
tree ret = make_ssa_name (TREE_TYPE (rhs1));
g = gimple_build_assign (ret, rhs1);
insert_before (g);
+ rhs1 = gimple_assign_lhs (g);
+
if (eh)
{
maybe_duplicate_eh_stmt (g, stmt);
@@ -2274,14 +2344,29 @@ normal_load:
m_gsi = gsi_after_labels (e->dest);
add_eh_edge (e->src, eh_edge);
}
- if (tree_fits_uhwi_p (idx))
- {
- tree atype = limb_access_type (rhs_type, idx);
- if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
- ret = add_cast (atype, ret);
- }
}
- return ret;
+
+ /* Handle type casting. */
+ if (tree_fits_uhwi_p (idx))
+ {
+ tree atype = limb_access_type (rhs_type, idx, true);
+ if (bitint_extended && (((eh && m_extending) || load_bitfield_p)))
+ {
+ /* If a bit-field is being loaded, we extend them
+ to avoid getting adjacent bits from the memory.
+
+ If m_extending is set in the eh case, the return value
+ should be in limb_access_type (rhs_type, idx, true). */
+
+ rhs1 = add_cast (atype, rhs1);
+
+ if (load_bitfield_p && !m_extending)
+ rhs1 = add_cast (m_limb_type, rhs1);
+ }
+ else if (eh && !useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
+ rhs1 = add_cast (atype, rhs1);
+ }
+ return rhs1;
}
/* Return a limb IDX from a mergeable statement STMT. */
@@ -2309,20 +2394,26 @@ bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
rhs1, rhs2);
insert_before (g);
+ /* Extend the result of BIT_NOT_EXPR. */
+ if (rhs2 == NULL_TREE && bitint_extended && tree_fits_uhwi_p (idx))
+ {
+ tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
+ tree atype = limb_access_type (rhs_type, idx, true);
+ lhs = add_cast (atype, lhs);
+ if (!m_extending)
+ lhs = add_cast (m_limb_type, lhs);
+ }
return lhs;
case PLUS_EXPR:
case MINUS_EXPR:
- rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
- rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
return handle_plus_minus (gimple_assign_rhs_code (stmt),
- rhs1, rhs2, idx);
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt), idx);
case NEGATE_EXPR:
- rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
- rhs1 = build_zero_cst (TREE_TYPE (rhs2));
- return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
+ return handle_plus_minus (MINUS_EXPR, NULL_TREE,
+ gimple_assign_rhs1 (stmt), idx);
case LSHIFT_EXPR:
- return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
- idx),
+ return handle_lshift (gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt), idx);
case SSA_NAME:
case PAREN_EXPR:
@@ -3186,7 +3277,8 @@ bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
&& tree_fits_uhwi_p (idx)
&& !nlhs)
{
- rhs1 = add_cast (limb_access_type (lhs_type, idx), rhs1);
+ rhs1 = add_cast (limb_access_type (lhs_type, idx, true),
+ rhs1);
rhs1 = add_cast (TREE_TYPE (l), rhs1);
}
@@ -3929,20 +4021,39 @@ bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
loop or in the if after it. To simplify the code, just
read it back from memory and extend. */
m_gsi = gsi_after_labels (edge_false->dest);
- idx = bitint_big_endian ? size_zero_node : p;
- tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
- tree type = limb_access_type (TREE_TYPE (lhs), idx);
- tree v = make_ssa_name (m_limb_type);
- g = gimple_build_assign (v, l);
- insert_before (g);
- v = add_cast (type, v);
- l = limb_access (TREE_TYPE (lhs), obj, idx, true);
- g = gimple_build_assign (l, add_cast (m_limb_type, v));
- insert_before (g);
+ extend_partial_limb (obj, TREE_TYPE (lhs));
}
}
}
+/* Extend the partial limb of OBJ of the _BitInt type TYPE.
+ This is needed after libcalls that output non-extended
+ _BitInt values on targets that have the bitint_extended
+ flag set. */
+
+void
+bitint_large_huge::extend_partial_limb (tree obj, tree type)
+{
+ int prec = TYPE_PRECISION (type);
+ if (prec % limb_prec != 0)
+ {
+ tree idx = bitint_big_endian ? size_zero_node
+ : size_int (prec / limb_prec);
+
+ tree hi_limb = limb_access (type, obj, idx, true);
+ gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), hi_limb);
+ insert_before (g);
+
+ tree val = gimple_assign_lhs (g);
+ tree hi_type = limb_access_type (type, idx, true);
+ val = add_cast (m_limb_type, add_cast (hi_type, val));
+
+ hi_limb = limb_access (type, obj, idx, true);
+ g = gimple_build_assign (hi_limb, val);
+ insert_before (g);
+ }
+}
+
/* Lower large/huge _BitInt multiplication or division. */
void
@@ -4007,6 +4118,11 @@ bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
default:
gcc_unreachable ();
}
+
+ /* Extend the result if needed. */
+ if (bitint_extended)
+ extend_partial_limb (obj, type);
+
if (stmt_ends_bb_p (stmt))
{
maybe_duplicate_eh_stmt (g, stmt);
@@ -4038,7 +4154,8 @@ bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
gimple *g;
if (rhs_code == FIX_TRUNC_EXPR)
{
- int prec = TYPE_PRECISION (TREE_TYPE (lhs));
+ tree lhs_type = TREE_TYPE (lhs);
+ int prec = TYPE_PRECISION (lhs_type);
if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
prec = -prec;
if (obj == NULL_TREE)
@@ -4073,6 +4190,10 @@ bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
lhs, build_int_cst (sitype, prec),
rhs1);
insert_before (g);
+
+ /* Extend the result if needed (libgcc won't do this now). */
+ if (bitint_extended)
+ extend_partial_limb (obj, lhs_type);
}
else
{
@@ -4286,6 +4407,11 @@ bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
obj_nelts * m_limb_size));
insert_before (g);
}
+
+ /* Extend the result if needed. */
+ if (obj && bitint_extended)
+ extend_partial_limb (obj, type);
+
if (orig_obj == NULL_TREE && obj)
{
ovf = add_cast (m_limb_type, ovf);
@@ -4678,7 +4804,7 @@ bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
}
}
}
- tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
+ tree rhs = handle_plus_minus_1 (code, rhs1, rhs2, idx);
if (ovf != boolean_false_node)
{
if (tree_fits_uhwi_p (idx))
@@ -5953,6 +6079,7 @@ bitint_large_huge::lower_stmt (gimple *stmt)
m_upwards = false;
m_var_msb = false;
m_cast_conditional = false;
+ m_extending = false;
m_bitfld_load = 0;
m_loc = gimple_location (stmt);
if (is_gimple_call (stmt))
--
2.46.0
More information about the Gcc-patches
mailing list