+2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * fold-const.h (mem_ref_offset): Return a poly_offset_int rather
+ than an offset_int.
+ * tree.c (mem_ref_offset): Likewise.
+ (build_simple_mem_ref_loc): Treat MEM_REF offsets as poly_ints.
+ * builtins.c (get_object_alignment_2): Likewise.
+ * expr.c (get_inner_reference, expand_expr_real_1): Likewise.
+ * gimple-fold.c (get_base_constructor): Likewise.
+ * gimple-ssa-strength-reduction.c (restructure_reference): Likewise.
+ * gimple-ssa-warn-restrict.c (builtin_memref::builtin_memref):
+ Likewise.
+ * ipa-polymorphic-call.c
+ (ipa_polymorphic_call_context::ipa_polymorphic_call_context): Likewise.
+ * ipa-prop.c (compute_complex_assign_jump_func): Likewise.
+ (get_ancestor_addr_info): Likewise.
+ * ipa-param-manipulation.c (ipa_get_adjustment_candidate): Likewise.
+ * match.pd: Likewise.
+ * tree-data-ref.c (dr_analyze_innermost): Likewise.
+ * tree-dfa.c (get_addr_base_and_unit_offset_1): Likewise.
+ * tree-eh.c (tree_could_trap_p): Likewise.
+ * tree-object-size.c (addr_object_size): Likewise.
+ * tree-ssa-address.c (copy_ref_info): Likewise.
+ * tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise.
+ (indirect_refs_may_alias_p): Likewise.
+ * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise.
+ * tree-ssa.c (maybe_rewrite_mem_ref_base): Likewise.
+ (non_rewritable_mem_ref_base): Likewise.
+ * tree-vect-data-refs.c (vect_check_gather_scatter): Likewise.
+ * tree-vrp.c (vrp_prop::check_array_ref): Likewise.
+ * varasm.c (decode_addr_const): Likewise.
+
2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
bitpos += ptr_bitpos;
if (TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
- bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
+ bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
}
}
else if (TREE_CODE (exp) == STRING_CST)
tree off = TREE_OPERAND (exp, 1);
if (!integer_zerop (off))
{
- offset_int boff, coff = mem_ref_offset (exp);
- boff = coff << LOG2_BITS_PER_UNIT;
+ poly_offset_int boff = mem_ref_offset (exp);
+ boff <<= LOG2_BITS_PER_UNIT;
bit_offset += boff;
}
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
might end up in a register. */
if (mem_ref_refers_to_non_mem_p (exp))
{
- HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
+ poly_int64 offset = mem_ref_offset (exp).force_shwi ();
base = TREE_OPERAND (base, 0);
- if (offset == 0
+ if (known_eq (offset, 0)
&& !reverse
&& tree_fits_uhwi_p (TYPE_SIZE (type))
&& (GET_MODE_BITSIZE (DECL_MODE (base))
extern tree build_simple_mem_ref_loc (location_t, tree);
#define build_simple_mem_ref(T)\
build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
-extern offset_int mem_ref_offset (const_tree);
+extern poly_offset_int mem_ref_offset (const_tree);
extern tree build_invariant_address (tree, tree, poly_int64);
extern tree constant_boolean_node (bool, tree);
extern tree div_if_zero_remainder (const_tree, const_tree);
{
if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
return NULL_TREE;
- *bit_offset += (mem_ref_offset (base).to_short_addr ()
+ *bit_offset += (mem_ref_offset (base).force_shwi ()
* BITS_PER_UNIT);
}
widest_int index = *pindex;
tree mult_op0, t1, t2, type;
widest_int c1, c2, c3, c4, c5;
+ offset_int mem_offset;
if (!base
|| !offset
|| TREE_CODE (base) != MEM_REF
+ || !mem_ref_offset (base).is_constant (&mem_offset)
|| TREE_CODE (offset) != MULT_EXPR
|| TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
|| wi::umod_floor (index, BITS_PER_UNIT) != 0)
return false;
t1 = TREE_OPERAND (base, 0);
- c1 = widest_int::from (mem_ref_offset (base), SIGNED);
+ c1 = widest_int::from (mem_offset, SIGNED);
type = TREE_TYPE (TREE_OPERAND (base, 1));
mult_op0 = TREE_OPERAND (offset, 0);
if (TREE_CODE (base) == MEM_REF)
{
- offset_int off = mem_ref_offset (base);
- refoff += off;
- offrange[0] += off;
- offrange[1] += off;
+ offset_int off;
+ if (mem_ref_offset (base).is_constant (&off))
+ {
+ refoff += off;
+ offrange[0] += off;
+ offrange[1] += off;
+ }
+ else
+ size = NULL_TREE;
base = TREE_OPERAND (base, 0);
}
if (TREE_CODE (base) == MEM_REF)
{
- offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
+ offset += mem_ref_offset (base).force_shwi () * BITS_PER_UNIT;
base = TREE_OPERAND (base, 0);
}
{
/* We found dereference of a pointer. Type of the pointer
and MEM_REF is meaningless, but we can look futher. */
- if (TREE_CODE (base) == MEM_REF)
+ offset_int mem_offset;
+ if (TREE_CODE (base) == MEM_REF
+ && mem_ref_offset (base).is_constant (&mem_offset))
{
- offset_int o = mem_ref_offset (base) * BITS_PER_UNIT;
+ offset_int o = mem_offset * BITS_PER_UNIT;
o += offset;
o += offset2;
if (!wi::fits_shwi_p (o))
if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
return;
base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
- if (!base || TREE_CODE (base) != MEM_REF)
+ offset_int mem_offset;
+ if (!base
+ || TREE_CODE (base) != MEM_REF
+ || !mem_ref_offset (base).is_constant (&mem_offset))
return;
- offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
+ offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
ssa = TREE_OPERAND (base, 0);
if (TREE_CODE (ssa) != SSA_NAME
|| !SSA_NAME_IS_DEFAULT_DEF (ssa)
obj = expr;
expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
- if (!expr || TREE_CODE (expr) != MEM_REF)
+ offset_int mem_offset;
+ if (!expr
+ || TREE_CODE (expr) != MEM_REF
+ || !mem_ref_offset (expr).is_constant (&mem_offset))
return NULL_TREE;
parm = TREE_OPERAND (expr, 0);
if (TREE_CODE (parm) != SSA_NAME
|| TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
return NULL_TREE;
- *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
+ *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
*obj_p = obj;
return expr;
}
tree base1 = get_addr_base_and_unit_offset (TREE_OPERAND (@1, 0), &off1);
if (base0 && TREE_CODE (base0) == MEM_REF)
{
- off0 += mem_ref_offset (base0).to_short_addr ();
+ off0 += mem_ref_offset (base0).force_shwi ();
base0 = TREE_OPERAND (base0, 0);
}
if (base1 && TREE_CODE (base1) == MEM_REF)
{
- off1 += mem_ref_offset (base1).to_short_addr ();
+ off1 += mem_ref_offset (base1).force_shwi ();
base1 = TREE_OPERAND (base1, 0);
}
}
}
/* Calculate the alignment and misalignment for the inner reference. */
- unsigned int HOST_WIDE_INT base_misalignment;
- unsigned int base_alignment;
- get_object_alignment_1 (base, &base_alignment, &base_misalignment);
+ unsigned int HOST_WIDE_INT bit_base_misalignment;
+ unsigned int bit_base_alignment;
+ get_object_alignment_1 (base, &bit_base_alignment, &bit_base_misalignment);
/* There are no bitfield references remaining in BASE, so the values
we got back must be whole bytes. */
- gcc_assert (base_alignment % BITS_PER_UNIT == 0
- && base_misalignment % BITS_PER_UNIT == 0);
- base_alignment /= BITS_PER_UNIT;
- base_misalignment /= BITS_PER_UNIT;
+ gcc_assert (bit_base_alignment % BITS_PER_UNIT == 0
+ && bit_base_misalignment % BITS_PER_UNIT == 0);
+ unsigned int base_alignment = bit_base_alignment / BITS_PER_UNIT;
+ poly_int64 base_misalignment = bit_base_misalignment / BITS_PER_UNIT;
if (TREE_CODE (base) == MEM_REF)
{
{
/* Subtract MOFF from the base and add it to POFFSET instead.
Adjust the misalignment to reflect the amount we subtracted. */
- offset_int moff = mem_ref_offset (base);
- base_misalignment -= moff.to_short_addr ();
+ poly_offset_int moff = mem_ref_offset (base);
+ base_misalignment -= moff.force_shwi ();
tree mofft = wide_int_to_tree (sizetype, moff);
if (!poffset)
poffset = mofft;
drb->offset = fold_convert (ssizetype, offset_iv.base);
drb->init = init;
drb->step = step;
- drb->base_alignment = base_alignment;
- drb->base_misalignment = base_misalignment & (base_alignment - 1);
+ if (known_misalignment (base_misalignment, base_alignment,
+ &drb->base_misalignment))
+ drb->base_alignment = base_alignment;
+ else
+ {
+ drb->base_alignment = known_alignment (base_misalignment);
+ drb->base_misalignment = 0;
+ }
drb->offset_alignment = highest_pow2_factor (offset_iv.base);
drb->step_alignment = highest_pow2_factor (step);
{
if (!integer_zerop (TREE_OPERAND (exp, 1)))
{
- offset_int off = mem_ref_offset (exp);
- byte_offset += off.to_short_addr ();
+ poly_offset_int off = mem_ref_offset (exp);
+ byte_offset += off.force_shwi ();
}
exp = TREE_OPERAND (base, 0);
}
return NULL_TREE;
if (!integer_zerop (TMR_OFFSET (exp)))
{
- offset_int off = mem_ref_offset (exp);
- byte_offset += off.to_short_addr ();
+ poly_offset_int off = mem_ref_offset (exp);
+ byte_offset += off.force_shwi ();
}
exp = TREE_OPERAND (base, 0);
}
if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
{
tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
- offset_int off = mem_ref_offset (expr);
- if (wi::neg_p (off, SIGNED))
+ poly_offset_int off = mem_ref_offset (expr);
+ if (maybe_lt (off, 0))
return true;
if (TREE_CODE (base) == STRING_CST)
- return wi::leu_p (TREE_STRING_LENGTH (base), off);
- else if (DECL_SIZE_UNIT (base) == NULL_TREE
- || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
- || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
+ return maybe_le (TREE_STRING_LENGTH (base), off);
+ tree size = DECL_SIZE_UNIT (base);
+ if (size == NULL_TREE
+ || !poly_int_tree_p (size)
+ || maybe_le (wi::to_poly_offset (size), off))
return true;
/* Now we are sure the first byte of the access is inside
the object. */
}
if (sz != unknown[object_size_type])
{
- offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
- if (wi::neg_p (dsz))
- sz = 0;
- else if (wi::fits_uhwi_p (dsz))
- sz = dsz.to_uhwi ();
+ offset_int mem_offset;
+ if (mem_ref_offset (pt_var).is_constant (&mem_offset))
+ {
+ offset_int dsz = wi::sub (sz, mem_offset);
+ if (wi::neg_p (dsz))
+ sz = 0;
+ else if (wi::fits_uhwi_p (dsz))
+ sz = dsz.to_uhwi ();
+ else
+ sz = unknown[object_size_type];
+ }
else
sz = unknown[object_size_type];
}
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref))
< align)))))
{
- unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
- - mem_ref_offset (new_ref).to_short_addr ());
+ poly_uint64 inc = (mem_ref_offset (old_ref)
+ - mem_ref_offset (new_ref)).force_uhwi ();
adjust_ptr_info_misalignment (new_pi, inc);
}
else
&& DECL_P (base2));
ptr1 = TREE_OPERAND (base1, 0);
- offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
/* If only one reference is based on a variable, they cannot alias if
the pointer access is beyond the extent of the variable access.
&& operand_equal_p (TMR_INDEX2 (base1),
TMR_INDEX2 (base2), 0))))))
{
- offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
- offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
offset2 + moff2, max_size2);
}
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
- {
- offset_int off = mem_ref_offset (ref);
- if (wi::fits_shwi_p (off))
- temp.off = off.to_shwi ();
- }
+ if (!mem_ref_offset (ref).to_shwi (&temp.off))
+ temp.off = -1;
temp.clique = MR_DEPENDENCE_CLIQUE (ref);
temp.base = MR_DEPENDENCE_BASE (ref);
temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
}
else if (DECL_SIZE (sym)
&& TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
- && mem_ref_offset (*tp) >= 0
- && wi::leu_p (mem_ref_offset (*tp)
- + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
- wi::to_offset (DECL_SIZE_UNIT (sym)))
+ && (known_subrange_p
+ (mem_ref_offset (*tp),
+ wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
+ 0, wi::to_offset (DECL_SIZE_UNIT (sym))))
&& (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
|| (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
== TYPE_PRECISION (TREE_TYPE (*tp))))
|| TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
&& useless_type_conversion_p (TREE_TYPE (base),
TREE_TYPE (TREE_TYPE (decl)))
- && wi::fits_uhwi_p (mem_ref_offset (base))
- && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
- mem_ref_offset (base))
+ && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
+ mem_ref_offset (base))
&& multiple_of_p (sizetype, TREE_OPERAND (base, 1),
TYPE_SIZE_UNIT (TREE_TYPE (base))))
return NULL_TREE;
return NULL_TREE;
/* For integral typed extracts we can use a BIT_FIELD_REF. */
if (DECL_SIZE (decl)
- && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
- && mem_ref_offset (base) >= 0
- && wi::leu_p (mem_ref_offset (base)
- + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
- wi::to_offset (DECL_SIZE_UNIT (decl)))
+ && (known_subrange_p
+ (mem_ref_offset (base),
+ wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
+ 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
/* ??? We can't handle bitfield precision extracts without
either using an alternate type for the BIT_FIELD_REF and
then doing a conversion or possibly adjusting the offset
if (!integer_zerop (TREE_OPERAND (base, 1)))
{
if (off == NULL_TREE)
- {
- offset_int moff = mem_ref_offset (base);
- off = wide_int_to_tree (sizetype, moff);
- }
+ off = wide_int_to_tree (sizetype, mem_ref_offset (base));
else
off = size_binop (PLUS_EXPR, off,
fold_convert (sizetype, TREE_OPERAND (base, 1)));
|| TREE_CODE (el_sz) != INTEGER_CST)
return;
- idx = mem_ref_offset (t);
+ if (!mem_ref_offset (t).is_constant (&idx))
+ return;
+
idx = wi::sdiv_trunc (idx, wi::to_offset (el_sz));
if (idx < 0)
{
}
}
-
/* Return true if STMT is interesting for VRP. */
bool
gcc_assert (ptr);
if (TREE_CODE (ptr) == MEM_REF)
{
- offset += mem_ref_offset (ptr).to_short_addr ();
+ offset += mem_ref_offset (ptr).force_shwi ();
ptr = TREE_OPERAND (ptr, 0);
}
else
/* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
-offset_int
+poly_offset_int
mem_ref_offset (const_tree t)
{
- return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
+ return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
+ SIGNED);
}
/* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
else if (TREE_CODE (target) == MEM_REF
&& TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
{
- offset += mem_ref_offset (target).to_short_addr ();
+ offset += mem_ref_offset (target).force_shwi ();
target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
}
else if (TREE_CODE (target) == INDIRECT_REF