/* Definitions for the ubiquitous 'tree' type for GNU compilers.
- Copyright (C) 1989-2017 Free Software Foundation, Inc.
+ Copyright (C) 1989-2018 Free Software Foundation, Inc.
This file is part of GCC.
#define STRIP_USELESS_TYPE_CONVERSION(EXP) \
(EXP) = tree_ssa_strip_useless_type_conversions (EXP)
+/* Remove any VIEW_CONVERT_EXPR or NON_LVALUE_EXPR that's purely
+ in use to provide a location_t. */
+
+#define STRIP_ANY_LOCATION_WRAPPER(EXP) \
+ (EXP) = tree_strip_any_location_wrapper (CONST_CAST_TREE (EXP))
+
/* Nonzero if TYPE represents a vector type. */
#define VECTOR_TYPE_P(TYPE) (TREE_CODE (TYPE) == VECTOR_TYPE)
#define FALLTHROUGH_LABEL_P(NODE) \
(LABEL_DECL_CHECK (NODE)->base.private_flag)
+/* Set on the artificial label created for break; stmt from a switch.
+ This is used to implement -Wimplicit-fallthrough. */
+#define SWITCH_BREAK_LABEL_P(NODE) \
+ (LABEL_DECL_CHECK (NODE)->base.protected_flag)
+
/* Nonzero means this expression is volatile in the C sense:
its address should be of type `volatile WHATEVER *'.
In other words, the declared item is volatile qualified.
/* Same as TYPE_UNSIGNED but converted to SIGNOP. */
#define TYPE_SIGN(NODE) ((signop) TYPE_UNSIGNED (NODE))
-/* True if overflow wraps around for the given integral type. That
+/* True if overflow wraps around for the given integral or pointer type. That
is, TYPE_MAX + 1 == TYPE_MIN. */
#define TYPE_OVERFLOW_WRAPS(TYPE) \
- (ANY_INTEGRAL_TYPE_CHECK(TYPE)->base.u.bits.unsigned_flag || flag_wrapv)
+ (POINTER_TYPE_P (TYPE) \
+ ? flag_wrapv_pointer \
+ : (ANY_INTEGRAL_TYPE_CHECK(TYPE)->base.u.bits.unsigned_flag \
+ || flag_wrapv))
-/* True if overflow is undefined for the given integral type. We may
- optimize on the assumption that values in the type never overflow.
+/* True if overflow is undefined for the given integral or pointer type.
+ We may optimize on the assumption that values in the type never overflow.
IMPORTANT NOTE: Any optimization based on TYPE_OVERFLOW_UNDEFINED
must issue a warning based on warn_strict_overflow. In some cases
other cases it will be appropriate to simply set a flag and let the
caller decide whether a warning is appropriate or not. */
#define TYPE_OVERFLOW_UNDEFINED(TYPE) \
- (!ANY_INTEGRAL_TYPE_CHECK(TYPE)->base.u.bits.unsigned_flag \
- && !flag_wrapv && !flag_trapv)
+ (POINTER_TYPE_P (TYPE) \
+ ? !flag_wrapv_pointer \
+ : (!ANY_INTEGRAL_TYPE_CHECK(TYPE)->base.u.bits.unsigned_flag \
+ && !flag_wrapv && !flag_trapv))
/* True if overflow for the given integral type should issue a
trap. */
(TREE_CHECK2 (NODE, VAR_DECL, \
RESULT_DECL)->decl_common.decl_nonshareable_flag)
+/* In a PARM_DECL, set for Fortran hidden string length arguments that some
+ buggy callers don't pass to the callee. */
+#define DECL_HIDDEN_STRING_LENGTH(NODE) \
+ (TREE_CHECK (NODE, PARM_DECL)->decl_common.decl_nonshareable_flag)
+
/* In a CALL_EXPR, means that the call is the jump from a thunk to the
thunked-to function. */
#define CALL_FROM_THUNK_P(NODE) (CALL_EXPR_CHECK (NODE)->base.protected_flag)
#define TREE_INT_CST_LOW(NODE) \
((unsigned HOST_WIDE_INT) TREE_INT_CST_ELT (NODE, 0))
+/* Return true if NODE is a POLY_INT_CST. This is only ever true on
+ targets with variable-sized modes. */
+#define POLY_INT_CST_P(NODE) \
+ (NUM_POLY_INT_COEFFS > 1 && TREE_CODE (NODE) == POLY_INT_CST)
+
+/* In a POLY_INT_CST node. */
+#define POLY_INT_CST_COEFF(NODE, I) \
+ (POLY_INT_CST_CHECK (NODE)->poly_int_cst.coeffs[I])
+
#define TREE_REAL_CST_PTR(NODE) (REAL_CST_CHECK (NODE)->real_cst.real_cst_ptr)
#define TREE_REAL_CST(NODE) (*TREE_REAL_CST_PTR (NODE))
#define TREE_REALPART(NODE) (COMPLEX_CST_CHECK (NODE)->complex.real)
#define TREE_IMAGPART(NODE) (COMPLEX_CST_CHECK (NODE)->complex.imag)
-/* In a VECTOR_CST node. */
-#define VECTOR_CST_NELTS(NODE) (VECTOR_CST_CHECK (NODE)->base.u.nelts)
-#define VECTOR_CST_ELTS(NODE) (VECTOR_CST_CHECK (NODE)->vector.elts)
-#define VECTOR_CST_ELT(NODE,IDX) (VECTOR_CST_CHECK (NODE)->vector.elts[IDX])
+/* In a VECTOR_CST node. See generic.texi for details. */
+#define VECTOR_CST_NELTS(NODE) (TYPE_VECTOR_SUBPARTS (TREE_TYPE (NODE)))
+#define VECTOR_CST_ELT(NODE,IDX) vector_cst_elt (NODE, IDX)
+
+#define VECTOR_CST_LOG2_NPATTERNS(NODE) \
+ (VECTOR_CST_CHECK (NODE)->base.u.vector_cst.log2_npatterns)
+#define VECTOR_CST_NPATTERNS(NODE) \
+ (1U << VECTOR_CST_LOG2_NPATTERNS (NODE))
+#define VECTOR_CST_NELTS_PER_PATTERN(NODE) \
+ (VECTOR_CST_CHECK (NODE)->base.u.vector_cst.nelts_per_pattern)
+#define VECTOR_CST_DUPLICATE_P(NODE) \
+ (VECTOR_CST_NELTS_PER_PATTERN (NODE) == 1)
+#define VECTOR_CST_STEPPED_P(NODE) \
+ (VECTOR_CST_NELTS_PER_PATTERN (NODE) == 3)
+#define VECTOR_CST_ENCODED_ELTS(NODE) \
+ (VECTOR_CST_CHECK (NODE)->vector.elts)
+#define VECTOR_CST_ENCODED_ELT(NODE, ELT) \
+ (VECTOR_CST_CHECK (NODE)->vector.elts[ELT])
/* Define fields and accessors for some special-purpose tree nodes. */
#define VL_EXP_OPERAND_LENGTH(NODE) \
((int)TREE_INT_CST_LOW (VL_EXP_CHECK (NODE)->exp.operands[0]))
+/* Nonzero if gimple_debug_nonbind_marker_p() may possibly hold. */
+#define MAY_HAVE_DEBUG_MARKER_STMTS debug_nonbind_markers_p
+/* Nonzero if gimple_debug_bind_p() (and thus
+ gimple_debug_source_bind_p()) may possibly hold. */
+#define MAY_HAVE_DEBUG_BIND_STMTS flag_var_tracking_assignments
/* Nonzero if is_gimple_debug() may possibly hold. */
-#define MAY_HAVE_DEBUG_STMTS (flag_var_tracking_assignments)
+#define MAY_HAVE_DEBUG_STMTS \
+ (MAY_HAVE_DEBUG_MARKER_STMTS || MAY_HAVE_DEBUG_BIND_STMTS)
/* In a LOOP_EXPR node. */
#define LOOP_EXPR_BODY(NODE) TREE_OPERAND_CHECK_CODE (NODE, LOOP_EXPR, 0)
extern void protected_set_expr_location (tree, location_t);
+extern tree maybe_wrap_with_location (tree, location_t);
+
/* In a TARGET_EXPR node. */
#define TARGET_EXPR_SLOT(NODE) TREE_OPERAND_CHECK_CODE (NODE, TARGET_EXPR, 0)
#define TARGET_EXPR_INITIAL(NODE) TREE_OPERAND_CHECK_CODE (NODE, TARGET_EXPR, 1)
#define TARGET_EXPR_CLEANUP(NODE) TREE_OPERAND_CHECK_CODE (NODE, TARGET_EXPR, 2)
+/* Don't elide the initialization of TARGET_EXPR_SLOT for this TARGET_EXPR
+ on rhs of MODIFY_EXPR. */
+#define TARGET_EXPR_NO_ELIDE(NODE) (TARGET_EXPR_CHECK (NODE)->base.private_flag)
/* DECL_EXPR accessor. This gives access to the DECL associated with
the given declaration statement. */
/* SWITCH_EXPR accessors. These give access to the condition and body. */
#define SWITCH_COND(NODE) TREE_OPERAND (SWITCH_EXPR_CHECK (NODE), 0)
#define SWITCH_BODY(NODE) TREE_OPERAND (SWITCH_EXPR_CHECK (NODE), 1)
+/* True if there are case labels for all possible values of SWITCH_COND, either
+ because there is a default: case label or because the case label ranges cover
+ all values. */
+#define SWITCH_ALL_CASES_P(NODE) (SWITCH_EXPR_CHECK (NODE)->base.private_flag)
/* CASE_LABEL_EXPR accessors. These give access to the high and low values
of a case label, respectively. */
/* GOTO_EXPR accessor. This gives access to the label associated with
a goto statement. */
-#define GOTO_DESTINATION(NODE) TREE_OPERAND ((NODE), 0)
+#define GOTO_DESTINATION(NODE) TREE_OPERAND (GOTO_EXPR_CHECK (NODE), 0)
/* ASM_EXPR accessors. ASM_STRING returns a STRING_CST for the
instruction (e.g., "mov x, y"). ASM_OUTPUTS, ASM_INPUTS, and
ASM_OPERAND with no operands. */
#define ASM_INPUT_P(NODE) (ASM_EXPR_CHECK (NODE)->base.static_flag)
#define ASM_VOLATILE_P(NODE) (ASM_EXPR_CHECK (NODE)->base.public_flag)
+/* Nonzero if we want to consider this asm as minimum length and cost
+ for inlining decisions. */
+#define ASM_INLINE_P(NODE) (ASM_EXPR_CHECK (NODE)->base.protected_flag)
/* COND_EXPR accessors. */
#define COND_EXPR_COND(NODE) (TREE_OPERAND (COND_EXPR_CHECK (NODE), 0))
If set in a INTEGER_TYPE, indicates a character type. */
#define TYPE_STRING_FLAG(NODE) (TYPE_CHECK (NODE)->type_common.string_flag)
-/* For a VECTOR_TYPE, this is the number of sub-parts of the vector. */
-#define TYPE_VECTOR_SUBPARTS(VECTOR_TYPE) \
- (HOST_WIDE_INT_1U \
- << VECTOR_TYPE_CHECK (VECTOR_TYPE)->type_common.precision)
-
-/* Set precision to n when we have 2^n sub-parts of the vector. */
-#define SET_TYPE_VECTOR_SUBPARTS(VECTOR_TYPE, X) \
- (VECTOR_TYPE_CHECK (VECTOR_TYPE)->type_common.precision = exact_log2 (X))
-
/* Nonzero in a VECTOR_TYPE if the frontends should not emit warnings
about missing conversions to other vector types of the same size. */
#define TYPE_VECTOR_OPAQUE(NODE) \
#define DECL_CXX_DESTRUCTOR_P(NODE)\
(FUNCTION_DECL_CHECK (NODE)->decl_with_vis.cxx_destructor)
+/* In FUNCTION_DECL, this is set if this function is a lambda function. */
+#define DECL_LAMBDA_FUNCTION(NODE) \
+ (FUNCTION_DECL_CHECK (NODE)->function_decl.lambda_function)
+
/* In FUNCTION_DECL that represent an virtual method this is set when
the method is final. */
#define DECL_FINAL_P(NODE)\
return !strcmp (str, IDENTIFIER_POINTER (id));
}
+/* Return the number of elements in the VECTOR_TYPE given by NODE. */
+
+inline poly_uint64
+TYPE_VECTOR_SUBPARTS (const_tree node)
+{
+ STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 2);
+ unsigned int precision = VECTOR_TYPE_CHECK (node)->type_common.precision;
+ if (NUM_POLY_INT_COEFFS == 2)
+ {
+ poly_uint64 res = 0;
+ res.coeffs[0] = 1 << (precision & 0xff);
+ if (precision & 0x100)
+ res.coeffs[1] = 1 << (precision & 0xff);
+ return res;
+ }
+ else
+ return 1 << precision;
+}
+
+/* Set the number of elements in VECTOR_TYPE NODE to SUBPARTS, which must
+ satisfy valid_vector_subparts_p. */
+
+inline void
+SET_TYPE_VECTOR_SUBPARTS (tree node, poly_uint64 subparts)
+{
+ STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 2);
+ unsigned HOST_WIDE_INT coeff0 = subparts.coeffs[0];
+ int index = exact_log2 (coeff0);
+ gcc_assert (index >= 0);
+ if (NUM_POLY_INT_COEFFS == 2)
+ {
+ unsigned HOST_WIDE_INT coeff1 = subparts.coeffs[1];
+ gcc_assert (coeff1 == 0 || coeff1 == coeff0);
+ VECTOR_TYPE_CHECK (node)->type_common.precision
+ = index + (coeff1 != 0 ? 0x100 : 0);
+ }
+ else
+ VECTOR_TYPE_CHECK (node)->type_common.precision = index;
+}
+
+/* Return true if we can construct vector types with the given number
+ of subparts. */
+
+static inline bool
+valid_vector_subparts_p (poly_uint64 subparts)
+{
+ unsigned HOST_WIDE_INT coeff0 = subparts.coeffs[0];
+ if (!pow2p_hwi (coeff0))
+ return false;
+ if (NUM_POLY_INT_COEFFS == 2)
+ {
+ unsigned HOST_WIDE_INT coeff1 = subparts.coeffs[1];
+ if (coeff1 != 0 && coeff1 != coeff0)
+ return false;
+ }
+ return true;
+}
+
+/* In NON_LVALUE_EXPR and VIEW_CONVERT_EXPR, set when this node is merely a
+ wrapper added to express a location_t on behalf of the node's child
+ (e.g. by maybe_wrap_with_location). */
+
+#define EXPR_LOCATION_WRAPPER_P(NODE) \
+ (TREE_CHECK2(NODE, NON_LVALUE_EXPR, VIEW_CONVERT_EXPR)->base.public_flag)
+
+/* Test if EXP is merely a wrapper node, added to express a location_t
+ on behalf of the node's child (e.g. by maybe_wrap_with_location). */
+
+inline bool
+location_wrapper_p (const_tree exp)
+{
+ /* A wrapper node has code NON_LVALUE_EXPR or VIEW_CONVERT_EXPR, and
+ the flag EXPR_LOCATION_WRAPPER_P is set.
+ It normally has the same type as its operand, but it can have a
+ different one if the type of the operand has changed (e.g. when
+ merging duplicate decls).
+
+ NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
+ VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST. */
+ if ((TREE_CODE (exp) == NON_LVALUE_EXPR
+ || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+ && EXPR_LOCATION_WRAPPER_P (exp))
+ return true;
+ return false;
+}
+
+/* Implementation of STRIP_ANY_LOCATION_WRAPPER. */
+
+inline tree
+tree_strip_any_location_wrapper (tree exp)
+{
+ if (location_wrapper_p (exp))
+ return TREE_OPERAND (exp, 0);
+ else
+ return exp;
+}
+
#define error_mark_node global_trees[TI_ERROR_MARK]
#define intQI_type_node global_trees[TI_INTQI_TYPE]
((NODE) == error_mark_node \
|| ((NODE) && TREE_TYPE ((NODE)) == error_mark_node))
+/* Return the number of elements encoded directly in a VECTOR_CST. */
+
+inline unsigned int
+vector_cst_encoded_nelts (const_tree t)
+{
+ return VECTOR_CST_NPATTERNS (t) * VECTOR_CST_NELTS_PER_PATTERN (t);
+}
+
extern tree decl_assembler_name (tree);
extern void overwrite_decl_assembler_name (tree decl, tree name);
extern tree decl_comdat_group (const_tree);
extern tree double_int_to_tree (tree, double_int);
-extern tree wide_int_to_tree (tree type, const wide_int_ref &cst);
-extern tree force_fit_type (tree, const wide_int_ref &, int, bool);
+extern tree wide_int_to_tree (tree type, const poly_wide_int_ref &cst);
+extern tree force_fit_type (tree, const poly_wide_int_ref &, int, bool);
/* Create an INT_CST node with a CST value zero extended. */
/* static inline */
-extern tree build_int_cst (tree, HOST_WIDE_INT);
-extern tree build_int_cstu (tree type, unsigned HOST_WIDE_INT cst);
-extern tree build_int_cst_type (tree, HOST_WIDE_INT);
-extern tree make_vector (unsigned CXX_MEM_STAT_INFO);
-extern tree build_vector (tree, vec<tree> CXX_MEM_STAT_INFO);
+extern tree build_int_cst (tree, poly_int64);
+extern tree build_int_cstu (tree type, poly_uint64);
+extern tree build_int_cst_type (tree, poly_int64);
+extern tree make_vector (unsigned, unsigned CXX_MEM_STAT_INFO);
extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
+extern tree build_vec_series (tree, tree, tree);
+extern tree build_index_vector (tree, poly_uint64, poly_uint64);
extern void recompute_constructor_flags (tree);
extern void verify_constructor_flags (tree);
extern tree build_constructor (tree, vec<constructor_elt, va_gc> *);
extern tree build_all_ones_cst (tree);
extern tree build_zero_cst (tree);
extern tree build_string (int, const char *);
+extern tree build_poly_int_cst (tree, const poly_wide_int_ref &);
extern tree build_tree_list (tree, tree CXX_MEM_STAT_INFO);
extern tree build_tree_list_vec (const vec<tree, va_gc> * CXX_MEM_STAT_INFO);
extern tree build_decl (location_t, enum tree_code,
extern tree build_reference_type_for_mode (tree, machine_mode, bool);
extern tree build_reference_type (tree);
extern tree build_vector_type_for_mode (tree, machine_mode);
-extern tree build_vector_type (tree innertype, int nunits);
-extern tree build_truth_vector_type (unsigned, unsigned);
+extern tree build_vector_type (tree, poly_int64);
+extern tree build_truth_vector_type (poly_uint64, poly_uint64);
extern tree build_same_sized_truth_vector_type (tree vectype);
-extern tree build_opaque_vector_type (tree innertype, int nunits);
+extern tree build_opaque_vector_type (tree, poly_int64);
extern tree build_index_type (tree);
extern tree build_array_type (tree, tree, bool = false);
extern tree build_nonshared_array_type (tree, tree);
-extern tree build_array_type_nelts (tree, unsigned HOST_WIDE_INT);
+extern tree build_array_type_nelts (tree, poly_uint64);
extern tree build_function_type (tree, tree);
extern tree build_function_type_list (tree, ...);
extern tree build_varargs_function_type_list (tree, ...);
extern int tree_int_cst_equal (const_tree, const_tree);
-extern bool tree_fits_shwi_p (const_tree)
- ATTRIBUTE_PURE;
-extern bool tree_fits_uhwi_p (const_tree)
- ATTRIBUTE_PURE;
+extern bool tree_fits_shwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_int64_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_uhwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_uint64_p (const_tree) ATTRIBUTE_PURE;
extern HOST_WIDE_INT tree_to_shwi (const_tree);
+extern poly_int64 tree_to_poly_int64 (const_tree);
extern unsigned HOST_WIDE_INT tree_to_uhwi (const_tree);
+extern poly_uint64 tree_to_poly_uint64 (const_tree);
#if !defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 4003)
extern inline __attribute__ ((__gnu_inline__)) HOST_WIDE_INT
tree_to_shwi (const_tree t)
gcc_assert (tree_fits_uhwi_p (t));
return TREE_INT_CST_LOW (t);
}
+#if NUM_POLY_INT_COEFFS == 1
+extern inline __attribute__ ((__gnu_inline__)) poly_int64
+tree_to_poly_int64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_int64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+
+extern inline __attribute__ ((__gnu_inline__)) poly_uint64
+tree_to_poly_uint64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_uint64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+#endif
#endif
extern int tree_int_cst_sgn (const_tree);
extern int tree_int_cst_sign_bit (const_tree);
extern tree excess_precision_type (tree);
extern bool valid_constant_size_p (const_tree);
+/* Return true if T holds a value that can be represented as a poly_int64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_int64_pod *value)
+{
+ if (tree_fits_poly_int64_p (t))
+ {
+ *value = tree_to_poly_int64 (t);
+ return true;
+ }
+ return false;
+}
+
+/* Return true if T holds a value that can be represented as a poly_uint64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_uint64_pod *value)
+{
+ if (tree_fits_poly_uint64_p (t))
+ {
+ *value = tree_to_poly_uint64 (t);
+ return true;
+ }
+ return false;
+}
/* From expmed.c. Since rtl.h is included after tree.h, we can't
put the prototype here. Rtl.h does declare the prototype if
extern bool initializer_zerop (const_tree);
+extern wide_int vector_cst_int_elt (const_tree, unsigned int);
+extern tree vector_cst_elt (const_tree, unsigned int);
+
/* Given a vector VEC, return its first element if all elements are
the same. Otherwise return NULL_TREE. */
&& COMPLETE_TYPE_P (TREE_TYPE (type)));
}
+/* Return true if the value of T could be represented as a poly_widest_int. */
+
+inline bool
+poly_int_tree_p (const_tree t)
+{
+ return (TREE_CODE (t) == INTEGER_CST || POLY_INT_CST_P (t));
+}
+
+/* Return the bit size of BIT_FIELD_REF T, in cases where it is known
+ to be a poly_uint64. (This is always true at the gimple level.) */
+
+inline poly_uint64
+bit_field_size (const_tree t)
+{
+ return tree_to_poly_uint64 (TREE_OPERAND (t, 1));
+}
+
+/* Return the starting bit offset of BIT_FIELD_REF T, in cases where it is
+ known to be a poly_uint64. (This is always true at the gimple level.) */
+
+inline poly_uint64
+bit_field_offset (const_tree t)
+{
+ return tree_to_poly_uint64 (TREE_OPERAND (t, 2));
+}
+
extern tree strip_float_extensions (tree);
extern int really_constant_p (const_tree);
+extern bool ptrdiff_tree_p (const_tree, poly_int64_pod *);
extern bool decl_address_invariant_p (const_tree);
extern bool decl_address_ip_invariant_p (const_tree);
extern bool int_fits_type_p (const_tree, const_tree);
extern location_t *block_nonartificial_location (tree);
extern location_t tree_nonartificial_location (tree);
extern tree block_ultimate_origin (const_tree);
-extern tree get_binfo_at_offset (tree, HOST_WIDE_INT, tree);
+extern tree get_binfo_at_offset (tree, poly_int64, tree);
extern bool virtual_method_call_p (const_tree);
extern tree obj_type_ref_class (const_tree ref);
extern bool types_same_for_odr (const_tree type1, const_tree type2,
/* The tree and const_tree overload templates. */
namespace wi
{
+ class unextended_tree
+ {
+ private:
+ const_tree m_t;
+
+ public:
+ unextended_tree () {}
+ unextended_tree (const_tree t) : m_t (t) {}
+
+ unsigned int get_precision () const;
+ const HOST_WIDE_INT *get_val () const;
+ unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
+ };
+
+ template <>
+ struct int_traits <unextended_tree>
+ {
+ static const enum precision_type precision_type = VAR_PRECISION;
+ static const bool host_dependent_precision = false;
+ static const bool is_sign_extended = false;
+ };
+
template <int N>
class extended_tree
{
const_tree m_t;
public:
+ extended_tree () {}
extended_tree (const_tree);
unsigned int get_precision () const;
const HOST_WIDE_INT *get_val () const;
unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
};
template <int N>
static const unsigned int precision = N;
};
- typedef const generic_wide_int <extended_tree <WIDE_INT_MAX_PRECISION> >
- tree_to_widest_ref;
- typedef const generic_wide_int <extended_tree <ADDR_MAX_PRECISION> >
- tree_to_offset_ref;
+ typedef extended_tree <WIDE_INT_MAX_PRECISION> widest_extended_tree;
+ typedef extended_tree <ADDR_MAX_PRECISION> offset_extended_tree;
+
+ typedef const generic_wide_int <widest_extended_tree> tree_to_widest_ref;
+ typedef const generic_wide_int <offset_extended_tree> tree_to_offset_ref;
typedef const generic_wide_int<wide_int_ref_storage<false, false> >
tree_to_wide_ref;
tree_to_offset_ref to_offset (const_tree);
tree_to_wide_ref to_wide (const_tree);
wide_int to_wide (const_tree, unsigned int);
+
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> >
+ tree_to_poly_widest_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> >
+ tree_to_poly_offset_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <unextended_tree> >
+ tree_to_poly_wide_ref;
+
+ tree_to_poly_widest_ref to_poly_widest (const_tree);
+ tree_to_poly_offset_ref to_poly_offset (const_tree);
+ tree_to_poly_wide_ref to_poly_wide (const_tree);
+
+ template <int N>
+ struct ints_for <generic_wide_int <extended_tree <N> >, CONST_PRECISION>
+ {
+ typedef generic_wide_int <extended_tree <N> > extended;
+ static extended zero (const extended &);
+ };
+
+ template <>
+ struct ints_for <generic_wide_int <unextended_tree>, VAR_PRECISION>
+ {
+ typedef generic_wide_int <unextended_tree> unextended;
+ static unextended zero (const unextended &);
+ };
}
/* Refer to INTEGER_CST T as though it were a widest_int.
gcc_unreachable ();
}
+inline unsigned int
+wi::unextended_tree::get_precision () const
+{
+ return TYPE_PRECISION (TREE_TYPE (m_t));
+}
+
+inline const HOST_WIDE_INT *
+wi::unextended_tree::get_val () const
+{
+ return &TREE_INT_CST_ELT (m_t, 0);
+}
+
+inline unsigned int
+wi::unextended_tree::get_len () const
+{
+ return TREE_INT_CST_NUNITS (m_t);
+}
+
+/* Return the value of a POLY_INT_CST in its native precision. */
+
+inline wi::tree_to_poly_wide_ref
+poly_int_cst_value (const_tree x)
+{
+ poly_int <NUM_POLY_INT_COEFFS, generic_wide_int <wi::unextended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (x, i);
+ return res;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_widest_int. See wi::to_widest for more details. */
+
+inline wi::tree_to_poly_widest_ref
+wi::to_poly_widest (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_offset_int. See wi::to_offset for more details. */
+
+inline wi::tree_to_poly_offset_ref
+wi::to_poly_offset (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_wide_int. See wi::to_wide for more details. */
+
+inline wi::tree_to_poly_wide_ref
+wi::to_poly_wide (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t);
+ return t;
+}
+
+template <int N>
+inline generic_wide_int <wi::extended_tree <N> >
+wi::ints_for <generic_wide_int <wi::extended_tree <N> >,
+ wi::CONST_PRECISION>::zero (const extended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
+inline generic_wide_int <wi::unextended_tree>
+wi::ints_for <generic_wide_int <wi::unextended_tree>,
+ wi::VAR_PRECISION>::zero (const unextended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
namespace wi
{
template <typename T>
bool
wi::fits_to_boolean_p (const T &x, const_tree type)
{
- return eq_p (x, 0) || eq_p (x, TYPE_UNSIGNED (type) ? 1 : -1);
+ typedef typename poly_int_traits<T>::int_type int_type;
+ return (known_eq (x, int_type (0))
+ || known_eq (x, int_type (TYPE_UNSIGNED (type) ? 1 : -1)));
}
template <typename T>
return fits_to_boolean_p (x, type);
if (TYPE_UNSIGNED (type))
- return eq_p (x, zext (x, TYPE_PRECISION (type)));
+ return known_eq (x, zext (x, TYPE_PRECISION (type)));
else
- return eq_p (x, sext (x, TYPE_PRECISION (type)));
+ return known_eq (x, sext (x, TYPE_PRECISION (type)));
}
/* Produce the smallest number that is represented in TYPE. The precision
/* Given an expression EXP that is a handled_component_p,
look for the ultimate containing object, which is returned and specify
the access position and size. */
-extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
+extern tree get_inner_reference (tree, poly_int64_pod *, poly_int64_pod *,
tree *, machine_mode *, int *, int *, int *);
extern tree build_personality_function (const char *);
inline bool
type_has_mode_precision_p (const_tree t)
{
- return TYPE_PRECISION (t) == GET_MODE_PRECISION (TYPE_MODE (t));
+ return known_eq (TYPE_PRECISION (t), GET_MODE_PRECISION (TYPE_MODE (t)));
}
#endif /* GCC_TREE_H */