+2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
+ * tree-core.h (tree_base::nothrow_flag): Adjust comment.
+ (tree_type_common::lang_flag_7): New.
+ (tree_type_common::spare): Reduce size.
+ * tree.h (TYPE_ALIGN_OK): Remove.
+ (TYPE_LANG_FLAG_7): New.
+ (get_inner_reference): Adjust header.
+ * print-tree.c (print_node): Adjust.
+ * expr.c (get_inner_reference): Remove parameter keep_aligning.
+ (get_bit_range, expand_assignment, expand_expr_addr_expr_1): Adjust
+ calls to get_inner_reference.
+ (expand_expr_real_1): Adjust call to get_inner_reference. Remove
+ handling of TYPE_ALIGN_OK.
+ * builtins.c (get_object_alignment_2): Adjust call to
+ get_inner_reference. Remove handling of VIEW_CONVERT_EXPR.
+ * emit-rtl.c (set_mem_attributes_minus_bitpos): Remove handling of
+ TYPE_ALIGN_OK.
+ * asan.c (instrument_derefs): Adjust calls to get_inner_reference.
+ * cfgexpand.c (expand_debug_expr): Likewise.
+ * dbxout.c (dbxout_expand_expr): Likewise.
+ * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
+ loc_list_from_tree, fortran_common): Likewise.
+ * fold-const.c (optimize_bit_field_compare,
+ decode_field_reference, fold_unary_loc, fold_comparison,
+ split_address_to_core_and_offset): Likewise.
+ * gimple-laddress.c (execute): Likewise.
+ * gimple-ssa-strength-reduction.c (slsr_process_ref): Likewise.
+ * gimplify.c (gimplify_scan_omp_clauses): Likewise.
+ * hsa-gen.c (gen_hsa_addr): Likewise.
+ * simplifx-rtx.c (delegitimize_mem_from_attrs): Likewise.
+ * tsan.c (instrument_expr): Likewise.
+ * ubsan.c (instrument_bool_enum_load, instrument_object_size): Likewise.
+ * tree.c (verify_type_variant): Remove handling of TYPE_ALIGN_OK.
+ * tree-affine.c (tree_to_aff_combination,
+ get_inner_reference_aff): Adjust calls to get_inner_reference.
+ * tree-data-ref.c (split_constant_offset_1,
+ dr_analyze_innermost): Likewise.
+ * tree-scalar-evolution.c (interpret_rhs_expr): Likewise.
+ * tree-sra.c (ipa_sra_check_caller): Likewise.
+ * tree-ssa-loop-ivopts.c (split_address_cost): Likewise.
+ * tree-ssa-math-opts.c (find_bswap_or_nop_load,
+ bswap_replace): Likewise.
+ * tree-vect-data-refs.c (vect_check_gather,
+ vect_analyze_data_refs): Likewise.
+ * config/mips/mips.c (r10k_safe_mem_expr_p): Likewise.
+ * config/pa/pa.c (pa_emit_move_sequence): Remove handling of
+ TYPE_ALIGN_OK.
+
2016-07-11 David Malcolm <dmalcolm@redhat.com>
* Makefile.in (selftest-valgrind): New phony target.
* lra-constraints.c (process_alt_operands): Check combination of
reg class and mode.
-2016-06-25 Jason Merrill <jason@redhat.com>
+2016-07-08 Jason Merrill <jason@redhat.com>
Richard Biener <rguenther@suse.de>
P0145: Refining Expression Order for C++.
+2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
+ * gcc-interface/ada-tree.h (TYPE_ALIGN_OK): Define.
+ * gcc-interface/trans.c (Attribute_to_gnu): Adjust call to
+ get_inner_reference.
+ * gcc-interface/utils2.c (build_unary_op): Likewise.
+
2016-07-11 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/trans.c (add_decl_expr): Minor tweak.
alignment value the type ought to have. */
#define TYPE_MAX_ALIGN(NODE) (TYPE_PRECISION (RECORD_OR_UNION_CHECK (NODE)))
+/* True if objects of tagged types are guaranteed to be properly aligned. */
+#define TYPE_ALIGN_OK(NODE) TYPE_LANG_FLAG_7 (NODE)
+
/* For an UNCONSTRAINED_ARRAY_TYPE, this is the record containing both the
template and the object.
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
- &mode, &unsignedp, &reversep, &volatilep, false);
+ &mode, &unsignedp, &reversep, &volatilep);
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
{
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
&mode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
/* If INNER is a padding type whose field has a self-referential
size, convert to that inner type. We know the offset is zero
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (t) == COMPONENT_REF
&& DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
/* Get the innermost object and the constant (bitpos) and possibly
variable (offset) offset of the access. */
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
/* Extract alignment information from the innermost object and
possibly adjust bitpos and offset. */
align = DECL_ALIGN (exp);
known_alignment = true;
}
- else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
- {
- align = TYPE_ALIGN (TREE_TYPE (exp));
- }
else if (TREE_CODE (exp) == INDIRECT_REF
|| TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
int reversep, volatilep = 0;
tree tem
= get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
rtx orig_op0;
if (bitsize == 0)
int unsigned_p, reverse_p, volatile_p;
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
- &unsigned_p, &reverse_p, &volatile_p, false);
+ &unsigned_p, &reverse_p, &volatile_p);
if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
return false;
type = strip_array_types (type);
if (POINTER_TYPE_P (type))
- {
- int align;
-
- type = TREE_TYPE (type);
- /* Using TYPE_ALIGN_OK is rather conservative as
- only the ada frontend actually sets it. */
- align = (TYPE_ALIGN_OK (type) ? TYPE_ALIGN (type)
- : BITS_PER_UNIT);
- mark_reg_pointer (operand0, align);
- }
+ mark_reg_pointer (operand0, BITS_PER_UNIT);
}
}
rtx x;
tem = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
x = dbxout_expand_expr (tem);
if (x == NULL || !MEM_P (x))
obj = get_inner_reference (TREE_OPERAND (loc, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
STRIP_NOPS (obj);
if (bitpos % BITS_PER_UNIT)
{
int unsignedp, reversep, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
gcc_assert (obj != loc);
return NULL_TREE;
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
if (cvar == NULL_TREE
|| TREE_CODE (cvar) != VAR_DECL
able to simply always use TYPE_ALIGN? */
}
- /* We can set the alignment from the type if we are making an object,
- this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
- if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
+ /* We can set the alignment from the type if we are making an object or if
+ this is an INDIRECT_REF. */
+ if (objectp || TREE_CODE (t) == INDIRECT_REF)
attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
/* If the size is known, we can set that. */
int unsignedp, reversep, volatilep = 0;
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
&roffset, &rmode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
if ((rbitpos % BITS_PER_UNIT) != 0)
{
*bitstart = *bitend = 0;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
/* Make sure bitpos is not negative, it can wreak havoc later. */
if (bitpos < 0)
If the field describes a variable-sized object, *PMODE is set to
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
- this case, but the address of the object can be found.
-
- If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
- look through nodes that serve as markers of a greater alignment than
- the one that can be deduced from the expression. These nodes make it
- possible for front-ends to prevent temporaries from being created by
- the middle-end on alignment considerations. For that purpose, the
- normal operating mode at high-level is to always pass FALSE so that
- the ultimate containing object is really returned; moreover, the
- associated predicate handled_component_p will always return TRUE
- on these nodes, thus indicating that they are essentially handled
- by get_inner_reference. TRUE should only be passed when the caller
- is scanning the expression in order to build another representation
- and specifically knows how to handle these nodes; as such, this is
- the normal operating mode in the RTL expanders. */
+ this case, but the address of the object can be found. */
tree
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, tree *poffset,
machine_mode *pmode, int *punsignedp,
- int *preversep, int *pvolatilep, bool keep_aligning)
+ int *preversep, int *pvolatilep)
{
tree size_tree = 0;
machine_mode mode = VOIDmode;
break;
case VIEW_CONVERT_EXPR:
- if (keep_aligning && STRICT_ALIGNMENT
- && (TYPE_ALIGN (TREE_TYPE (exp))
- > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
- && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- < BIGGEST_ALIGNMENT)
- && (TYPE_ALIGN_OK (TREE_TYPE (exp))
- || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- goto done;
break;
case MEM_REF:
they won't change the final object whose address will be returned
(they actually exist only for that purpose). */
inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
break;
}
int reversep, volatilep = 0, must_force_mem;
tree tem
= get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
rtx orig_op0, memloc;
bool clear_mem_expr = false;
int unsignedp, reversep, volatilep = 0;
tree tem
= get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
rtx orig_op0;
/* ??? We should work harder and deal with non-zero offsets. */
{
enum insn_code icode;
- if (TYPE_ALIGN_OK (type))
- {
- /* ??? Copying the MEM without substantially changing it might
- run afoul of the code handling volatile memory references in
- store_expr, which assumes that TARGET is returned unmodified
- if it has been used. */
- op0 = copy_rtx (op0);
- set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
- }
- else if (modifier != EXPAND_WRITE
- && modifier != EXPAND_MEMORY
- && !inner_reference_p
- && mode != BLKmode
- && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
+ if (modifier != EXPAND_WRITE
+ && modifier != EXPAND_MEMORY
+ && !inner_reference_p
+ && mode != BLKmode
+ && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
{
/* If the target does have special handling for unaligned
loads of mode then use them. */
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lreversep, &lvolatilep, false);
+ &lunsignedp, &lreversep, &lvolatilep);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
sizes, signedness and storage order are the same. */
rinner
= get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rreversep, &rvolatilep, false);
+ &runsignedp, &rreversep, &rvolatilep);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || lreversep != rreversep || offset != 0
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, preversep, pvolatilep, false);
+ punsignedp, preversep, pvolatilep);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
tree base
= get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
&offset, &mode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
base0
= get_inner_reference (TREE_OPERAND (arg0, 0),
&bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
base0
= get_inner_reference (TREE_OPERAND (base0, 0),
&bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &reversep, &volatilep,
- false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
base1
= get_inner_reference (TREE_OPERAND (arg1, 0),
&bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
base1
= get_inner_reference (TREE_OPERAND (base1, 0),
&bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &reversep, &volatilep,
- false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
poffset, &mode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
core = build_fold_addr_expr_loc (loc, core);
}
else
int volatilep = 0, reversep, unsignedp = 0;
base = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize,
&bitpos, &offset, &mode, &unsignedp,
- &reversep, &volatilep, false);
+ &reversep, &volatilep);
gcc_assert (base != NULL_TREE && (bitpos % BITS_PER_UNIT) == 0);
if (offset != NULL_TREE)
{
return;
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (reversep)
return;
widest_int index = bitpos;
base = TREE_OPERAND (base, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
&mode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
tree orig_base = base;
if ((TREE_CODE (base) == INDIRECT_REF
|| (TREE_CODE (base) == MEM_REF
base = get_inner_reference (base, &bitsize2,
&bitpos2, &offset2,
&mode, &unsignedp,
- &reversep, &volatilep,
- false);
+ &reversep, &volatilep);
if ((TREE_CODE (base) == INDIRECT_REF
|| (TREE_CODE (base) == MEM_REF
&& integer_zerop (TREE_OPERAND (base,
int unsignedp, volatilep, preversep;
ref = get_inner_reference (ref, &bitsize, &bitpos, &varoffset, &mode,
- &unsignedp, &preversep, &volatilep, false);
+ &unsignedp, &preversep, &volatilep);
offset = bitpos;
offset = wi::rshift (offset, LOG2_BITS_PER_UNIT, SIGNED);
if (TREE_USED (node))
fputs (" used", file);
if (TREE_NOTHROW (node))
- fputs (TYPE_P (node) ? " align-ok" : " nothrow", file);
+ fputs (" nothrow", file);
if (TREE_PUBLIC (node))
fputs (" public", file);
if (TREE_PRIVATE (node))
fputs (" type_5", file);
if (TYPE_LANG_FLAG_6 (node))
fputs (" type_6", file);
+ if (TYPE_LANG_FLAG_7 (node))
+ fputs (" type_7", file);
mode = TYPE_MODE (node);
fprintf (file, " %s", GET_MODE_NAME (mode));
decl
= get_inner_reference (decl, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (bitsize != GET_MODE_BITSIZE (mode)
|| (bitpos % BITS_PER_UNIT)
|| (toffset && !tree_fits_shwi_p (toffset)))
}
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
&toffset, &mode, &unsignedp, &reversep,
- &volatilep, false);
+ &volatilep);
if (bitpos % BITS_PER_UNIT != 0)
break;
aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
int uns, rev, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
- &uns, &rev, &vol, false);
+ &uns, &rev, &vol);
tree base_addr = build_fold_addr_expr (base);
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
CALL_EXPR
FUNCTION_DECL
- TYPE_ALIGN_OK in
- all types
-
TREE_THIS_NOTRAP in
INDIRECT_REF, MEM_REF, TARGET_MEM_REF, ARRAY_REF, ARRAY_RANGE_REF
unsigned lang_flag_4 : 1;
unsigned lang_flag_5 : 1;
unsigned lang_flag_6 : 1;
+ unsigned lang_flag_7 : 1;
/* TYPE_ALIGN in log2; this has to be large enough to hold values
of the maximum of BIGGEST_ALIGNMENT and MAX_OFILE_ALIGNMENT,
so we need to store the value 32 (not 31, as we need the zero
as well), hence six bits. */
unsigned align : 6;
- unsigned spare : 26;
+ unsigned spare : 25;
alias_set_type alias_set;
tree pointer_to;
tree reference_to;
op0 = TREE_OPERAND (op0, 0);
base
= get_inner_reference (op0, &pbitsize, &pbitpos, &poffset, &pmode,
- &punsignedp, &preversep, &pvolatilep, false);
+ &punsignedp, &preversep, &pvolatilep);
if (pbitpos % BITS_PER_UNIT != 0)
return false;
fprintf (dump_file, "analyze_innermost: ");
base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset, &pmode,
- &punsignedp, &preversep, &pvolatilep, false);
+ &punsignedp, &preversep, &pvolatilep);
gcc_assert (base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep,
- false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base) == MEM_REF)
{
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (bitpos % BITS_PER_UNIT)
{
iscc->bad_arg_alignment = true;
int unsignedp, reversep, volatilep;
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (toffset != 0
|| bitpos % BITS_PER_UNIT != 0
return false;
base_addr = get_inner_reference (ref, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (TREE_CODE (base_addr) == MEM_REF)
{
tree offset;
get_inner_reference (src, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (n->range < (unsigned HOST_WIDE_INT) bitsize)
{
load_offset = (bitsize - n->range) / BITS_PER_UNIT;
SSA_NAME OFF and put the loop invariants into a tree BASE
that can be gimplified before the loop. */
base = get_inner_reference (base, &pbitsize, &pbitpos, &off, &pmode,
- &punsignedp, &reversep, &pvolatilep, false);
+ &punsignedp, &reversep, &pvolatilep);
gcc_assert (base && (pbitpos % BITS_PER_UNIT) == 0 && !reversep);
if (TREE_CODE (base) == MEM_REF)
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
&poffset, &pmode, &punsignedp,
- &preversep, &pvolatilep, false);
+ &preversep, &pvolatilep);
gcc_assert (outer_base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
/* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
verify_variant_match (TYPE_UNSIGNED);
- verify_variant_match (TYPE_ALIGN_OK);
verify_variant_match (TYPE_PACKED);
if (TREE_CODE (t) == REFERENCE_TYPE)
verify_variant_match (TYPE_REF_IS_RVALUE);
/* In a CALL_EXPR, means call was instrumented by Pointer Bounds Checker. */
#define CALL_WITH_BOUNDS_P(NODE) (CALL_EXPR_CHECK (NODE)->base.deprecated_flag)
-/* In a type, nonzero means that all objects of the type are guaranteed by the
- language or front-end to be properly aligned, so we can indicate that a MEM
- of this type is aligned at least to the alignment of the type, even if it
- doesn't appear that it is. We see this, for example, in object-oriented
- languages where a tag field may show this is an object of a more-aligned
- variant of the more generic type. */
-#define TYPE_ALIGN_OK(NODE) (TYPE_CHECK (NODE)->base.nothrow_flag)
-
/* Used in classes in C++. */
#define TREE_PRIVATE(NODE) ((NODE)->base.private_flag)
/* Used in classes in C++. */
#define TYPE_LANG_FLAG_4(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_4)
#define TYPE_LANG_FLAG_5(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_5)
#define TYPE_LANG_FLAG_6(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_6)
+#define TYPE_LANG_FLAG_7(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_7)
/* Used to keep track of visited nodes in tree traversals. This is set to
0 by copy_node and make_node. */
look for the ultimate containing object, which is returned and specify
the access position and size. */
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
- tree *, machine_mode *, int *, int *,
- int *, bool);
+ tree *, machine_mode *, int *, int *, int *);
extern tree build_personality_function (const char *);
machine_mode mode;
int unsignedp, reversep, volatilep = 0;
base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
/* No need to instrument accesses to decls that don't escape,
they can't escape to other threads then. */
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
tree utype = build_nonstandard_integer_type (modebitsize, 1);
if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base))
machine_mode mode;
int volatilep = 0, reversep, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
if (bitpos % BITS_PER_UNIT != 0
|| bitsize != size_in_bytes * BITS_PER_UNIT)