* *
* C Implementation File *
* *
- * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
+ * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
* *
* GNAT is free software; you can redistribute it and/or modify it under *
* terms of the GNU General Public License as published by the Free Soft- *
#include "config.h"
#include "system.h"
#include "coretypes.h"
+#include "memmodel.h"
#include "tm.h"
+#include "vec.h"
+#include "alias.h"
#include "tree.h"
+#include "inchash.h"
+#include "builtins.h"
+#include "fold-const.h"
#include "stor-layout.h"
#include "stringpool.h"
#include "varasm.h"
return type;
}
\f
-/* EXP is a GCC tree representing an address. See if we can find how
- strictly the object at that address is aligned. Return that alignment
- in bits. If we don't know anything about the alignment, return 0. */
+/* EXP is a GCC tree representing an address. See if we can find how strictly
+ the object at this address is aligned and, if so, return the alignment of
+ the object in bits. Otherwise return 0. */
unsigned int
known_alignment (tree exp)
break;
case COMPOUND_EXPR:
- /* The value of a COMPOUND_EXPR is that of it's second operand. */
+ /* The value of a COMPOUND_EXPR is that of its second operand. */
this_alignment = known_alignment (TREE_OPERAND (exp, 1));
break;
case PLUS_EXPR:
case MINUS_EXPR:
- /* If two address are added, the alignment of the result is the
+ /* If two addresses are added, the alignment of the result is the
minimum of the two alignments. */
lhs = known_alignment (TREE_OPERAND (exp, 0));
rhs = known_alignment (TREE_OPERAND (exp, 1));
break;
case POINTER_PLUS_EXPR:
- lhs = known_alignment (TREE_OPERAND (exp, 0));
- rhs = known_alignment (TREE_OPERAND (exp, 1));
+ /* If this is the pattern built for aligning types, decode it. */
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
+ {
+ tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
+ return
+ known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
+ }
+
/* If we don't know the alignment of the offset, we assume that
of the base. */
+ lhs = known_alignment (TREE_OPERAND (exp, 0));
+ rhs = known_alignment (TREE_OPERAND (exp, 1));
+
if (rhs == 0)
this_alignment = lhs;
else
break;
case COND_EXPR:
- /* If there is a choice between two values, use the smallest one. */
+ /* If there is a choice between two values, use the smaller one. */
lhs = known_alignment (TREE_OPERAND (exp, 1));
rhs = known_alignment (TREE_OPERAND (exp, 2));
this_alignment = MIN (lhs, rhs);
unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
/* The first part of this represents the lowest bit in the constant,
but it is originally in bytes, not bits. */
- this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
+ this_alignment = (c & -c) * BITS_PER_UNIT;
}
break;
break;
case ADDR_EXPR:
- this_alignment = expr_align (TREE_OPERAND (exp, 0));
+ if (DECL_P (TREE_OPERAND (exp, 0)))
+ this_alignment = DECL_ALIGN (TREE_OPERAND (exp, 0));
+ else
+ this_alignment = get_object_alignment (TREE_OPERAND (exp, 0));
break;
case CALL_EXPR:
{
+ tree fndecl = get_callee_fndecl (exp);
+ if (fndecl == malloc_decl || fndecl == realloc_decl)
+ return get_target_system_allocator_alignment () * BITS_PER_UNIT;
+
tree t = maybe_inline_call_in_expr (exp);
if (t)
return known_alignment (t);
}
- /* Fall through... */
+ /* ... fall through ... */
default:
/* For other pointer expressions, we assume that the pointed-to object
have a dummy type here (e.g. a Taft Amendment type), for which the
alignment is meaningless and should be ignored. */
if (POINTER_TYPE_P (TREE_TYPE (exp))
- && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
+ && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
+ && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
else
this_alignment = 0;
calling into build_binary_op), some others are really expected and we
have to be careful. */
+ const bool variable_record_on_lhs
+ = (TREE_CODE (t1) == RECORD_TYPE
+ && TREE_CODE (t2) == RECORD_TYPE
+ && get_variant_part (t1)
+ && !get_variant_part (t2));
+
+ const bool variable_array_on_lhs
+ = (TREE_CODE (t1) == ARRAY_TYPE
+ && TREE_CODE (t2) == ARRAY_TYPE
+ && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
+ && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
+
/* We must avoid writing more than what the target can hold if this is for
an assignment and the case of tagged types is handled in build_binary_op
so we use the lhs type if it is known to be smaller or of constant size
and the rhs type is not, whatever the modes. We also force t1 in case of
constant size equality to minimize occurrences of view conversions on the
- lhs of an assignment, except for the case of record types with a variant
- part on the lhs but not on the rhs to make the conversion simpler. */
+ lhs of an assignment, except for the case of types with a variable part
+ on the lhs but not on the rhs to make the conversion simpler. */
if (TREE_CONSTANT (TYPE_SIZE (t1))
&& (!TREE_CONSTANT (TYPE_SIZE (t2))
|| tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
|| (TYPE_SIZE (t1) == TYPE_SIZE (t2)
- && !(TREE_CODE (t1) == RECORD_TYPE
- && TREE_CODE (t2) == RECORD_TYPE
- && get_variant_part (t1) != NULL_TREE
- && get_variant_part (t2) == NULL_TREE))))
+ && !variable_record_on_lhs
+ && !variable_array_on_lhs)))
return t1;
- /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
- that we will not have any alignment problems since, if we did, the
- non-BLKmode type could not have been used. */
- if (TYPE_MODE (t1) != BLKmode)
+ /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
+ a non-BLKmode rhs and array types with a variable part on the lhs but not
+ on the rhs to make sure the conversion is preserved during gimplification.
+ Note that we know that we will not have any alignment problems since, if
+ we did, the non-BLKmode type could not have been used. */
+ if (TYPE_MODE (t1) != BLKmode
+ && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
return t1;
/* If the rhs type is of constant size, use it whatever the modes. At
in order to suppress the comparison of the data at the end. */
while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
{
- tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
- tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
- tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
- tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
- tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
+ tree dom1 = TYPE_DOMAIN (t1);
+ tree dom2 = TYPE_DOMAIN (t2);
+ tree length1 = size_binop (PLUS_EXPR,
+ size_binop (MINUS_EXPR,
+ TYPE_MAX_VALUE (dom1),
+ TYPE_MIN_VALUE (dom1)),
size_one_node);
- tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
+ tree length2 = size_binop (PLUS_EXPR,
+ size_binop (MINUS_EXPR,
+ TYPE_MAX_VALUE (dom2),
+ TYPE_MIN_VALUE (dom2)),
size_one_node);
+ tree ind1 = TYPE_INDEX_TYPE (dom1);
+ tree ind2 = TYPE_INDEX_TYPE (dom2);
+ tree base_type = maybe_character_type (get_base_type (ind1));
+ tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
+ tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
+ tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
+ tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
tree comparison, this_a1_is_null, this_a2_is_null;
- /* If the length of the first array is a constant, swap our operands
- unless the length of the second array is the constant zero. */
- if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
+ /* If the length of the first array is a constant and that of the second
+ array is not, swap our operands to have the constant second. */
+ if (TREE_CODE (length1) == INTEGER_CST
+ && TREE_CODE (length2) != INTEGER_CST)
{
tree tem;
bool btem;
last < first holds. */
if (integer_zerop (length2))
{
- tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
-
length_zero_p = true;
- ub1
- = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
- lb1
- = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
+ lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
+ ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
- comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
if (EXPR_P (comparison))
SET_EXPR_LOCATION (comparison, loc);
just use its length computed from the actual stored bounds. */
else if (TREE_CODE (length2) == INTEGER_CST)
{
- tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
-
- ub1
- = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
- lb1
- = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
- /* Note that we know that UB2 and LB2 are constant and hence
+ /* Note that we know that LB2 and UB2 are constant and hence
cannot contain a PLACEHOLDER_EXPR. */
- ub2
- = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
- lb2
- = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
+ lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
+ ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
comparison
= fold_build2_loc (loc, EQ_EXPR, result_type,
- build_binary_op (MINUS_EXPR, b, ub1, lb1),
- build_binary_op (MINUS_EXPR, b, ub2, lb2));
- comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
+ build_binary_op (MINUS_EXPR, base_type,
+ ub1, lb1),
+ build_binary_op (MINUS_EXPR, base_type,
+ ub2, lb2));
if (EXPR_P (comparison))
SET_EXPR_LOCATION (comparison, loc);
comparison
= fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
+ if (EXPR_P (comparison))
+ SET_EXPR_LOCATION (comparison, loc);
- /* If the length expression is of the form (cond ? val : 0), assume
- that cond is equivalent to (length != 0). That's guaranteed by
- construction of the array types in gnat_to_gnu_entity. */
- if (TREE_CODE (length1) == COND_EXPR
- && integer_zerop (TREE_OPERAND (length1, 2)))
- this_a1_is_null
- = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
- else
- this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
- length1, size_zero_node);
-
- /* Likewise for the second array. */
- if (TREE_CODE (length2) == COND_EXPR
- && integer_zerop (TREE_OPERAND (length2, 2)))
- this_a2_is_null
- = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
- else
- this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
- length2, size_zero_node);
+ lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
+ ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
+
+ this_a1_is_null
+ = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
+
+ lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
+ ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
+
+ this_a2_is_null
+ = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
}
/* Append expressions for this dimension to the final expressions. */
/* The constant folder doesn't fold fat pointer types so we do it here. */
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
+ p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
else
- p1_array = build_component_ref (p1, NULL_TREE,
- TYPE_FIELDS (TREE_TYPE (p1)), true);
+ p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
p1_array_is_null
= fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
null_pointer_node));
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
+ p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
else
- p2_array = build_component_ref (p2, NULL_TREE,
- TYPE_FIELDS (TREE_TYPE (p2)), true);
+ p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
p2_array_is_null
= fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
= fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
+ p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
else
p1_bounds
- = build_component_ref (p1, NULL_TREE,
- DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
+ = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
+ true);
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
+ p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
else
p2_bounds
- = build_component_ref (p2, NULL_TREE,
- DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
+ = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
+ true);
same_bounds
= fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
if (TYPE_PRECISION (op_type) < precision
|| TYPE_UNSIGNED (op_type) != unsignedp)
{
- /* Copy the node so we ensure it can be modified to make it modular. */
- op_type = copy_node (gnat_type_for_size (precision, unsignedp));
+ /* Copy the type so we ensure it can be modified to make it modular. */
+ op_type = copy_type (gnat_type_for_size (precision, unsignedp));
modulus = convert (op_type, modulus);
SET_TYPE_MODULUS (op_type, modulus);
TYPE_MODULAR_P (op_type) = 1;
possible size. */
if (op_code == MULT_EXPR)
{
- tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
+ /* Copy the type so we ensure it can be modified to make it modular. */
+ tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
modulus = convert (div_type, modulus);
SET_TYPE_MODULUS (div_type, modulus);
TYPE_MODULAR_P (div_type) = 1;
result = gnat_protect_expr (result);
result = fold_build3 (COND_EXPR, op_type,
fold_build2 (LT_EXPR, boolean_type_node, result,
- convert (op_type, integer_zero_node)),
+ build_int_cst (op_type, 0)),
fold_build2 (PLUS_EXPR, op_type, result, modulus),
result);
}
return 0;
}
-/* Build an atomic load for the underlying atomic object in SRC. */
+/* Build an atomic load for the underlying atomic object in SRC. SYNC is
+ true if the load requires synchronization. */
tree
-build_atomic_load (tree src)
+build_atomic_load (tree src, bool sync)
{
tree ptr_type
= build_pointer_type
- (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
- tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
+ (build_qualified_type (void_type_node,
+ TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
+ tree mem_model
+ = build_int_cst (integer_type_node,
+ sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
tree orig_src = src;
tree t, addr, val;
unsigned int size;
return convert (TREE_TYPE (orig_src), t);
}
-/* Build an atomic store from SRC to the underlying atomic object in DEST. */
+/* Build an atomic store from SRC to the underlying atomic object in DEST.
+ SYNC is true if the store requires synchronization. */
tree
-build_atomic_store (tree dest, tree src)
+build_atomic_store (tree dest, tree src, bool sync)
{
tree ptr_type
= build_pointer_type
- (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
- tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
+ (build_qualified_type (void_type_node,
+ TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
+ tree mem_model
+ = build_int_cst (integer_type_node,
+ sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
tree orig_dest = dest;
tree t, int_type, addr;
unsigned int size;
return build_call_expr (t, 3, addr, src, mem_model);
}
+
+/* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
+ the location of the sequence. Note that, even though the load and the store
+ are both atomic, the sequence itself is not atomic. */
+
+tree
+build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
+{
+ /* We will be modifying DEST below so we build a copy. */
+ dest = copy_node (dest);
+ tree ref = dest;
+
+ while (handled_component_p (ref))
+ {
+ /* The load should already have been generated during the translation
+ of the GNAT destination tree; find it out in the GNU tree. */
+ if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
+ {
+ tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
+ if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
+ tree t = CALL_EXPR_ARG (op, 0);
+ tree obj, temp, stmt;
+
+ /* Find out the loaded object. */
+ if (TREE_CODE (t) == NOP_EXPR)
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == ADDR_EXPR)
+ obj = TREE_OPERAND (t, 0);
+ else
+ obj = build1 (INDIRECT_REF, type, t);
+
+ /* Drop atomic and volatile qualifiers for the temporary. */
+ type = TYPE_MAIN_VARIANT (type);
+
+ /* And drop BLKmode, if need be, to put it into a register. */
+ if (TYPE_MODE (type) == BLKmode)
+ {
+ unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
+ type = copy_type (type);
+ machine_mode mode = int_mode_for_size (size, 0).else_blk ();
+ SET_TYPE_MODE (type, mode);
+ }
+
+ /* Create the temporary by inserting a SAVE_EXPR. */
+ temp = build1 (SAVE_EXPR, type,
+ build1 (VIEW_CONVERT_EXPR, type, op));
+ TREE_OPERAND (ref, 0) = temp;
+
+ start_stmt_group ();
+
+ /* Build the modify of the temporary. */
+ stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
+ add_stmt_with_node (stmt, gnat_node);
+
+ /* Build the store to the object. */
+ stmt = build_atomic_store (obj, temp, false);
+ add_stmt_with_node (stmt, gnat_node);
+
+ return end_stmt_group ();
+ }
+ }
+
+ TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ /* Something went wrong earlier if we have not found the atomic load. */
+ gcc_unreachable ();
+}
\f
/* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
desired for the result. Usually the operation is to be performed
in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
case the type to be used will be derived from the operands.
+ Don't fold the result if NO_FOLD is true.
This function is very much unlike the ones for C and C++ since we
have already done any type conversion and matching required. All we
tree
build_binary_op (enum tree_code op_code, tree result_type,
- tree left_operand, tree right_operand)
+ tree left_operand, tree right_operand,
+ bool no_fold)
{
- tree left_type = TREE_TYPE (left_operand);
+ tree left_type = TREE_TYPE (left_operand);
tree right_type = TREE_TYPE (right_operand);
tree left_base_type = get_base_type (left_type);
tree right_base_type = get_base_type (right_type);
&& TYPE_JUSTIFIED_MODULAR_P (operation_type))
operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
- if (operation_type
- && TREE_CODE (operation_type) == INTEGER_TYPE
- && TYPE_EXTRA_SUBTYPE_P (operation_type))
+ if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
operation_type = get_base_type (operation_type);
modulus = (operation_type
{
case INIT_EXPR:
case MODIFY_EXPR:
-#ifdef ENABLE_CHECKING
- gcc_assert (result_type == NULL_TREE);
-#endif
+ gcc_checking_assert (!result_type);
+
/* If there were integral or pointer conversions on the LHS, remove
them; we'll be putting them back below if needed. Likewise for
conversions between array and record types, except for justified
operation_type = left_type;
}
- /* If we have a call to a function that returns an unconstrained type
- with default discriminant on the RHS, use the RHS type (which is
- padded) as we cannot compute the size of the actual assignment. */
+ /* If we have a call to a function that returns with variable size, use
+ the RHS type in case we want to use the return slot optimization. */
else if (TREE_CODE (right_operand) == CALL_EXPR
- && TYPE_IS_PADDING_P (right_type)
- && CONTAINS_PLACEHOLDER_P
- (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
+ && return_type_with_variable_size_p (right_type))
operation_type = right_type;
/* Find the best type to use for copying between aggregate types. */
strip anything that get_inner_reference can handle. Then remove any
conversions between types having the same code and mode. And mark
VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
- either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
+ either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
result = left_operand;
while (true)
{
gcc_assert (TREE_CODE (result) == INDIRECT_REF
|| TREE_CODE (result) == NULL_EXPR
+ || TREE_CODE (result) == SAVE_EXPR
|| DECL_P (result));
/* Convert the right operand to the operation type unless it is
/* For a range, make sure the element type is consistent. */
if (op_code == ARRAY_RANGE_REF
&& TREE_TYPE (operation_type) != TREE_TYPE (left_type))
- operation_type = build_array_type (TREE_TYPE (left_type),
- TYPE_DOMAIN (operation_type));
+ {
+ operation_type
+ = build_nonshared_array_type (TREE_TYPE (left_type),
+ TYPE_DOMAIN (operation_type));
+ /* Declare it now since it will never be declared otherwise. This
+ is necessary to ensure that its subtrees are properly marked. */
+ create_type_decl (TYPE_NAME (operation_type), operation_type, true,
+ false, Empty);
+ }
/* Then convert the right operand to its base type. This will prevent
unneeded sign conversions when sizetype is wider than integer. */
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
-#ifdef ENABLE_CHECKING
- gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
-#endif
+ gcc_checking_assert
+ (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
operation_type = left_base_type;
left_operand = convert (operation_type, left_operand);
right_operand = convert (operation_type, right_operand);
case LT_EXPR:
case EQ_EXPR:
case NE_EXPR:
-#ifdef ENABLE_CHECKING
- gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
-#endif
+ gcc_checking_assert
+ (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
/* If either operand is a NULL_EXPR, just return a new one. */
if (TREE_CODE (left_operand) == NULL_EXPR)
return build2 (op_code, result_type,
gcc_unreachable ();
}
+ else if (POINTER_TYPE_P (left_base_type)
+ && POINTER_TYPE_P (right_base_type))
+ {
+ gcc_assert (TREE_TYPE (left_base_type)
+ == TREE_TYPE (right_base_type));
+ best_type = left_base_type;
+ }
else
gcc_unreachable ();
else if (TREE_CODE (right_operand) == NULL_EXPR)
return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
- result = fold (build4 (op_code, operation_type, left_operand,
- right_operand, NULL_TREE, NULL_TREE));
+ {
+ result = build4 (op_code, operation_type, left_operand, right_operand,
+ NULL_TREE, NULL_TREE);
+ if (!no_fold)
+ result = fold (result);
+ }
else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
result = build2 (op_code, void_type_node, left_operand, right_operand);
+ else if (no_fold)
+ result = build2 (op_code, operation_type, left_operand, right_operand);
else
result
= fold_build2 (op_code, operation_type, left_operand, right_operand);
/* If we are working with modular types, perform the MOD operation
if something above hasn't eliminated the need for it. */
if (modulus)
- result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
- convert (operation_type, modulus));
+ {
+ modulus = convert (operation_type, modulus);
+ if (no_fold)
+ result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
+ else
+ result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
+ }
if (result_type && result_type != operation_type)
result = convert (result_type, result);
break;
case TRUTH_NOT_EXPR:
-#ifdef ENABLE_CHECKING
- gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
-#endif
+ gcc_checking_assert
+ (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
/* When not optimizing, fold the result as invert_truthvalue_loc
doesn't fold the result of comparisons. This is intended to undo
since the middle-end cannot handle it. But we don't it in the
general case because it may introduce aliasing issues if the
first operand is an indirect assignment and the second operand
- the corresponding address, e.g. for an allocator. */
- if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
+ the corresponding address, e.g. for an allocator. However do
+ it for a return value to expose it for later recognition. */
+ if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
+ || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
+ && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
{
result = build_unary_op (ADDR_EXPR, result_type,
TREE_OPERAND (operand, 1));
the offset to the field. Otherwise, do this the normal way. */
if (op_code == ATTR_ADDR_EXPR)
{
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
+ poly_int64 bitsize;
+ poly_int64 bitpos;
tree offset, inner;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep,
- false);
+ &mode, &unsignedp, &reversep,
+ &volatilep);
/* If INNER is a padding type whose field has a self-referential
size, convert to that inner type. We know the offset is zero
and we need to have that type visible. */
- if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
- && CONTAINS_PLACEHOLDER_P
- (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
- (TREE_TYPE (inner))))))
+ if (type_is_padding_self_referential (TREE_TYPE (inner)))
inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
inner);
if (!offset)
offset = size_zero_node;
- offset = size_binop (PLUS_EXPR, offset,
- size_int (bitpos / BITS_PER_UNIT));
+ offset
+ = size_binop (PLUS_EXPR, offset,
+ size_int (bits_to_bytes_round_down (bitpos)));
- /* Take the address of INNER, convert the offset to void *, and
- add then. It will later be converted to the desired result
- type, if any. */
- inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
- inner = convert (ptr_void_type_node, inner);
- result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
+ /* Take the address of INNER, convert it to a pointer to our type
+ and add the offset. */
+ inner = build_unary_op (ADDR_EXPR,
+ build_pointer_type (TREE_TYPE (operand)),
+ inner);
+ result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
inner, offset);
- result = convert (build_pointer_type (TREE_TYPE (operand)),
- result);
break;
}
goto common;
a pointer to our type. */
if (TYPE_IS_PADDING_P (type))
{
- result = (*CONSTRUCTOR_ELTS (operand))[0].value;
- result = convert (build_pointer_type (TREE_TYPE (operand)),
- build_unary_op (ADDR_EXPR, NULL_TREE, result));
+ result
+ = build_unary_op (ADDR_EXPR,
+ build_pointer_type (TREE_TYPE (operand)),
+ CONSTRUCTOR_ELT (operand, 0)->value);
break;
}
-
goto common;
case NOP_EXPR:
{
if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
modulus,
- convert (operation_type,
- integer_one_node))))
+ build_int_cst (operation_type,
+ 1))))
result = fold_build2 (BIT_XOR_EXPR, operation_type,
operand, modulus);
else
fold_build2 (NE_EXPR,
boolean_type_node,
operand,
- convert
- (operation_type,
- integer_zero_node)),
+ build_int_cst
+ (operation_type, 0)),
result, operand);
}
else
that constant for nonbinary modulus. */
tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
- convert (operation_type,
- integer_one_node));
+ build_int_cst (operation_type, 1));
if (mod_pow2)
result = fold_build2 (BIT_XOR_EXPR, operation_type,
return fn;
}
\f
-/* Call a function that raises an exception and pass the line number and file
- name, if requested. MSG says which exception function to call.
+/* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
+ MSG gives the exception's identity for the call to Local_Raise, if any. */
- GNAT_NODE is the gnat node conveying the source location for which the
- error should be signaled, or Empty in which case the error is signaled on
- the current ref_file_name/input_line.
-
- KIND says which kind of exception this is for
- (N_Raise_{Constraint,Storage,Program}_Error). */
-
-tree
-build_call_raise (int msg, Node_Id gnat_node, char kind)
+static tree
+build_goto_raise (Entity_Id gnat_label, int msg)
{
- tree fndecl = gnat_raise_decls[msg];
- tree label = get_exception_label (kind);
- tree filename;
- int line_number;
- const char *str;
- int len;
+ tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
+ tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
+ Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
- /* If this is to be done as a goto, handle that case. */
- if (label)
+ /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
+ if (Present (local_raise))
{
- Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
- tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
-
- /* If Local_Raise is present, generate
- Local_Raise (exception'Identity); */
- if (Present (local_raise))
- {
- tree gnu_local_raise
- = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
- tree gnu_exception_entity
- = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
- tree gnu_call
- = build_call_n_expr (gnu_local_raise, 1,
- build_unary_op (ADDR_EXPR, NULL_TREE,
- gnu_exception_entity));
-
- gnu_result = build2 (COMPOUND_EXPR, void_type_node,
- gnu_call, gnu_result);}
-
- return gnu_result;
+ tree gnu_local_raise
+ = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
+ tree gnu_exception_entity
+ = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
+ tree gnu_call
+ = build_call_n_expr (gnu_local_raise, 1,
+ build_unary_op (ADDR_EXPR, NULL_TREE,
+ gnu_exception_entity));
+ gnu_result
+ = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
}
- str
- = (Debug_Flag_NN || Exception_Locations_Suppressed)
- ? ""
- : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
- ? IDENTIFIER_POINTER
- (get_identifier (Get_Name_String
- (Debug_Source_Name
- (Get_Source_File_Index (Sloc (gnat_node))))))
- : ref_filename;
-
- len = strlen (str);
- filename = build_string (len, str);
- line_number
- = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
- ? Get_Logical_Line_Number (Sloc(gnat_node))
- : LOCATION_LINE (input_location);
-
- TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
- build_index_type (size_int (len)));
-
- return
- build_call_n_expr (fndecl, 2,
- build1 (ADDR_EXPR,
- build_pointer_type (unsigned_char_type_node),
- filename),
- build_int_cst (NULL_TREE, line_number));
+ TREE_USED (gnu_label) = 1;
+ return gnu_result;
}
-/* Similar to build_call_raise, for an index or range check exception as
- determined by MSG, with extra information generated of the form
- "INDEX out of range FIRST..LAST". */
+/* Expand the SLOC of GNAT_NODE, if present, into tree location information
+ pointed to by FILENAME, LINE and COL. Fall back to the current location
+ if GNAT_NODE is absent or has no SLOC. */
-tree
-build_call_raise_range (int msg, Node_Id gnat_node,
- tree index, tree first, tree last)
+static void
+expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
{
- tree fndecl = gnat_raise_decls_ext[msg];
- tree filename;
- int line_number, column_number;
const char *str;
- int len;
-
- str
- = (Debug_Flag_NN || Exception_Locations_Suppressed)
- ? ""
- : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
- ? IDENTIFIER_POINTER
- (get_identifier (Get_Name_String
- (Debug_Source_Name
- (Get_Source_File_Index (Sloc (gnat_node))))))
- : ref_filename;
-
- len = strlen (str);
- filename = build_string (len, str);
- if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
+ int line_number, column_number;
+
+ if (Debug_Flag_NN || Exception_Locations_Suppressed)
{
+ str = "";
+ line_number = 0;
+ column_number = 0;
+ }
+ else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
+ {
+ str = Get_Name_String
+ (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
line_number = Get_Logical_Line_Number (Sloc (gnat_node));
column_number = Get_Column_Number (Sloc (gnat_node));
}
else
{
+ str = lbasename (LOCATION_FILE (input_location));
line_number = LOCATION_LINE (input_location);
- column_number = 0;
+ column_number = LOCATION_COLUMN (input_location);
}
- TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
- build_index_type (size_int (len)));
+ const int len = strlen (str);
+ *filename = build_string (len, str);
+ TREE_TYPE (*filename) = build_array_type (char_type_node,
+ build_index_type (size_int (len)));
+ *line = build_int_cst (NULL_TREE, line_number);
+ if (col)
+ *col = build_int_cst (NULL_TREE, column_number);
+}
+
+/* Build a call to a function that raises an exception and passes file name
+ and line number, if requested. MSG says which exception function to call.
+ GNAT_NODE is the node conveying the source location for which the error
+ should be signaled, or Empty in which case the error is signaled for the
+ current location. KIND says which kind of exception node this is for,
+ among N_Raise_{Constraint,Storage,Program}_Error. */
+
+tree
+build_call_raise (int msg, Node_Id gnat_node, char kind)
+{
+ Entity_Id gnat_label = get_exception_label (kind);
+ tree fndecl = gnat_raise_decls[msg];
+ tree filename, line;
+
+ /* If this is to be done as a goto, handle that case. */
+ if (Present (gnat_label))
+ return build_goto_raise (gnat_label, msg);
+
+ expand_sloc (gnat_node, &filename, &line, NULL);
return
- build_call_n_expr (fndecl, 6,
+ build_call_n_expr (fndecl, 2,
build1 (ADDR_EXPR,
- build_pointer_type (unsigned_char_type_node),
+ build_pointer_type (char_type_node),
filename),
- build_int_cst (NULL_TREE, line_number),
- build_int_cst (NULL_TREE, column_number),
- convert (integer_type_node, index),
- convert (integer_type_node, first),
- convert (integer_type_node, last));
+ line);
}
/* Similar to build_call_raise, with extra information about the column
where the check failed. */
tree
-build_call_raise_column (int msg, Node_Id gnat_node)
+build_call_raise_column (int msg, Node_Id gnat_node, char kind)
{
+ Entity_Id gnat_label = get_exception_label (kind);
tree fndecl = gnat_raise_decls_ext[msg];
- tree filename;
- int line_number, column_number;
- const char *str;
- int len;
-
- str
- = (Debug_Flag_NN || Exception_Locations_Suppressed)
- ? ""
- : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
- ? IDENTIFIER_POINTER
- (get_identifier (Get_Name_String
- (Debug_Source_Name
- (Get_Source_File_Index (Sloc (gnat_node))))))
- : ref_filename;
-
- len = strlen (str);
- filename = build_string (len, str);
- if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
- {
- line_number = Get_Logical_Line_Number (Sloc (gnat_node));
- column_number = Get_Column_Number (Sloc (gnat_node));
- }
- else
- {
- line_number = LOCATION_LINE (input_location);
- column_number = 0;
- }
+ tree filename, line, col;
+
+ /* If this is to be done as a goto, handle that case. */
+ if (Present (gnat_label))
+ return build_goto_raise (gnat_label, msg);
- TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
- build_index_type (size_int (len)));
+ expand_sloc (gnat_node, &filename, &line, &col);
return
build_call_n_expr (fndecl, 3,
build1 (ADDR_EXPR,
- build_pointer_type (unsigned_char_type_node),
+ build_pointer_type (char_type_node),
filename),
- build_int_cst (NULL_TREE, line_number),
- build_int_cst (NULL_TREE, column_number));
+ line, col);
+}
+
+/* Similar to build_call_raise_column, for an index or range check exception ,
+ with extra information of the form "INDEX out of range FIRST..LAST". */
+
+tree
+build_call_raise_range (int msg, Node_Id gnat_node, char kind,
+ tree index, tree first, tree last)
+{
+ Entity_Id gnat_label = get_exception_label (kind);
+ tree fndecl = gnat_raise_decls_ext[msg];
+ tree filename, line, col;
+
+ /* If this is to be done as a goto, handle that case. */
+ if (Present (gnat_label))
+ return build_goto_raise (gnat_label, msg);
+
+ expand_sloc (gnat_node, &filename, &line, &col);
+
+ return
+ build_call_n_expr (fndecl, 6,
+ build1 (ADDR_EXPR,
+ build_pointer_type (char_type_node),
+ filename),
+ line, col,
+ convert (integer_type_node, index),
+ convert (integer_type_node, first),
+ convert (integer_type_node, last));
}
\f
/* qsort comparer for the bit positions of two constructor elements
static int
compare_elmt_bitpos (const PTR rt1, const PTR rt2)
{
- const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
- const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
+ const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
+ const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
const_tree const field1 = elmt1->index;
const_tree const field2 = elmt2->index;
const int ret
|| (TREE_CODE (type) == RECORD_TYPE
&& CONSTRUCTOR_BITFIELD_P (obj)
&& !initializer_constant_valid_for_bitfield_p (val))
- || !initializer_constant_valid_p (val, TREE_TYPE (val)))
+ || !initializer_constant_valid_p (val,
+ TREE_TYPE (val),
+ TYPE_REVERSE_STORAGE_ORDER (type)))
allconstant = false;
if (!TREE_READONLY (val))
return result;
}
\f
-/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
- an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
- for the field. Don't fold the result if NO_FOLD_P is true.
-
- We also handle the fact that we might have been passed a pointer to the
- actual record and know how to look for fields in variant parts. */
+/* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
+ is not found in the record. Don't fold the result if NO_FOLD is true. */
static tree
-build_simple_component_ref (tree record_variable, tree component, tree field,
- bool no_fold_p)
+build_simple_component_ref (tree record, tree field, bool no_fold)
{
- tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
- tree base, ref;
-
- gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
- && COMPLETE_TYPE_P (record_type)
- && (component == NULL_TREE) != (field == NULL_TREE));
+ tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
+ tree ref;
- /* If no field was specified, look for a field with the specified name in
- the current record only. */
- if (!field)
- for (field = TYPE_FIELDS (record_type);
- field;
- field = DECL_CHAIN (field))
- if (DECL_NAME (field) == component)
- break;
+ gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
- if (!field)
- return NULL_TREE;
+ /* Try to fold a conversion from another record or union type unless the type
+ contains a placeholder as it might be needed for a later substitution. */
+ if (TREE_CODE (record) == VIEW_CONVERT_EXPR
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
+ && !type_contains_placeholder_p (type))
+ {
+ tree op = TREE_OPERAND (record, 0);
+
+ /* If this is an unpadding operation, convert the underlying object to
+ the unpadded type directly. */
+ if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
+ return convert (TREE_TYPE (field), op);
+
+ /* Otherwise try to access FIELD directly in the underlying type, but
+ make sure that the form of the reference doesn't change too much;
+ this can happen for an unconstrained bit-packed array type whose
+ constrained form can be an integer type. */
+ ref = build_simple_component_ref (op, field, no_fold);
+ if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
+ return ref;
+ }
/* If this field is not in the specified record, see if we can find a field
in the specified record whose original field is the same as this one. */
- if (DECL_CONTEXT (field) != record_type)
+ if (DECL_CONTEXT (field) != type)
{
tree new_field;
/* First loop through normal components. */
- for (new_field = TYPE_FIELDS (record_type);
+ for (new_field = TYPE_FIELDS (type);
new_field;
new_field = DECL_CHAIN (new_field))
if (SAME_FIELD_P (field, new_field))
break;
- /* Next, see if we're looking for an inherited component in an extension.
- If so, look through the extension directly, unless the type contains
- a placeholder, as it might be needed for a later substitution. */
- if (!new_field
- && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
- && TYPE_ALIGN_OK (record_type)
- && !type_contains_placeholder_p (record_type)
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
- == RECORD_TYPE
- && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
- {
- ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
- NULL_TREE, field, no_fold_p);
- if (ref)
- return ref;
- }
-
/* Next, loop through DECL_INTERNAL_P components if we haven't found the
component in the first search. Doing this search in two steps is
required to avoid hidden homonymous fields in the _Parent field. */
if (!new_field)
- for (new_field = TYPE_FIELDS (record_type);
+ for (new_field = TYPE_FIELDS (type);
new_field;
new_field = DECL_CHAIN (new_field))
- if (DECL_INTERNAL_P (new_field))
+ if (DECL_INTERNAL_P (new_field)
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
{
tree field_ref
- = build_simple_component_ref (record_variable,
- NULL_TREE, new_field, no_fold_p);
- ref = build_simple_component_ref (field_ref, NULL_TREE, field,
- no_fold_p);
+ = build_simple_component_ref (record, new_field, no_fold);
+ ref = build_simple_component_ref (field_ref, field, no_fold);
if (ref)
return ref;
}
&& TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
return NULL_TREE;
- /* We have found a suitable field. Before building the COMPONENT_REF, get
- the base object of the record variable if possible. */
- base = record_variable;
-
- if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
- {
- tree inner_variable = TREE_OPERAND (record_variable, 0);
- tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
-
- /* Look through a conversion between type variants. This is transparent
- as far as the field is concerned. */
- if (inner_type == record_type)
- base = inner_variable;
-
- /* Look through a conversion between original and packable version, but
- the field needs to be adjusted in this case. */
- else if (RECORD_OR_UNION_TYPE_P (inner_type)
- && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
- {
- tree new_field;
+ ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
- for (new_field = TYPE_FIELDS (inner_type);
- new_field;
- new_field = DECL_CHAIN (new_field))
- if (SAME_FIELD_P (field, new_field))
- break;
- if (new_field)
- {
- field = new_field;
- base = inner_variable;
- }
- }
- }
-
- ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
-
- if (TREE_READONLY (record_variable)
+ if (TREE_READONLY (record)
|| TREE_READONLY (field)
- || TYPE_READONLY (record_type))
+ || TYPE_READONLY (type))
TREE_READONLY (ref) = 1;
- if (TREE_THIS_VOLATILE (record_variable)
+ if (TREE_THIS_VOLATILE (record)
|| TREE_THIS_VOLATILE (field)
- || TYPE_VOLATILE (record_type))
+ || TYPE_VOLATILE (type))
TREE_THIS_VOLATILE (ref) = 1;
- if (no_fold_p)
+ if (no_fold)
return ref;
/* The generic folder may punt in this case because the inner array type
can be self-referential, but folding is in fact not problematic. */
- if (TREE_CODE (base) == CONSTRUCTOR
- && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
+ if (TREE_CODE (record) == CONSTRUCTOR
+ && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
{
- vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
unsigned HOST_WIDE_INT idx;
tree index, value;
FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
- if (index == field)
- return value;
+ if (index == field)
+ return value;
return ref;
}
return fold (ref);
}
-\f
-/* Likewise, but generate a Constraint_Error if the reference could not be
- found. */
+
+/* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
+ field is not found in the record. */
tree
-build_component_ref (tree record_variable, tree component, tree field,
- bool no_fold_p)
+build_component_ref (tree record, tree field, bool no_fold)
{
- tree ref = build_simple_component_ref (record_variable, component, field,
- no_fold_p);
+ tree ref = build_simple_component_ref (record, field, no_fold);
if (ref)
return ref;
- /* If FIELD was specified, assume this is an invalid user field so raise
- Constraint_Error. Otherwise, we have no type to return so abort. */
- gcc_assert (field);
+ /* Assume this is an invalid user field so raise Constraint_Error. */
return build1 (NULL_EXPR, TREE_TYPE (field),
build_call_raise (CE_Discriminant_Check_Failed, Empty,
N_Raise_Constraint_Error));
= build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
tree aligning_field
- = build_component_ref (aligning_record, NULL_TREE,
- TYPE_FIELDS (aligning_type), false);
+ = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
+ false);
tree aligning_field_addr
= build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
/* Then arrange to store the allocator's return value ahead
and return. */
tree storage_ptr_slot_addr
- = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
- convert (ptr_void_type_node, aligning_field_addr),
+ = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
+ convert (ptr_type_node, aligning_field_addr),
size_int (-(HOST_WIDE_INT) POINTER_SIZE
/ BITS_PER_UNIT));
tree storage_ptr_slot
= build_unary_op (INDIRECT_REF, NULL_TREE,
- convert (build_pointer_type (ptr_void_type_node),
+ convert (build_pointer_type (ptr_type_node),
storage_ptr_slot_addr));
return
= (void *)DATA_PTR - (void *)sizeof (void *)) */
tree data_front_ptr
= build_binary_op
- (POINTER_PLUS_EXPR, ptr_void_type_node,
- convert (ptr_void_type_node, data_ptr),
+ (POINTER_PLUS_EXPR, ptr_type_node,
+ convert (ptr_type_node, data_ptr),
size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
/* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
free_ptr
= build_unary_op
(INDIRECT_REF, NULL_TREE,
- convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
+ convert (build_pointer_type (ptr_type_node), data_front_ptr));
}
else
free_ptr = data_ptr;
Entity_Id gnat_proc, Entity_Id gnat_pool,
Node_Id gnat_node)
{
- gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
-
/* Explicit proc to call ? This one is assumed to deal with the type
alignment constraints. */
if (Present (gnat_proc))
/* Check that we aren't violating the associated restriction. */
if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
- Check_No_Implicit_Heap_Alloc (gnat_node);
-
+ {
+ Check_No_Implicit_Heap_Alloc (gnat_node);
+ if (Has_Task (Etype (gnat_node)))
+ Check_No_Implicit_Task_Alloc (gnat_node);
+ if (Has_Protected (Etype (gnat_node)))
+ Check_No_Implicit_Protected_Alloc (gnat_node);
+ }
return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
}
}
if (init && TREE_CODE (init) == NULL_EXPR)
return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
+ /* If we are just annotating types, also return a NULL_EXPR. */
+ else if (type_annotate_only)
+ return build1 (NULL_EXPR, result_type,
+ build_call_raise (CE_Range_Check_Failed, gnat_node,
+ N_Raise_Constraint_Error));
+
/* If the initializer, if present, is a COND_EXPR, deal with each branch. */
else if (init && TREE_CODE (init) == COND_EXPR)
return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
else
storage_init
= build_binary_op (INIT_EXPR, NULL_TREE,
- build_component_ref (storage_deref, NULL_TREE,
+ build_component_ref (storage_deref,
TYPE_FIELDS (storage_type),
false),
build_template (template_type, type, NULL_TREE));
size = TYPE_SIZE_UNIT (TREE_TYPE (init));
/* If the size is still self-referential, reference the initializing
- expression, if it is present. If not, this must have been a
- call to allocate a library-level object, in which case we use
- the maximum size. */
- if (CONTAINS_PLACEHOLDER_P (size))
- {
- if (!ignore_init_type && init)
- size = substitute_placeholder_in_expr (size, init);
- else
- size = max_size (size, true);
- }
+ expression, if it is present. If not, this must have been a call
+ to allocate a library-level object, in which case we just use the
+ maximum size. */
+ if (!ignore_init_type && init)
+ size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
+ else if (CONTAINS_PLACEHOLDER_P (size))
+ size = max_size (size, true);
/* If the size overflows, pass -1 so Storage_Error will be raised. */
if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
}
\f
/* Indicate that we need to take the address of T and that it therefore
- should not be allocated in a register. Returns true if successful. */
+ should not be allocated in a register. Return true if successful. */
bool
gnat_mark_addressable (tree t)
}
}
\f
+/* Return true if EXP is a stable expression for the purpose of the functions
+ below and, therefore, can be returned unmodified by them. We accept things
+ that are actual constants or that have already been handled. */
+
+static bool
+gnat_stable_expr_p (tree exp)
+{
+ enum tree_code code = TREE_CODE (exp);
+ return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
+}
+
/* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
but we know how to handle our own nodes. */
tree type = TREE_TYPE (exp);
enum tree_code code = TREE_CODE (exp);
- if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
+ if (gnat_stable_expr_p (exp))
return exp;
if (code == UNCONSTRAINED_ARRAY_REF)
if (code == COMPONENT_REF
&& TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
- TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
+ TREE_OPERAND (exp, 1), NULL_TREE);
return save_expr (exp);
}
tree type = TREE_TYPE (exp);
enum tree_code code = TREE_CODE (exp);
- if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
+ if (gnat_stable_expr_p (exp))
return exp;
/* If EXP has no side effects, we theoretically don't need to do anything.
return t;
}
+ /* Likewise if we're indirectly referencing part of something. */
+ if (code == COMPONENT_REF
+ && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
+ return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
+ TREE_OPERAND (exp, 1), NULL_TREE);
+
/* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
This may be more efficient, but will also allow us to more easily find
the match for the PLACEHOLDER_EXPR. */
if (code == COMPONENT_REF
&& TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
- TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
+ TREE_OPERAND (exp, 1), NULL_TREE);
- /* If this is a fat pointer or something that can be placed in a register,
- just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
- returned via invisible reference in most ABIs so the temporary will
- directly be filled by the callee. */
+ /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
+ for a CALL_EXPR as large objects are returned via invisible reference
+ in most ABIs so the temporary will directly be filled by the callee. */
if (TYPE_IS_FAT_POINTER_P (type)
- || TYPE_MODE (type) != BLKmode
+ || !AGGREGATE_TYPE_P (type)
|| code == CALL_EXPR)
return save_expr (exp);
/* Otherwise reference, protect the address and dereference. */
return
build_unary_op (INDIRECT_REF, type,
- save_expr (build_unary_op (ADDR_EXPR,
- build_reference_type (type),
- exp)));
+ save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
}
/* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
argument to force evaluation of everything. */
static tree
-gnat_stabilize_reference_1 (tree e, bool force)
+gnat_stabilize_reference_1 (tree e, void *data)
{
+ const bool force = *(bool *)data;
enum tree_code code = TREE_CODE (e);
tree type = TREE_TYPE (e);
tree result;
- /* We cannot ignore const expressions because it might be a reference
- to a const array but whose index contains side-effects. But we can
- ignore things that are actual constant or that already have been
- handled by this function. */
- if (TREE_CONSTANT (e) || code == SAVE_EXPR)
+ if (gnat_stable_expr_p (e))
return e;
switch (TREE_CODE_CLASS (code))
&& TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
result
= build3 (code, type,
- gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
- TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
+ gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
+ TREE_OPERAND (e, 1), NULL_TREE);
/* If the expression has side-effects, then encase it in a SAVE_EXPR
so that it will only be evaluated once. */
/* The tcc_reference and tcc_comparison classes could be handled as
/* Recursively stabilize each operand. */
result
= build2 (code, type,
- gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
- gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
+ gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
+ gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
break;
case tcc_unary:
/* Recursively stabilize each operand. */
result
= build1 (code, type,
- gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
+ gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
break;
default:
gcc_unreachable ();
}
- /* See similar handling in gnat_stabilize_reference. */
TREE_READONLY (result) = TREE_READONLY (e);
TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
- if (code == INDIRECT_REF
- || code == UNCONSTRAINED_ARRAY_REF
- || code == ARRAY_REF
- || code == ARRAY_RANGE_REF)
- TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
-
return result;
}
/* This is equivalent to stabilize_reference in tree.c but we know how to
handle our own nodes and we take extra arguments. FORCE says whether to
- force evaluation of everything. We set SUCCESS to true unless we walk
- through something we don't know how to stabilize. */
+ force evaluation of everything in REF. INIT is set to the first arm of
+ a COMPOUND_EXPR present in REF, if any. */
tree
-gnat_stabilize_reference (tree ref, bool force, bool *success)
+gnat_stabilize_reference (tree ref, bool force, tree *init)
+{
+ return
+ gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
+}
+
+/* Rewrite reference REF and call FUNC on each expression within REF in the
+ process. DATA is passed unmodified to FUNC. INIT is set to the first
+ arm of a COMPOUND_EXPR present in REF, if any. */
+
+tree
+gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
{
tree type = TREE_TYPE (ref);
enum tree_code code = TREE_CODE (ref);
tree result;
- /* Assume we'll success unless proven otherwise. */
- if (success)
- *success = true;
-
switch (code)
{
case CONST_DECL:
/* No action is needed in this case. */
return ref;
- case ADDR_EXPR:
CASE_CONVERT:
case FLOAT_EXPR:
case FIX_TRUNC_EXPR:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
case VIEW_CONVERT_EXPR:
result
= build1 (code, type,
- gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
- success));
+ gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
+ init));
break;
case INDIRECT_REF:
case UNCONSTRAINED_ARRAY_REF:
- result = build1 (code, type,
- gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
- force));
+ result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
break;
case COMPONENT_REF:
- result = build3 (COMPONENT_REF, type,
- gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
- success),
- TREE_OPERAND (ref, 1), NULL_TREE);
+ result = build3 (COMPONENT_REF, type,
+ gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
+ data, init),
+ TREE_OPERAND (ref, 1), NULL_TREE);
break;
case BIT_FIELD_REF:
result = build3 (BIT_FIELD_REF, type,
- gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
- success),
+ gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
+ data, init),
TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
+ REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
- result = build4 (code, type,
- gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
- success),
- gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
- force),
- NULL_TREE, NULL_TREE);
- break;
-
- case CALL_EXPR:
- result = gnat_stabilize_reference_1 (ref, force);
+ result
+ = build4 (code, type,
+ gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
+ init),
+ func (TREE_OPERAND (ref, 1), data),
+ TREE_OPERAND (ref, 2), NULL_TREE);
break;
case COMPOUND_EXPR:
- result = build2 (COMPOUND_EXPR, type,
- gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
- success),
- gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
- success));
- break;
+ gcc_assert (!*init);
+ *init = TREE_OPERAND (ref, 0);
+ /* We expect only the pattern built in Call_to_gnu. */
+ gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
+ || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
+ && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
+ return TREE_OPERAND (ref, 1);
- case CONSTRUCTOR:
- /* Constructors with 1 element are used extensively to formally
- convert objects to special wrapping types. */
- if (TREE_CODE (type) == RECORD_TYPE
- && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
- {
- tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
- tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
- result
- = build_constructor_single (type, index,
- gnat_stabilize_reference_1 (value,
- force));
- }
- else
- {
- if (success)
- *success = false;
- return ref;
- }
+ case CALL_EXPR:
+ {
+ /* This can only be an atomic load. */
+ gcc_assert (call_is_atomic_load (ref));
+
+ /* An atomic load is an INDIRECT_REF of its first argument. */
+ tree t = CALL_EXPR_ARG (ref, 0);
+ if (TREE_CODE (t) == NOP_EXPR)
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == ADDR_EXPR)
+ t = build1 (ADDR_EXPR, TREE_TYPE (t),
+ gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
+ init));
+ else
+ t = func (t, data);
+ t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
+
+ result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
+ t, CALL_EXPR_ARG (ref, 1));
+ }
break;
case ERROR_MARK:
- ref = error_mark_node;
-
- /* ... fall through to failure ... */
+ case NULL_EXPR:
+ return ref;
- /* If arg isn't a kind of lvalue we recognize, make no change.
- Caller should recognize the error for an invalid lvalue. */
default:
- if (success)
- *success = false;
- return ref;
+ gcc_unreachable ();
}
/* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
return result;
}
+/* This is equivalent to get_inner_reference in expr.c but it returns the
+ ultimate containing object only if the reference (lvalue) is constant,
+ i.e. if it doesn't depend on the context in which it is evaluated. */
+
+tree
+get_inner_constant_reference (tree exp)
+{
+ while (true)
+ {
+ switch (TREE_CODE (exp))
+ {
+ case BIT_FIELD_REF:
+ break;
+
+ case COMPONENT_REF:
+ if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
+ return NULL_TREE;
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ {
+ if (TREE_OPERAND (exp, 2))
+ return NULL_TREE;
+
+ tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
+ || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
+ || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
+ return NULL_TREE;
+ }
+ break;
+
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ break;
+
+ default:
+ goto done;
+ }
+
+ exp = TREE_OPERAND (exp, 0);
+ }
+
+done:
+ return exp;
+}
+
+/* Return true if EXPR is the addition or the subtraction of a constant and,
+ if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
+ if this is a subtraction. */
+
+bool
+is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
+{
+ /* Skip overflow checks. */
+ if (TREE_CODE (expr) == COND_EXPR
+ && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
+ && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
+ && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
+ == gnat_raise_decls[CE_Overflow_Check_Failed])
+ expr = COND_EXPR_ELSE (expr);
+
+ if (TREE_CODE (expr) == PLUS_EXPR)
+ {
+ if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
+ {
+ *add = TREE_OPERAND (expr, 1);
+ *cst = TREE_OPERAND (expr, 0);
+ *minus_p = false;
+ return true;
+ }
+ else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
+ {
+ *add = TREE_OPERAND (expr, 0);
+ *cst = TREE_OPERAND (expr, 1);
+ *minus_p = false;
+ return true;
+ }
+ }
+ else if (TREE_CODE (expr) == MINUS_EXPR)
+ {
+ if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
+ {
+ *add = TREE_OPERAND (expr, 0);
+ *cst = TREE_OPERAND (expr, 1);
+ *minus_p = true;
+ return true;
+ }
+ }
+
+ return false;
+}
+
/* If EXPR is an expression that is invariant in the current function, in the
sense that it can be evaluated anywhere in the function and any number of
times, return EXPR or an equivalent expression. Otherwise return NULL. */
tree
gnat_invariant_expr (tree expr)
{
- tree type = TREE_TYPE (expr), t;
+ tree type = TREE_TYPE (expr);
+ tree add, cst;
+ bool minus_p;
expr = remove_conversions (expr, false);
+ /* Look through temporaries created to capture values. */
while ((TREE_CODE (expr) == CONST_DECL
|| (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
&& decl_function_context (expr) == current_function_decl
{
expr = DECL_INITIAL (expr);
/* Look into CONSTRUCTORs built to initialize padded types. */
- if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
- expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
+ expr = maybe_padded_object (expr);
expr = remove_conversions (expr, false);
}
+ /* We are only interested in scalar types at the moment and, even if we may
+ have gone through padding types in the above loop, we must be back to a
+ scalar value at this point. */
+ if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
+ return NULL_TREE;
+
if (TREE_CONSTANT (expr))
return fold_convert (type, expr);
- t = expr;
+ /* Deal with addition or subtraction of constants. */
+ if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
+ {
+ add = gnat_invariant_expr (add);
+ if (add)
+ return
+ fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
+ fold_convert (type, add), fold_convert (type, cst));
+ else
+ return NULL_TREE;
+ }
+
+ bool invariant_p = false;
+ tree t = expr;
while (true)
{
switch (TREE_CODE (t))
{
case COMPONENT_REF:
- if (TREE_OPERAND (t, 2) != NULL_TREE)
- return NULL_TREE;
+ invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
- if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
- || TREE_OPERAND (t, 2) != NULL_TREE
- || TREE_OPERAND (t, 3) != NULL_TREE)
+ if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
return NULL_TREE;
break;
case BIT_FIELD_REF:
- case VIEW_CONVERT_EXPR:
case REALPART_EXPR:
case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ CASE_CONVERT:
break;
case INDIRECT_REF:
- if (!TREE_READONLY (t)
- || TREE_SIDE_EFFECTS (t)
- || !TREE_THIS_NOTRAP (t))
+ if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
return NULL_TREE;
+ invariant_p = false;
break;
default:
|| decl_function_context (t) != current_function_decl))
return fold_convert (type, expr);
- if (!TREE_READONLY (t))
+ if (!invariant_p && !TREE_READONLY (t))
return NULL_TREE;
if (TREE_CODE (t) == PARM_DECL)