return exp;
return fold (build4 (code, TREE_TYPE (exp), op0, TREE_OPERAND (exp, 1),
- TREE_OPERAND (exp, 2), NULL_TREE));
+ TREE_OPERAND (exp, 2), TREE_OPERAND (exp, 3)));
case REALPART_EXPR:
case IMAGPART_EXPR:
tree gnu_type = TREE_TYPE (gnu_prefix);
tree gnu_expr, gnu_result_type, gnu_result = error_mark_node;
bool prefix_unused = false;
+ Entity_Id gnat_smo;
/* If the input is a NULL_EXPR, make a new one. */
if (TREE_CODE (gnu_prefix) == NULL_EXPR)
return build1 (NULL_EXPR, gnu_result_type, TREE_OPERAND (gnu_prefix, 0));
}
+ /* If the input is a LOAD_EXPR of an unconstrained array type, the second
+ operand contains the storage model object. */
+ if (TREE_CODE (gnu_prefix) == LOAD_EXPR
+ && TREE_CODE (gnu_type) == UNCONSTRAINED_ARRAY_TYPE)
+ gnat_smo = tree_to_shwi (TREE_OPERAND (gnu_prefix, 1));
+ else
+ gnat_smo = Empty;
+
switch (attribute)
{
case Attr_Pred:
/* Deal with a self-referential size by qualifying the size with the
object or returning the maximum size for a type. */
if (TREE_CODE (gnu_prefix) != TYPE_DECL)
- gnu_result = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_result, gnu_prefix);
+ {
+ gnu_result = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_result, gnu_prefix);
+ if (Present (gnat_smo))
+ gnu_result = INSTANTIATE_LOAD_IN_EXPR (gnu_result, gnat_smo);
+ }
else if (CONTAINS_PLACEHOLDER_P (gnu_result))
gnu_result = max_size (gnu_result, true);
handling. Note that these attributes could not have been used on
an unconstrained array type. */
gnu_result = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_result, gnu_prefix);
+ if (Present (gnat_smo))
+ gnu_result = INSTANTIATE_LOAD_IN_EXPR (gnu_result, gnat_smo);
/* Cache the expression we have just computed. Since we want to do it
at run time, we force the use of a SAVE_EXPR and let the gimplifier
/* If this has a PLACEHOLDER_EXPR, qualify it by the object we are
handling. */
gnu_result = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_result, gnu_prefix);
+ if (Present (gnat_smo))
+ gnu_result = INSTANTIATE_LOAD_IN_EXPR (gnu_result, gnat_smo);
break;
}
return type == SIMPLE_ATOMIC;
}
+/* Return the storage model specified by GNAT_NODE, or else Empty. */
+
+static Entity_Id
+get_storage_model (Node_Id gnat_node)
+{
+ if (Nkind (gnat_node) == N_Explicit_Dereference
+ && Has_Designated_Storage_Model_Aspect (Etype (Prefix (gnat_node))))
+ return Storage_Model_Object (Etype (Prefix (gnat_node)));
+ else
+ return Empty;
+}
+
+/* Compute whether GNAT_NODE requires storage model access and set GNAT_SMO to
+ the storage model object to be used for it if it does, or else Empty. */
+
+static void
+get_storage_model_access (Node_Id gnat_node, Entity_Id *gnat_smo)
+{
+ const Node_Id gnat_parent = Parent (gnat_node);
+
+ /* If we are the prefix of the parent, then the access is above us. */
+ if (node_is_component (gnat_parent) && Prefix (gnat_parent) == gnat_node)
+ {
+ *gnat_smo = Empty;
+ return;
+ }
+
+ while (node_is_component (gnat_node))
+ gnat_node = Prefix (gnat_node);
+
+ *gnat_smo = get_storage_model (gnat_node);
+}
+
+/* Return true if GNAT_NODE requires storage model access and, if so, set
+ GNAT_SMO to the storage model object to be used for it. */
+
+static bool
+storage_model_access_required_p (Node_Id gnat_node, Entity_Id *gnat_smo)
+{
+ get_storage_model_access (gnat_node, gnat_smo);
+ return Present (*gnat_smo);
+}
+
/* Create a temporary variable with PREFIX and TYPE, and return it. */
static tree
N_Assignment_Statement and the result is to be placed into that object.
ATOMIC_ACCESS is the type of atomic access to be used for the assignment
to GNU_TARGET. If, in addition, ATOMIC_SYNC is true, then the assignment
- to GNU_TARGET requires atomic synchronization. */
+ to GNU_TARGET requires atomic synchronization. GNAT_STORAGE_MODEL is the
+ storage model object to be used for the assignment to GNU_TARGET or Empty
+ if there is none. */
static tree
Call_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, tree gnu_target,
- atomic_acces_t atomic_access, bool atomic_sync)
+ atomic_acces_t atomic_access, bool atomic_sync,
+ Entity_Id gnat_storage_model)
{
const bool function_call = (Nkind (gnat_node) == N_Function_Call);
const bool returning_value = (function_call && !gnu_target);
Node_Id gnat_actual;
atomic_acces_t aa_type;
bool aa_sync;
+ Entity_Id gnat_smo;
/* The only way we can make a call via an access type is if GNAT_NAME is an
explicit dereference. In that case, get the list of formal args from the
unconstrained record type with default discriminant, because the
return may copy more data than the bit-field can contain.
- 5. There is no target and we have misaligned In Out or Out parameters
+ 5. There is a target which needs to be accessed with a storage model.
+
+ 6. There is no target and we have misaligned In Out or Out parameters
passed by reference, because we need to preserve the return value
before copying back the parameters. However, in this case, we'll
defer creating the temporary, see below.
&& DECL_BIT_FIELD (TREE_OPERAND (gnu_target, 1))
&& DECL_SIZE (TREE_OPERAND (gnu_target, 1))
!= TYPE_SIZE (TREE_TYPE (gnu_target))
- && type_is_padding_self_referential (gnu_result_type))))
+ && type_is_padding_self_referential (gnu_result_type))
+ || (gnu_target
+ && Present (gnat_storage_model)
+ && Present (Storage_Model_Copy_To (gnat_storage_model)))))
{
gnu_retval = create_temporary ("R", gnu_result_type);
DECL_RETURN_VALUE_P (gnu_retval) = 1;
= build_compound_expr (TREE_TYPE (gnu_name), init, gnu_name);
}
- /* If we are passing a non-addressable parameter by reference, pass the
- address of a copy. In the In Out or Out case, set up to copy back
- out after the call. */
+ get_storage_model_access (gnat_actual, &gnat_smo);
+
+ /* If we are passing a non-addressable actual parameter by reference,
+ pass the address of a copy. Likewise if it needs to be accessed with
+ a storage model. In the In Out or Out case, set up to copy back out
+ after the call. */
if (is_by_ref_formal_parm
&& (gnu_name_type = gnat_to_gnu_type (Etype (gnat_name)))
- && !addressable_p (gnu_name, gnu_name_type))
+ && (!addressable_p (gnu_name, gnu_name_type)
+ || (Present (gnat_smo)
+ && (Present (Storage_Model_Copy_From (gnat_smo))
+ || (!in_param
+ && Present (Storage_Model_Copy_To (gnat_smo)))))))
{
tree gnu_orig = gnu_name, gnu_temp, gnu_stmt;
}
/* Create an explicit temporary holding the copy. */
+ tree gnu_temp_type;
+ if (Nkind (gnat_actual) == N_Explicit_Dereference
+ && Present (Actual_Designated_Subtype (gnat_actual)))
+ gnu_temp_type
+ = gnat_to_gnu_type (Actual_Designated_Subtype (gnat_actual));
+ else
+ gnu_temp_type = TREE_TYPE (gnu_name);
+
/* Do not initialize it for the _Init parameter of an initialization
procedure since no data is meant to be passed in. */
if (Ekind (gnat_formal) == E_Out_Parameter
&& Is_Entity_Name (gnat_subprog)
&& Is_Init_Proc (Entity (gnat_subprog)))
- gnu_name = gnu_temp = create_temporary ("A", TREE_TYPE (gnu_name));
+ gnu_name = gnu_temp = create_temporary ("A", gnu_temp_type);
/* Initialize it on the fly like for an implicit temporary in the
other cases, as we don't necessarily have a statement list. */
else
{
- gnu_temp = create_init_temporary ("A", gnu_name, &gnu_stmt,
- gnat_actual);
- gnu_name = build_compound_expr (TREE_TYPE (gnu_name), gnu_stmt,
+ if (Present (gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo)))
+ {
+ gnu_temp = create_temporary ("A", gnu_temp_type);
+ gnu_stmt
+ = build_storage_model_load (gnat_smo, gnu_temp,
+ gnu_name,
+ TYPE_SIZE_UNIT (gnu_temp_type));
+ set_expr_location_from_node (gnu_stmt, gnat_actual);
+ }
+ else
+ gnu_temp = create_init_temporary ("A", gnu_name, &gnu_stmt,
+ gnat_actual);
+
+ gnu_name = build_compound_expr (gnu_temp_type, gnu_stmt,
gnu_temp);
}
(TREE_OPERAND (TREE_OPERAND (gnu_orig, 1), 1)))
gnu_orig = TREE_OPERAND (gnu_orig, 2);
- gnu_stmt
- = build_binary_op (MODIFY_EXPR, NULL_TREE, gnu_orig, gnu_temp);
+ if (Present (gnat_smo)
+ && Present (Storage_Model_Copy_To (gnat_smo)))
+ gnu_stmt
+ = build_storage_model_store (gnat_smo, gnu_orig,
+ gnu_temp,
+ TYPE_SIZE_UNIT (gnu_temp_type));
+ else
+ gnu_stmt
+ = build_binary_op (MODIFY_EXPR, NULL_TREE, gnu_orig,
+ gnu_temp);
set_expr_location_from_node (gnu_stmt, gnat_node);
append_to_statement_list (gnu_stmt, &gnu_after_list);
tree gnu_actual = gnu_name;
/* If atomic access is required for an In or In Out actual parameter,
- build the atomic load. */
+ build the atomic load. Or else, if storage model access is required,
+ build the special load. */
if (is_true_formal_parm
&& !is_by_ref_formal_parm
- && Ekind (gnat_formal) != E_Out_Parameter
- && simple_atomic_access_required_p (gnat_actual, &aa_sync))
- gnu_actual = build_atomic_load (gnu_actual, aa_sync);
+ && Ekind (gnat_formal) != E_Out_Parameter)
+ {
+ if (simple_atomic_access_required_p (gnat_actual, &aa_sync))
+ gnu_actual = build_atomic_load (gnu_actual, aa_sync);
+
+ else if (Present (gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo)))
+ gnu_actual = build_storage_model_load (gnat_smo, gnu_actual);
+ }
/* If this was a procedure call, we may not have removed any padding.
So do it here for the part we will use as an input, if any. */
}
get_atomic_access (gnat_actual, &aa_type, &aa_sync);
+ get_storage_model_access (gnat_actual, &gnat_smo);
/* If an outer atomic access is required for an actual parameter,
build the load-modify-store sequence. */
gnu_result
= build_atomic_store (gnu_actual, gnu_result, aa_sync);
+ /* Or else, if a storage model access is required, build the special
+ store. */
+ else if (Present (gnat_smo)
+ && Present (Storage_Model_Copy_To (gnat_smo)))
+ gnu_result
+ = build_storage_model_store (gnat_smo, gnu_actual, gnu_result);
+
/* Otherwise build a regular assignment. */
else
gnu_result = build_binary_op (MODIFY_EXPR, NULL_TREE,
= build_load_modify_store (gnu_target, gnu_call, gnat_node);
else if (atomic_access == SIMPLE_ATOMIC)
gnu_call = build_atomic_store (gnu_target, gnu_call, atomic_sync);
+ else if (Present (gnat_storage_model)
+ && Present (Storage_Model_Copy_To (gnat_storage_model)))
+ gnu_call
+ = build_storage_model_store (gnat_storage_model, gnu_target,
+ gnu_call);
else
gnu_call
= build_binary_op (op_code, NULL_TREE, gnu_target, gnu_call);
atomic_acces_t aa_type;
bool went_into_elab_proc;
bool aa_sync;
+ Entity_Id gnat_smo;
/* Save node number for error message and set location information. */
if (Sloc (gnat_node) > No_Location)
gnu_result
= Call_to_gnu (Prefix (Expression (gnat_node)),
&gnu_result_type, gnu_result,
- NOT_ATOMIC, false);
+ NOT_ATOMIC, false, Empty);
break;
}
if (simple_atomic_access_required_p (gnat_node, &aa_sync)
&& !present_in_lhs_or_actual_p (gnat_node))
gnu_result = build_atomic_load (gnu_result, aa_sync);
+
+ /* If storage model access is required on the RHS, build the load. */
+ else if (storage_model_access_required_p (gnat_node, &gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo))
+ && !present_in_lhs_or_actual_p (gnat_node))
+ gnu_result = build_storage_model_load (gnat_smo, gnu_result);
break;
case N_Indexed_Component:
{
- tree gnu_array_object = gnat_to_gnu ((Prefix (gnat_node)));
+ const Entity_Id gnat_array_object = Prefix (gnat_node);
+ tree gnu_array_object = gnat_to_gnu (gnat_array_object);
tree gnu_type;
int ndim, i;
Node_Id *gnat_expr_array;
+ /* Get the storage model of the array. */
+ gnat_smo = get_storage_model (gnat_array_object);
+
gnu_array_object = maybe_padded_object (gnu_array_object);
gnu_array_object = maybe_unconstrained_array (gnu_array_object);
gnu_result
= build_binary_op (ARRAY_REF, NULL_TREE, gnu_result, gnu_expr);
+
+ if (Present (gnat_smo))
+ instantiate_load_in_array_ref (gnu_result, gnat_smo);
}
gnu_result_type = get_unpadded_type (Etype (gnat_node));
if (simple_atomic_access_required_p (gnat_node, &aa_sync)
&& !present_in_lhs_or_actual_p (gnat_node))
gnu_result = build_atomic_load (gnu_result, aa_sync);
+
+ /* If storage model access is required on the RHS, build the load. */
+ else if (storage_model_access_required_p (gnat_node, &gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo))
+ && !present_in_lhs_or_actual_p (gnat_node))
+ gnu_result = build_storage_model_load (gnat_smo, gnu_result);
}
break;
case N_Slice:
{
- tree gnu_array_object = gnat_to_gnu (Prefix (gnat_node));
+ const Entity_Id gnat_array_object = Prefix (gnat_node);
+ tree gnu_array_object = gnat_to_gnu (gnat_array_object);
- gnu_result_type = get_unpadded_type (Etype (gnat_node));
+ /* Get the storage model of the array. */
+ gnat_smo = get_storage_model (gnat_array_object);
gnu_array_object = maybe_padded_object (gnu_array_object);
gnu_array_object = maybe_unconstrained_array (gnu_array_object);
+ gnu_result_type = get_unpadded_type (Etype (gnat_node));
+
gnu_expr = TYPE_MIN_VALUE (TYPE_DOMAIN (gnu_result_type));
gnu_expr = maybe_character_value (gnu_expr);
gnu_result = build_binary_op (ARRAY_RANGE_REF, gnu_result_type,
gnu_array_object, gnu_expr);
+
+ if (Present (gnat_smo))
+ instantiate_load_in_array_ref (gnu_result, gnat_smo);
+
+ /* If storage model access is required on the RHS, build the load. */
+ if (storage_model_access_required_p (gnat_node, &gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo))
+ && !present_in_lhs_or_actual_p (gnat_node))
+ gnu_result = build_storage_model_load (gnat_smo, gnu_result);
}
break;
if (simple_atomic_access_required_p (gnat_node, &aa_sync)
&& !present_in_lhs_or_actual_p (gnat_node))
gnu_result = build_atomic_load (gnu_result, aa_sync);
+
+ /* If storage model access is required on the RHS, build the load. */
+ else if (storage_model_access_required_p (gnat_node, &gnat_smo)
+ && Present (Storage_Model_Copy_From (gnat_smo))
+ && !present_in_lhs_or_actual_p (gnat_node))
+ gnu_result = build_storage_model_load (gnat_smo, gnu_result);
}
break;
else if (Nkind (Expression (gnat_node)) == N_Function_Call)
{
get_atomic_access (Name (gnat_node), &aa_type, &aa_sync);
+ get_storage_model_access (Name (gnat_node), &gnat_smo);
gnu_result
= Call_to_gnu (Expression (gnat_node), &gnu_result_type, gnu_lhs,
- aa_type, aa_sync);
+ aa_type, aa_sync, gnat_smo);
}
/* Otherwise we need to build the assignment statement manually. */
gigi_checking_assert (!Do_Range_Check (gnat_expr));
get_atomic_access (Name (gnat_node), &aa_type, &aa_sync);
+ get_storage_model_access (Name (gnat_node), &gnat_smo);
/* If an outer atomic access is required on the LHS, build the load-
modify-store sequence. */
else if (aa_type == SIMPLE_ATOMIC)
gnu_result = build_atomic_store (gnu_lhs, gnu_rhs, aa_sync);
+ /* Or else, if a storage model access is required, build the special
+ store. */
+ else if (Present (gnat_smo)
+ && Present (Storage_Model_Copy_To (gnat_smo)))
+ {
+ tree t = remove_conversions (gnu_rhs, false);
+
+ /* If a storage model load is present on the RHS then instantiate
+ the temporary associated with it now, lest it be of variable
+ size and thus could not be instantiated by gimplification. */
+ if (TREE_CODE (t) == LOAD_EXPR)
+ {
+ t = TREE_OPERAND (t, 1);
+ gcc_assert (TREE_CODE (t) == CALL_EXPR);
+
+ tree elem
+ = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
+ tree size = fold_convert (sizetype, CALL_EXPR_ARG (t, 3));
+ tree index = build_index_type (size);
+ tree temp
+ = create_temporary ("L", build_array_type (elem, index));
+ tree arg = CALL_EXPR_ARG (t, 1);
+ CALL_EXPR_ARG (t, 1)
+ = build_unary_op (ADDR_EXPR, TREE_TYPE (arg), temp);
+
+ start_stmt_group ();
+ add_stmt (t);
+ t = build_storage_model_store (gnat_smo, gnu_lhs, temp);
+ add_stmt (t);
+ gnu_result = end_stmt_group ();
+ }
+
+ else
+ gnu_result
+ = build_storage_model_store (gnat_smo, gnu_lhs, gnu_rhs);
+ }
+
/* Or else, use memset when the conditions are met. This has already
been validated by Aggr_Assignment_OK_For_Backend in the front-end
and the RHS is thus guaranteed to be of the appropriate form. */
gnat_node);
}
- /* Otherwise build a regular assignment. */
else
- gnu_result
- = build_binary_op (MODIFY_EXPR, NULL_TREE, gnu_lhs, gnu_rhs);
+ {
+ tree t = remove_conversions (gnu_rhs, false);
+
+ /* If a storage model load is present on the RHS, then elide the
+ temporary associated with it. */
+ if (TREE_CODE (t) == LOAD_EXPR)
+ {
+ gnu_result = TREE_OPERAND (t, 1);
+ gcc_assert (TREE_CODE (gnu_result) == CALL_EXPR);
+
+ tree arg = CALL_EXPR_ARG (gnu_result, 1);
+ CALL_EXPR_ARG (gnu_result, 1)
+ = build_unary_op (ADDR_EXPR, TREE_TYPE (arg), gnu_lhs);
+ }
+
+ /* Otherwise build a regular assignment. */
+ else
+ gnu_result
+ = build_binary_op (MODIFY_EXPR, NULL_TREE, gnu_lhs, gnu_rhs);
+ }
/* If the assignment type is a regular array and the two sides are
not completely disjoint, play safe and use memmove. But don't do
case N_Function_Call:
case N_Procedure_Call_Statement:
gnu_result = Call_to_gnu (gnat_node, &gnu_result_type, NULL_TREE,
- NOT_ATOMIC, false);
+ NOT_ATOMIC, false, Empty);
break;
/************************/
if (!type_annotate_only)
{
- tree gnu_ptr, gnu_ptr_type, gnu_obj_type, gnu_actual_obj_type;
-
const Entity_Id gnat_desig_type
= Designated_Type (Underlying_Type (Etype (gnat_temp)));
+ const Entity_Id gnat_pool = Storage_Pool (gnat_node);
+ const bool pool_is_storage_model
+ = Present (gnat_pool)
+ && Has_Storage_Model_Type_Aspect (Etype (gnat_pool))
+ && Present (Storage_Model_Copy_From (gnat_pool));
+ tree gnu_ptr, gnu_ptr_type, gnu_obj_type, gnu_actual_obj_type;
/* Make sure the designated type is complete before dereferencing,
in case it is a Taft Amendment type. */
tree gnu_size = TYPE_SIZE_UNIT (gnu_actual_obj_type);
gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_ptr);
+ if (pool_is_storage_model)
+ gnu_size = INSTANTIATE_LOAD_IN_EXPR (gnu_size, gnat_pool);
gnu_result
= build_call_alloc_dealloc (gnu_ptr, gnu_size, gnu_obj_type,
Procedure_To_Call (gnat_node),
- Storage_Pool (gnat_node),
- gnat_node);
+ gnat_pool, gnat_node);
}
break;
&& return_type_with_variable_size_p (TREE_TYPE (gnu_result)))
;
- else if (TREE_CODE (gnu_result) == UNCONSTRAINED_ARRAY_REF
+ else if (TREE_CODE (TREE_TYPE (gnu_result)) == UNCONSTRAINED_ARRAY_TYPE
&& Present (Parent (gnat_node))
&& Nkind (Parent (gnat_node)) == N_Attribute_Reference
&& lvalue_required_for_attribute_p (Parent (gnat_node)))
avoid blocking concatenation in the caller when it is inlined. */
for (int i = 0; i < call_expr_nargs (expr); i++)
{
- tree arg = *(CALL_EXPR_ARGP (expr) + i);
+ tree arg = CALL_EXPR_ARG (expr, i);
if (TREE_CODE (arg) == CONSTRUCTOR
&& TREE_CONSTANT (arg)
if (TREE_CODE (t) == ADDR_EXPR)
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) != STRING_CST)
- *(CALL_EXPR_ARGP (expr) + i) = tree_output_constant_def (arg);
+ CALL_EXPR_ARG (expr, i) = tree_output_constant_def (arg);
}
}
break;
TREE_NO_WARNING (expr) = TREE_NO_WARNING (op);
break;
- case UNCONSTRAINED_ARRAY_REF:
- /* We should only do this if we are just elaborating for side effects,
- but we can't know that yet. */
- *expr_p = TREE_OPERAND (*expr_p, 0);
- return GS_OK;
+ case LOAD_EXPR:
+ {
+ tree new_var = create_tmp_var (type, "L");
+ TREE_ADDRESSABLE (new_var) = 1;
+
+ tree init = TREE_OPERAND (expr, 1);
+ gcc_assert (TREE_CODE (init) == CALL_EXPR);
+ tree arg = CALL_EXPR_ARG (init, 1);
+ CALL_EXPR_ARG (init, 1)
+ = build_unary_op (ADDR_EXPR, TREE_TYPE (arg), new_var);
+ gimplify_and_add (init, pre_p);
+
+ *expr_p = new_var;
+ return GS_OK;
+ }
case VIEW_CONVERT_EXPR:
op = TREE_OPERAND (expr, 0);
&& AGGREGATE_TYPE_P (TREE_TYPE (op))
&& !AGGREGATE_TYPE_P (type))
{
- tree mod, new_var = create_tmp_var_raw (TREE_TYPE (op), "C");
+ tree new_var = create_tmp_var_raw (TREE_TYPE (op), "C");
gimple_add_tmp_var (new_var);
- mod = build2 (INIT_EXPR, TREE_TYPE (new_var), new_var, op);
+ tree mod = build2 (INIT_EXPR, TREE_TYPE (new_var), new_var, op);
gimplify_and_add (mod, pre_p);
TREE_OPERAND (expr, 0) = new_var;
}
break;
+ case UNCONSTRAINED_ARRAY_REF:
+ /* We should only do this if we are just elaborating for side effects,
+ but we can't know that yet. */
+ *expr_p = TREE_OPERAND (expr, 0);
+ return GS_OK;
+
default:
break;
}
build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
{
+ const bool pool_is_storage_model
+ = Present (gnat_pool)
+ && Has_Storage_Model_Type_Aspect (Etype (gnat_pool))
+ && Present (Storage_Model_Copy_To (gnat_pool));
tree size, storage, storage_deref, storage_init;
/* If the initializer, if present, is a NULL_EXPR, just return a new one. */
get_identifier ("ALLOC"), false);
tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
tree storage_ptr_type = build_pointer_type (storage_type);
+ tree lhs, rhs;
size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
init);
build_template (template_type, type, init));
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
init);
- storage_init
- = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
- gnat_build_constructor (storage_type, v));
+
+ lhs = storage_deref;
+ rhs = gnat_build_constructor (storage_type, v);
}
else
- storage_init
- = build_binary_op (INIT_EXPR, NULL_TREE,
- build_component_ref (storage_deref,
- TYPE_FIELDS (storage_type),
- false),
- build_template (template_type, type, NULL_TREE));
+ {
+ lhs = build_component_ref (storage_deref, TYPE_FIELDS (storage_type),
+ false);
+ rhs = build_template (template_type, type, NULL_TREE);
+ }
+
+ if (pool_is_storage_model)
+ storage_init = build_storage_model_store (gnat_pool, lhs, rhs);
+ else
+ storage_init = build_binary_op (INIT_EXPR, NULL_TREE, lhs, rhs);
return build2 (COMPOUND_EXPR, result_type,
storage_init, convert (result_type, storage));
storage = gnat_protect_expr (storage);
storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
TREE_THIS_NOTRAP (storage_deref) = 1;
- storage_init
- = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
+ if (pool_is_storage_model)
+ storage_init
+ = build_storage_model_store (gnat_pool, storage_deref, init);
+ else
+ storage_init
+ = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
}
return storage;
}
+/* Build a call to a copy procedure of a storage model given by an object.
+ DEST, SRC and SIZE are as for a call to memcpy. GNAT_SMO is the entity
+ for the storage model object and COPY_TO says which procedure to use. */
+
+static tree
+build_storage_model_copy (Entity_Id gnat_smo, tree dest, tree src, tree size,
+ bool copy_to)
+{
+ const Entity_Id gnat_copy_proc
+ = copy_to
+ ? Storage_Model_Copy_To (gnat_smo)
+ : Storage_Model_Copy_From (gnat_smo);
+ tree gnu_copy_proc = gnat_to_gnu (gnat_copy_proc);
+ tree gnu_param_type_list = TYPE_ARG_TYPES (TREE_TYPE (gnu_copy_proc));
+ tree t1 = TREE_VALUE (gnu_param_type_list);
+ tree t2 = TREE_VALUE (TREE_CHAIN (gnu_param_type_list));
+ tree t3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list)));
+ tree t4
+ = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list))));
+
+ return
+ build_call_n_expr (gnu_copy_proc,
+ 4,
+ build_unary_op (ADDR_EXPR, t1, gnat_to_gnu (gnat_smo)),
+ build_unary_op (ADDR_EXPR, t2, dest),
+ build_unary_op (ADDR_EXPR, t3, src),
+ convert (t4, size));
+}
+
+/* Build a load of SRC using the storage model of GNAT_SMO. */
+
+tree
+build_storage_model_load (Entity_Id gnat_smo, tree src)
+{
+ tree ret = build2 (LOAD_EXPR, TREE_TYPE (src), src, NULL_TREE);
+
+ /* Unconstrained array references have no size so we need to store the
+ storage object model for future processing by the machinery. */
+ if (TREE_CODE (src) == UNCONSTRAINED_ARRAY_REF)
+ TREE_OPERAND (ret, 1) = build_int_cst (integer_type_node, gnat_smo);
+ else
+ TREE_OPERAND (ret, 1) = build_storage_model_load (gnat_smo, src, src);
+
+ return ret;
+}
+
+/* Build a load of SRC into DEST using the storage model of GNAT_SMO.
+ If SIZE is specified, use it, otherwise use the size of SRC. */
+
+tree
+build_storage_model_load (Entity_Id gnat_smo, tree dest, tree src, tree size)
+{
+ gcc_assert (TREE_CODE (src) != LOAD_EXPR);
+
+ if (!size)
+ {
+ size = TYPE_SIZE_UNIT (TREE_TYPE (src));
+ size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, src);
+ size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
+ }
+
+ return build_storage_model_copy (gnat_smo, dest, src, size, false);
+}
+
+/* Build a store of SRC into DEST using the storage model of GNAT_SMO.
+ If SIZE is specified, use it, otherwise use the size of DEST. */
+
+tree
+build_storage_model_store (Entity_Id gnat_smo, tree dest, tree src, tree size)
+{
+ gcc_assert (TREE_CODE (src) != LOAD_EXPR);
+
+ if (!size)
+ {
+ size = TYPE_SIZE_UNIT (TREE_TYPE (dest));
+ size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, dest);
+ size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
+ }
+
+ return build_storage_model_copy (gnat_smo, dest, src, size, true);
+}
+
+/* Given a tree EXP, instantiate occurrences of LOAD_EXPR in it and associate
+ them with the storage model of GNAT_SMO. */
+
+tree
+instantiate_load_in_expr (tree exp, Entity_Id gnat_smo)
+{
+ const enum tree_code code = TREE_CODE (exp);
+ tree type = TREE_TYPE (exp);
+ tree op0, op1, op2, op3;
+ tree new_tree;
+
+ /* We handle TREE_LIST and COMPONENT_REF separately. */
+ if (code == TREE_LIST)
+ {
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_CHAIN (exp), gnat_smo);
+ op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_VALUE (exp), gnat_smo);
+ if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
+ return exp;
+
+ return tree_cons (TREE_PURPOSE (exp), op1, op0);
+ }
+ else if (code == COMPONENT_REF)
+ {
+ /* The field. */
+ op1 = TREE_OPERAND (exp, 1);
+
+ /* If it is a discriminant or equivalent, a LOAD_EXPR is needed. */
+ if (DECL_DISCRIMINANT_NUMBER (op1))
+ return build_storage_model_load (gnat_smo, exp);
+
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
+ if (op0 == TREE_OPERAND (exp, 0))
+ return exp;
+
+ new_tree = fold_build3 (COMPONENT_REF, type, op0, op1, NULL_TREE);
+ }
+ else
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_constant:
+ case tcc_declaration:
+ return exp;
+
+ case tcc_expression:
+ if (code == LOAD_EXPR)
+ return exp;
+
+ /* Fall through. */
+
+ case tcc_exceptional:
+ case tcc_unary:
+ case tcc_binary:
+ case tcc_comparison:
+ case tcc_reference:
+ switch (TREE_CODE_LENGTH (code))
+ {
+ case 0:
+ return exp;
+
+ case 1:
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
+ if (op0 == TREE_OPERAND (exp, 0))
+ return exp;
+
+ new_tree = fold_build1 (code, type, op0);
+ break;
+
+ case 2:
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
+ op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
+
+ if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
+ return exp;
+
+ new_tree = fold_build2 (code, type, op0, op1);
+ break;
+
+ case 3:
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
+ op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
+ op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
+
+ if (op0 == TREE_OPERAND (exp, 0)
+ && op1 == TREE_OPERAND (exp, 1)
+ && op2 == TREE_OPERAND (exp, 2))
+ return exp;
+
+ new_tree = fold_build3 (code, type, op0, op1, op2);
+ break;
+
+ case 4:
+ op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
+ op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
+ op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
+ op3 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 3), gnat_smo);
+
+ if (op0 == TREE_OPERAND (exp, 0)
+ && op1 == TREE_OPERAND (exp, 1)
+ && op2 == TREE_OPERAND (exp, 2)
+ && op3 == TREE_OPERAND (exp, 3))
+ return exp;
+
+ new_tree = fold (build4 (code, type, op0, op1, op2, op3));
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ break;
+
+ case tcc_vl_exp:
+ {
+ gcc_assert (code == CALL_EXPR);
+
+ const int n = call_expr_nargs (exp);
+ gcc_assert (n > 0);
+ tree *argarray = XALLOCAVEC (tree, n);
+ for (int i = 0; i < n; i++)
+ argarray[i]
+ = INSTANTIATE_LOAD_IN_EXPR (CALL_EXPR_ARG (exp, i), gnat_smo);
+
+ for (int i = 0; i < n; i++)
+ if (argarray[i] != CALL_EXPR_ARG (exp, i))
+ return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
+
+ return exp;
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+
+ TREE_READONLY (new_tree) |= TREE_READONLY (exp);
+
+ if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
+
+ return new_tree;
+}
+
+/* Given an array or slice reference, instantiate occurrences of LOAD_EXPR in
+ it and associate them with the storage model of GNAT_SMO. */
+
+void
+instantiate_load_in_array_ref (tree ref, Entity_Id gnat_smo)
+{
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+ tree elem_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
+
+ TREE_OPERAND (ref, 2)
+ = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), ref);
+ TREE_OPERAND (ref, 2)
+ = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 2), gnat_smo);
+
+ TREE_OPERAND (ref, 3)
+ = size_binop (EXACT_DIV_EXPR,
+ SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elem_type),
+ ref),
+ size_int (TYPE_ALIGN_UNIT (elem_type)));
+ TREE_OPERAND (ref, 3)
+ = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 3), gnat_smo);
+}
+
/* Indicate that we need to take the address of T and that it therefore
should not be allocated in a register. Return true if successful. */
gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
init),
func (TREE_OPERAND (ref, 1), data),
- TREE_OPERAND (ref, 2), NULL_TREE);
+ TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
break;
case COMPOUND_EXPR:
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
- if (TREE_OPERAND (exp, 2))
- return NULL_TREE;
-
tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
|| !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
case ARRAY_REF:
case ARRAY_RANGE_REF:
- if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
- return NULL_TREE;
+ {
+ tree array_type = TREE_TYPE (TREE_OPERAND (t, 0));
+ if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
+ || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
+ || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
+ return NULL_TREE;
+ }
break;
case BIT_FIELD_REF: