* alias.c (alias_sets_conflict_p): New function.
(mems_in_disjoint_alias_sets_p): Use it.
(readonly_fields_p): Moved from expr.c; check for record type.
(objects_must_conflict_p): New function.
* calls.c (expand_call): Use assign_temp as much as possible, use
readonly variant if assigned once, and don't set memory attributes.
(emit_library_call_value_1, store_one_arg): Likewise.
* integrate.c (expand_inline_function): Likewise.
* stmt.c (expand_asm_operands, expand_return): Likewise.
* expr.c (copy_blkmode_from_reg, store_constructor): Likewise.
(store_field, save_noncopied_parts, expand_expr): Likewise.
(expand_expr_unaligned): Likewise.
(readonly_fields_p): Moved to alias.c.
(safe_from_p): Rework handling of SAVE_EXPR.
MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF.
* function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE.
(assign_stack_for_temp): Use objects_must_confict_p.
Set all memory attributes from type, if specified.
(mark_temp_slot): Mark TYPE field.
* tree.h (alias_sets_conflict_p, readonly_fields_p): New decls.
(objects_must_conflict_p): Likewise.
* stmt.c (expand_decl): Don't use assign_stack_temp in error case.
(add_case_node): No need to copy nodes anymore.
From-SVN: r38559
+2000-12-30 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
+
+ * alias.c (alias_sets_conflict_p): New function.
+ (mems_in_disjoint_alias_sets_p): Use it.
+ (readonly_fields_p): Moved from expr.c; check for record type.
+ (objects_must_conflict_p): New function.
+ * calls.c (expand_call): Use assign_temp as much as possible, use
+ readonly variant if assigned once, and don't set memory attributes.
+ (emit_library_call_value_1, store_one_arg): Likewise.
+ * integrate.c (expand_inline_function): Likewise.
+ * stmt.c (expand_asm_operands, expand_return): Likewise.
+ * expr.c (copy_blkmode_from_reg, store_constructor): Likewise.
+ (store_field, save_noncopied_parts, expand_expr): Likewise.
+ (expand_expr_unaligned): Likewise.
+ (readonly_fields_p): Moved to alias.c.
+ (safe_from_p): Rework handling of SAVE_EXPR.
+ MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF.
+ * function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE.
+ (assign_stack_for_temp): Use objects_must_confict_p.
+ Set all memory attributes from type, if specified.
+ (mark_temp_slot): Mark TYPE field.
+ * tree.h (alias_sets_conflict_p, readonly_fields_p): New decls.
+ (objects_must_conflict_p): Likewise.
+
+ * stmt.c (expand_decl): Don't use assign_stack_temp in error case.
+ (add_case_node): No need to copy nodes anymore.
+
2000-12-30 Alexandre Oliva <aoliva@redhat.com>
* config/sh/sh.c (split_branches): Don't dereference re-computed
`beyond' before checking it's non-NULL.
-
2000-12-29 Robert Lipe <robertl@sco.com>
Remove COFF support from i?86-pc-sco3.2v5.
rtx mem1;
rtx mem2;
{
- alias_set_entry ase;
-
#ifdef ENABLE_CHECKING
/* Perform a basic sanity check. Namely, that there are no alias sets
if we're not using strict aliasing. This helps to catch bugs
abort ();
#endif
- /* If have no alias set information for one of the MEMs, we have to assume
- it can alias anything. */
- if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
- return 0;
+ return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
+}
- /* If the two alias sets are the same, they may alias. */
- if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
- return 0;
+/* Insert the NODE into the splay tree given by DATA. Used by
+ record_alias_subset via splay_tree_foreach. */
+
+static int
+insert_subset_children (node, data)
+ splay_tree_node node;
+ void *data;
+{
+ splay_tree_insert ((splay_tree) data, node->key, node->value);
+
+ return 0;
+}
+
+/* Return 1 if the two specified alias sets may conflict. */
+
+int
+alias_sets_conflict_p (set1, set2)
+ HOST_WIDE_INT set1, set2;
+{
+ alias_set_entry ase;
+
+ /* If have no alias set information for one of the operands, we have
+ to assume it can alias anything. */
+ if (set1 == 0 || set2 == 0
+ /* If the two alias sets are the same, they may alias. */
+ || set1 == set2)
+ return 1;
/* See if the first alias set is a subset of the second. */
- ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
+ ase = get_alias_set_entry (set1);
if (ase != 0
&& (ase->has_zero_child
|| splay_tree_lookup (ase->children,
- (splay_tree_key) MEM_ALIAS_SET (mem2))))
- return 0;
+ (splay_tree_key) set2)))
+ return 1;
/* Now do the same, but with the alias sets reversed. */
- ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
+ ase = get_alias_set_entry (set2);
if (ase != 0
&& (ase->has_zero_child
|| splay_tree_lookup (ase->children,
- (splay_tree_key) MEM_ALIAS_SET (mem1))))
- return 0;
+ (splay_tree_key) set1)))
+ return 1;
- /* The two MEMs are in distinct alias sets, and neither one is the
+ /* The two alias sets are distinct and neither one is the
child of the other. Therefore, they cannot alias. */
- return 1;
+ return 0;
}
+\f
+/* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
+ has any readonly fields. If any of the fields have types that
+ contain readonly fields, return true as well. */
-/* Insert the NODE into the splay tree given by DATA. Used by
- record_alias_subset via splay_tree_foreach. */
-
-static int
-insert_subset_children (node, data)
- splay_tree_node node;
- void *data;
+int
+readonly_fields_p (type)
+ tree type;
{
- splay_tree_insert ((splay_tree) data, node->key, node->value);
+ tree field;
+
+ if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
+ && TREE_CODE (type) != QUAL_UNION_TYPE)
+ return 0;
+
+ for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && (TREE_READONLY (field)
+ || readonly_fields_p (TREE_TYPE (field))))
+ return 1;
return 0;
}
\f
+/* Return 1 if any MEM object of type T1 will always conflict (using the
+ dependency routines in this file) with any MEM object of type T2.
+ This is used when allocating temporary storage. If T1 and/or T2 are
+ NULL_TREE, it means we know nothing about the storage. */
+
+int
+objects_must_conflict_p (t1, t2)
+ tree t1, t2;
+{
+ /* If they are the same type, they must conflict. */
+ if (t1 == t2
+ /* Likewise if both are volatile. */
+ || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
+ return 1;
+
+ /* We now know they are different types. If one or both has readonly fields
+ or if one is readonly and the other not, they may not conflict.
+ Likewise if one is aggregate and the other is scalar. */
+ if ((t1 != 0 && readonly_fields_p (t1))
+ || (t2 != 0 && readonly_fields_p (t2))
+ || ((t1 != 0 && TYPE_READONLY (t1))
+ != (t2 != 0 && TYPE_READONLY (t2)))
+ || ((t1 != 0 && AGGREGATE_TYPE_P (t1))
+ != (t2 != 0 && AGGREGATE_TYPE_P (t2))))
+ return 0;
+
+ /* Otherwise they conflict only if the alias sets conflict. */
+ return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
+ t2 ? get_alias_set (t2) : 0);
+}
+\f
/* T is an expression with pointer type. Find the DECL on which this
expression is based. (For example, in `a[i]' this would be `a'.)
If there is no such DECL, or a unique decl cannot be determined,
structure_value_addr = XEXP (target, 0);
else
{
- rtx d;
-
/* For variable-sized objects, we must be called with a target
specified. If we were to allocate space on the stack here,
we would have no way of knowing when to free it. */
+ rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
- if (struct_value_size < 0)
- abort ();
-
- d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
mark_temp_addr_taken (d);
structure_value_addr = XEXP (d, 0);
target = 0;
The Irix 6 ABI has examples of this. */
else if (GET_CODE (valreg) == PARALLEL)
{
- int bytes = int_size_in_bytes (TREE_TYPE (exp));
-
if (target == 0)
{
- target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
- bytes, 0);
- MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
+ /* This will only be assigned once, so it can be readonly. */
+ tree nt = build_qualified_type (TREE_TYPE (exp),
+ (TYPE_QUALS (TREE_TYPE (exp))
+ | TYPE_QUAL_CONST));
+
+ target = assign_temp (nt, 0, 1, 1);
preserve_temp_slots (target);
}
if (! rtx_equal_p (target, valreg))
- emit_group_store (target, valreg, bytes,
+ emit_group_store (target, valreg,
+ int_size_in_bytes (TREE_TYPE (exp)),
TYPE_ALIGN (TREE_TYPE (exp)));
/* We can not support sibling calls for this case. */
if (value != 0 && GET_CODE (value) == MEM)
mem_value = value;
else
- mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
+ mem_value = assign_temp (type_for_mode (outmode, 0), 0, 1, 1);
#endif
/* This call returns a big structure. */
{
/* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
be viewed as just an efficiency improvement. */
- rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
+ rtx slot = assign_temp (type_for_mode (mode, 0), 0, 1, 1);
+
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, slot),
call_fusage);
if (save_mode == BLKmode)
{
- arg->save_area = assign_stack_temp (BLKmode,
- arg->size.constant, 0);
- MEM_SET_IN_STRUCT_P (arg->save_area,
- AGGREGATE_TYPE_P (TREE_TYPE
- (arg->tree_value)));
+ tree ot = TREE_TYPE (arg->tree_value);
+ tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
+ | TYPE_QUAL_CONST));
+
+ arg->save_area = assign_temp (nt, 0, 1, 1);
preserve_temp_slots (arg->save_area);
emit_block_move (validize_mem (arg->save_area), stack_area,
- GEN_INT (arg->size.constant),
- PARM_BOUNDARY);
+ expr_size (arg->tree_value),
+ MIN (PARM_BOUNDARY, TYPE_ALIGN (nt)));
}
else
{
static tree init_noncopied_parts PARAMS ((tree, tree));
static int fixed_type_p PARAMS ((tree));
static rtx var_rtx PARAMS ((tree));
-static int readonly_fields_p PARAMS ((tree));
static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
static rtx expand_increment PARAMS ((tree, int, int));
static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
if (tgtblk == 0)
{
- tgtblk = assign_stack_temp (BLKmode, bytes, 0);
- MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
+ tgtblk = assign_temp (build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | TYPE_QUAL_CONST)),
+ 0, 1, 1);
preserve_temp_slots (tgtblk);
}
enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
? MODE_FLOAT : MODE_INT);
- enum machine_mode reg_mode =
- mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
+ enum machine_mode reg_mode
+ = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
if (reg_mode != BLKmode)
{
rtx mem = assign_stack_temp (reg_mode,
GET_MODE_SIZE (mode), 0);
-
rtx cmem = change_address (mem, mode, NULL_RTX);
- cfun->cannot_inline = N_("function using short complex types cannot be inline");
+ cfun->cannot_inline
+ = N_("function using short complex types cannot be inline");
if (packed_dest_p)
{
if (REG_P (target))
{
- targetx = assign_stack_temp (GET_MODE (target),
- GET_MODE_SIZE (GET_MODE (target)),
- 0);
+ targetx
+ = assign_temp
+ ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
+ TYPE_QUAL_CONST)),
+ 0, 1, 1);
emit_move_insn (targetx, target);
}
if (mode == BLKmode
&& (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
{
- rtx object = assign_stack_temp (GET_MODE (target),
- GET_MODE_SIZE (GET_MODE (target)), 0);
+ rtx object
+ = assign_temp
+ (build_qualified_type (type_for_mode (GET_MODE (target), 0),
+ TYPE_QUAL_CONST),
+ 0, 1, 1);
rtx blk_object = copy_rtx (object);
- MEM_SET_IN_STRUCT_P (object, 1);
- MEM_SET_IN_STRUCT_P (blk_object, 1);
PUT_MODE (blk_object, BLKmode);
if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
tree part = TREE_VALUE (tail);
tree part_type = TREE_TYPE (part);
tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
- rtx target = assign_temp (part_type, 0, 1, 1);
+ rtx target
+ = assign_temp (build_qualified_type (part_type,
+ (TYPE_QUALS (part_type)
+ | TYPE_QUAL_CONST)),
+ 0, 1, 1);
+
if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
parts = tree_cons (to_be_saved,
{
rtx exp_rtl = 0;
int i, nops;
- static int save_expr_count;
- static int save_expr_size = 0;
- static tree *save_expr_rewritten;
- static tree save_expr_trees[256];
+ static tree save_expr_list;
if (x == 0
/* If EXP has varying size, we MUST use a target since we currently
|| TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
|| TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
!= INTEGER_CST)
- && GET_MODE (x) == BLKmode))
+ && GET_MODE (x) == BLKmode)
+ /* If X is in the outgoing argument area, it is always safe. */
+ || (GET_CODE (x) == MEM
+ && (XEXP (x, 0) == virtual_outgoing_args_rtx
+ || (GET_CODE (XEXP (x, 0)) == PLUS
+ && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
return 1;
- if (top_p && save_expr_size == 0)
- {
- int rtn;
-
- save_expr_count = 0;
- save_expr_size = ARRAY_SIZE (save_expr_trees);
- save_expr_rewritten = &save_expr_trees[0];
-
- rtn = safe_from_p (x, exp, 1);
-
- for (i = 0; i < save_expr_count; ++i)
- {
- if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
- abort ();
- TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
- }
-
- save_expr_size = 0;
-
- return rtn;
- }
-
/* If this is a subreg of a hard register, declare it unsafe, otherwise,
find the underlying pseudo. */
if (GET_CODE (x) == SUBREG)
return 0;
}
- /* If X is a location in the outgoing argument area, it is always safe. */
- if (GET_CODE (x) == MEM
- && (XEXP (x, 0) == virtual_outgoing_args_rtx
- || (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
- return 1;
+ /* A SAVE_EXPR might appear many times in the expression passed to the
+ top-level safe_from_p call, and if it has a complex subexpression,
+ examining it multiple times could result in a combinatorial explosion.
+ E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
+ with optimization took about 28 minutes to compile -- even though it was
+ only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
+ and turn that off when we are done. We keep a list of the SAVE_EXPRs
+ we have processed. Note that the only test of top_p was above. */
+
+ if (top_p)
+ {
+ int rtn;
+ tree t;
+
+ save_expr_list = 0;
+
+ rtn = safe_from_p (x, exp, 0);
+
+ for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
+ TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
+
+ return rtn;
+ }
+ /* Now look at our tree code and possibly recurse. */
switch (TREE_CODE_CLASS (TREE_CODE (exp)))
{
case 'd':
{
case ADDR_EXPR:
return (staticp (TREE_OPERAND (exp, 0))
- || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
- || TREE_STATIC (exp));
+ || TREE_STATIC (exp)
+ || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
case INDIRECT_REF:
- if (GET_CODE (x) == MEM)
+ if (GET_CODE (x) == MEM
+ && alias_sets_conflict_p (MEM_ALIAS_SET (x),
+ get_alias_set (exp)))
return 0;
break;
if (exp_rtl)
break;
- /* This SAVE_EXPR might appear many times in the top-level
- safe_from_p() expression, and if it has a complex
- subexpression, examining it multiple times could result
- in a combinatorial explosion. E.g. on an Alpha
- running at least 200MHz, a Fortran test case compiled with
- optimization took about 28 minutes to compile -- even though
- it was only a few lines long, and the complicated line causing
- so much time to be spent in the earlier version of safe_from_p()
- had only 293 or so unique nodes.
-
- So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
- where it is so we can turn it back in the top-level safe_from_p()
- when we're done. */
-
- /* For now, don't bother re-sizing the array. */
- if (save_expr_count >= save_expr_size)
- return 0;
- save_expr_rewritten[save_expr_count++] = exp;
+ /* If we've already scanned this, don't do it again. Otherwise,
+ show we've scanned it and record for clearing the flag if we're
+ going on. */
+ if (TREE_PRIVATE (exp))
+ return 1;
- nops = TREE_CODE_LENGTH (SAVE_EXPR);
- for (i = 0; i < nops; i++)
+ TREE_PRIVATE (exp) = 1;
+ if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
{
- tree operand = TREE_OPERAND (exp, i);
- if (operand == NULL_TREE)
- continue;
- TREE_SET_CODE (exp, ERROR_MARK);
- if (!safe_from_p (x, operand, 0))
- return 0;
- TREE_SET_CODE (exp, SAVE_EXPR);
+ TREE_PRIVATE (exp) = 0;
+ return 0;
}
- TREE_SET_CODE (exp, ERROR_MARK);
+
+ save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
return 1;
case BIND_EXPR:
}
/* If the rtl is X, then it is not safe. Otherwise, it is unless both
- are memory and EXP is not readonly. */
+ are memory and they conflict. */
return ! (rtx_equal_p (x, exp_rtl)
|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
- && ! TREE_READONLY (exp)));
+ && true_dependence (exp_rtl, GET_MODE (x), x,
+ rtx_addr_varies_p)));
}
/* If we reach here, it is safe. */
}
#endif
\f
-/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
- has any readonly fields. If any of the fields have types that
- contain readonly fields, return true as well. */
-
-static int
-readonly_fields_p (type)
- tree type;
-{
- tree field;
-
- for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
- if (TREE_CODE (field) == FIELD_DECL
- && (TREE_READONLY (field)
- || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
- && readonly_fields_p (TREE_TYPE (field)))))
- return 1;
-
- return 0;
-}
-\f
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned.
if (mode == VOIDmode)
temp = const0_rtx;
else
- {
- temp = assign_temp (type, 3, 0, 0);
- if (GET_CODE (temp) == MEM)
- RTX_UNCHANGING_P (temp) = 1;
- }
+ temp = assign_temp (build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | TYPE_QUAL_CONST)),
+ 3, 0, 0);
SAVE_EXPR_RTL (exp) = temp;
if (!optimize && GET_CODE (temp) == REG)
XEXP (constructor, 0));
return constructor;
}
-
else
{
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (target == 0 || ! safe_from_p (target, exp, 1)
|| GET_CODE (target) == PARALLEL)
- {
- if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
- target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- else
- target = assign_temp (type, 0, 1, 1);
- }
-
- if (TREE_READONLY (exp))
- {
- if (GET_CODE (target) == MEM)
- target = copy_rtx (target);
-
- RTX_UNCHANGING_P (target) = 1;
- }
+ target
+ = assign_temp (build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | (TREE_READONLY (exp)
+ * TYPE_QUAL_CONST))),
+ TREE_ADDRESSABLE (exp), 1, 1);
store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
int_size_in_bytes (TREE_TYPE (exp)));
/* If we are writing to this object and its type is a record with
readonly fields, we must mark it as readonly so it will
conflict with readonly references to those fields. */
- if (modifier == EXPAND_MEMORY_USE_WO
- && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
+ if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
RTX_UNCHANGING_P (temp) = 1;
return temp;
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
{
- rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
+ tree nt = build_qualified_type (TREE_TYPE (tem),
+ (TYPE_QUALS (TREE_TYPE (tem))
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (nt, 1, 1, 1);
mark_temp_addr_taken (memloc);
emit_move_insn (memloc, op0);
if (mode == BLKmode)
{
- rtx new = assign_stack_temp (ext_mode,
- bitsize / BITS_PER_UNIT, 0);
+ tree nt = build_qualified_type (type_for_size (ext_mode, 0),
+ TYPE_QUAL_CONST);
+ rtx new = assign_temp (nt, 0, 1, 1);
emit_move_insn (new, op0);
op0 = copy_rtx (new);
PUT_MODE (op0, BLKmode);
- MEM_SET_IN_STRUCT_P (op0, 1);
}
return op0;
modifier);
if (target == 0)
- {
- if (mode != BLKmode)
- target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- else
- target = assign_temp (type, 0, 1, 1);
- }
+ target = assign_temp (type, 0, 1, 1);
if (GET_CODE (target) == MEM)
/* Store data into beginning of memory target. */
/* If this object is in a register, it must be not
be BLKmode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- rtx memloc = assign_temp (inner_type, 1, 1, 1);
+ tree nt = build_qualified_type (inner_type,
+ (TYPE_QUALS (inner_type)
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (nt, 1, 1, 1);
mark_temp_addr_taken (memloc);
if (GET_CODE (op0) == PARALLEL)
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
{
- rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
+ tree nt = build_qualified_type (TREE_TYPE (tem),
+ (TYPE_QUALS (TREE_TYPE (tem))
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (nt, 1, 1, 1);
mark_temp_addr_taken (memloc);
emit_move_insn (memloc, op0);
}
else
{
- rtx new = assign_stack_temp (ext_mode,
- bitsize / BITS_PER_UNIT, 0);
+ tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
+ TYPE_QUAL_CONST);
+ rtx new = assign_temp (nt, 0, 1, 1);
op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
unsignedp, NULL_RTX, ext_mode,
int align;
/* The size, in units, of the slot. */
HOST_WIDE_INT size;
- /* The alias set for the slot. If the alias set is zero, we don't
- know anything about the alias set of the slot. We must only
- reuse a slot if it is assigned an object of the same alias set.
- Otherwise, the rest of the compiler may assume that the new use
- of the slot cannot alias the old use of the slot, which is
- false. If the slot has alias set zero, then we can't reuse the
- slot at all, since we have no idea what alias set may have been
- imposed on the memory. For example, if the stack slot is the
- call frame for an inline functioned, we have no idea what alias
- sets will be assigned to various pieces of the call frame. */
- HOST_WIDE_INT alias_set;
+ /* The type of the object in the slot, or zero if it doesn't correspond
+ to a type. We use this to determine whether a slot can be reused.
+ It can be reused if objects of the type of the new slot will always
+ conflict with objects of the type of the old slot. */
+ tree type;
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
tree rtl_expr;
/* Non-zero if this temporary is currently in use. */
tree type;
{
int align;
- HOST_WIDE_INT alias_set;
struct temp_slot *p, *best_p = 0;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
if (size == -1)
abort ();
- /* If we know the alias set for the memory that will be used, use
- it. If there's no TYPE, then we don't know anything about the
- alias set for the memory. */
- if (type)
- alias_set = get_alias_set (type);
- else
- alias_set = 0;
-
if (mode == BLKmode)
align = BIGGEST_ALIGNMENT;
else
for (p = temp_slots; p; p = p->next)
if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
&& ! p->in_use
- && (! flag_strict_aliasing
- || (alias_set && p->alias_set == alias_set))
+ && objects_must_conflict_p (p->type, type)
&& (best_p == 0 || best_p->size > p->size
|| (best_p->size == p->size && best_p->align > p->align)))
{
p->align = best_p->align;
p->address = 0;
p->rtl_expr = 0;
- p->alias_set = best_p->alias_set;
+ p->type = best_p->type;
p->next = temp_slots;
temp_slots = p;
align);
p->align = align;
- p->alias_set = alias_set;
/* The following slot size computation is necessary because we don't
know the actual size of the temporary slot until assign_stack_local
p->in_use = 1;
p->addr_taken = 0;
p->rtl_expr = seq_rtl_expr;
+ p->type = type;
if (keep == 2)
{
RTX_UNCHANGING_P (p->slot) = 0;
MEM_IN_STRUCT_P (p->slot) = 0;
MEM_SCALAR_P (p->slot) = 0;
- MEM_ALIAS_SET (p->slot) = alias_set;
+ MEM_VOLATILE_P (p->slot) = 0;
+ /* If we know the alias set for the memory that will be used, use
+ it. If there's no TYPE, then we don't know anything about the
+ alias set for the memory. */
+ if (type)
+ MEM_ALIAS_SET (p->slot) = get_alias_set (type);
+ else
+ MEM_ALIAS_SET (p->slot) = 0;
+
+ /* If a type is specified, set the relevant flags. */
if (type != 0)
- MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+ {
+ RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
+ MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
+ MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+ }
return p->slot;
}
/* Make sure that all refs to the variable, previously made
when it was a register, are fixed up to be valid again.
See function above for meaning of arguments. */
+
static void
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
struct function *function;
ggc_mark_rtx (t->slot);
ggc_mark_rtx (t->address);
ggc_mark_tree (t->rtl_expr);
+ ggc_mark_tree (t->type);
t = t->next;
}
if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
&& REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
{
- rtx stack_slot
- = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
- int_size_in_bytes (TREE_TYPE (arg)), 1);
- MEM_SET_IN_STRUCT_P (stack_slot,
- AGGREGATE_TYPE_P (TREE_TYPE (arg)));
+ rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
store_expr (arg, stack_slot, 0);
-
arg_vals[i] = XEXP (stack_slot, 0);
invisiref = 1;
}
|| GET_CODE (op) == CONCAT)
{
tree type = TREE_TYPE (TREE_VALUE (tail));
- rtx memloc = assign_temp (type, 1, 1, 1);
+ tree qual_type = build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (qual_type, 1, 1, 1);
emit_move_insn (memloc, op);
op = memloc;
{
/* Calculate the return value into a temporary (usually a pseudo
reg). */
- val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
- 0, 0, 1);
+ tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
+ tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
+
+ val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
emit_queue ();
if (type == error_mark_node)
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
+
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. */
{
if (DECL_INITIAL (decl) == 0)
/* Error message was already done; now avoid a crash. */
- DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
+ DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
else
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
}
}
- /* Add this label to the chain, and succeed.
- Copy LOW, HIGH so they are on temporary rather than momentary
- obstack and will thus survive till the end of the case statement. */
+ /* Add this label to the chain, and succeed. */
r = (struct case_node *) xmalloc (sizeof (struct case_node));
- r->low = copy_node (low);
+ r->low = low;
/* If the bounds are equal, turn this into the one-value case. */
if (tree_int_cst_equal (low, high))
r->high = r->low;
else
- r->high = copy_node (high);
+ r->high = high;
r->code_label = label;
expand_label (label);
/* In alias.c */
extern void record_component_aliases PARAMS ((tree));
extern HOST_WIDE_INT get_alias_set PARAMS ((tree));
+extern int alias_sets_conflict_p PARAMS ((HOST_WIDE_INT,
+ HOST_WIDE_INT));
+extern int readonly_fields_p PARAMS ((tree));
+extern int objects_must_conflict_p PARAMS ((tree, tree));
/* In c-common.c */
extern HOST_WIDE_INT lang_get_alias_set PARAMS ((tree));