This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[patch] Fix PR tree-optimization/50569
- From: Eric Botcazou <ebotcazou at adacore dot com>
- To: gcc-patches at gcc dot gnu dot org
- Date: Sat, 10 Dec 2011 22:31:23 +0100
- Subject: [patch] Fix PR tree-optimization/50569
Hi,
this is a regression present on mainline and 4.6 branch at -O for the SPARC.
The compiler again generates an unaligned access for the memcpy calls in:
struct event {
struct {
unsigned int sec;
} sent __attribute__((packed));
};
void __attribute__((noinline,noclone)) frob_entry(char *buf)
{
struct event event;
__builtin_memcpy(&event, buf, sizeof(event));
if (event.sent.sec < 64) {
event.sent.sec = -1U;
__builtin_memcpy(buf, &event, sizeof(event));
}
}
Unsurprisingly enough, the trick used in build_ref_for_model (in case this is a
reference to a component, the function will replicate the last COMPONENT_REF
of model's expr to access it) isn't sufficient anymore with MEM_REFs around,
since MEM_REFs can encapsulate an arbitrary number of inner references.
Fixed by extending the trick to chain of COMPONENT_REFs. Tested on x86/Linux
and SPARC/Solaris, OK for mainline and 4.6 branch?
2011-12-10 Eric Botcazou <ebotcazou@adacore.com>
PR tree-optimization/50569
* tree-sra.c (build_ref_for_model): Replicate a chain of COMPONENT_REFs
in the expression of MODEL instead of just the last one.
2011-12-10 Eric Botcazou <ebotcazou@adacore.com>
* gcc.c-torture/execute/20111210-1.c! New test.
--
Eric Botcazou
/* PR tree-optimization/50569 */
/* Reported by Paul Koning <pkoning@gcc.gnu.org> */
/* Reduced testcase by Mikael Pettersson <mikpe@it.uu.se> */
struct event {
struct {
unsigned int sec;
} sent __attribute__((packed));
};
void __attribute__((noinline,noclone)) frob_entry(char *buf)
{
struct event event;
__builtin_memcpy(&event, buf, sizeof(event));
if (event.sent.sec < 64) {
event.sent.sec = -1U;
__builtin_memcpy(buf, &event, sizeof(event));
}
}
int main(void)
{
union {
char buf[1 + sizeof(struct event)];
int align;
} u;
__builtin_memset(&u, 0, sizeof u);
frob_entry(&u.buf[1]);
return 0;
}
Index: tree-sra.c
===================================================================
--- tree-sra.c (revision 182102)
+++ tree-sra.c (working copy)
@@ -1493,32 +1493,61 @@ build_ref_for_offset (location_t loc, tr
}
/* Construct a memory reference to a part of an aggregate BASE at the given
- OFFSET and of the same type as MODEL. In case this is a reference to a
- component, the function will replicate the last COMPONENT_REF of model's
- expr to access it. GSI and INSERT_AFTER have the same meaning as in
- build_ref_for_offset. */
+ OFFSET and of the type of MODEL. In case this is a chain of references
+ to component, the function will replicate the chain of COMPONENT_REFs of
+ the expression of MODEL to access it. GSI and INSERT_AFTER have the same
+ meaning as in build_ref_for_offset. */
static tree
build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
struct access *model, gimple_stmt_iterator *gsi,
bool insert_after)
{
+ tree type = model->type, t;
+ VEC(tree,heap) *stack = NULL;
+
if (TREE_CODE (model->expr) == COMPONENT_REF)
{
- tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
- tree cr_offset = component_ref_field_offset (model->expr);
+ tree expr = model->expr;
+
+ /* Create a stack of the COMPONENT_REFs so later we can walk them in
+ order from inner to outer. */
+ stack = VEC_alloc (tree, heap, 6);
+
+ do {
+ tree field = TREE_OPERAND (expr, 1);
+ tree cr_offset = component_ref_field_offset (expr);
+ gcc_assert (cr_offset && host_integerp (cr_offset, 1));
+
+ offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
+ offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
- gcc_assert (cr_offset && host_integerp (cr_offset, 1));
- offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
- offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
- exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
- t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
- return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
- TREE_OPERAND (model->expr, 2));
+ VEC_safe_push (tree, heap, stack, expr);
+
+ expr = TREE_OPERAND (expr, 0);
+ type = TREE_TYPE (expr);
+ } while (TREE_CODE (expr) == COMPONENT_REF);
}
- else
- return build_ref_for_offset (loc, base, offset, model->type,
- gsi, insert_after);
+
+ t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
+
+ if (TREE_CODE (model->expr) == COMPONENT_REF)
+ {
+ unsigned i;
+ tree expr;
+
+ /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
+ FOR_EACH_VEC_ELT_REVERSE (tree, stack, i, expr)
+ {
+ tree field = TREE_OPERAND (expr, 1);
+ t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
+ TREE_OPERAND (expr, 2));
+ }
+
+ VEC_free (tree, heap, stack);
+ }
+
+ return t;
}
/* Construct a memory reference consisting of component_refs and array_refs to