This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[PATCH, pretty-ipa] Do not disqualify aggregates if they have variable accesses
- From: Martin Jambor <mjambor at suse dot cz>
- To: GCC Patches <gcc-patches at gcc dot gnu dot org>
- Date: Thu, 26 Mar 2009 13:20:07 +0100
- Subject: [PATCH, pretty-ipa] Do not disqualify aggregates if they have variable accesses
Hi,
as requested and approved by Honza (on IRC), the following patch does
not disqualify the whole aggregate for intra-SRA whenever size and
max_size returned by get_ref_base_and_extent are not equal to each
other but introduces unscalarizable region access (of max_size size)
instead that simply do what they are called, prevent creation of
scalar replacement for such a region.
Bootstrapped and tested on x86_64-linux, committed together with the
previous patch that dumps reasons for candidate disqualification.
Thanks,
Martin
2009-03-26 Martin Jambor <mjambor@suse.cz>
* ipa-sra.c (struct access): New flag grp_unscalarizable_region.
(dump_access): Dump also grp_unscalarizable_region flag.
(create_access): If max_size != size in intra-SRA, do not
disqualify the candidate but create a grp_unscalarizable_region
access instead.
(sort_and_splice_var_accesses): Propagate
grp_unscalarizable_region flag to the group representative.
(build_access_tree_1): Disallow replacements in a subtree if
grp_unscalarizable_region is set.
Index: isra/gcc/ipa-sra.c
===================================================================
--- isra.orig/gcc/ipa-sra.c
+++ isra/gcc/ipa-sra.c
@@ -216,6 +216,9 @@ struct access
/* Is the subtree rooted in this access fully covered by scalar
replacements? */
unsigned grp_covered : 1;
+ /* If set to true, this access and all below it in an access tree must not be
+ scalarized. */
+ unsigned grp_unscalarizable_region : 1;
/* Whether data have been written to parts of the aggregate covered by this
access which is not to be scalarized. This flag is propagated up in the
access tree. */
@@ -322,12 +325,13 @@ dump_access (struct access *access, bool
print_generic_expr (dump_file, access->type, 0);
if (grp)
fprintf (dump_file, ", grp_write = %d, grp_read = %d, grp_covered = %d, "
- "grp_unscalarized_data = %d, grp_maybe_modified = %d, "
- "to_be_replaced = %d, "
+ "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
+ "grp_maybe_modified = %d, to_be_replaced = %d, "
"stmt_no = %d, always_safe = %d'\n",
access->grp_write, access->grp_read, access->grp_covered,
- access->grp_unscalarized_data, access->grp_maybe_modified,
- access->to_be_replaced, access->stmt_no, access->always_safe);
+ access->grp_unscalarizable_region, access->grp_unscalarized_data,
+ access->grp_maybe_modified, access->to_be_replaced,
+ access->stmt_no, access->always_safe);
else
fprintf (dump_file, ", write = %d, stmt_no = %d'\n", access->write,
access->stmt_no);
@@ -996,7 +1000,7 @@ create_access (tree expr, bool write)
VEC (access_p,heap) *vec;
HOST_WIDE_INT offset, size, max_size;
tree base = expr;
- bool ptr = false;
+ bool ptr = false, unscalarizable_region = false;
if (handled_component_p (expr))
{
@@ -1032,19 +1036,40 @@ create_access (tree expr, bool write)
if (sra_mode == SRA_MODE_EARLY_IPA)
base = get_ssa_base_param (base);
- if (!base || !DECL_P (base) || (ptr && TREE_CODE (base) != PARM_DECL))
+ if (!base || !DECL_P (base)
+ || (ptr && TREE_CODE (base) != PARM_DECL)
+ || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
return NULL;
- if (size < 0 || size != max_size
- || (sra_mode == SRA_MODE_EARLY_IPA
- && ((offset % BITS_PER_UNIT) != 0
- || (size % BITS_PER_UNIT) != 0)))
+ if (sra_mode == SRA_MODE_EARLY_IPA)
{
- disqualify_candidate (base, "there is an acces not aligned to a byte.");
- return NULL;
+ if (size < 0 || size != max_size)
+ {
+ disqualify_candidate (base, "Encountered a variable sized access.");
+ return NULL;
+ }
+ else if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
+ {
+ disqualify_candidate (base,
+ "Encountered an acces not aligned to a byte.");
+ return NULL;
+ }
+ }
+ else
+ {
+ if (size != max_size)
+ {
+ size = max_size;
+ unscalarizable_region = true;
+ }
+
+ if (size < 0)
+ {
+ disqualify_candidate (base, "Encountered an ultra variable sized "
+ "access.");
+ return NULL;
+ }
}
- if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
- return NULL;
access = (struct access *) pool_alloc (access_pool);
memset (access, 0, sizeof (struct access));
@@ -1057,6 +1082,7 @@ create_access (tree expr, bool write)
access->write = write;
access->stmt_no = stmt_no;
access->bb = current_bb;
+ access->grp_unscalarizable_region = unscalarizable_region;
slot = pointer_map_contains (base_access_vec, base);
if (slot)
@@ -2805,6 +2831,7 @@ sort_and_splice_var_accesses (tree var)
bool grp_read = !access->write;
bool grp_bfr_lhs = access->grp_bfr_lhs;
bool first_scalar = is_sra_scalar_type (access->type);
+ bool unscalarizable_region = access->grp_unscalarizable_region;
if (first || access->offset >= high)
{
@@ -2834,6 +2861,7 @@ sort_and_splice_var_accesses (tree var)
modification |= ac2->write;
grp_read |= !ac2->write;
grp_bfr_lhs |= ac2->grp_bfr_lhs;
+ unscalarizable_region |= ac2->grp_unscalarizable_region;
/* If one of the equivalent accesses is scalar, use it as a
representative (this happens when when there is for example on a
@@ -2859,6 +2887,7 @@ sort_and_splice_var_accesses (tree var)
access->grp_read = grp_read;
access->grp_maybe_modified = modification;
access->grp_bfr_lhs = grp_bfr_lhs;
+ access->grp_unscalarizable_region = unscalarizable_region;
*prev_acc_ptr = access;
prev_acc_ptr = &access->next_grp;
}
@@ -2968,6 +2997,9 @@ build_access_tree_1 (struct access **acc
else if (root->grp_write)
mark_write = true;
+ if (root->grp_unscalarizable_region)
+ allow_replacements = false;
+
*access = (*access)->next_grp;
while (*access && (*access)->offset + (*access)->size <= limit)
{