This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs


Hi,
this patch makes nonoverlapping_component_refs_since_match_p to accept
paths with non-trivial MEM_REFs and TMRs assuming that they have same
semantics.

Bootstrapped/regtested x86_64-linux, OK?

Honza

	* tree-ssa-alias.c (same_tmr_indexing_p): Break out from ...
	(indirect_refs_may_alias_p): ... here.
	(nonoverlapping_component_refs_since_match_p): Support also non-trivial
	mem refs in the access paths.
Index: testsuite/gcc.dg/tree-ssa/alias-access-path-9.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(working copy)
@@ -0,0 +1,44 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+/* This testcase tests nonoverlapping_component_refs_since_match_p in presence
+   of non-trivial mem-refs.  */
+struct a {int a,b;};
+struct b {struct a a[10];};
+struct c {int c; struct b b;} c, *cptr;
+
+void
+set_a(struct a *a, int p)
+{
+  a->a=p;
+}
+void
+set_b(struct a *a, int p)
+{
+  a->b=p;
+}
+int
+get_a(struct a *a)
+{
+  return a->a;
+}
+
+int
+test(int i, int j)
+{
+  struct b *bptr = &c.b;
+  set_a (&bptr->a[i], 123);
+  set_b (&bptr->a[j], 124);
+  return get_a (&bptr->a[i]);
+}
+
+int
+test2(int i, int j)
+{
+  struct b *bptr = &cptr->b;
+  set_a (&bptr->a[i], 125);
+  set_b (&bptr->a[j], 126);
+  return get_a (&bptr->a[i]);
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
+/* { dg-final { scan-tree-dump-times "return 125" 1 "fre1"} } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273322)
+++ tree-ssa-alias.c	(working copy)
@@ -1216,6 +1216,25 @@ nonoverlapping_component_refs_p_1 (const
   return -1;
 }
 
+/* Return if TARGET_MEM_REFS base1 and base2 have same offsets.  */
+
+static bool
+same_tmr_indexing_p (tree base1, tree base2)
+{
+  return ((TMR_STEP (base1) == TMR_STEP (base2)
+	  || (TMR_STEP (base1) && TMR_STEP (base2)
+	      && operand_equal_p (TMR_STEP (base1),
+				  TMR_STEP (base2), 0)))
+	  && (TMR_INDEX (base1) == TMR_INDEX (base2)
+	      || (TMR_INDEX (base1) && TMR_INDEX (base2)
+		  && operand_equal_p (TMR_INDEX (base1),
+				      TMR_INDEX (base2), 0)))
+	  && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
+	      || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
+		  && operand_equal_p (TMR_INDEX2 (base1),
+				      TMR_INDEX2 (base2), 0))));
+}
+
 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    MATCH2 either point to the same address or are disjoint.
    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
@@ -1265,20 +1284,6 @@ nonoverlapping_component_refs_since_matc
         component_refs1.safe_push (ref1);
       ref1 = TREE_OPERAND (ref1, 0);
     }
-  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  /* TODO: Handle TARGET_MEM_REF later.  */
-  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
-    {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-      return -1;
-    }
 
   /* Create the stack of handled components for REF2.  */
   while (handled_component_p (ref2) && ref2 != match2)
@@ -1290,15 +1295,39 @@ nonoverlapping_component_refs_since_matc
         component_refs2.safe_push (ref2);
       ref2 = TREE_OPERAND (ref2, 0);
     }
-  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
+
+  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
+  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
+
+  /* If only one of access path starts with MEM_REF check that offset is 0
+     so the addresses stays the same after stripping it.
+     TODO: In this case we may walk the other access path until we get same
+     offset.
+
+     If both starts with MEM_REF, offset has to be same.  */
+  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
+      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
+      || (mem_ref1 && mem_ref2
+	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				  TREE_OPERAND (ref2, 1))))
     {
-      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
+      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
+      return -1;
     }
-  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
+
+  bool target_mem_ref1 = TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1;
+  bool target_mem_ref2 = TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2;
+
+  if ((target_mem_ref1 && !target_mem_ref2
+       && (TMR_INDEX (ref1) || TMR_INDEX2 (ref1)
+	   || !integer_zerop (TREE_OPERAND (ref1, 1))))
+      || (target_mem_ref2 && !target_mem_ref1
+          && (TMR_INDEX (ref2) || TMR_INDEX2 (ref2)
+	      || !integer_zerop (TREE_OPERAND (ref2, 1))))
+      || (target_mem_ref1 && target_mem_ref2
+	  && (!same_tmr_indexing_p (ref1, ref2)
+	      || !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				      TREE_OPERAND (ref2, 1)))))
     {
       ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
       return -1;
@@ -1776,18 +1805,7 @@ indirect_refs_may_alias_p (tree ref1 ATT
 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
 	  || (TREE_CODE (base1) == TARGET_MEM_REF
 	      && TREE_CODE (base2) == TARGET_MEM_REF
-	      && (TMR_STEP (base1) == TMR_STEP (base2)
-		  || (TMR_STEP (base1) && TMR_STEP (base2)
-		      && operand_equal_p (TMR_STEP (base1),
-					  TMR_STEP (base2), 0)))
-	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
-		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
-		      && operand_equal_p (TMR_INDEX (base1),
-					  TMR_INDEX (base2), 0)))
-	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
-		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
-		      && operand_equal_p (TMR_INDEX2 (base1),
-					  TMR_INDEX2 (base2), 0))))))
+	      && same_tmr_indexing_p (base1, base2))))
     {
       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]