PR 58958: wrong aliasing info

Marc Glisse marc.glisse@inria.fr
Fri Nov 1 22:39:00 GMT 2013


Hello,

the issue was described in the PR and the message linked from there. 
ao_ref_init_from_ptr_and_size calls get_ref_base_and_extent, which may 
detect an array_ref of variable index, but ao_ref_init_from_ptr_and_size 
never learns of it and uses the offset+size as if they were meaningful.

Bootstrap+testsuite on x86_64-unknown-linux-gnu.

2013-11-04  Marc Glisse  <marc.glisse@inria.fr>

 	PR tree-optimization/
gcc/
 	* tree-dfa.h (get_addr_base_and_unit_offset_1): Add error reporting.
 	* tree-ssa-alias.c (ao_ref_init_from_ptr_and_size): Use
 	get_addr_base_and_unit_offset_1 instead of get_ref_base_and_extent.

gcc/testsuite/
 	* gcc.dg/tree-ssa/pr58958.c: New file.

-- 
Marc Glisse
-------------- next part --------------
Index: gcc/testsuite/gcc.dg/tree-ssa/pr58958.c
===================================================================
--- gcc/testsuite/gcc.dg/tree-ssa/pr58958.c	(revision 0)
+++ gcc/testsuite/gcc.dg/tree-ssa/pr58958.c	(working copy)
@@ -0,0 +1,12 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
+
+double a[10];
+int f(int n){
+  a[3]=9;
+  __builtin_memset(&a[n],3,sizeof(double));
+  return a[3]==9;
+}
+
+/* { dg-final { scan-tree-dump " == 9" "optimized" } } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */

Property changes on: gcc/testsuite/gcc.dg/tree-ssa/pr58958.c
___________________________________________________________________
Added: svn:keywords
## -0,0 +1 ##
+Author Date Id Revision URL
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Index: gcc/tree-dfa.h
===================================================================
--- gcc/tree-dfa.h	(revision 204302)
+++ gcc/tree-dfa.h	(working copy)
@@ -30,65 +30,70 @@ extern tree ssa_default_def (struct func
 extern void set_ssa_default_def (struct function *, tree, tree);
 extern tree get_or_create_ssa_default_def (struct function *, tree);
 extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
 				     HOST_WIDE_INT *, HOST_WIDE_INT *);
 extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
 extern bool stmt_references_abnormal_ssa_name (gimple);
 extern void dump_enumerated_decls (FILE *, int);
 
 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
    denotes the starting address of the memory access EXP.
-   Returns NULL_TREE if the offset is not constant or any component
-   is not BITS_PER_UNIT-aligned.
+   If the offset is not constant or any component is not BITS_PER_UNIT-aligned,
+   sets *SAFE to false or returns NULL_TREE if SAFE is NULL.
    VALUEIZE if non-NULL is used to valueize SSA names.  It should return
    its argument or a constant if the argument is known to be constant.  */
 /* ??? This is a static inline here to avoid the overhead of the indirect calls
    to VALUEIZE.  But is this overhead really that significant?  And should we
    perhaps just rely on WHOPR to specialize the function?  */
 
 static inline tree
 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
-				 tree (*valueize) (tree))
+				 tree (*valueize) (tree), bool *safe = NULL)
 {
   HOST_WIDE_INT byte_offset = 0;
+  bool issafe = true;
 
   /* Compute cumulative byte-offset for nested component-refs and array-refs,
      and find the ultimate containing object.  */
   while (1)
     {
       switch (TREE_CODE (exp))
 	{
 	case BIT_FIELD_REF:
 	  {
 	    HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
 	    if (this_off % BITS_PER_UNIT)
-	      return NULL_TREE;
-	    byte_offset += this_off / BITS_PER_UNIT;
+	      issafe = false;
+	    else
+	      byte_offset += this_off / BITS_PER_UNIT;
 	  }
 	  break;
 
 	case COMPONENT_REF:
 	  {
 	    tree field = TREE_OPERAND (exp, 1);
 	    tree this_offset = component_ref_field_offset (exp);
 	    HOST_WIDE_INT hthis_offset;
 
 	    if (!this_offset
 		|| TREE_CODE (this_offset) != INTEGER_CST
 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
 		    % BITS_PER_UNIT))
-	      return NULL_TREE;
-
-	    hthis_offset = TREE_INT_CST_LOW (this_offset);
-	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
-			     / BITS_PER_UNIT);
-	    byte_offset += hthis_offset;
+	      issafe = false;
+	    else
+	      {
+		hthis_offset = TREE_INT_CST_LOW (this_offset);
+		hthis_offset +=
+		  (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
+		   / BITS_PER_UNIT);
+		byte_offset += hthis_offset;
+	      }
 	  }
 	  break;
 
 	case ARRAY_REF:
 	case ARRAY_RANGE_REF:
 	  {
 	    tree index = TREE_OPERAND (exp, 1);
 	    tree low_bound, unit_size;
 
 	    if (valueize
@@ -102,21 +107,21 @@ get_addr_base_and_unit_offset_1 (tree ex
 		&& (unit_size = array_ref_element_size (exp),
 		    TREE_CODE (unit_size) == INTEGER_CST))
 	      {
 		HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
 
 		hindex -= TREE_INT_CST_LOW (low_bound);
 		hindex *= TREE_INT_CST_LOW (unit_size);
 		byte_offset += hindex;
 	      }
 	    else
-	      return NULL_TREE;
+	      issafe = false;
 	  }
 	  break;
 
 	case REALPART_EXPR:
 	  break;
 
 	case IMAGPART_EXPR:
 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
 	  break;
 
@@ -148,37 +153,48 @@ get_addr_base_and_unit_offset_1 (tree ex
 	  {
 	    tree base = TREE_OPERAND (exp, 0);
 	    if (valueize
 		&& TREE_CODE (base) == SSA_NAME)
 	      base = (*valueize) (base);
 
 	    /* Hand back the decl for MEM[&decl, off].  */
 	    if (TREE_CODE (base) == ADDR_EXPR)
 	      {
 		if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
-		  return NULL_TREE;
-		if (!integer_zerop (TMR_OFFSET (exp)))
+		  issafe = false;
+		else if (!integer_zerop (TMR_OFFSET (exp)))
 		  {
 		    double_int off = mem_ref_offset (exp);
 		    gcc_assert (off.high == -1 || off.high == 0);
 		    byte_offset += off.to_shwi ();
 		  }
 		exp = TREE_OPERAND (base, 0);
 	      }
 	    goto done;
 	  }
 
 	default:
 	  goto done;
 	}
 
       exp = TREE_OPERAND (exp, 0);
     }
 done:
 
-  *poffset = byte_offset;
-  return exp;
+  if (issafe)
+    {
+      *poffset = byte_offset;
+      return exp;
+    }
+  else if (safe)
+    {
+      *safe = false;
+      *poffset = 0;
+      return exp;
+    }
+  else
+    return NULL_TREE;
 }
 
 
 
 #endif /* GCC_TREE_DFA_H */
Index: gcc/tree-ssa-alias.c
===================================================================
--- gcc/tree-ssa-alias.c	(revision 204302)
+++ gcc/tree-ssa-alias.c	(working copy)
@@ -559,49 +559,54 @@ ao_ref_alias_set (ao_ref *ref)
 }
 
 /* Init an alias-oracle reference representation from a gimple pointer
    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE the the
    size is assumed to be unknown.  The access is assumed to be only
    to or after of the pointer target, not before it.  */
 
 void
 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
 {
-  HOST_WIDE_INT t1, t2, extra_offset = 0;
+  HOST_WIDE_INT t, extra_offset = 0;
+  bool safe = true;
   ref->ref = NULL_TREE;
   if (TREE_CODE (ptr) == SSA_NAME)
     {
       gimple stmt = SSA_NAME_DEF_STMT (ptr);
       if (gimple_assign_single_p (stmt)
 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
 	ptr = gimple_assign_rhs1 (stmt);
       else if (is_gimple_assign (stmt)
 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
 	       && host_integerp (gimple_assign_rhs2 (stmt), 0)
-	       && (t1 = int_cst_value (gimple_assign_rhs2 (stmt))) >= 0)
+	       && (t = int_cst_value (gimple_assign_rhs2 (stmt))) >= 0)
 	{
 	  ptr = gimple_assign_rhs1 (stmt);
-	  extra_offset = BITS_PER_UNIT * t1;
+	  extra_offset = BITS_PER_UNIT * t;
 	}
     }
 
   if (TREE_CODE (ptr) == ADDR_EXPR)
-    ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
-					 &ref->offset, &t1, &t2);
+    {
+      ref->base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (ptr, 0),
+						   &t, 0, &safe);
+      ref->offset = BITS_PER_UNIT * t;
+    }
   else
     {
       ref->base = build2 (MEM_REF, char_type_node,
 			  ptr, null_pointer_node);
       ref->offset = 0;
     }
   ref->offset += extra_offset;
-  if (size
+  if (safe
+      && size
       && host_integerp (size, 0)
       && TREE_INT_CST_LOW (size) * BITS_PER_UNIT / BITS_PER_UNIT
 	 == TREE_INT_CST_LOW (size))
     ref->max_size = ref->size = TREE_INT_CST_LOW (size) * BITS_PER_UNIT;
   else
     ref->max_size = ref->size = -1;
   ref->ref_alias_set = 0;
   ref->base_alias_set = 0;
   ref->volatile_p = false;
 }


More information about the Gcc-patches mailing list