This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Fix PR39362


This fixes PR39362.  Like for other operations we have to avoid
value-numbering anything to SSA_NAMEs that occur in abnormal PHIs,
including stores.  Otherwise we might end up creating overlapping
life-ranges during elimination.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.

Richard.

2009-03-04  Richard Guenther  <rguenther@suse.de>

	PR tree-optimization/39362
	* tree-ssa-sccvn.c (visit_use): Stores and copies from SSA_NAMEs
	that occur in abnormal PHIs should be varying.

	* g++.dg/torture/pr39362.C: New testcase.

Index: gcc/tree-ssa-sccvn.c
===================================================================
*** gcc/tree-ssa-sccvn.c	(revision 144601)
--- gcc/tree-ssa-sccvn.c	(working copy)
*************** visit_use (tree use)
*** 2367,2380 ****
  	      VN_INFO (lhs)->expr = NULL_TREE;
  	    }
  
! 	  if (TREE_CODE (lhs) == SSA_NAME
! 	      /* We can substitute SSA_NAMEs that are live over
! 		 abnormal edges with their constant value.  */
! 	      && !(gimple_assign_copy_p (stmt)
! 		   && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
! 	      && !(simplified
! 		   && is_gimple_min_invariant (simplified))
! 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
  	    changed = defs_to_varying (stmt);
  	  else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
  	    {
--- 2367,2385 ----
  	      VN_INFO (lhs)->expr = NULL_TREE;
  	    }
  
! 	  if ((TREE_CODE (lhs) == SSA_NAME
! 	       /* We can substitute SSA_NAMEs that are live over
! 		  abnormal edges with their constant value.  */
! 	       && !(gimple_assign_copy_p (stmt)
! 		    && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
! 	       && !(simplified
! 		    && is_gimple_min_invariant (simplified))
! 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
! 	      /* Stores or copies from SSA_NAMEs that are live over
! 		 abnormal edges are a problem.  */
! 	      || (gimple_assign_single_p (stmt)
! 		  && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
! 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
  	    changed = defs_to_varying (stmt);
  	  else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
  	    {
Index: gcc/testsuite/g++.dg/torture/pr39362.C
===================================================================
*** gcc/testsuite/g++.dg/torture/pr39362.C	(revision 0)
--- gcc/testsuite/g++.dg/torture/pr39362.C	(revision 0)
***************
*** 0 ****
--- 1,105 ----
+ /* { dg-do compile } */
+ 
+ void *fastMalloc (int n);
+ void fastFree (void *p);
+ template <class T> struct C
+ {
+   void deref () { delete static_cast <T *>(this); }
+ };
+ template <typename T>
+ struct D
+ {
+   D (T *ptr) : m_ptr (ptr) { }
+   ~D () { if (T * ptr = m_ptr) ptr->deref (); }
+   T *operator-> () const;
+   T *m_ptr;
+   typedef T *UnspecifiedBoolType;
+   operator UnspecifiedBoolType () const;
+ };
+ template <typename T> struct E
+ {
+   static void destruct (T * begin, T * end)
+     {
+       for (T * cur = begin; cur != end; ++cur)
+ 	cur->~T ();
+     }
+ };
+ template <typename T> class F;
+ template <typename T> struct G
+ {
+   static void destruct (T * begin, T * end)
+     {
+       E <T>::destruct (begin, end);
+     }
+   static void uninitializedFill (T * dst, T * dstEnd, const T & val)
+     {
+       F<T>::uninitializedFill (dst, dstEnd, val);
+     }
+ };
+ template <typename T> struct H
+ {
+   void allocateBuffer (int newCapacity)
+     {
+       m_buffer = static_cast <T *>(fastMalloc (newCapacity * sizeof (T)));
+     }
+   void deallocateBuffer (T * bufferToDeallocate)
+     {
+       if (m_buffer == bufferToDeallocate)
+ 	fastFree (bufferToDeallocate);
+     }
+   T *buffer () { }
+   int capacity () const { }
+   T *m_buffer;
+ };
+ template <typename T, int cap> class I;
+ template <typename T> struct I <T, 0> : H <T>
+ {
+   I (int capacity) { allocateBuffer (capacity); }
+   ~I () { deallocateBuffer (buffer ()); }
+   using H <T>::allocateBuffer;
+   H <T>::buffer;
+ };
+ template <typename T, int cap = 0> struct J
+ {
+   typedef T *iterator;
+   ~J () { if (m_size) shrink (0); }
+   J (const J &);
+   int capacity () const { m_buffer.capacity (); }
+   T & operator[](int i) { }
+   iterator begin () { }
+   iterator end () { return begin () + m_size; }
+   void shrink (int size);
+   template <typename U> void append (const U &);
+   int m_size;
+   I <T, cap> m_buffer;
+ };
+ template <typename T, int cap>
+ J <T, cap>::J (const J & other) : m_buffer (other.capacity ())
+ {
+ }
+ template <typename T, int cap>
+ void J <T, cap>::shrink (int size)
+ {
+   G <T>::destruct (begin () + size, end ());
+   m_size = size;
+ }
+ struct A : public C <A>
+ {
+   virtual ~A ();
+   typedef J <D <A> > B;
+   virtual A *firstChild () const;
+   virtual A *nextSibling () const;
+   virtual const B & children (int length);
+   B m_children;
+ };
+ const A::B &
+ A::children (int length)
+ {
+   for (D <A> obj = firstChild (); obj; obj = obj->nextSibling ())
+     {
+       B children = obj->children (2);
+       for (unsigned i = 0; i <length; ++i)
+ 	m_children.append (children[i]);
+     }
+ }
+ 


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]