This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Fix PR tree-opt/28937


The problem here is that during cleanup cfg, we could prograte a
constant into an ARRAY_REF (of a component reference of an indirect
reference) which causes us to ICE because SMT usage was not updated.
This patch fixes the problem by doing two things:
1) Removing all the direct calls to cleanup_tree_cfg_loop in the loop
passes which can be expressed by returning a TODO.
2) Set up so we set updating_used_alone before calling cleanup CFG and
call recalculate_used_alone after the cleanup.

The one problem I have currently with 2 is that we do the call
unconditionally but I don't see any way around that with the current
infrastructure of cleanup CFG.

OK? Bootstrapped and tested on i686-linux-gnu with no regressions.

Thanks,
Andrew Pinski

ChangeLog:
	* tree-flow.h (tree_ssa_unswitch_loops): Return unsigned int.
	(canonicalize_induction_variables): Likewise.
	(tree_unroll_loops_completely): Likewise.
	(tree_ssa_prefetch_arrays): Likewise.
	(remove_empty_loops): Likewise.
	* tree-ssa-loop-unswitch.c (tree_ssa_unswitch_loops): Return
	TODO_cleanup_cfg instead of directly calling
	cleanup_tree_cfg_loop.
	* tree-ssa-loop-ivcanon.c (canonicalize_induction_variables):
	Likewise.
	(tree_unroll_loops_completely): Likewise.
	(remove_empty_loops): Likewise.
	* tree-ssa-loop-prefetch.c (tree_ssa_prefetch_arrays): Likewise.
	* tree-ssa-loop.c (tree_ssa_loop_unswitch): Use the return value
	of tree_ssa_unswitch_loops.
	(tree_ssa_loop_ivcanon): Use the return value of
	canonicalize_induction_variables.
	(tree_ssa_empty_loop): Use the return value of
	remove_empty_loops.
	(tree_complete_unroll): Use the return value of
	tree_unroll_loops_completely.
	(tree_ssa_loop_prefetch): Use the return value of
	tree_ssa_prefetch_arrays.
	* passes.c (execute_todo): Before Cleanup CFG, set
	updating_used_alone and after cleanup CFG, call
	recalculate_used_alone.

testsuite/ChangeLog:

	* g++.dg/opt/unroll2.C: New test.
	



Index: tree-ssa-loop-unswitch.c
===================================================================
--- tree-ssa-loop-unswitch.c	(revision 116677)
+++ tree-ssa-loop-unswitch.c	(working copy)
@@ -80,7 +80,7 @@ static tree tree_may_unswitch_on (basic_
 
 /* Main entry point.  Perform loop unswitching on all suitable LOOPS.  */
 
-void
+unsigned int
 tree_ssa_unswitch_loops (struct loops *loops)
 {
   int i, num;
@@ -104,7 +104,8 @@ tree_ssa_unswitch_loops (struct loops *l
     }
 
   if (changed)
-    cleanup_tree_cfg_loop ();
+    return TODO_cleanup_cfg;
+  return 0;
 }
 
 /* Checks whether we can unswitch LOOP on condition at end of BB -- one of its
Index: testsuite/g++.dg/opt/unroll2.C
===================================================================
--- testsuite/g++.dg/opt/unroll2.C	(revision 0)
+++ testsuite/g++.dg/opt/unroll2.C	(revision 0)
@@ -0,0 +1,27 @@
+// PR tree-opt/28937
+// Complete unroll forgot to update the statement usage
+// which meant we ICEd in add_virtual_operand.
+
+// { dg-do compile }
+// { dg-options "-O2" }
+
+
+class SHA256
+{
+  unsigned m_digest;
+  unsigned long long m_count;
+  unsigned char _buffer[64];
+  static void Transform (unsigned * data);
+  void WriteByteBlock (unsigned t);
+};
+void SHA256::WriteByteBlock (unsigned t)
+{
+  unsigned data32[16];
+  Transform (data32);
+  unsigned long long lenInBits = m_count;
+  if (t != (64 - 8))
+    return;
+  for (int i = 0; i < 2; i++)
+          _buffer[t++] = (unsigned char)lenInBits;
+}
+
Index: tree-ssa-loop-ivcanon.c
===================================================================
--- tree-ssa-loop-ivcanon.c	(revision 116677)
+++ tree-ssa-loop-ivcanon.c	(working copy)
@@ -330,7 +330,7 @@ canonicalize_loop_induction_variables (s
 /* The main entry point of the pass.  Adds canonical induction variables
    to the suitable LOOPS.  */
 
-void
+unsigned int
 canonicalize_induction_variables (struct loops *loops)
 {
   unsigned i;
@@ -352,14 +352,15 @@ canonicalize_induction_variables (struct
   scev_reset ();
 
   if (changed)
-    cleanup_tree_cfg_loop ();
+    return TODO_cleanup_cfg;
+  return 0;
 }
 
 /* Unroll LOOPS completely if they iterate just few times.  Unless
    MAY_INCREASE_SIZE is true, perform the unrolling only if the
    size of the code does not increase.  */
 
-void
+unsigned int
 tree_unroll_loops_completely (struct loops *loops, bool may_increase_size)
 {
   unsigned i;
@@ -388,7 +389,8 @@ tree_unroll_loops_completely (struct loo
   scev_reset ();
 
   if (changed)
-    cleanup_tree_cfg_loop ();
+    return TODO_cleanup_cfg;
+  return 0;
 }
 
 /* Checks whether LOOP is empty.  */
@@ -562,7 +564,7 @@ try_remove_empty_loop (struct loop *loop
 
 /* Remove the empty LOOPS.  */
 
-void
+unsigned int
 remove_empty_loops (struct loops *loops)
 {
   bool changed = false;
@@ -574,6 +576,7 @@ remove_empty_loops (struct loops *loops)
   if (changed)
     {
       scev_reset ();
-      cleanup_tree_cfg_loop ();
+      return TODO_cleanup_cfg;
     }
+  return 0;
 }
Index: tree-ssa-loop.c
===================================================================
--- tree-ssa-loop.c	(revision 116677)
+++ tree-ssa-loop.c	(working copy)
@@ -158,8 +158,7 @@ tree_ssa_loop_unswitch (void)
   if (!current_loops)
     return 0;
 
-  tree_ssa_unswitch_loops (current_loops);
-  return 0;
+  return tree_ssa_unswitch_loops (current_loops);
 }
 
 static bool
@@ -260,8 +259,7 @@ tree_ssa_loop_ivcanon (void)
   if (!current_loops)
     return 0;
 
-  canonicalize_induction_variables (current_loops);
-  return 0;
+  return canonicalize_induction_variables (current_loops);
 }
 
 static bool
@@ -322,8 +320,7 @@ tree_ssa_empty_loop (void)
   if (!current_loops)
     return 0;
 
-  remove_empty_loops (current_loops);
-  return 0;
+  return remove_empty_loops (current_loops);
 }
 
 struct tree_opt_pass pass_empty_loop =
@@ -381,11 +378,10 @@ tree_complete_unroll (void)
   if (!current_loops)
     return 0;
 
-  tree_unroll_loops_completely (current_loops,
-				flag_unroll_loops
-				|| flag_peel_loops
-				|| optimize >= 3);
-  return 0;
+  return tree_unroll_loops_completely (current_loops,
+				       flag_unroll_loops
+					|| flag_peel_loops
+					|| optimize >= 3);
 }
 
 static bool
@@ -419,8 +415,7 @@ tree_ssa_loop_prefetch (void)
   if (!current_loops)
     return 0;
 
-  tree_ssa_prefetch_arrays (current_loops);
-  return 0;
+  return tree_ssa_prefetch_arrays (current_loops);
 }
 
 static bool
Index: tree-ssa-loop-prefetch.c
===================================================================
--- tree-ssa-loop-prefetch.c	(revision 116677)
+++ tree-ssa-loop-prefetch.c	(working copy)
@@ -1006,12 +1006,13 @@ fail:
 
 /* Issue prefetch instructions for array references in LOOPS.  */
 
-void
+unsigned int
 tree_ssa_prefetch_arrays (struct loops *loops)
 {
   unsigned i;
   struct loop *loop;
   bool unrolled = false;
+  int todo_flags = 0;
 
   if (!HAVE_prefetch
       /* It is possible to ask compiler for say -mtune=i486 -march=pentium4.
@@ -1019,7 +1020,7 @@ tree_ssa_prefetch_arrays (struct loops *
 	 of processor costs and i486 does not have prefetch, but
 	 -march=pentium4 causes HAVE_prefetch to be true.  Ugh.  */
       || PREFETCH_BLOCK == 0)
-    return;
+    return 0;
 
   initialize_original_copy_tables ();
 
@@ -1057,8 +1058,9 @@ tree_ssa_prefetch_arrays (struct loops *
   if (unrolled)
     {
       scev_reset ();
-      cleanup_tree_cfg_loop ();
+      todo_flags |= TODO_cleanup_cfg;
     }
 
   free_original_copy_tables ();
+  return todo_flags;
 }
Index: tree-flow.h
===================================================================
--- tree-flow.h	(revision 116677)
+++ tree-flow.h	(working copy)
@@ -798,11 +798,11 @@ bool empty_block_p (basic_block);
 /* In tree-ssa-loop*.c  */
 
 void tree_ssa_lim (struct loops *);
-void tree_ssa_unswitch_loops (struct loops *);
-void canonicalize_induction_variables (struct loops *);
-void tree_unroll_loops_completely (struct loops *, bool);
-void tree_ssa_prefetch_arrays (struct loops *);
-void remove_empty_loops (struct loops *);
+unsigned int tree_ssa_unswitch_loops (struct loops *);
+unsigned int canonicalize_induction_variables (struct loops *);
+unsigned int tree_unroll_loops_completely (struct loops *, bool);
+unsigned int tree_ssa_prefetch_arrays (struct loops *);
+unsigned int remove_empty_loops (struct loops *);
 void tree_ssa_iv_optimize (struct loops *);
 
 bool number_of_iterations_exit (struct loop *, edge,
Index: passes.c
===================================================================
--- passes.c	(revision 116677)
+++ passes.c	(working copy)
@@ -727,11 +727,17 @@ execute_todo (unsigned int flags)
   /* Always cleanup the CFG before trying to update SSA .  */
   if (flags & TODO_cleanup_cfg)
     {
+      /* CFG Cleanup can cause a constant to prop into an ARRAY_REF.  */
+      updating_used_alone = true;
+
       if (current_loops)
 	cleanup_tree_cfg_loop ();
       else
 	cleanup_tree_cfg ();
 
+      /* Update the used alone after cleanup cfg.  */
+      recalculate_used_alone ();
+
       /* When cleanup_tree_cfg merges consecutive blocks, it may
 	 perform some simplistic propagation when removing single
 	 valued PHI nodes.  This propagation may, in turn, cause the

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]