This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Sink clobbers if EH block contains just clobbers (PR tree-optimization/51117)


Hi!

Even with the optimize_clobbers changes, for e.g.
struct A { char buf[64]; };
void bar (A *);

void
foo ()
{
  A c;
  bar (&c);
  try
  {
    {
      A a;
      bar (&a);
      if (a.buf[13])
	throw 1;
      else if (a.buf[52])
	throw 3;
    }
    {
      A b;
      bar (&b);
      if (b.buf[13])
	throw 2;
    }
  }
  catch ( ...)
  {
    throw;
  }
}

we still generate much worse code since the introduction of
clobbers.  This patch is an attempt to improve it, if we catch
an exception just to do a bunch of clobbers and then rethrow it
internally, the extra EH regions/rethrow can't be optimized away.
For that case this patch attempts to sink all the clobbers into the
single successor (through the EH edge) - if it wouldn't leave on that
edge the original scope, the clobbers wouldn't be there.
I had to tweak a little bit the expander conflict checking, because
if we have a BB with two incoming EH edges and clobber stmts from both
sunk into its beginning, then it would consider both variables (a and b
above) to be live at the same time, while there is no insn on which they
can actually live at the same time, the PHIs don't mention either of them
(and after all, PHIs aren't memory loads), and after the PHIs we have
immediately the clobbers.

Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

2011-12-09  Jakub Jelinek  <jakub@redhat.com>

	PR tree-optimization/51117
	* tree-eh.c (sink_clobbers): New function.
	(execute_lower_eh_dispatch): Call it for BBs ending with
	internally throwing RESX.
	* cfgexpand.c (add_scope_conflicts_1): Add SCRATCH argument.
	If BB starts with clobbers of vars that aren't mentioned
	in any of the PHIs, don't consider the clobbered vars live
	on entry of the BB.
	(add_scope_conflicts): Allocate and free SCRATCH bitmap
	and pass it to add_scope_conflicts_1.

--- gcc/tree-eh.c.jj	2011-12-09 15:13:51.849968215 +0100
+++ gcc/tree-eh.c	2011-12-09 15:07:46.608148450 +0100
@@ -3194,6 +3194,76 @@ optimize_clobbers (basic_block bb)
     }
 }
 
+/* Try to sink var = {v} {CLOBBER} stmts followed just by
+   internal throw to successor BB.  */
+
+static int
+sink_clobbers (basic_block bb)
+{
+  edge e;
+  edge_iterator ei;
+  gimple_stmt_iterator gsi, dgsi;
+  basic_block succbb;
+  bool any_clobbers = false;
+
+  /* Only optimize if BB has a single EH successor and
+     all predecessor edges are EH too.  */
+  if (!single_succ_p (bb)
+      || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
+    return 0;
+
+  FOR_EACH_EDGE (e, ei, bb->preds)
+    {
+      if ((e->flags & EDGE_EH) == 0)
+	return 0;
+    }
+
+  /* And BB contains only CLOBBER stmts before the final
+     RESX.  */
+  gsi = gsi_last_bb (bb);
+  for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
+    {
+      gimple stmt = gsi_stmt (gsi);
+      if (is_gimple_debug (stmt))
+	continue;
+      if (gimple_code (stmt) == GIMPLE_LABEL)
+	break;
+      if (!gimple_clobber_p (stmt)
+	  || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
+	return 0;
+      any_clobbers = true;
+    }
+  if (!any_clobbers)
+    return 0;
+
+  succbb = single_succ (bb);
+  dgsi = gsi_after_labels (succbb);
+  gsi = gsi_last_bb (bb);
+  for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
+    {
+      gimple stmt = gsi_stmt (gsi);
+      tree vdef;
+      if (is_gimple_debug (stmt))
+	continue;
+      if (gimple_code (stmt) == GIMPLE_LABEL)
+	break;
+      unlink_stmt_vdef (stmt);
+      gsi_remove (&gsi, false);
+      vdef = gimple_vdef (stmt);
+      if (vdef && TREE_CODE (vdef) == SSA_NAME)
+	{
+	  vdef = SSA_NAME_VAR (vdef);
+	  mark_sym_for_renaming (vdef);
+	  gimple_set_vdef (stmt, vdef);
+	  gimple_set_vuse (stmt, vdef);
+	}
+      release_defs (stmt);
+      gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
+    }
+
+  return TODO_update_ssa_only_virtuals;
+}
+
 /* At the end of inlining, we can lower EH_DISPATCH.  Return true when 
    we have found some duplicate labels and removed some edges.  */
 
@@ -3349,7 +3419,7 @@ static unsigned
 execute_lower_eh_dispatch (void)
 {
   basic_block bb;
-  bool any_rewritten = false;
+  int flags = 0;
   bool redirected = false;
 
   assign_filter_values ();
@@ -3362,16 +3432,20 @@ execute_lower_eh_dispatch (void)
       if (gimple_code (last) == GIMPLE_EH_DISPATCH)
 	{
 	  redirected |= lower_eh_dispatch (bb, last);
-	  any_rewritten = true;
+	  flags |= TODO_update_ssa_only_virtuals;
+	}
+      else if (gimple_code (last) == GIMPLE_RESX)
+	{
+	  if (stmt_can_throw_external (last))
+	    optimize_clobbers (bb);
+	  else
+	    flags |= sink_clobbers (bb);
 	}
-      else if (gimple_code (last) == GIMPLE_RESX
-	       && stmt_can_throw_external (last))
-	optimize_clobbers (bb);
     }
 
   if (redirected)
     delete_unreachable_blocks ();
-  return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
+  return flags;
 }
 
 static bool
--- gcc/cfgexpand.c.jj	2011-12-08 16:36:52.000000000 +0100
+++ gcc/cfgexpand.c	2011-12-09 16:56:23.743900593 +0100
@@ -445,17 +445,56 @@ visit_conflict (gimple stmt ATTRIBUTE_UN
    liveness.  */
 
 static void
-add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
+add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap scratch,
+		       bool for_conflict)
 {
   edge e;
   edge_iterator ei;
-  gimple_stmt_iterator gsi;
+  gimple_stmt_iterator gsi, pgsi;
   bool (*visit)(gimple, tree, void *);
+  bool phis_checked = false;
 
   bitmap_clear (work);
   FOR_EACH_EDGE (e, ei, bb->preds)
     bitmap_ior_into (work, (bitmap)e->src->aux);
 
+  /* As an exception, if the basic block starts with clobber stmts,
+     don't consider the clobbered vars as live on entry, unless they are
+     referenced by any of the PHIs.  This is for clobbers that were sunk
+     across EH edges.  */
+  for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      gimple stmt = gsi_stmt (gsi);
+
+      if (gimple_clobber_p (stmt))
+	{
+	  tree lhs = gimple_assign_lhs (stmt);
+	  size_t *v;
+
+	  if (!phis_checked)
+	    {
+	      bitmap_clear (scratch);
+	      for (pgsi = gsi_start_phis (bb); !gsi_end_p (pgsi);
+		   gsi_next (&pgsi))
+		{
+		  gimple phi = gsi_stmt (pgsi);
+		  walk_stmt_load_store_addr_ops (phi, scratch, NULL, NULL,
+						 visit_op);
+		}
+	      phis_checked = true;
+	    }
+
+	  if (TREE_CODE (lhs) == VAR_DECL
+	      && DECL_RTL_IF_SET (lhs) == pc_rtx
+	      && (v = (size_t *)
+		  pointer_map_contains (decl_to_stack_part, lhs))
+	      && !bitmap_bit_p (scratch, *v))
+	    bitmap_clear_bit (work, *v);
+	}
+      else if (!is_gimple_debug (stmt))
+	break;
+    }
+
   if (for_conflict)
     {
       /* We need to add conflicts for everything life at the start of
@@ -477,11 +516,10 @@ add_scope_conflicts_1 (basic_block bb, b
   else
     visit = visit_op;
 
-  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+  for (pgsi = gsi_start_phis (bb); !gsi_end_p (pgsi); gsi_next (&pgsi))
     {
-      gimple stmt = gsi_stmt (gsi);
-      if (!is_gimple_debug (stmt))
-	walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
+      gimple phi = gsi_stmt (pgsi);
+      walk_stmt_load_store_addr_ops (phi, work, NULL, NULL, visit);
     }
   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
@@ -514,6 +552,7 @@ add_scope_conflicts (void)
   basic_block bb;
   bool changed;
   bitmap work = BITMAP_ALLOC (NULL);
+  bitmap scratch = BITMAP_ALLOC (NULL);
 
   /* We approximate the life range of a stack variable by taking the first
      mention of its name as starting point(s), and by the end-of-scope
@@ -535,15 +574,16 @@ add_scope_conflicts (void)
       FOR_EACH_BB (bb)
 	{
 	  bitmap active = (bitmap)bb->aux;
-	  add_scope_conflicts_1 (bb, work, false);
+	  add_scope_conflicts_1 (bb, work, scratch, false);
 	  if (bitmap_ior_into (active, work))
 	    changed = true;
 	}
     }
 
   FOR_EACH_BB (bb)
-    add_scope_conflicts_1 (bb, work, true);
+    add_scope_conflicts_1 (bb, work, scratch, true);
 
+  BITMAP_FREE (scratch);
   BITMAP_FREE (work);
   FOR_ALL_BB (bb)
     BITMAP_FREE (bb->aux);

	Jakub


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]