[tree-ssa] [PATCH] SSAPRE fixes

Daniel Berlin dberlin@dberlin.org
Wed Oct 29 20:19:00 GMT 2003


On Oct 29, 2003, at 1:55 PM, Andreas Jaeger wrote:

> Daniel Berlin <dberlin@dberlin.org> writes:
>
>>>
>>> Ok, I finally understood that - but it didn't help (I replied some
>>> hours ago to the list but I just wanted to clarify),
>>>
>>> Andreas
>>>
>>
>> that's very odd, since that's the only thing i've ever seen cause the
>> failure in the backtrace you listed.
>>
>> Like I said, i'm trying to reproduce on the platforms i have .
>
> Ok, let's see.
>
> I'll just run a cvs update and try again...
>

I reproduced the problem on ppc64, and verified my patch fixes it.
Patch attached.

Will commit as soon as x86 bootstrap finishes.
2003-10-29  Daniel Berlin <dberlin@dberlin.org>

	* tree-ssa-pre.c (n_phi_preds): New variable.
	(generate_vops_as_of_bb): New function
	(generate_expr_as_of_bb): Remove unused first argument. Update all
	callers.
	(subst_phis): Add boundary check for the phi_pred_cache array.
	(same_e_version_phi_result): Once modified, no point in continuing
	the loop.
	(finalize_2): ESSA Minimization can crash if we ended up with new
	BB's
Index: tree-ssa-pre.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Attic/tree-ssa-pre.c,v
retrieving revision 1.1.4.94
diff -u -3 -p -r1.1.4.94 tree-ssa-pre.c
--- tree-ssa-pre.c	24 Oct 2003 07:41:31 -0000	1.1.4.94
+++ tree-ssa-pre.c	29 Oct 2003 19:43:51 -0000
@@ -132,8 +132,8 @@ static tree create_ephi_node (basic_bloc
  static inline int opnum_of_phi (tree, int);
  static inline int opnum_of_ephi (const tree, const edge);
  static tree subst_phis (struct expr_info *, tree, basic_block, 
basic_block);
-static void generate_expr_as_of_bb (struct expr_info *, tree, int,
-				    basic_block);
+static void generate_expr_as_of_bb (tree, int, basic_block);
+static void generate_vops_as_of_bb (tree, int, basic_block);
  void rename_1 (struct expr_info *);
  static void process_delayed_rename (struct expr_info *, tree, tree);
  static void assign_new_class (tree, varray_type *, varray_type *);
@@ -323,7 +323,8 @@ struct expr_info

  /* Cache of expressions generated for given phi operand, to avoid
     recomputation and wasting memory.  */
-tree *phi_pred_cache;
+static tree *phi_pred_cache;
+static int n_phi_preds;

  /* Trying to lookup ephi pred operand indexes takes forever on graphs
     that have high connectivity because it's an O(n) linked list
@@ -785,6 +786,7 @@ expr_phi_insertion (bitmap * dfs, struct
  	    continue;
  	  set_var_phis (ei, SSA_NAME_DEF_STMT (use));
  	}
+#if 1
        if (ei->loadpre_cand && TREE_CODE (ei->expr) == INDIRECT_REF)
  	{
  	  uses = vuse_ops (stmt_ann (occurp));
@@ -799,7 +801,7 @@ expr_phi_insertion (bitmap * dfs, struct
  	      set_var_phis (ei, SSA_NAME_DEF_STMT (use));
  	    }
  	}
-
+#endif
      }
    /* Union the results of the dfphis and the varphis to get the
       answer to everywhere we need EPHIS. */
@@ -1153,8 +1155,7 @@ opnum_of_phi (tree phi, int j)
     block BB). */

  static void
-generate_expr_as_of_bb (struct expr_info *ei ATTRIBUTE_UNUSED, tree 
expr,
-			int j, basic_block bb)
+generate_expr_as_of_bb (tree expr, int j, basic_block bb)
  {
    varray_type uses = use_ops (stmt_ann (expr));
    bool replaced_constants = false;
@@ -1165,7 +1166,7 @@ generate_expr_as_of_bb (struct expr_info

    for (k = 0; k < VARRAY_ACTIVE_SIZE (uses); k++)
      {
-      tree *vp = VARRAY_GENERIC_PTR (uses, k);
+      tree *vp = VARRAY_TREE_PTR (uses, k);
        tree v = *vp;
        tree phi;

@@ -1178,7 +1179,7 @@ generate_expr_as_of_bb (struct expr_info
  	      *vp = p;
  	      if (!phi_ssa_name_p (p))
  		replaced_constants = true;
-        break;
+	      break;
  	    }
  	}
      }
@@ -1190,24 +1191,88 @@ generate_expr_as_of_bb (struct expr_info

  }

+/* Generate VUSE ops as they would look in basic block J (using the 
phi in
+   block BB). */
+
+static void
+generate_vops_as_of_bb (tree expr, int j, basic_block bb)
+{
+  varray_type uses = vuse_ops (stmt_ann (expr));
+  size_t i;
+
+  if (!uses)
+    return;
+
+  for (i = 0; i < VARRAY_ACTIVE_SIZE (uses); i++)
+    {
+      tree v = VARRAY_TREE (uses, i);
+      tree phi;
+
+      for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
+	{
+	  if (names_match_p (PHI_RESULT (phi), v))
+	    {
+	      int opnum = opnum_of_phi (phi, j);
+	      tree p = PHI_ARG_DEF (phi, opnum);
+	      VARRAY_TREE (uses, i) = p;
+	      break;
+	    }
+	}
+    }
+}
+
  /* Make a copy of Z as it would look in BB j, using the PHIs in BB.  */

  static tree
  subst_phis (struct expr_info *ei, tree Z, basic_block j, basic_block 
bb)
  {
    tree stmt_copy;
-  if (phi_pred_cache[j->index] != NULL_TREE)
+  varray_type vuses_copy;
+  size_t i;
+
+  /* Return the cached version, if we have one. */
+  if (j->index < n_phi_preds
+      && phi_pred_cache[j->index] != NULL_TREE)
      return phi_pred_cache[j->index];
+
+  /* Otherwise, generate a new expression.  */
    pre_stats.exprs_generated++;
    stmt_copy = unshare_expr (Z);
    create_stmt_ann (stmt_copy);
    modify_stmt (stmt_copy);
    get_stmt_operands (stmt_copy);
-  generate_expr_as_of_bb (ei, stmt_copy, j->index, bb);
+  generate_expr_as_of_bb (stmt_copy, j->index, bb);
    set_bb_for_stmt (stmt_copy, bb);
    modify_stmt (stmt_copy);
    get_stmt_operands (stmt_copy);
-  phi_pred_cache[j->index] = stmt_copy;
+
+  /* If we have vuses on the original statement, and we still have
+     use_ops on the generated expr, we need to copy the vuses.  */
+  if (ei->loadpre_cand
+      && vuse_ops (stmt_ann (Z))
+      && use_ops (stmt_ann (stmt_copy)))
+    {
+      varray_type vuses = vuse_ops (stmt_ann (Z));
+      stmt_ann_t ann = stmt_ann (stmt_copy);
+      VARRAY_TREE_INIT (vuses_copy, 1, "Use ops array generated by 
subst_phis");
+      for (i = 0; i < VARRAY_ACTIVE_SIZE (vuses); i++)
+	VARRAY_PUSH_TREE (vuses_copy, VARRAY_TREE (vuses, i));
+
+      if (ann->vops == NULL)
+	{
+	  ann->vops = ggc_alloc (sizeof (struct voperands_d));
+	  memset ((void *) ann->vops, 0, sizeof (*(ann->vops)));
+	}
+      ann->vops->vuse_ops = vuses_copy;
+      generate_vops_as_of_bb (stmt_copy, j->index, bb);
+    }
+  else
+    {
+      stmt_ann (stmt_copy)->vops = NULL;
+    }
+
+  if (j->index < n_phi_preds)
+    phi_pred_cache[j->index] = stmt_copy;
    return stmt_copy;
  }

@@ -1296,7 +1361,8 @@ bool same_e_version_phi_result (struct e
    varray_type cruses = use_ops (stmt_ann (cr));
    if (!cruses)
      return false;
-  for (i = 0; i < VARRAY_ACTIVE_SIZE (cruses); i++)
+
+  for (i = 0; i < VARRAY_ACTIVE_SIZE (cruses) && not_mod; i++)
      {
        tree *use1p = VARRAY_TREE_PTR (cruses, i);
        tree use1;
@@ -1306,8 +1372,21 @@ bool same_e_version_phi_result (struct e
        if (load_modified_phi_result (bb_for_stmt (def), use1))
  	not_mod = false;
      }
+
+  if (not_mod && ei->loadpre_cand)
+    {
+      cruses = vuse_ops (stmt_ann (cr));
+
+      for (i = 0; cruses && i < VARRAY_ACTIVE_SIZE (cruses) && 
not_mod; i++)
+	{
+	  tree use1 = VARRAY_TREE (cruses, i);
+	  if (load_modified_phi_result (bb_for_stmt (def), use1))
+	    not_mod = false;
+	}
+    }
+
    if (not_mod)
-    return true;
+    return true;
    else if (ei->strred_cand)
      {
        if (injured_phi_result_real_occ (ei, def, cr, bb_for_stmt (use)))
@@ -2411,37 +2490,42 @@ finalize_2 (struct expr_info *ei)
  	  set_save (ei, EUSE_DEF (ref));
  	}
      }
-
-  /* ESSA Minimization.  */
-  for (i = 0; i < VARRAY_ACTIVE_SIZE (ei->euses_dt_order); i++)
-    {
-      tree ephi = VARRAY_TREE (ei->euses_dt_order, i);
-      if (TREE_CODE (ephi) != EPHI_NODE)
-	continue;
-      EPHI_IDENTITY (ephi) = true;
-      EPHI_IDENTICAL_TO (ephi) = NULL;
-    }
-
-  for (i = 0; i < VARRAY_ACTIVE_SIZE (ei->euses_dt_order); i++)
+
+  /* We can't do ESSA minimization if dominators need to be redone.  */
+  if (!redo_dominators)
      {
-      tree ephi = VARRAY_TREE (ei->euses_dt_order, i);
-      if (!ephi || TREE_CODE (ephi) != EPHI_NODE)
-	continue;
-      if (ephi_will_be_avail (ephi))
+
+      /* ESSA Minimization.  */
+      for (i = 0; i < VARRAY_ACTIVE_SIZE (ei->euses_dt_order); i++)
+	{
+	  tree ephi = VARRAY_TREE (ei->euses_dt_order, i);
+	  if (TREE_CODE (ephi) != EPHI_NODE)
+	    continue;
+	  EPHI_IDENTITY (ephi) = true;
+	  EPHI_IDENTICAL_TO (ephi) = NULL;
+	}
+
+      for (i = 0; i < VARRAY_ACTIVE_SIZE (ei->euses_dt_order); i++)
  	{
-	  int k;
-	  for (k = 0; k < EPHI_NUM_ARGS (ephi); k++)
+	  tree ephi = VARRAY_TREE (ei->euses_dt_order, i);
+	  if (!ephi || TREE_CODE (ephi) != EPHI_NODE)
+	    continue;
+	  if (ephi_will_be_avail (ephi))
  	    {
-	      if (EPHI_ARG_INJURED (ephi, k))
-		require_phi (ei, EPHI_ARG_EDGE (ephi, k)->src);
-	      else if (EPHI_ARG_DEF (ephi, k)
-		       && TREE_CODE (EPHI_ARG_DEF (ephi, k)) == EUSE_NODE
-		       && really_available_def (EPHI_ARG_DEF (ephi, k)))
-		require_phi (ei, bb_for_stmt (EPHI_ARG_DEF (ephi, k)));
+	      int k;
+	      for (k = 0; k < EPHI_NUM_ARGS (ephi); k++)
+		{
+		  if (EPHI_ARG_INJURED (ephi, k))
+		    require_phi (ei, EPHI_ARG_EDGE (ephi, k)->src);
+		  else if (EPHI_ARG_DEF (ephi, k)
+			   && TREE_CODE (EPHI_ARG_DEF (ephi, k)) == EUSE_NODE
+			   && really_available_def (EPHI_ARG_DEF (ephi, k)))
+		    require_phi (ei, bb_for_stmt (EPHI_ARG_DEF (ephi, k)));
+		}
  	    }
  	}
+      do_ephi_df_search (ei, replacing_search);
      }
-  do_ephi_df_search (ei, replacing_search);
  }

  /* Perform a DFS on EPHI using the functions in SEARCH. */
@@ -3045,6 +3129,7 @@ pre_expression (struct expr_info *slot,
    bitmap_clear (created_phi_preds);
    ephi_pindex_htab = htab_create (500, ephi_pindex_hash, 
ephi_pindex_eq, free);
    phi_pred_cache = xcalloc (last_basic_block, sizeof (tree));
+  n_phi_preds = last_basic_block;

    if (!expr_phi_insertion ((bitmap *)data, ei))
      goto cleanup;
@@ -3165,8 +3250,8 @@ collect_expressions (basic_block block,
        if ((TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
  	   || TREE_CODE_CLASS (TREE_CODE (expr)) == '<'
  	   /*|| TREE_CODE_CLASS (TREE_CODE (expr)) == '1'*/
-	   /*|| TREE_CODE (expr) == SSA_NAME
-	   || TREE_CODE (expr) == INDIRECT_REF*/)
+	   /* || TREE_CODE (expr) == SSA_NAME
+	      || TREE_CODE (expr) == INDIRECT_REF */)
  	  && !ann->makes_aliased_stores
  	  && !ann->has_volatile_ops)
  	{



More information about the Gcc-patches mailing list