This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[dataflow]:PATCH COMMITTED to speed up compilation of functions with many small loops.


from stevenb


> >
> >Timings of compilation time of fma1.f90 at -O2:
> >Dataflow branch without patch: ~72.5s
> >Dataflow branch after applying patch: ~49.5s
> >
> >That's a 32% compile time improvement!
> >
> >For reference, trunk as of r123653: ~43.5
> >
> >So the dataflow branch is still slower, but not nearly as much as
> >before: 67% before, 14% after.
>   

This removes a issue where the entire function was being examined to
build the table of defs for a subset (an inner loop) of the program.

I bootstrapped and regression tested this on x86-32, x86-64, ia-64 and ppc.

kenny




2007-04-15  Kenneth Zadeck <zadeck@naturalbridge.com>

    * df-scan.c (df_ref_create, df_reg_chain_unlink,
    df_install_refs): Use df->analyze_subset.
    (df_reorganize_refs_by_reg_by_reg, df_reorganize_refs_by_reg_by_insn):
    New functions. 
    (df_reorganize_refs_by_reg): Split into
    df_reorganize_refs_by_reg_by_reg,
    df_reorganize_refs_by_reg_by_insn.
    (df_add_refs_to_table): Do not add hardware_regs if not asked for.
    (df_reorganize_refs_by_insn): Move call to count_refs.
    (df_maybe_reorganize_def_refs, df_maybe_reorganize_use_refs):
    Remove code to modify df->total_size.
        * df-core.c (df_set_blocks): Set df->analyze_subset.
    (df_finish_pass): Clear df->analyze->subset.
    (df_analyze): Use df->analyze_subset.
    (df_dump_start): Remove total_size from dump. 
    * df.h (DF_RI_NO_UPDATE, DF_DEFS_TOTAL_SIZE, DF_USES_TOTAL_SIZE):
    Removed.
    (df.analyze_subset): New field.  * df-problems.c (df_ri_alloc,
    df_ri_compute): Removed DF_RI_NO_UPDATE.
   



Index: df-scan.c
===================================================================
--- df-scan.c	(revision 123790)
+++ df-scan.c	(working copy)
@@ -653,7 +653,7 @@ df_ref_create (rtx reg, rtx *loc, rtx in
     }
 
   /* Do not add if ref is not in the right blocks.  */
-  if (add_to_table && df->blocks_to_analyze)
+  if (add_to_table && df->analyze_subset)
     add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
 
   df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
@@ -761,7 +761,7 @@ df_reg_chain_unlink (struct df_ref *ref)
 
   if (refs)
     {
-      if (df->blocks_to_analyze)
+      if (df->analyze_subset)
 	{
 	  if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
 	    refs[id] = NULL;
@@ -1276,112 +1276,271 @@ df_process_deferred_rescans (void)
 }
 
 
+/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
+   the uses if INCLUDE_USES. Include the eq_uses if
+   INCLUDE_EQ_USES.  */
+
+static unsigned int
+df_count_refs (bool include_defs, bool include_uses, 
+	       bool include_eq_uses)
+{
+  unsigned int regno;
+  int size = 0;
+  unsigned int m = df->regs_inited;
+  
+  for (regno = 0; regno < m; regno++)
+    {
+      if (include_defs)
+	size += DF_REG_DEF_COUNT (regno);
+      if (include_uses)
+	size += DF_REG_USE_COUNT (regno);
+      if (include_eq_uses)
+	size += DF_REG_EQ_USE_COUNT (regno);
+    }
+  return size;
+}
+
+
 /* Take build ref table for either the uses or defs from the reg-use
-   or reg-def chains.  */
+   or reg-def chains.  This version processes the refs in reg order
+   which is likely to be best if processing the whole function.  */
 
 static void 
-df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
-			   struct df_reg_info **reg1_info,
-			   struct df_reg_info **reg2_info)
+df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
+				  bool include_defs, 
+				  bool include_uses, 
+				  bool include_eq_uses)
 {
   unsigned int m = df->regs_inited;
   unsigned int regno;
   unsigned int offset = 0;
-  unsigned int size = 0;
+  unsigned int start;
+
+  if (df->changeable_flags & DF_NO_HARD_REGS)
+    {
+      start = FIRST_PSEUDO_REGISTER;
+      memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
+      memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
+    }
+  else
+    start = 0;
+
+  ref_info->total_size 
+    = df_count_refs (include_defs, include_uses, include_eq_uses);
 
   df_check_and_grow_ref_info (ref_info, 1);
 
-  for (regno = 0; regno < m; regno++)
+  for (regno = start; regno < m; regno++)
     {
-      struct df_reg_info *reg_info = reg1_info[regno];
-      struct df_ref *ref = reg_info->reg_chain;
       int count = 0;
       ref_info->begin[regno] = offset;
-      while (ref) 
+      if (include_defs)
 	{
-	  if ((!df->blocks_to_analyze)
-	      || bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB(ref)->index))
+	  struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
+	  while (ref) 
 	    {
 	      ref_info->refs[offset] = ref;
 	      DF_REF_ID (ref) = offset++;
 	      count++;
+	      ref = DF_REF_NEXT_REG (ref);
+	      gcc_assert (offset < ref_info->refs_size);
 	    }
-	  else
-	    DF_REF_ID (ref) = -1;
-
-	  size++;
-	  ref = DF_REF_NEXT_REG (ref);
-	  gcc_assert (offset < ref_info->refs_size);
 	}
-      if (reg2_info)
+      if (include_uses)
 	{
-	  reg_info = reg2_info[regno];
-	  gcc_assert (reg_info);
-	  ref = reg_info->reg_chain;
+	  struct df_ref *ref = DF_REG_USE_CHAIN (regno);
 	  while (ref) 
 	    {
-	      if ((!df->blocks_to_analyze)
-		  || bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB(ref)->index))
-		{
-		  ref_info->refs[offset] = ref;
-		  DF_REF_ID (ref) = offset++;
-		  count++;
-		}
-	      else
-		DF_REF_ID (ref) = -1;
-
-	      size++;
+	      ref_info->refs[offset] = ref;
+	      DF_REF_ID (ref) = offset++;
+	      count++;
+	      ref = DF_REF_NEXT_REG (ref);
+	      gcc_assert (offset < ref_info->refs_size);
+	    }
+	}
+      if (include_eq_uses)
+	{
+	  struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
+	  while (ref) 
+	    {
+	      ref_info->refs[offset] = ref;
+	      DF_REF_ID (ref) = offset++;
+	      count++;
 	      ref = DF_REF_NEXT_REG (ref);
 	      gcc_assert (offset < ref_info->refs_size);
 	    }
 	}
       ref_info->count[regno] = count;
     }
-
+  
   /* The bitmap size is not decremented when refs are deleted.  So
      reset it now that we have squished out all of the empty
      slots.  */
-
   ref_info->table_size = offset;
-  ref_info->total_size = size;
 }
 
 
-/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
-   the uses if INCLUDE_USES. Include the eq_uses if
-   INCLUDE_EQ_USES.  */
+/* Take build ref table for either the uses or defs from the reg-use
+   or reg-def chains.  This version processes the refs in insn order
+   which is likely to be best if processing some segment of the
+   function.  */
 
-static unsigned int
-df_count_refs (bool include_defs, bool include_uses, 
-	       bool include_eq_uses)
+static void 
+df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
+				   bool include_defs, 
+				   bool include_uses, 
+				   bool include_eq_uses)
 {
-  unsigned int regno;
-  int size = 0;
+  bitmap_iterator bi;
+  unsigned int bb_index;
   unsigned int m = df->regs_inited;
+  unsigned int offset = 0;
+  unsigned int r;
+  unsigned int start 
+    = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
+
+  memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
+  memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
+
+  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
+  df_check_and_grow_ref_info (ref_info, 1);
+
+  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
+    {
+      basic_block bb = BASIC_BLOCK (bb_index);
+      rtx insn;
+
+      FOR_BB_INSNS (bb, insn)
+	{
+	  if (INSN_P (insn))
+	    {
+	      unsigned int uid = INSN_UID (insn);
+	      struct df_ref **ref_rec;
+	      
+	      if (include_defs)
+		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
+		  {
+		    unsigned int regno = DF_REF_REGNO (*ref_rec);
+		    ref_info->count[regno]++;
+		  }
+	      if (include_uses)
+		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
+		  {
+		    unsigned int regno = DF_REF_REGNO (*ref_rec);
+		    ref_info->count[regno]++;
+		  }
+	      if (include_eq_uses)
+		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
+		  {
+		    unsigned int regno = DF_REF_REGNO (*ref_rec);
+		    ref_info->count[regno]++;
+		  }
+	    }
+	}
+    }
+
+  for (r = start; r < m; r++)
+    {
+      ref_info->begin[r] = offset;
+      offset += ref_info->count[r];
+      ref_info->count[r] = 0;
+    }
   
-  /* The eq_uses do not go into the table but we must account for
-     them and reset their ids.  */
-  for (regno = 0; regno < m; regno++)
+  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
     {
-      if (include_defs)
-	size += DF_REG_DEF_COUNT (regno);
-      if (include_uses)
-	size += DF_REG_USE_COUNT (regno);
-      if (include_eq_uses)
-	size += DF_REG_EQ_USE_COUNT (regno);
+      basic_block bb = BASIC_BLOCK (bb_index);
+      rtx insn;
+
+      FOR_BB_INSNS (bb, insn)
+	{
+	  if (INSN_P (insn))
+	    {
+	      unsigned int uid = INSN_UID (insn);
+	      struct df_ref **ref_rec;
+	      
+	      if (include_defs)
+		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
+		  {
+		    struct df_ref *ref = *ref_rec;
+		    unsigned int regno = DF_REF_REGNO (ref);
+		    if (regno >= start)
+		      {
+			unsigned int id
+			  = ref_info->begin[regno] + ref_info->count[regno]++;
+			DF_REF_ID (ref) = id;
+			ref_info->refs[id] = ref;
+		      }
+		  }
+	      if (include_uses)
+		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
+		  {
+		    struct df_ref *ref = *ref_rec;
+		    unsigned int regno = DF_REF_REGNO (ref);
+		    if (regno >= start)
+		      {
+			unsigned int id
+			  = ref_info->begin[regno] + ref_info->count[regno]++;
+			DF_REF_ID (ref) = id;
+			ref_info->refs[id] = ref;
+		      }
+		  }
+	      if (include_eq_uses)
+		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
+		  {
+		    struct df_ref *ref = *ref_rec;
+		    unsigned int regno = DF_REF_REGNO (ref);
+		    if (regno >= start)
+		      {
+			unsigned int id
+			  = ref_info->begin[regno] + ref_info->count[regno]++;
+			DF_REF_ID (ref) = id;
+			ref_info->refs[id] = ref;
+		      }
+		  }
+	    }
+	}
     }
-  return size;
+
+  /* The bitmap size is not decremented when refs are deleted.  So
+     reset it now that we have squished out all of the empty
+     slots.  */
+
+  ref_info->table_size = offset;
+}
+
+/* Take build ref table for either the uses or defs from the reg-use
+   or reg-def chains.  */
+
+static void 
+df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
+			   bool include_defs, 
+			   bool include_uses, 
+			   bool include_eq_uses)
+{
+  if (df->analyze_subset)
+    df_reorganize_refs_by_reg_by_insn (ref_info, include_defs, 
+				       include_uses, include_eq_uses);
+  else
+    df_reorganize_refs_by_reg_by_reg (ref_info, include_defs, 
+				       include_uses, include_eq_uses);
 }
 
 
 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET.  */
 static unsigned int 
-df_add_refs_to_table (unsigned int offset, struct df_ref_info *ref_info, struct df_ref **ref_vec)
+df_add_refs_to_table (unsigned int offset, 
+		      struct df_ref_info *ref_info, 
+		      struct df_ref **ref_vec)
 {
   while (*ref_vec)
     {
-      ref_info->refs[offset] = *ref_vec;
-      DF_REF_ID (*ref_vec) = offset++;
+      struct df_ref *ref = *ref_vec;
+      if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+	  || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
+	{
+	  ref_info->refs[offset] = ref;
+	  DF_REF_ID (*ref_vec) = offset++;
+	}
       ref_vec++;
     }
   return offset;
@@ -1401,20 +1560,25 @@ df_reorganize_refs_by_insn_bb (basic_blo
   rtx insn;
 
   if (include_defs)
-    offset = df_add_refs_to_table (offset, ref_info, df_get_artificial_defs (bb->index));
+    offset = df_add_refs_to_table (offset, ref_info, 
+				   df_get_artificial_defs (bb->index));
   if (include_uses)
-    offset = df_add_refs_to_table (offset, ref_info, df_get_artificial_uses (bb->index));
+    offset = df_add_refs_to_table (offset, ref_info, 
+				   df_get_artificial_uses (bb->index));
 
   FOR_BB_INSNS (bb, insn)
     if (INSN_P (insn))
       {
 	unsigned int uid = INSN_UID (insn);
 	if (include_defs)
-	  offset = df_add_refs_to_table (offset, ref_info, DF_INSN_UID_DEFS (uid));
+	  offset = df_add_refs_to_table (offset, ref_info, 
+					 DF_INSN_UID_DEFS (uid));
 	if (include_uses)
-	  offset = df_add_refs_to_table (offset, ref_info, DF_INSN_UID_USES (uid));
+	  offset = df_add_refs_to_table (offset, ref_info, 
+					 DF_INSN_UID_USES (uid));
 	if (include_eq_uses)
-	  offset = df_add_refs_to_table (offset, ref_info, DF_INSN_UID_EQ_USES (uid));
+	  offset = df_add_refs_to_table (offset, ref_info, 
+					 DF_INSN_UID_EQ_USES (uid));
       }
   return offset;
 }
@@ -1433,6 +1597,7 @@ df_reorganize_refs_by_insn (struct df_re
   basic_block bb;
   unsigned int offset = 0;
 
+  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
   df_check_and_grow_ref_info (ref_info, 1);
   if (df->blocks_to_analyze)
     {
@@ -1447,8 +1612,6 @@ df_reorganize_refs_by_insn (struct df_re
 	}
 
       ref_info->table_size = offset;
-      ref_info->total_size = df_count_refs (include_defs, include_uses, 
-					    include_eq_uses);
     }
   else
     {
@@ -1457,10 +1620,88 @@ df_reorganize_refs_by_insn (struct df_re
 						include_defs, include_uses, 
 						include_eq_uses);
       ref_info->table_size = offset;
-      ref_info->total_size = offset;
     }
 }
 
+
+/* If the use refs in DF are not organized, reorganize them.  */
+
+void 
+df_maybe_reorganize_use_refs (enum df_ref_order order)
+{
+  if (order == df->use_info.ref_order)
+    return;
+
+  switch (order)
+    {
+    case DF_REF_ORDER_BY_REG:
+      df_reorganize_refs_by_reg (&df->use_info, false, true, false);
+      break;
+
+    case DF_REF_ORDER_BY_REG_WITH_NOTES:
+      df_reorganize_refs_by_reg (&df->use_info, false, true, true);
+      break;
+
+    case DF_REF_ORDER_BY_INSN:
+      df_reorganize_refs_by_insn (&df->use_info, false, true, false);
+      break;
+
+    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
+      df_reorganize_refs_by_insn (&df->use_info, false, true, true);
+      break;
+
+    case DF_REF_ORDER_NO_TABLE:
+      free (df->use_info.refs);
+      df->use_info.refs = NULL;
+      df->use_info.refs_size = 0;
+      break;
+
+    case DF_REF_ORDER_UNORDERED:
+    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
+      gcc_unreachable ();
+      break;
+    }
+      
+  df->use_info.ref_order = order;
+}
+
+
+/* If the def refs in DF are not organized, reorganize them.  */
+
+void 
+df_maybe_reorganize_def_refs (enum df_ref_order order)
+{
+  if (order == df->def_info.ref_order)
+    return;
+
+  switch (order)
+    {
+    case DF_REF_ORDER_BY_REG:
+      df_reorganize_refs_by_reg (&df->def_info, true, false, false);
+      break;
+
+    case DF_REF_ORDER_BY_INSN:
+      df_reorganize_refs_by_insn (&df->def_info, true, false, false);
+      break;
+
+    case DF_REF_ORDER_NO_TABLE:
+      free (df->def_info.refs);
+      df->def_info.refs = NULL;
+      df->def_info.refs_size = 0;
+      break;
+
+    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
+    case DF_REF_ORDER_BY_REG_WITH_NOTES:
+    case DF_REF_ORDER_UNORDERED:
+    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
+      gcc_unreachable ();
+      break;
+    }
+      
+  df->def_info.ref_order = order;
+}
+
+
 /* Change the BB of all refs in the ref chain to NEW_BB.
    Assumes that all refs in the chain have the same BB.
    If changed, return the original bb the chain belonged to
@@ -1548,87 +1789,6 @@ df_insn_change_bb (rtx insn)
 }
 
 
-/* If the use refs in DF are not organized, reorganize them.  */
-
-void 
-df_maybe_reorganize_use_refs (enum df_ref_order order)
-{
-  if (order == df->use_info.ref_order)
-    return;
-
-  switch (order)
-    {
-    case DF_REF_ORDER_BY_REG:
-      df_reorganize_refs_by_reg (&df->use_info, df->use_regs, NULL);
-      df->use_info.total_size += df_count_refs (false, false, true);
-      break;
-
-    case DF_REF_ORDER_BY_REG_WITH_NOTES:
-      df_reorganize_refs_by_reg (&df->use_info, 
-				 df->use_regs, df->eq_use_regs);
-      break;
-
-    case DF_REF_ORDER_BY_INSN:
-      df_reorganize_refs_by_insn (&df->use_info, false, true, false);
-      df->use_info.total_size += df_count_refs (false, false, true);
-      break;
-
-    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
-      df_reorganize_refs_by_insn (&df->use_info, false, true, true);
-      break;
-
-    case DF_REF_ORDER_NO_TABLE:
-      free (df->use_info.refs);
-      df->use_info.refs = NULL;
-      df->use_info.refs_size = 0;
-      break;
-
-    case DF_REF_ORDER_UNORDERED:
-    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
-      gcc_unreachable ();
-      break;
-    }
-      
-  df->use_info.ref_order = order;
-}
-
-
-/* If the def refs in DF are not organized, reorganize them.  */
-
-void 
-df_maybe_reorganize_def_refs (enum df_ref_order order)
-{
-  if (order == df->def_info.ref_order)
-    return;
-
-  switch (order)
-    {
-    case DF_REF_ORDER_BY_REG:
-      df_reorganize_refs_by_reg (&df->def_info, df->def_regs, NULL);
-      break;
-
-    case DF_REF_ORDER_BY_INSN:
-      df_reorganize_refs_by_insn (&df->def_info, true, false, false);
-      break;
-
-    case DF_REF_ORDER_NO_TABLE:
-      free (df->def_info.refs);
-      df->def_info.refs = NULL;
-      df->def_info.refs_size = 0;
-      break;
-
-    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
-    case DF_REF_ORDER_BY_REG_WITH_NOTES:
-    case DF_REF_ORDER_UNORDERED:
-    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
-      gcc_unreachable ();
-      break;
-    }
-      
-  df->def_info.ref_order = order;
-}
-
-
 /* Helper function for df_ref_change_reg_with_loc.  */
 
 static void
@@ -1707,7 +1867,6 @@ df_ref_change_reg_with_loc_1 (struct df_
 }
 
 
-
 /* Change the regno of all refs that contained LOC from OLD_REGNO to
    NEW_REGNO.  Refs that do not match LOC are not changed.  This call
    is to support the SET_REGNO macro. */
@@ -2231,7 +2390,7 @@ df_install_refs (basic_block bb,
 	}
 
       /* Do not add if ref is not in the right blocks.  */
-      if (add_to_table && df->blocks_to_analyze)
+      if (add_to_table && df->analyze_subset)
 	add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
 
       for (i = 0; i < count; i++)
Index: df-core.c
===================================================================
--- df-core.c	(revision 123790)
+++ df-core.c	(working copy)
@@ -561,6 +561,7 @@ df_set_blocks (bitmap blocks)
 	  df->blocks_to_analyze = BITMAP_ALLOC (NULL);
 	}
       bitmap_copy (df->blocks_to_analyze, blocks);
+      df->analyze_subset = true;
     }
   else
     {
@@ -571,6 +572,7 @@ df_set_blocks (bitmap blocks)
 	  BITMAP_FREE (df->blocks_to_analyze);
 	  df->blocks_to_analyze = NULL;
 	}
+      df->analyze_subset = false;
     }
 
   /* Setting the blocks causes the refs to be unorganized since only
@@ -664,6 +666,7 @@ df_finish_pass (void)
       BITMAP_FREE (df->blocks_to_analyze);
       df->blocks_to_analyze = NULL;
       df_mark_solutions_dirty ();
+      df->analyze_subset = false;
     }
 
 #ifdef ENABLE_CHECKING
@@ -1265,8 +1268,8 @@ void
 df_analyze (void)
 {
   bitmap current_all_blocks = BITMAP_ALLOC (NULL);
-  int i;
   bool everything;
+  int i;
   
   if (df->postorder)
     free (df->postorder);
@@ -1295,7 +1298,7 @@ df_analyze (void)
 
   /* Make sure that we have pruned any unreachable blocks from these
      sets.  */
-  if (df->blocks_to_analyze)
+  if (df->analyze_subset)
     {
       everything = false;
       bitmap_and_into (df->blocks_to_analyze, current_all_blocks);
@@ -2018,8 +2021,6 @@ df_dump_start (FILE *file)
   if (df->blocks_to_analyze)
     fprintf (file, "def_info->table_size = %d, use_info->table_size = %d\n",
 	     DF_DEFS_TABLE_SIZE (), DF_USES_TABLE_SIZE ());
-  fprintf (file, "def_info->total_size = %d, use_info->total_size = %d\n",
-	   DF_DEFS_TOTAL_SIZE (), DF_USES_TOTAL_SIZE ());
 
   for (i = 0; i < df->num_problems_defined; i++)
     {
Index: df.h
===================================================================
--- df.h	(revision 123790)
+++ df.h	(working copy)
@@ -389,18 +389,17 @@ enum df_changeable_flags 
   DF_LR_RUN_DCE           =  1, /* Run DCE.  */
   DF_NO_HARD_REGS         =  2, /* Skip hard registers in RD and CHAIN Building.  */
   DF_EQ_NOTES             =  4, /* Build chains with uses present in EQUIV/EQUAL notes. */
-  DF_RI_NO_UPDATE         =  8, /* Do not update the register info when df_analyze is run.  */
-  DF_NO_REGS_EVER_LIVE    = 16, /* Do not compute the regs_ever_live.  */
+  DF_NO_REGS_EVER_LIVE    =  8, /* Do not compute the regs_ever_live.  */
 
   /* Cause df_insn_rescan df_notes_rescan and df_insn_delete, to
   return immediately.  This is used by passes that know how to update
   the scanning them selves.  */
-  DF_NO_INSN_RESCAN       = 32,
+  DF_NO_INSN_RESCAN       = 16,
 
   /* Cause df_insn_rescan df_notes_rescan and df_insn_delete, to
   return after marking the insn for later processing.  This allows all
   rescans to be batched.  */
-  DF_DEFER_INSN_RESCAN    = 64
+  DF_DEFER_INSN_RESCAN    = 32
 };
 
 /* Two of these structures are inline in df, one for the uses and one
@@ -457,10 +456,16 @@ struct df
   struct dataflow *problems_by_index [DF_LAST_PROBLEM_PLUS1]; 
   int num_problems_defined;
 
-  /* If not NULL, the subset of blocks of the program to be considered
-     for analysis.  */ 
+  /* If not NULL, this subset of blocks of the program to be
+     considered for analysis.  At certain times, this will contain all
+     the blocks in the function so it cannot be used as an indicator
+     of if we are analyzing a subset.  See analyze_subset.  */ 
   bitmap blocks_to_analyze;
 
+  /* If this is true, then only a subset of the blocks of the program
+     is considered to compute the solutions of dataflow problems.  */
+  bool analyze_subset;
+
   /* True if someone added or deleted something from regs_ever_live so
      that the entry and exit blocks need be reprocessed.  */
   bool redo_entry_and_exit;
@@ -612,13 +617,11 @@ struct df
    ARRAYS ARE A CACHE LOCALITY KILLER.  */
 
 #define DF_DEFS_TABLE_SIZE() (df->def_info.table_size)
-#define DF_DEFS_TOTAL_SIZE() (df->def_info.total_size)
 #define DF_DEFS_GET(ID) (df->def_info.refs[(ID)])
 #define DF_DEFS_SET(ID,VAL) (df->def_info.refs[(ID)]=(VAL))
 #define DF_DEFS_COUNT(ID) (df->def_info.count[(ID)])
 #define DF_DEFS_BEGIN(ID) (df->def_info.begin[(ID)])
 #define DF_USES_TABLE_SIZE() (df->use_info.table_size)
-#define DF_USES_TOTAL_SIZE() (df->use_info.total_size)
 #define DF_USES_GET(ID) (df->use_info.refs[(ID)])
 #define DF_USES_SET(ID,VAL) (df->use_info.refs[(ID)]=(VAL))
 #define DF_USES_COUNT(ID) (df->use_info.count[(ID)])
Index: df-problems.c
===================================================================
--- df-problems.c	(revision 123790)
+++ df-problems.c	(working copy)
@@ -3818,9 +3818,6 @@ df_ri_alloc (bitmap all_blocks ATTRIBUTE
   struct df_ri_problem_data *problem_data =
     (struct df_ri_problem_data *) df_ri->problem_data;
 
-  if (df->changeable_flags & DF_RI_NO_UPDATE)
-    return;
-
   df_grow_reg_info ();
 
   if (!df_ri->problem_data)
@@ -4423,9 +4420,6 @@ df_ri_compute (bitmap all_blocks)
   struct df_ri_problem_data *problem_data =
     (struct df_ri_problem_data *) df_ri->problem_data;
 
-  if (df->changeable_flags & DF_RI_NO_UPDATE)
-    return;
-
   if (df_ri_problem_p (DF_RI_LIFE))
     {
       local_live = BITMAP_ALLOC (NULL);

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]