This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

patch committed to dataflow branch


This patch removes all uses of flow during register allocation.

This code has been bootstrapped and regression tested on

x86_64-unknown-linux-gnu
powerpc64-unknown-linux-gnu
i686-pc-linux-gnu

Kenny

2006-04-16  Kenneth Zadeck <zadeck@naturalbridge.com>
	* df.h (shared_df): Removed.
	(ra_df): New.
	* core.c (shared_df): Removed.
	(ra_df): New.
	* postreload.c (reload_combine): Changed to use ra_df.
	* reload.c (push_reload, find_dummy_reload): Ditto.
	* global.c (global_alloc, global_conflicts, mark_elimination, 
	build_insn_chain, rest_of_handle_global_alloc): Ditto.
	* reload1.c (compute_use_by_pseudos, reload): Ditto.
	* local-alloc.c (local_alloc, block_alloc): Created local instance
	of df.
	* passes.c (init_optimization_passes): Moved clear_df to before
	register allocators.
	* tree-pass.h (pass_partition_blocks): Removed redundant 
	copy.

Eric, I am getting closer!!!

Index: df.h
===================================================================
--- df.h	(revision 112855)
+++ df.h	(working copy)
@@ -576,8 +576,9 @@ extern void debug_df_defno (unsigned int
 extern void debug_df_useno (unsigned int);
 extern void debug_df_ref (struct df_ref *);
 extern void debug_df_chain (struct df_link *);
-/* An instance of df that can be shared between passes.  */
-extern struct df *shared_df; 
+/* An instance of df that can be shared between global_alloc and the
+   reload passes.  */
+extern struct df *ra_df; 
 
 
 /* Functions defined in df-problems.c. */
Index: df-core.c
===================================================================
--- df-core.c	(revision 112985)
+++ df-core.c	(working copy)
@@ -294,7 +294,7 @@ are write-only operations.  
 #include "tree-pass.h"
 
 static struct df *ddf = NULL;
-struct df *shared_df = NULL;
+struct df *ra_df = NULL;
 
 static void * df_get_bb_info (struct dataflow *, unsigned int);
 static void df_set_bb_info (struct dataflow *, unsigned int, void *);
Index: postreload.c
===================================================================
--- postreload.c	(revision 112855)
+++ postreload.c	(working copy)
@@ -742,8 +742,8 @@ reload_combine (void)
 	{
 	  HARD_REG_SET live;
 
-	  REG_SET_TO_HARD_REG_SET (live, DF_RA_LIVE_IN (rtl_df, bb));
-	  compute_use_by_pseudos (&live, DF_RA_LIVE_IN (rtl_df, bb));
+	  REG_SET_TO_HARD_REG_SET (live, DF_RA_LIVE_IN (ra_df, bb));
+	  compute_use_by_pseudos (&live, DF_RA_LIVE_IN (ra_df, bb));
 	  COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
 	  IOR_HARD_REG_SET (ever_live_at_start, live);
 	}
@@ -1580,6 +1580,8 @@ rest_of_handle_postreload (void)
      Remove any EH edges associated with them.  */
   if (flag_non_call_exceptions)
     purge_all_dead_edges ();
+
+  df_finish (ra_df);
   return 0;
 }
 
Index: reload.c
===================================================================
--- reload.c	(revision 112855)
+++ reload.c	(working copy)
@@ -1495,7 +1495,7 @@ push_reload (rtx in, rtx out, rtx *inloc
 	    /* Check that we don't use a hardreg for an uninitialized
 	       pseudo.  See also find_dummy_reload().  */
 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
-		|| ! bitmap_bit_p (DF_RA_LIVE_OUT (rtl_df, ENTRY_BLOCK_PTR),
+		|| ! bitmap_bit_p (DF_RA_LIVE_OUT (ra_df, ENTRY_BLOCK_PTR),
 				   ORIGINAL_REGNO (XEXP (note, 0))))
 	    && ! refers_to_regno_for_reload_p (regno,
 					       (regno
@@ -1973,7 +1973,7 @@ find_dummy_reload (rtx real_in, rtx real
 	   as they would clobber the other live pseudo using the same.
 	   See also PR20973.  */
       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
-          || ! bitmap_bit_p (DF_RA_LIVE_OUT (rtl_df, ENTRY_BLOCK_PTR),
+          || ! bitmap_bit_p (DF_RA_LIVE_OUT (ra_df, ENTRY_BLOCK_PTR),
 			     ORIGINAL_REGNO (in))))
     {
       unsigned int regno = REGNO (in) + in_offset;
Index: global.c
===================================================================
--- global.c	(revision 112855)
+++ global.c	(working copy)
@@ -334,20 +334,18 @@ global_alloc (void)
 
   size_t i;
   rtx x;
-  df_finish (rtl_df);
-  rtl_df = NULL;
 
   max_regno = max_reg_num ();
   compact_blocks ();
 
   /* Create a new version of df that has the special version of UR.  */
-  rtl_df = df_init (DF_HARD_REGS);
-  df_lr_add_problem (rtl_df, 0);
-  df_urec_add_problem (rtl_df, 0);
+  ra_df = df_init (DF_HARD_REGS);
+  df_lr_add_problem (ra_df, 0);
+  df_urec_add_problem (ra_df, 0);
 
-  df_analyze (rtl_df);
+  df_analyze (ra_df);
   if (dump_file)
-    df_dump (rtl_df, dump_file);
+    df_dump (ra_df, dump_file);
   max_allocno = 0;
 
 #if 0
@@ -706,7 +704,7 @@ global_conflicts (void)
 	 be explicitly marked in basic_block_live_at_start.  */
 
       {
-	regset old = DF_RA_LIVE_IN (rtl_df, b);
+	regset old = DF_RA_LIVE_IN (ra_df, b);
 	int ax = 0;
 	reg_set_iterator rsi;
 
@@ -1759,7 +1757,7 @@ mark_elimination (int from, int to)
 
   FOR_EACH_BB (bb)
     {
-      regset r = DF_RA_LIVE_IN (rtl_df, bb);
+      regset r = DF_RA_LIVE_IN (ra_df, bb);
       if (REGNO_REG_SET_P (r, from))
 	{
 	  CLEAR_REGNO_REG_SET (r, from);
@@ -1849,7 +1847,7 @@ build_insn_chain (rtx first)
 
 	  CLEAR_REG_SET (live_relevant_regs);
 
-	  EXECUTE_IF_SET_IN_BITMAP (DF_RA_LIVE_IN (rtl_df, b), 0, i, bi)
+	  EXECUTE_IF_SET_IN_BITMAP (DF_RA_LIVE_IN (ra_df, b), 0, i, bi)
 	    {
 	      if (i < FIRST_PSEUDO_REGISTER
 		  ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
@@ -2026,15 +2024,14 @@ rest_of_handle_global_alloc (void)
     failure = global_alloc ();
   else
     {
-      df_finish (rtl_df);
-      rtl_df = NULL;
-      rtl_df = df_init (DF_HARD_REGS);
-      df_lr_add_problem (rtl_df, 0);
-      df_urec_add_problem (rtl_df, 0);
-      df_analyze (rtl_df);
+      ra_df = df_init (DF_HARD_REGS);
+      df_lr_add_problem (ra_df, 0);
+      df_urec_add_problem (ra_df, 0);
+      df_analyze (ra_df);
 
       build_insn_chain (get_insns ());
       failure = reload (get_insns (), 0);
+      df_finish (ra_df);
     }
 
   if (dump_enabled_p (pass_global_alloc.static_pass_number))
Index: reload1.c
===================================================================
--- reload1.c	(revision 112855)
+++ reload1.c	(working copy)
@@ -544,7 +544,7 @@ compute_use_by_pseudos (HARD_REG_SET *to
       if (r < 0)
 	{
 	  /* reload_combine uses the information from
-	     DF_RA_LIVE_IN (rtl_df, BASIC_BLOCK), which might still
+	     DF_RA_LIVE_IN (ra_df, BASIC_BLOCK), which might still
 	     contain registers that have not actually been allocated
 	     since they have an equivalence.  */
 	  gcc_assert (reload_completed);
@@ -1090,8 +1090,8 @@ reload (rtx first, int global)
   if (! frame_pointer_needed)
     FOR_EACH_BB (bb)
       {
-	CLEAR_REGNO_REG_SET (DF_RA_LIVE_IN (rtl_df, bb), HARD_FRAME_POINTER_REGNUM);
-	/*	CLEAR_REGNO_REG_SET (DF_RA_LIVE_OUT (rtl_df, bb), HARD_FRAME_POINTER_REGNUM); */
+	CLEAR_REGNO_REG_SET (DF_RA_LIVE_IN (ra_df, bb), HARD_FRAME_POINTER_REGNUM);
+	/*	CLEAR_REGNO_REG_SET (DF_RA_LIVE_OUT (ra_df, bb), HARD_FRAME_POINTER_REGNUM); */
       }
 	
   /* Come here (with failure set nonzero) if we can't get enough spill
Index: local-alloc.c
===================================================================
--- local-alloc.c	(revision 112855)
+++ local-alloc.c	(working copy)
@@ -295,7 +295,7 @@ static int memref_referenced_p (rtx, rtx
 static int memref_used_between_p (rtx, rtx, rtx);
 static void update_equiv_regs (void);
 static void no_equiv (rtx, rtx, void *);
-static void block_alloc (int);
+static void block_alloc (struct df *, int);
 static int qty_sugg_compare (int, int);
 static int qty_sugg_compare_1 (const void *, const void *);
 static int qty_compare (int, int);
@@ -346,6 +346,12 @@ local_alloc (void)
   int i;
   int max_qty;
   basic_block b;
+  struct df *df = df_init (DF_HARD_REGS);
+
+  df_lr_add_problem (df, 0);
+  df_ur_add_problem (df, 0);
+  df_ri_add_problem (df, DF_RI_LIFE);
+  df_analyze (df);
 
   /* We need to keep track of whether or not we recorded a LABEL_REF so
      that we know if the jump optimizer needs to be rerun.  */
@@ -432,9 +438,10 @@ local_alloc (void)
 
       next_qty = 0;
 
-      block_alloc (b->index);
+      block_alloc (df, b->index);
     }
 
+  df_finish (df);
   free (qty);
   free (qty_phys_copy_sugg);
   free (qty_phys_num_copy_sugg);
@@ -1239,7 +1246,7 @@ no_equiv (rtx reg, rtx store ATTRIBUTE_U
    Only the pseudos that die but once can be handled.  */
 
 static void
-block_alloc (int b)
+block_alloc (struct df * df, int b)
 {
   int i, q;
   rtx insn;
@@ -1271,7 +1278,7 @@ block_alloc (int b)
 
   /* Initialize table of hardware registers currently live.  */
 
-  REG_SET_TO_HARD_REG_SET (regs_live, DF_LIVE_IN (rtl_df, BASIC_BLOCK (b)));
+  REG_SET_TO_HARD_REG_SET (regs_live, DF_LIVE_IN (df, BASIC_BLOCK (b)));
 
   /* This loop scans the instructions of the basic block
      and assigns quantities to registers.
Index: passes.c
===================================================================
--- passes.c	(revision 112985)
+++ passes.c	(working copy)
@@ -662,6 +662,7 @@ init_optimization_passes (void)
   NEXT_PASS (pass_recompute_reg_usage);
   NEXT_PASS (pass_sms);
   NEXT_PASS (pass_sched);
+  NEXT_PASS (pass_clear_df);
   /* NEXT_PASS (pass_web); */
   NEXT_PASS (pass_local_alloc);
   NEXT_PASS (pass_global_alloc);
@@ -670,7 +671,6 @@ init_optimization_passes (void)
 
   p = &pass_postreload.sub;
   NEXT_PASS (pass_postreload_cse);
-  NEXT_PASS (pass_clear_df);
   NEXT_PASS (pass_gcse2);
   NEXT_PASS (pass_rtl_dse);
   NEXT_PASS (pass_split_after_reload);
Index: tree-pass.h
===================================================================
--- tree-pass.h	(revision 112985)
+++ tree-pass.h	(working copy)
@@ -357,7 +357,6 @@ extern struct tree_opt_pass pass_life;
 extern struct tree_opt_pass pass_combine;
 extern struct tree_opt_pass pass_if_after_combine;
 extern struct tree_opt_pass pass_partition_blocks;
-extern struct tree_opt_pass pass_partition_blocks;
 extern struct tree_opt_pass pass_regmove;
 extern struct tree_opt_pass pass_split_all_insns;
 extern struct tree_opt_pass pass_mode_switching;

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]