This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]

Re: Patch to make loop_depth start from 0


On Tue, Jan 04, 2000 at 01:32:59PM +0100, Jan Hubicka wrote:
> Unlike other loop_depths the loop_depth stored in basic block structure
> is set to 1 for insns outside loops.
> Because this caused enought confusion so far, I believe it is best to
> change this to use 0 as other parts does.

I think this is a good change to make.  I've committed the
following patch to address it.


r~


        * flow.c (mark_set_1): Use loop_depth+1 as reference weight.
        (find_auto_inc, mark_used_regs, try_pre_increment_1): Likewise.
        (count_reg_sets_1, count_reg_references): Likewise.
        (flow_loops_level_compute): Start counting actual loop depth at 1.
        (flow_loops_find): Likewise.
        * local-alloc.c (update_equiv_regs): Likewise.
        * regclass.c (regclass): Re-instate Jan 4 0-based loop_depth change.

Index: flow.c
===================================================================
RCS file: /cvs/gcc/egcs/gcc/flow.c,v
retrieving revision 1.200
diff -c -p -d -r1.200 flow.c
*** flow.c	1999/12/27 23:01:12	1.200
--- flow.c	2000/01/06 20:24:47
*************** mark_set_1 (needed, dead, x, insn, signi
*** 4028,4034 ****
  	          /* Count (weighted) references, stores, etc.  This counts a
  		     register twice if it is modified, but that is correct.  */
  	          REG_N_SETS (regno)++;
! 	          REG_N_REFS (regno) += loop_depth;
  		  
  	          /* The insns where a reg is live are normally counted
  		     elsewhere, but we want the count to include the insn
--- 4028,4034 ----
  	          /* Count (weighted) references, stores, etc.  This counts a
  		     register twice if it is modified, but that is correct.  */
  	          REG_N_SETS (regno)++;
! 	          REG_N_REFS (regno) += loop_depth + 1;
  		  
  	          /* The insns where a reg is live are normally counted
  		     elsewhere, but we want the count to include the insn
*************** find_auto_inc (needed, x, insn)
*** 4281,4287 ****
  	      /* Count an extra reference to the reg.  When a reg is
  		 incremented, spilling it is worse, so we want to make
  		 that less likely.  */
! 	      REG_N_REFS (regno) += loop_depth;
  
  	      /* Count the increment as a setting of the register,
  		 even though it isn't a SET in rtl.  */
--- 4281,4287 ----
  	      /* Count an extra reference to the reg.  When a reg is
  		 incremented, spilling it is worse, so we want to make
  		 that less likely.  */
! 	      REG_N_REFS (regno) += loop_depth + 1;
  
  	      /* Count the increment as a setting of the register,
  		 even though it isn't a SET in rtl.  */
*************** mark_used_regs (needed, live, x, flags, 
*** 4502,4508 ****
  
  		/* Count (weighted) number of uses of each reg.  */
  
! 		REG_N_REFS (regno) += loop_depth;
  	      }
  	  }
  
--- 4502,4508 ----
  
  		/* Count (weighted) number of uses of each reg.  */
  
! 		REG_N_REFS (regno) += loop_depth + 1;
  	      }
  	  }
  
*************** try_pre_increment_1 (insn)
*** 4745,4751 ****
  	 less likely.  */
        if (regno >= FIRST_PSEUDO_REGISTER)
  	{
! 	  REG_N_REFS (regno) += loop_depth;
  	  REG_N_SETS (regno)++;
  	}
        return 1;
--- 4745,4751 ----
  	 less likely.  */
        if (regno >= FIRST_PSEUDO_REGISTER)
  	{
! 	  REG_N_REFS (regno) += loop_depth + 1;
  	  REG_N_SETS (regno)++;
  	}
        return 1;
*************** count_reg_sets_1 (x)
*** 5372,5379 ****
  	  /* Count (weighted) references, stores, etc.  This counts a
  	     register twice if it is modified, but that is correct.  */
  	  REG_N_SETS (regno)++;
! 
! 	  REG_N_REFS (regno) += loop_depth;
  	}
      }
  }
--- 5372,5378 ----
  	  /* Count (weighted) references, stores, etc.  This counts a
  	     register twice if it is modified, but that is correct.  */
  	  REG_N_SETS (regno)++;
! 	  REG_N_REFS (regno) += loop_depth + 1;
  	}
      }
  }
*************** count_reg_references (x)
*** 5452,5458 ****
  
      case REG:
        if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
! 	REG_N_REFS (REGNO (x)) += loop_depth;
        return;
  
      case SET:
--- 5451,5457 ----
  
      case REG:
        if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
! 	REG_N_REFS (REGNO (x)) += loop_depth + 1;
        return;
  
      case SET:
*************** count_reg_references (x)
*** 5551,5559 ****
     More accurate reference counts generally lead to better register allocation.
  
     F is the first insn to be scanned.
     LOOP_STEP denotes how much loop_depth should be incremented per
!    loop nesting level in order to increase the ref count more for references
!    in a loop.
  
     It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
     possibly other information which is used by the register allocators.  */
--- 5550,5559 ----
     More accurate reference counts generally lead to better register allocation.
  
     F is the first insn to be scanned.
+ 
     LOOP_STEP denotes how much loop_depth should be incremented per
!    loop nesting level in order to increase the ref count more for
!    references in a loop.
  
     It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
     possibly other information which is used by the register allocators.  */
*************** recompute_reg_usage (f, loop_step)
*** 5577,5583 ****
  
    /* Scan each insn in the chain and count how many times each register is
       set/used.  */
-   loop_depth = 1;
    for (index = 0; index < n_basic_blocks; index++)
      {
        basic_block bb = BASIC_BLOCK (index);
--- 5577,5582 ----
*************** static int 
*** 6819,6825 ****
  flow_loops_level_compute (loops)
       struct loops *loops;
  {
!   return flow_loop_level_compute (loops->tree, 0);
  }
  
  
--- 6818,6824 ----
  flow_loops_level_compute (loops)
       struct loops *loops;
  {
!   return flow_loop_level_compute (loops->tree, 1);
  }
  
  
*************** flow_loops_find (loops)
*** 6862,6868 ****
    num_loops = 0;
    for (b = 0; b < n_basic_blocks; b++)
      {
!       BASIC_BLOCK (b)->loop_depth = 1;
        for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
  	{
  	  basic_block latch = e->src;
--- 6861,6867 ----
    num_loops = 0;
    for (b = 0; b < n_basic_blocks; b++)
      {
!       BASIC_BLOCK (b)->loop_depth = 0;
        for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
  	{
  	  basic_block latch = e->src;
Index: local-alloc.c
===================================================================
RCS file: /cvs/gcc/egcs/gcc/local-alloc.c,v
retrieving revision 1.57
diff -c -p -d -r1.57 local-alloc.c
*** local-alloc.c	2000/01/04 16:29:40	1.57
--- local-alloc.c	2000/01/06 20:24:47
*************** update_equiv_regs ()
*** 679,685 ****
  
    init_alias_analysis ();
  
!   loop_depth = 1;
  
    /* Scan the insns and find which registers have equivalences.  Do this
       in a separate scan of the insns because (due to -fcse-follow-jumps)
--- 679,685 ----
  
    init_alias_analysis ();
  
!   loop_depth = 0;
  
    /* Scan the insns and find which registers have equivalences.  Do this
       in a separate scan of the insns because (due to -fcse-follow-jumps)
Index: regclass.c
===================================================================
RCS file: /cvs/gcc/egcs/gcc/regclass.c,v
retrieving revision 1.88
diff -c -p -d -r1.88 regclass.c
*** regclass.c	2000/01/04 18:46:46	1.88
--- regclass.c	2000/01/06 20:24:47
*************** regclass (f, nregs, dump)
*** 1100,1112 ****
  	    basic_block bb = BASIC_BLOCK (index);
  
  	    /* Show that an insn inside a loop is likely to be executed three
! 	       times more than insns outside a loop.  This is much more aggressive
! 	       than the assumptions made elsewhere and is being tried as an
! 	       experiment.  */
  	    if (optimize_size)
  	      loop_cost = 1;
  	    else
! 	      loop_cost = 1 << (2 * MIN (bb->loop_depth - 1, 5));
  	    for (insn = bb->head; ; insn = NEXT_INSN (insn))
  	      {
  		insn = scan_one_insn (insn, pass);
--- 1100,1112 ----
  	    basic_block bb = BASIC_BLOCK (index);
  
  	    /* Show that an insn inside a loop is likely to be executed three
! 	       times more than insns outside a loop.  This is much more
! 	       aggressive than the assumptions made elsewhere and is being
! 	       tried as an experiment.  */
  	    if (optimize_size)
  	      loop_cost = 1;
  	    else
! 	      loop_cost = 1 << (2 * MIN (bb->loop_depth, 5));
  	    for (insn = bb->head; ; insn = NEXT_INSN (insn))
  	      {
  		insn = scan_one_insn (insn, pass);

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]