This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [PATCH] Make CSE path following use the CFG


On 12/11/06, Steven Bosscher <stevenb.gcc@gmail.com> wrote:
Fortunately, there appears to be a more serious problem than the typo.

And another one, where I intended to traverse the CFG in topological sort, and I actually asked for a DFS sort.

 For CC0-targets, we clear prev_insn_cc0 too early now.  I've hacked
together the attached patch, but I can't test it right now (0:00 AM,
work tomorrow, etc.). I wanted to post it anyway because maybe it
helps people forward, and maybe someone can help me by testing this on
a CC0-target;-)

The patch below *should* fix both the CC0 problems and the HPPA/MIPSEL problems. I have built an AVR cross-compiler with this, and I have verified that the test case from PR30149 does not ICE anymore with this patch. I have asked for some assistance to have this patch bootstrapped&tested properly.

For the Ada issue I'll wait for Eric to come up with a test case.
That failure is probably caused by a replacement that makes EH edges
unnecessary, but I have no idea where that would happen.

Gr.
Steven
Index: cse.c
===================================================================
*** cse.c	(revision 119741)
--- cse.c	(working copy)
*************** struct change_cc_mode_args
*** 269,285 ****
     table since its use is guaranteed to be the insn immediately following
     its definition and any other insn is presumed to invalidate it.
  
!    Instead, we store below the value last assigned to CC0.  If it should
!    happen to be a constant, it is stored in preference to the actual
!    assigned value.  In case it is a constant, we store the mode in which
!    the constant should be interpreted.  */
  
! static rtx prev_insn_cc0;
! static enum machine_mode prev_insn_cc0_mode;
! 
! /* Previous actual insn.  0 if at first insn of basic block.  */
! 
! static rtx prev_insn;
  #endif
  
  /* Insn being scanned.  */
--- 269,281 ----
     table since its use is guaranteed to be the insn immediately following
     its definition and any other insn is presumed to invalidate it.
  
!    Instead, we store below the current and last value assigned to CC0.
!    If it should happen to be a constant, it is stored in preference
!    to the actual assigned value.  In case it is a constant, we store
!    the mode in which the constant should be interpreted.  */
  
! static rtx this_insn_cc0, prev_insn_cc0;
! static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
  #endif
  
  /* Insn being scanned.  */
*************** new_basic_block (void)
*** 900,906 ****
      }
  
  #ifdef HAVE_cc0
-   prev_insn = 0;
    prev_insn_cc0 = 0;
  #endif
  }
--- 896,901 ----
*************** cse_insn (rtx insn, rtx libcall_insn)
*** 4022,4029 ****
  
  #ifdef HAVE_cc0
    /* Records what this insn does to set CC0.  */
!   rtx this_insn_cc0 = 0;
!   enum machine_mode this_insn_cc0_mode = VOIDmode;
  #endif
  
    rtx src_eqv = 0;
--- 4017,4024 ----
  
  #ifdef HAVE_cc0
    /* Records what this insn does to set CC0.  */
!   this_insn_cc0 = 0;
!   this_insn_cc0_mode = VOIDmode;
  #endif
  
    rtx src_eqv = 0;
*************** cse_insn (rtx insn, rtx libcall_insn)
*** 5644,5663 ****
      }
  
  done:;
- #ifdef HAVE_cc0
-   /* If the previous insn set CC0 and this insn no longer references CC0,
-      delete the previous insn.  Here we use the fact that nothing expects CC0
-      to be valid over an insn, which is true until the final pass.  */
-   if (prev_insn && NONJUMP_INSN_P (prev_insn)
-       && (tem = single_set (prev_insn)) != 0
-       && SET_DEST (tem) == cc0_rtx
-       && ! reg_mentioned_p (cc0_rtx, x))
-     delete_insn_and_edges (prev_insn);
- 
-   prev_insn_cc0 = this_insn_cc0;
-   prev_insn_cc0_mode = this_insn_cc0_mode;
-   prev_insn = insn;
- #endif
  }
  
  /* Remove from the hash table all expressions that reference memory.  */
--- 5639,5644 ----
*************** cse_find_path (basic_block first_bb, str
*** 5924,5934 ****
  	    {
  	      basic_block bb2 = e->dest;
  
- #if ENABLE_CHECKING
  	      /* We should only see blocks here that we have not
  		 visited yet.  */
  	      gcc_assert (!TEST_BIT (cse_visited_basic_blocks, bb2->index));
! #endif
  	      SET_BIT (cse_visited_basic_blocks, bb2->index);
  	      data->path[path_size++].bb = bb2;
  	      bb = bb2;
--- 5905,5914 ----
  	    {
  	      basic_block bb2 = e->dest;
  
  	      /* We should only see blocks here that we have not
  		 visited yet.  */
  	      gcc_assert (!TEST_BIT (cse_visited_basic_blocks, bb2->index));
! 
  	      SET_BIT (cse_visited_basic_blocks, bb2->index);
  	      data->path[path_size++].bb = bb2;
  	      bb = bb2;
*************** cse_extended_basic_block (struct cse_bas
*** 6096,6114 ****
  		  && for_each_rtx (&PATTERN (insn), check_for_label_ref,
  				   (void *) insn))
  		recorded_label_ref = 1;
  	    }
  	}
  
        /* Make sure that libcalls don't span multiple basic blocks.  */
        gcc_assert (libcall_insn == NULL_RTX);
  
- #ifdef HAVE_cc0
-       /* Clear the CC0-tracking related insns, they can't provide
- 	 useful information across basic block boundaries.  */
-       prev_insn_cc0 = 0;
-       prev_insn = 0;
- #endif
- 
        /* If we changed a conditional jump, we may have terminated
  	 the path we are following.  Check that by verifying that
  	 the edge we would take still exists.  If the edge does
--- 6076,6114 ----
  		  && for_each_rtx (&PATTERN (insn), check_for_label_ref,
  				   (void *) insn))
  		recorded_label_ref = 1;
+ 
+ #ifdef HAVE_cc0
+ 	      /* If the previous insn set CC0 and this insn no longer
+ 		 references CC0, delete the previous insn.  Here we use
+ 		 fact that nothing expects CC0 to be valid over an insn,
+ 		 which is true until the final pass.  */
+ 	      {
+ 		rtx prev_insn, tem;
+ 
+ 		prev_insn = PREV_INSN (insn);
+ 		if (prev_insn && NONJUMP_INSN_P (prev_insn)
+ 		    && (tem = single_set (prev_insn)) != 0
+ 		    && SET_DEST (tem) == cc0_rtx
+ 		    && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
+ 		  delete_insn (prev_insn);
+ 	      }
+ 
+ 	      /* If this insn is not the last insn in the basic block,
+ 		 it will be PREV_INSN(insn) in the next iteration.  If
+ 		 we recorded any CC0-related information for this insn,
+ 		 remember it.  */
+ 	      if (insn != BB_END (bb))
+ 		{
+ 		  prev_insn_cc0 = this_insn_cc0;
+ 		  prev_insn_cc0_mode = this_insn_cc0_mode;
+ 		}
+ #endif
  	    }
  	}
  
        /* Make sure that libcalls don't span multiple basic blocks.  */
        gcc_assert (libcall_insn == NULL_RTX);
  
        /* If we changed a conditional jump, we may have terminated
  	 the path we are following.  Check that by verifying that
  	 the edge we would take still exists.  If the edge does
*************** cse_extended_basic_block (struct cse_bas
*** 6133,6138 ****
--- 6133,6144 ----
  	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
  	  record_jump_equiv (insn, taken);
  	}
+ 
+ #ifdef HAVE_cc0
+       /* Clear the CC0-tracking related insns, they can't provide
+ 	 useful information across basic block boundaries.  */
+       prev_insn_cc0 = 0;
+ #endif
      }
  
    gcc_assert (next_qty <= max_qty);
*************** cse_main (rtx f ATTRIBUTE_UNUSED, int nr
*** 6152,6158 ****
  {
    struct cse_basic_block_data ebb_data;
    basic_block bb;
!   int *dfs_order = XNEWVEC (int, last_basic_block);
    int i, n_blocks;
  
    init_cse_reg_info (nregs);
--- 6158,6164 ----
  {
    struct cse_basic_block_data ebb_data;
    basic_block bb;
!   int *rc_order = XNEWVEC (int, last_basic_block);
    int i, n_blocks;
  
    init_cse_reg_info (nregs);
*************** cse_main (rtx f ATTRIBUTE_UNUSED, int nr
*** 6192,6198 ****
  
    /* Loop over basic blocks in DFS order,
       excluding the ENTRY and EXIT blocks.  */
!   n_blocks = pre_and_rev_post_order_compute (dfs_order, NULL, false);
    i = 0;
    while (i < n_blocks)
      {
--- 6198,6204 ----
  
    /* Loop over basic blocks in DFS order,
       excluding the ENTRY and EXIT blocks.  */
!   n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
    i = 0;
    while (i < n_blocks)
      {
*************** cse_main (rtx f ATTRIBUTE_UNUSED, int nr
*** 6200,6206 ****
  	 processed before.  */
        do
  	{
! 	  bb = BASIC_BLOCK (dfs_order[i++]);
  	}
        while (TEST_BIT (cse_visited_basic_blocks, bb->index)
  	     && i < n_blocks);
--- 6206,6212 ----
  	 processed before.  */
        do
  	{
! 	  bb = BASIC_BLOCK (rc_order[i++]);
  	}
        while (TEST_BIT (cse_visited_basic_blocks, bb->index)
  	     && i < n_blocks);
*************** cse_main (rtx f ATTRIBUTE_UNUSED, int nr
*** 6236,6242 ****
    free (reg_eqv_table);
    free (ebb_data.path);
    sbitmap_free (cse_visited_basic_blocks);
!   free (dfs_order);
    rtl_hooks = general_rtl_hooks;
  
    return cse_jumps_altered || recorded_label_ref;
--- 6242,6248 ----
    free (reg_eqv_table);
    free (ebb_data.path);
    sbitmap_free (cse_visited_basic_blocks);
!   free (rc_order);
    rtl_hooks = general_rtl_hooks;
  
    return cse_jumps_altered || recorded_label_ref;

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]