This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

operand scan speedups for PR 18587


In response to the excessive time spend in operand scanning in PR 18587,
the following two patches help.

First, we eliminate the linear search for duplicates when building VUSES
and VMAYDEFs. Instead, two bits are added to the var annotation and we
use those bits to determine whether a given var is already in the list
or not. 

I tried a few approaches, including bitmaps, but the overhead of
searching and inserting into a bitmap works out pretty close to the time
required to do the linear search most of the time.  The average list is
7 items or less, and quite sparse. In any case, this appeared to have
the best all round results.

The other patch speeds up the code which adds virtual operands for the
call clobbered bit vector at each call site. Every time we had a call,
we iterated through the bitvector and built up an operand vector for
those variables.  Those variables dont change very often, so now we keep
a cache of the operand vector that is built. The next time we need the
clobbers for a call, we simply use the cache if it hasn't been
invalidated by adding or removing global variables.

The combined speedups from these two patches are pretty decent.

Average of two runs USER time for operand scanning phase on a 1.8Ghz P4.

				  orig		  patched
cplusplus-grammer.ii		22.58 sec	10.35 sec
tramp3d.ii			 6.14 sec	 4.03 sec
generate3.4.ii			 1.69 sec	 1.23 sec
all gcc .i files		 6.72 sec	 5.83 sec

This has been bootstrapped on i686-pc-linux-gnu, and causes no new
regressions on that platform.

Andrew


PS. The var-ann patch is responsible for the lion share of the speedup
to cplusplus-grammer.ii, and the call clobber patch responsible for most
of the rest of the gains.

Attachment: speed.change
Description: Text document

Index: tree-flow.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-flow.h,v
retrieving revision 2.71
diff -c -p -r2.71 tree-flow.h
*** tree-flow.h	23 Nov 2004 05:25:11 -0000	2.71
--- tree-flow.h	25 Nov 2004 18:53:57 -0000
*************** struct var_ann_d GTY(())
*** 166,171 ****
--- 166,179 ----
       states.  */
    ENUM_BITFIELD (need_phi_state) need_phi_state : 2;
  
+   /* Used during operand processing to determine if this variable is already 
+      in the vuse list.  */
+   unsigned in_vuse_list : 1;
+ 
+   /* Used during operand processing to determine if this variable is already 
+      in the v_may_def list.  */
+   unsigned in_v_may_def_list : 1;
+ 
    /* An artificial variable representing the memory location pointed-to by
       all the pointers that TBAA (type-based alias analysis) considers
       to be aliased.  If the variable is not a pointer or if it is never
Index: tree-ssa-operands.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-operands.c,v
retrieving revision 2.56
diff -c -p -r2.56 tree-ssa-operands.c
*** tree-ssa-operands.c	8 Nov 2004 21:56:03 -0000	2.56
--- tree-ssa-operands.c	25 Nov 2004 18:53:58 -0000
*************** finalize_ssa_v_may_defs (v_may_def_optyp
*** 490,495 ****
--- 513,535 ----
  }
  
  
+ /* Clear the in_list bits and empty the build array for v_may_defs.  */
+ 
+ static inline void
+ cleanup_v_may_defs (void)
+ {
+   unsigned x, num;
+   num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+ 
+   for (x = 0; x < num; x++)
+     {
+       tree t = VARRAY_TREE (build_v_may_defs, x);
+       var_ann_t ann = var_ann (t);
+       ann->in_v_may_def_list = 0;
+     }
+   VARRAY_POP_ALL (build_v_may_defs);
+ }
+ 
  /* Return a new vuse operand vector, comparing to OLD_OPS_P.  */
  
  static vuse_optype
*************** finalize_ssa_vuses (vuse_optype *old_ops
*** 502,508 ****
    num = VARRAY_ACTIVE_SIZE (build_vuses);
    if (num == 0)
      {
!       VARRAY_POP_ALL (build_v_may_defs);
        return NULL;
      }
  
--- 542,548 ----
    num = VARRAY_ACTIVE_SIZE (build_vuses);
    if (num == 0)
      {
!       cleanup_v_may_defs ();
        return NULL;
      }
  
*************** finalize_ssa_vuses (vuse_optype *old_ops
*** 522,565 ****
  
    if (num_v_may_defs > 0)
      {
!       size_t i, j;
        tree vuse;
        for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
  	{
  	  vuse = VARRAY_TREE (build_vuses, i);
! 	  for (j = 0; j < num_v_may_defs; j++)
  	    {
! 	      if (vuse == VARRAY_TREE (build_v_may_defs, j))
! 		break;
! 	    }
! 
! 	  /* If we found a useless VUSE operand, remove it from the
! 	     operand array by replacing it with the last active element
! 	     in the operand array (unless the useless VUSE was the
! 	     last operand, in which case we simply remove it.  */
! 	  if (j != num_v_may_defs)
! 	    {
! 	      if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
! 		{
! 		  VARRAY_TREE (build_vuses, i)
! 		    = VARRAY_TREE (build_vuses,
! 				   VARRAY_ACTIVE_SIZE (build_vuses) - 1);
  		}
- 	      VARRAY_POP (build_vuses);
- 
- 	      /* We want to rescan the element at this index, unless
- 		 this was the last element, in which case the loop
- 		 terminates.  */
- 	      i--;
  	    }
  	}
      }
  
    num = VARRAY_ACTIVE_SIZE (build_vuses);
    /* We could have reduced the size to zero now, however.  */
    if (num == 0)
      {
!       VARRAY_POP_ALL (build_v_may_defs);
        return NULL;
      }
  
--- 562,616 ----
  
    if (num_v_may_defs > 0)
      {
!       size_t i;
        tree vuse;
        for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
  	{
  	  vuse = VARRAY_TREE (build_vuses, i);
! 	  if (TREE_CODE (vuse) != SSA_NAME)
  	    {
! 	      var_ann_t ann = var_ann (vuse);
! 	      ann->in_vuse_list = 0;
! 	      if (ann->in_v_may_def_list)
! 	        {
! 		  /* If we found a useless VUSE operand, remove it from the
! 		     operand array by replacing it with the last active element
! 		     in the operand array (unless the useless VUSE was the
! 		     last operand, in which case we simply remove it.  */
! 		  if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
! 		    {
! 		      VARRAY_TREE (build_vuses, i)
! 			= VARRAY_TREE (build_vuses,
! 				       VARRAY_ACTIVE_SIZE (build_vuses) - 1);
! 		    }
! 		  VARRAY_POP (build_vuses);
! 
! 		  /* We want to rescan the element at this index, unless
! 		     this was the last element, in which case the loop
! 		     terminates.  */
! 		  i--;
  		}
  	    }
  	}
      }
+   else
+     /* Clear out the in_list bits.  */
+     for (x = 0; x < num; x++)
+       {
+ 	tree t = VARRAY_TREE (build_vuses, x);
+ 	if (TREE_CODE (t) != SSA_NAME)
+ 	  {
+ 	    var_ann_t ann = var_ann (t);
+ 	    ann->in_vuse_list = 0;
+ 	  }
+       }
+ 
  
    num = VARRAY_ACTIVE_SIZE (build_vuses);
    /* We could have reduced the size to zero now, however.  */
    if (num == 0)
      {
!       cleanup_v_may_defs ();
        return NULL;
      }
  
*************** finalize_ssa_vuses (vuse_optype *old_ops
*** 618,624 ****
    /* The v_may_def build vector wasn't freed because we needed it here.
       Free it now with the vuses build vector.  */
    VARRAY_POP_ALL (build_vuses);
!   VARRAY_POP_ALL (build_v_may_defs);
  
    return vuse_ops;
  }
--- 669,675 ----
    /* The v_may_def build vector wasn't freed because we needed it here.
       Free it now with the vuses build vector.  */
    VARRAY_POP_ALL (build_vuses);
!   cleanup_v_may_defs ();
  
    return vuse_ops;
  }
*************** append_use (tree *use_p)
*** 751,762 ****
  static inline void
  append_v_may_def (tree var)
  {
!   unsigned i;
  
    /* Don't allow duplicate entries.  */
!   for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
!     if (var == VARRAY_TREE (build_v_may_defs, i))
!       return;
  
    VARRAY_PUSH_TREE (build_v_may_defs, var);
  }
--- 802,813 ----
  static inline void
  append_v_may_def (tree var)
  {
!   var_ann_t ann = get_var_ann (var);
  
    /* Don't allow duplicate entries.  */
!   if (ann->in_v_may_def_list)
!     return;
!   ann->in_v_may_def_list = 1;
  
    VARRAY_PUSH_TREE (build_v_may_defs, var);
  }
*************** append_v_may_def (tree var)
*** 767,778 ****
  static inline void
  append_vuse (tree var)
  {
-   size_t i;
  
    /* Don't allow duplicate entries.  */
!   for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
!     if (var == VARRAY_TREE (build_vuses, i))
!       return;
  
    VARRAY_PUSH_TREE (build_vuses, var);
  }
--- 818,833 ----
  static inline void
  append_vuse (tree var)
  {
  
    /* Don't allow duplicate entries.  */
!   if (TREE_CODE (var) != SSA_NAME)
!     {
!       var_ann_t ann = get_var_ann (var);
! 
!       if (ann->in_vuse_list || ann->in_v_may_def_list)
!         return;
!       ann->in_vuse_list = 1;
!     }
  
    VARRAY_PUSH_TREE (build_vuses, var);
  }
Index: tree-flow-inline.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-flow-inline.h,v
retrieving revision 2.26
diff -c -p -r2.26 tree-flow-inline.h
*** tree-flow-inline.h	23 Nov 2004 17:45:41 -0000	2.26
--- tree-flow-inline.h	25 Nov 2004 18:53:57 -0000
*************** mark_call_clobbered (tree var)
*** 621,626 ****
--- 621,628 ----
    if (ann->mem_tag_kind != NOT_A_TAG)
      DECL_EXTERNAL (var) = 1;
    bitmap_set_bit (call_clobbered_vars, ann->uid);
+   ssa_call_clobbered_cache_valid = false;
+   ssa_ro_call_cache_valid = false;
  }
  
  /* Mark variable VAR as being non-addressable.  */
*************** mark_non_addressable (tree var)
*** 629,634 ****
--- 631,638 ----
  {
    bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
    TREE_ADDRESSABLE (var) = 0;
+   ssa_call_clobbered_cache_valid = false;
+   ssa_ro_call_cache_valid = false;
  }
  
  /* Return the common annotation for T.  Return NULL if the annotation
Index: tree-ssa-operands.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-operands.c,v
retrieving revision 2.56
diff -c -p -r2.56 tree-ssa-operands.c
*** tree-ssa-operands.c	8 Nov 2004 21:56:03 -0000	2.56
--- tree-ssa-operands.c	25 Nov 2004 18:53:58 -0000
*************** static GTY (()) varray_type build_vuses;
*** 116,121 ****
--- 116,132 ----
  /* Array for building all the v_must_def operands.  */
  static GTY (()) varray_type build_v_must_defs;
  
+ /* True if the operands for call clobbered vars are cached and valid.  */
+ bool ssa_call_clobbered_cache_valid;
+ bool ssa_ro_call_cache_valid;
+ 
+ /* These arrays are the cached operand vectors for call clobberd calls.  */
+ static GTY (()) varray_type clobbered_v_may_defs;
+ static GTY (()) varray_type clobbered_vuses;
+ static GTY (()) varray_type ro_call_vuses;
+ static bool clobbered_aliased_loads;
+ static bool clobbered_aliased_stores;
+ static bool ro_call_aliased_loads;
  
  #ifdef ENABLE_CHECKING
  /* Used to make sure operand construction is working on the proper stmt.  */
*************** static void append_v_may_def (tree);
*** 136,142 ****
  static void append_v_must_def (tree);
  static void add_call_clobber_ops (tree);
  static void add_call_read_ops (tree);
! static void add_stmt_operand (tree *, tree, int);
  
  /* Return a vector of contiguous memory for NUM def operands.  */
  
--- 147,153 ----
  static void append_v_must_def (tree);
  static void add_call_clobber_ops (tree);
  static void add_call_read_ops (tree);
! static void add_stmt_operand (tree *, stmt_ann_t, int);
  
  /* Return a vector of contiguous memory for NUM def operands.  */
  
*************** fini_ssa_operands (void)
*** 302,307 ****
--- 313,330 ----
    build_v_may_defs = NULL;
    build_vuses = NULL;
    build_v_must_defs = NULL;
+   if (clobbered_v_may_defs)
+     {
+       ggc_free (clobbered_v_may_defs);
+       ggc_free (clobbered_vuses);
+       clobbered_v_may_defs = NULL;
+       clobbered_vuses = NULL;
+     }
+   if (ro_call_vuses)
+     {
+       ggc_free (ro_call_vuses);
+       ro_call_vuses = NULL;
+     }
  }
  
  
*************** get_expr_operands (tree stmt, tree *expr
*** 972,977 ****
--- 1027,1033 ----
    enum tree_code code;
    enum tree_code_class class;
    tree expr = *expr_p;
+   stmt_ann_t s_ann = stmt_ann (stmt);
  
    if (expr == NULL || expr == error_mark_node)
      return;
*************** get_expr_operands (tree stmt, tree *expr
*** 987,993 ****
        /* Taking the address of a variable does not represent a
  	 reference to it, but the fact that the stmt takes its address will be
  	 of interest to some passes (e.g. alias resolution).  */
!       add_stmt_operand (expr_p, stmt, 0);
  
        /* If the address is invariant, there may be no interesting variable
  	 references inside.  */
--- 1043,1049 ----
        /* Taking the address of a variable does not represent a
  	 reference to it, but the fact that the stmt takes its address will be
  	 of interest to some passes (e.g. alias resolution).  */
!       add_stmt_operand (expr_p, s_ann, 0);
  
        /* If the address is invariant, there may be no interesting variable
  	 references inside.  */
*************** get_expr_operands (tree stmt, tree *expr
*** 1010,1016 ****
      case CONST_DECL:
        /* If we found a variable, add it to DEFS or USES depending
  	 on the operand flags.  */
!       add_stmt_operand (expr_p, stmt, flags);
        return;
  
      case MISALIGNED_INDIRECT_REF:
--- 1066,1072 ----
      case CONST_DECL:
        /* If we found a variable, add it to DEFS or USES depending
  	 on the operand flags.  */
!       add_stmt_operand (expr_p, s_ann, flags);
        return;
  
      case MISALIGNED_INDIRECT_REF:
*************** get_expr_operands (tree stmt, tree *expr
*** 1032,1038 ****
  	 according to the value of IS_DEF.  Recurse if the LHS of the
  	 ARRAY_REF node is not a regular variable.  */
        if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
! 	add_stmt_operand (expr_p, stmt, flags);
        else
  	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
  
--- 1088,1094 ----
  	 according to the value of IS_DEF.  Recurse if the LHS of the
  	 ARRAY_REF node is not a regular variable.  */
        if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
! 	add_stmt_operand (expr_p, s_ann, flags);
        else
  	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
  
*************** get_expr_operands (tree stmt, tree *expr
*** 1060,1066 ****
        /* If the LHS of the compound reference is not a regular variable,
  	 recurse to keep looking for more operands in the subexpression.  */
        if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
! 	add_stmt_operand (expr_p, stmt, flags);
        else
  	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
  
--- 1116,1122 ----
        /* If the LHS of the compound reference is not a regular variable,
  	 recurse to keep looking for more operands in the subexpression.  */
        if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
! 	add_stmt_operand (expr_p, s_ann, flags);
        else
  	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
  
*************** get_asm_expr_operands (tree stmt)
*** 1273,1291 ****
  	/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
  	   decided to group them).  */
  	if (global_var)
! 	  add_stmt_operand (&global_var, stmt, opf_is_def);
  	else
  	  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
  	      {
  		tree var = referenced_var (i);
! 		add_stmt_operand (&var, stmt, opf_is_def);
  	      }
  
  	/* Now clobber all addressables.  */
  	EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
  	    {
  	      tree var = referenced_var (i);
! 	      add_stmt_operand (&var, stmt, opf_is_def);
  	    }
  
  	break;
--- 1329,1347 ----
  	/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
  	   decided to group them).  */
  	if (global_var)
! 	  add_stmt_operand (&global_var, s_ann, opf_is_def);
  	else
  	  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
  	      {
  		tree var = referenced_var (i);
! 		add_stmt_operand (&var, s_ann, opf_is_def);
  	      }
  
  	/* Now clobber all addressables.  */
  	EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
  	    {
  	      tree var = referenced_var (i);
! 	      add_stmt_operand (&var, s_ann, opf_is_def);
  	    }
  
  	break;
*************** get_indirect_ref_operands (tree stmt, tr
*** 1300,1306 ****
  {
    tree *pptr = &TREE_OPERAND (expr, 0);
    tree ptr = *pptr;
!   stmt_ann_t ann = stmt_ann (stmt);
  
    /* Stores into INDIRECT_REF operands are never killing definitions.  */
    flags &= ~opf_kill_def;
--- 1356,1362 ----
  {
    tree *pptr = &TREE_OPERAND (expr, 0);
    tree ptr = *pptr;
!   stmt_ann_t s_ann = stmt_ann (stmt);
  
    /* Stores into INDIRECT_REF operands are never killing definitions.  */
    flags &= ~opf_kill_def;
*************** get_indirect_ref_operands (tree stmt, tr
*** 1327,1339 ****
  	  && pi->name_mem_tag)
  	{
  	  /* PTR has its own memory tag.  Use it.  */
! 	  add_stmt_operand (&pi->name_mem_tag, stmt, flags);
  	}
        else
  	{
  	  /* If PTR is not an SSA_NAME or it doesn't have a name
  	     tag, use its type memory tag.  */
! 	  var_ann_t ann;
  
  	  /* If we are emitting debugging dumps, display a warning if
  	     PTR is an SSA_NAME with no flow-sensitive alias
--- 1383,1395 ----
  	  && pi->name_mem_tag)
  	{
  	  /* PTR has its own memory tag.  Use it.  */
! 	  add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
  	}
        else
  	{
  	  /* If PTR is not an SSA_NAME or it doesn't have a name
  	     tag, use its type memory tag.  */
! 	  var_ann_t v_ann;
  
  	  /* If we are emitting debugging dumps, display a warning if
  	     PTR is an SSA_NAME with no flow-sensitive alias
*************** get_indirect_ref_operands (tree stmt, tr
*** 1352,1360 ****
  
  	  if (TREE_CODE (ptr) == SSA_NAME)
  	    ptr = SSA_NAME_VAR (ptr);
! 	  ann = var_ann (ptr);
! 	  if (ann->type_mem_tag)
! 	    add_stmt_operand (&ann->type_mem_tag, stmt, flags);
  	}
      }
  
--- 1408,1416 ----
  
  	  if (TREE_CODE (ptr) == SSA_NAME)
  	    ptr = SSA_NAME_VAR (ptr);
! 	  v_ann = var_ann (ptr);
! 	  if (v_ann->type_mem_tag)
! 	    add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
  	}
      }
  
*************** get_indirect_ref_operands (tree stmt, tr
*** 1363,1370 ****
       optimizations from messing things up.  */
    else if (TREE_CODE (ptr) == INTEGER_CST)
      {
!       if (ann)
! 	ann->has_volatile_ops = true;
        return;
      }
  
--- 1419,1426 ----
       optimizations from messing things up.  */
    else if (TREE_CODE (ptr) == INTEGER_CST)
      {
!       if (s_ann)
! 	s_ann->has_volatile_ops = true;
        return;
      }
  
*************** get_indirect_ref_operands (tree stmt, tr
*** 1379,1385 ****
      {
        /* Make sure we know the object is addressable.  */
        pptr = &TREE_OPERAND (ptr, 0);
!       add_stmt_operand (pptr, stmt, 0);
  
        /* Mark the object itself with a VUSE.  */
        pptr = &TREE_OPERAND (*pptr, 0);
--- 1435,1441 ----
      {
        /* Make sure we know the object is addressable.  */
        pptr = &TREE_OPERAND (ptr, 0);
!       add_stmt_operand (pptr, s_ann, 0);
  
        /* Mark the object itself with a VUSE.  */
        pptr = &TREE_OPERAND (*pptr, 0);
*************** get_call_expr_operands (tree stmt, tree 
*** 1403,1416 ****
    tree op;
    int call_flags = call_expr_flags (expr);
  
-   /* Find uses in the called function.  */
-   get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- 
-   for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
-     get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
- 
-   get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- 
    if (!bitmap_empty_p (call_clobbered_vars))
      {
        /* A 'pure' or a 'const' functions never call clobber anything. 
--- 1459,1464 ----
*************** get_call_expr_operands (tree stmt, tree 
*** 1422,1427 ****
--- 1470,1484 ----
        else if (!(call_flags & ECF_CONST))
  	add_call_read_ops (stmt);
      }
+ 
+   /* Find uses in the called function.  */
+   get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ 
+   for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
+     get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+ 
+   get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ 
  }
  
  
*************** get_call_expr_operands (tree stmt, tree 
*** 1431,1441 ****
     operands.  */
  
  static void
! add_stmt_operand (tree *var_p, tree stmt, int flags)
  {
    bool is_real_op;
    tree var, sym;
-   stmt_ann_t s_ann = stmt_ann (stmt);
    var_ann_t v_ann;
  
    var = *var_p;
--- 1488,1497 ----
     operands.  */
  
  static void
! add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
  {
    bool is_real_op;
    tree var, sym;
    var_ann_t v_ann;
  
    var = *var_p;
*************** note_addressable (tree var, stmt_ann_t s
*** 1586,1617 ****
  static void
  add_call_clobber_ops (tree stmt)
  {
    /* Functions that are not const, pure or never return may clobber
       call-clobbered variables.  */
!   if (stmt_ann (stmt))
!     stmt_ann (stmt)->makes_clobbering_call = true;
  
!   /* If we had created .GLOBAL_VAR earlier, use it.  Otherwise, add 
!      a V_MAY_DEF operand for every call clobbered variable.  See 
!      compute_may_aliases for the heuristic used to decide whether 
!      to create .GLOBAL_VAR or not.  */
    if (global_var)
-     add_stmt_operand (&global_var, stmt, opf_is_def);
-   else
      {
!       unsigned i;
!       bitmap_iterator bi;
  
!       EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
  	{
! 	  tree var = referenced_var (i);
! 	  if (TREE_READONLY (var)
! 	      && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
! 	    add_stmt_operand (&var, stmt, opf_none);
! 	  else
! 	    add_stmt_operand (&var, stmt, opf_is_def);
  	}
      }
  }
  
  
--- 1642,1733 ----
  static void
  add_call_clobber_ops (tree stmt)
  {
+   unsigned i;
+   tree t;
+   bitmap_iterator bi;
+   stmt_ann_t s_ann = stmt_ann (stmt);
+   struct stmt_ann_d empty_ann;
+ 
    /* Functions that are not const, pure or never return may clobber
       call-clobbered variables.  */
!   if (s_ann)
!     s_ann->makes_clobbering_call = true;
  
!   /* If we created .GLOBAL_VAR earlier, just use it.  See compute_may_aliases 
!      for the heuristic used to decide whether to create .GLOBAL_VAR or not.  */
    if (global_var)
      {
!       add_stmt_operand (&global_var, s_ann, opf_is_def);
!       return;
!     }
  
!   /* If cache is valid, copy the elements into the build vectors.  */
!   if (ssa_call_clobbered_cache_valid)
!     {
!       for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
  	{
! 	  t = VARRAY_TREE (clobbered_vuses, i);
! 	  gcc_assert (TREE_CODE (t) != SSA_NAME);
! 	  var_ann (t)->in_vuse_list = 1;
! 	  VARRAY_PUSH_TREE (build_vuses, t);
! 	}
!       for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
! 	{
! 	  t = VARRAY_TREE (clobbered_v_may_defs, i);
! 	  gcc_assert (TREE_CODE (t) != SSA_NAME);
! 	  var_ann (t)->in_v_may_def_list = 1;
! 	  VARRAY_PUSH_TREE (build_v_may_defs, t);
  	}
+       if (s_ann)
+ 	{
+ 	  s_ann->makes_aliased_loads = clobbered_aliased_loads;
+ 	  s_ann->makes_aliased_stores = clobbered_aliased_stores;
+ 	}
+       return;
+     }
+ 
+   memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+ 
+   /* Add a V_MAY_DEF operand for every call clobbered variable.  */
+   EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+     {
+       tree var = referenced_var (i);
+       if (TREE_READONLY (var)
+ 	  && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+ 	add_stmt_operand (&var, &empty_ann, opf_none);
+       else
+ 	add_stmt_operand (&var, &empty_ann, opf_is_def);
+     }
+ 
+   clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+   clobbered_aliased_stores = empty_ann.makes_aliased_stores;
+ 
+   /* Set the flags for a stmt's annotation.  */
+   if (s_ann)
+     {
+       s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+       s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+     }
+ 
+   /* Perpare empty cache vectors.  */
+   if (clobbered_v_may_defs)
+     {
+       VARRAY_POP_ALL (clobbered_vuses);
+       VARRAY_POP_ALL (clobbered_v_may_defs);
      }
+   else
+     {
+       VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
+       VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
+     }
+ 
+   /* Now fill the clobbered cache with the values that have been found.  */
+   for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+     VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
+   for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
+     VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
+ 
+   ssa_call_clobbered_cache_valid = true;
  }
  
  
*************** add_call_clobber_ops (tree stmt)
*** 1621,1644 ****
  static void
  add_call_read_ops (tree stmt)
  {
    bitmap_iterator bi;
  
!   /* Otherwise, if the function is not pure, it may reference memory.  Add
!      a VUSE for .GLOBAL_VAR if it has been created.  Otherwise, add a VUSE
!      for each call-clobbered variable.  See add_referenced_var for the
!      heuristic used to decide whether to create .GLOBAL_VAR.  */
    if (global_var)
-     add_stmt_operand (&global_var, stmt, opf_none);
-   else
      {
!       unsigned i;
!       
!       EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
  	{
! 	  tree var = referenced_var (i);
! 	  add_stmt_operand (&var, stmt, opf_none);
  	}
      }
  }
  
  /* Copies virtual operands from SRC to DST.  */
--- 1737,1796 ----
  static void
  add_call_read_ops (tree stmt)
  {
+   unsigned i;
+   tree t;
    bitmap_iterator bi;
+   stmt_ann_t s_ann = stmt_ann (stmt);
+   struct stmt_ann_d empty_ann;
  
!   /* if the function is not pure, it may reference memory.  Add
!      a VUSE for .GLOBAL_VAR if it has been created.  See add_referenced_var
!      for the heuristic used to decide whether to create .GLOBAL_VAR.  */
    if (global_var)
      {
!       add_stmt_operand (&global_var, s_ann, opf_none);
!       return;
!     }
!   
!   /* If cache is valid, copy the elements into the build vector.  */
!   if (ssa_ro_call_cache_valid)
!     {
!       for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
  	{
! 	  t = VARRAY_TREE (ro_call_vuses, i);
! 	  gcc_assert (TREE_CODE (t) != SSA_NAME);
! 	  var_ann (t)->in_vuse_list = 1;
! 	  VARRAY_PUSH_TREE (build_vuses, t);
  	}
+       if (s_ann)
+ 	s_ann->makes_aliased_loads = ro_call_aliased_loads;
+       return;
+     }
+ 
+   memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+ 
+   /* Add a VUSE for each call-clobbered variable.  */
+   EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+     {
+       tree var = referenced_var (i);
+       add_stmt_operand (&var, &empty_ann, opf_none);
      }
+ 
+   ro_call_aliased_loads = empty_ann.makes_aliased_loads;
+   if (s_ann)
+     s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ 
+   /* Perpare empty cache vectors.  */
+   if (ro_call_vuses)
+     VARRAY_POP_ALL (ro_call_vuses);
+   else
+     VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+ 
+   /* Now fill the clobbered cache with the values that have been found.  */
+   for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+     VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+ 
+   ssa_ro_call_cache_valid = true;
  }
  
  /* Copies virtual operands from SRC to DST.  */
Index: tree-ssa-operands.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-operands.h,v
retrieving revision 2.8
diff -c -p -r2.8 tree-ssa-operands.h
*** tree-ssa-operands.h	27 Oct 2004 17:45:21 -0000	2.8
--- tree-ssa-operands.h	25 Nov 2004 18:53:58 -0000
*************** extern void get_stmt_operands (tree);
*** 188,193 ****
--- 188,195 ----
  extern void copy_virtual_operands (tree, tree);
  extern void create_ssa_artficial_load_stmt (stmt_operands_p, tree);
  
+ extern bool ssa_call_clobbered_cache_valid;
+ extern bool ssa_ro_call_cache_valid;
  
  /* This structure is used in the operand iterator loops.  It contains the 
     items required to determine which operand is retrieved next.  During

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]