This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: Patches ping


Hello,

> > http://gcc.gnu.org/ml/gcc-patches/2005-02/msg00205.html
> >   -- tree level store motion speed up (PR 17790)
> > 
> Send me an updated patch?  This one doesn't apply for me.

here is the updated patch, bootstrapped & regtested on i686 and ia64.
PR17790 no longer reproduces for me (we spend no significant amount
of time in lim now); probably the problem got fixed or masked in
the meantime by some unrelated change.  Still, I like the new
implementation more than the old one (it gets the same results,
but I find it easier to read, and it also is by some 100 lines shorter).

Zdenek

	* tree-ssa-loop-im.c: Include hashtab.h.
	(struct mem_ref_loc): New.
	(struct mem_ref): Describe the set of references with the same
	shape.
	(max_stmt_uid, get_stmt_uid, record_mem_ref, free_mem_refs,
	maybe_queue_var, fem_single_reachable_address,
	for_each_memref, single_reachable_address,
	is_call_clobbered_ref, determine_lsm_reg): Removed.
	(record_mem_ref_loc, free_mem_ref_locs, determine_lsm_ref,
	hoist_memory_reference, memref_hash, memref_eq, memref_del,
	gather_mem_refs_stmt, gather_mem_refs, find_more_ref_vops):
	New functions.
	(rewrite_mem_refs, schedule_sm): Use mem_ref_loc list.
	(determine_lsm_loop): Rewritten.
	(determine_lsm): Do not set stmt uids.

Index: tree-ssa-loop-im.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-loop-im.c,v
retrieving revision 2.39
diff -c -3 -p -r2.39 tree-ssa-loop-im.c
*** tree-ssa-loop-im.c	3 May 2005 20:18:32 -0000	2.39
--- tree-ssa-loop-im.c	5 May 2005 20:27:27 -0000
*************** Software Foundation, 59 Temple Place - S
*** 38,43 ****
--- 38,44 ----
  #include "tree-pass.h"
  #include "flags.h"
  #include "real.h"
+ #include "hashtab.h"
  
  /* TODO:  Support for predicated code motion.  I.e.
  
*************** struct lim_aux_data
*** 103,115 ****
  			? NULL \
  			: (struct lim_aux_data *) (stmt_ann (STMT)->aux))
  
! /* Description of a memory reference for store motion.  */
  
! struct mem_ref
  {
    tree *ref;			/* The reference itself.  */
    tree stmt;			/* The statement in that it occurs.  */
!   struct mem_ref *next;		/* Next use in the chain.  */
  };
  
  /* Minimum cost of an expensive expression.  */
--- 104,129 ----
  			? NULL \
  			: (struct lim_aux_data *) (stmt_ann (STMT)->aux))
  
! /* Description of a memory reference location for store motion.  */
  
! struct mem_ref_loc
  {
    tree *ref;			/* The reference itself.  */
    tree stmt;			/* The statement in that it occurs.  */
!   struct mem_ref_loc *next;	/* Next use in the chain.  */
! };
! 
! /* Description of a memory reference for store motion.  */
! 
! struct mem_ref
! {
!   tree mem;			/* The memory itself.  */
!   hashval_t hash;		/* Its hash value.  */
!   bool is_stored;		/* True if there is a store to the location
! 				   in the loop.  */
!   struct mem_ref_loc *locs;	/* The locations where it is found.  */
!   bitmap vops;			/* Vops corresponding to this memory
! 				   location.  */
  };
  
  /* Minimum cost of an expensive expression.  */
*************** struct mem_ref
*** 119,139 ****
     block will be executed.  */
  #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
  
- static unsigned max_stmt_uid;	/* Maximal uid of a statement.  Uids to phi
- 				   nodes are assigned using the versions of
- 				   ssa names they define.  */
- 
- /* Returns uid of statement STMT.  */
- 
- static unsigned
- get_stmt_uid (tree stmt)
- {
-   if (TREE_CODE (stmt) == PHI_NODE)
-     return SSA_NAME_VERSION (PHI_RESULT (stmt)) + max_stmt_uid;
- 
-   return stmt_ann (stmt)->uid;
- }
- 
  /* Calls CBCK for each index in memory reference ADDR_P.  There are two
     kinds situations handled; in each of these cases, the memory reference
     and DATA are passed to the callback:
--- 133,138 ----
*************** force_move_till (tree ref, tree *index, 
*** 859,871 ****
    return true;
  }
  
! /* Records memory reference *REF (that occurs in statement STMT)
!    to the list MEM_REFS.  */
  
  static void
! record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
  {
!   struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
  
    aref->stmt = stmt;
    aref->ref = ref;
--- 858,870 ----
    return true;
  }
  
! /* Records memory reference location *REF to the list MEM_REFS.  The reference
!    occurs in statement STMT.  */
  
  static void
! record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
  {
!   struct mem_ref_loc *aref = xmalloc (sizeof (struct mem_ref_loc));
  
    aref->stmt = stmt;
    aref->ref = ref;
*************** record_mem_ref (struct mem_ref **mem_ref
*** 874,885 ****
    *mem_refs = aref;
  }
  
! /* Releases list of memory references MEM_REFS.  */
  
  static void
! free_mem_refs (struct mem_ref *mem_refs)
  {
!   struct mem_ref *act;
  
    while (mem_refs)
      {
--- 873,884 ----
    *mem_refs = aref;
  }
  
! /* Releases list of memory reference locations MEM_REFS.  */
  
  static void
! free_mem_ref_locs (struct mem_ref_loc *mem_refs)
  {
!   struct mem_ref_loc *act;
  
    while (mem_refs)
      {
*************** free_mem_refs (struct mem_ref *mem_refs)
*** 889,1124 ****
      }
  }
  
- /* If VAR is defined in LOOP and the statement it is defined in does not belong
-    to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
-    to the set SEEN.  */
- 
- static void
- maybe_queue_var (tree var, struct loop *loop,
- 		 sbitmap seen, tree *queue, unsigned *in_queue)
- {
-   tree stmt = SSA_NAME_DEF_STMT (var);
-   basic_block def_bb = bb_for_stmt (stmt);
- 	      
-   if (!def_bb
-       || !flow_bb_inside_loop_p (loop, def_bb)
-       || TEST_BIT (seen, get_stmt_uid (stmt)))
-     return;
- 	  
-   SET_BIT (seen, get_stmt_uid (stmt));
-   queue[(*in_queue)++] = stmt;
- }
- 
- /* If COMMON_REF is NULL, set COMMON_REF to *OP and return true.
-    Otherwise return true if the memory reference *OP is equal to COMMON_REF.
-    Record the reference OP to list MEM_REFS.  STMT is the statement in that
-    the reference occurs.  */
- 
- struct sra_data
- {
-   struct mem_ref **mem_refs;
-   tree common_ref;
-   tree stmt;
- };
- 
- static bool
- fem_single_reachable_address (tree *op, void *data)
- {
-   struct sra_data *sra_data = data;
- 
-   if (sra_data->common_ref
-       && !operand_equal_p (*op, sra_data->common_ref, 0))
-     return false;
-   sra_data->common_ref = *op;
- 
-   record_mem_ref (sra_data->mem_refs, sra_data->stmt, op);
-   return true;
- }
- 
- /* Runs CALLBACK for each operand of STMT that is a memory reference.  DATA
-    is passed to the CALLBACK as well.  The traversal stops if CALLBACK
-    returns false, for_each_memref then returns false as well.  Otherwise
-    for_each_memref returns true.  */
- 
- static bool
- for_each_memref (tree stmt, bool (*callback)(tree *, void *), void *data)
- {
-   tree *op;
- 
-   if (TREE_CODE (stmt) == RETURN_EXPR)
-     stmt = TREE_OPERAND (stmt, 1);
- 
-   if (TREE_CODE (stmt) == MODIFY_EXPR)
-     {
-       op = &TREE_OPERAND (stmt, 0);
-       if (TREE_CODE (*op) != SSA_NAME
- 	  && !callback (op, data))
- 	return false;
- 
-       op = &TREE_OPERAND (stmt, 1);
-       if (TREE_CODE (*op) != SSA_NAME
- 	  && is_gimple_lvalue (*op)
- 	  && !callback (op, data))
- 	return false;
- 
-       stmt = TREE_OPERAND (stmt, 1);
-     }
- 
-   if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
-     stmt = TREE_OPERAND (stmt, 0);
- 
-   if (TREE_CODE (stmt) == CALL_EXPR)
-     {
-       tree args;
- 
-       for (args = TREE_OPERAND (stmt, 1); args; args = TREE_CHAIN (args))
- 	{
- 	  op = &TREE_VALUE (args);
- 
- 	  if (TREE_CODE (*op) != SSA_NAME
- 	      && is_gimple_lvalue (*op)
- 	      && !callback (op, data))
- 	    return false;
- 	}
-     }
- 
-   return true;
- }
- 
- /* Determine whether all memory references inside the LOOP that correspond
-    to virtual ssa names defined in statement STMT are equal.
-    If so, store the list of the references to MEM_REFS, and return one
-    of them.  Otherwise store NULL to MEM_REFS and return NULL_TREE.
-    *SEEN_CALL_STMT is set to true if the virtual operands suggest
-    that the reference might be clobbered by a call inside the LOOP.  */
- 
- static tree
- single_reachable_address (struct loop *loop, tree stmt,
- 			  struct mem_ref **mem_refs,
- 			  bool *seen_call_stmt)
- {
-   unsigned max_uid = max_stmt_uid + num_ssa_names;
-   tree *queue = xmalloc (sizeof (tree) * max_uid);
-   sbitmap seen = sbitmap_alloc (max_uid);
-   unsigned in_queue = 1;
-   unsigned i;
-   struct sra_data sra_data;
-   tree call;
-   tree val;
-   ssa_op_iter iter;
-   imm_use_iterator imm_iter;
-   use_operand_p use_p;
- 
-   sbitmap_zero (seen);
- 
-   *mem_refs = NULL;
-   sra_data.mem_refs = mem_refs;
-   sra_data.common_ref = NULL_TREE;
- 
-   queue[0] = stmt;
-   SET_BIT (seen, get_stmt_uid (stmt));
-   *seen_call_stmt = false;
- 
-   while (in_queue)
-     {
-       stmt = queue[--in_queue];
-       sra_data.stmt = stmt;
- 
-       if (LIM_DATA (stmt)
- 	  && LIM_DATA (stmt)->sm_done)
- 	goto fail;
- 
-       switch (TREE_CODE (stmt))
- 	{
- 	case MODIFY_EXPR:
- 	case CALL_EXPR:
- 	case RETURN_EXPR:
- 	  if (!for_each_memref (stmt, fem_single_reachable_address,
- 				&sra_data))
- 	    goto fail;
- 
- 	  /* If this is a function that may depend on the memory location,
- 	     record the fact.  We cannot directly refuse call clobbered
- 	     operands here, since sra_data.common_ref did not have
- 	     to be set yet.  */
- 	  call = get_call_expr_in (stmt);
- 	  if (call
- 	      && !(call_expr_flags (call) & ECF_CONST))
- 	    *seen_call_stmt = true;
- 
- 	  /* Traverse also definitions of the VUSES (there may be other
- 	     distinct from the one we used to get to this statement).  */
- 	  FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
- 	    maybe_queue_var (val, loop, seen, queue, &in_queue);
- 
- 	  break;
- 
- 	case PHI_NODE:
- 	  for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
- 	    if (TREE_CODE (PHI_ARG_DEF (stmt, i)) == SSA_NAME)
- 	      maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
- 		               seen, queue, &in_queue);
- 	  break;
- 
- 	default:
- 	  goto fail;
- 	}
- 
-       /* Find uses of virtual names.  */
-       if (TREE_CODE (stmt) == PHI_NODE)
-         {
- 	  if (!is_gimple_reg (SSA_NAME_VAR (PHI_RESULT (stmt))))
- 	    FOR_EACH_IMM_USE_FAST (use_p, imm_iter, PHI_RESULT (stmt))
- 	      {	      
- 		tree imm_stmt = USE_STMT (use_p);
- 
- 		if (TEST_BIT (seen, get_stmt_uid (imm_stmt)))
- 		  continue;
- 
- 		if (!flow_bb_inside_loop_p (loop, bb_for_stmt (imm_stmt)))
- 		  continue;
- 
- 		SET_BIT (seen, get_stmt_uid (imm_stmt));
- 
- 		queue[in_queue++] = imm_stmt;
- 	      }
- 	}
-       else
- 	FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- 	  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, val)
- 	    {
- 	      tree imm_stmt = USE_STMT (use_p);
- 
- 	      if (TEST_BIT (seen, get_stmt_uid (imm_stmt)))
- 		continue;
- 
- 	      if (!flow_bb_inside_loop_p (loop, bb_for_stmt (imm_stmt)))
- 		continue;
- 
- 	      SET_BIT (seen, get_stmt_uid (imm_stmt));
- 
- 	      queue[in_queue++] = imm_stmt;
- 	    }
-     }
- 
-   free (queue);
-   sbitmap_free (seen);
- 
-   return sra_data.common_ref;
- 
- fail:
-   free_mem_refs (*mem_refs);
-   *mem_refs = NULL;
-   free (queue);
-   sbitmap_free (seen);
- 
-   return NULL;
- }
- 
  /* Rewrites memory references in list MEM_REFS by variable TMP_VAR.  */
  
  static void
! rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
  {
    tree var;
    ssa_op_iter iter;
--- 888,897 ----
      }
  }
  
  /* Rewrites memory references in list MEM_REFS by variable TMP_VAR.  */
  
  static void
! rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
  {
    tree var;
    ssa_op_iter iter;
*************** rewrite_mem_refs (tree tmp_var, struct m
*** 1143,1151 ****
  
  static void
  schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
! 	     struct mem_ref *mem_refs)
  {
!   struct mem_ref *aref;
    tree tmp_var;
    unsigned i;
    tree load, store;
--- 916,924 ----
  
  static void
  schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
! 	     struct mem_ref_loc *mem_refs)
  {
!   struct mem_ref_loc *aref;
    tree tmp_var;
    unsigned i;
    tree load, store;
*************** schedule_sm (struct loop *loop, edge *ex
*** 1187,1290 ****
      }
  }
  
! /* Returns true if REF may be clobbered by calls.  */
! 
! static bool
! is_call_clobbered_ref (tree ref)
! {
!   tree base;
!   HOST_WIDE_INT offset, size;
!   subvar_t sv;
!   subvar_t svars;
!   tree sref = ref;
! 
!   if (TREE_CODE (sref) == COMPONENT_REF
!       && (sref = okay_component_ref_for_subvars (sref, &offset, &size)))
!     {
!       svars = get_subvars_for_var (sref);
!       for (sv = svars; sv; sv = sv->next)
! 	{
! 	  if (overlap_subvar (offset, size, sv, NULL)
! 	      && is_call_clobbered (sv->var))
! 	    return true;
! 	}
!     }
! 	      
!   base = get_base_address (ref);
!   if (!base)
!     return true;
! 
!   if (DECL_P (base))
!     {
!       if (var_can_have_subvars (base)
! 	  && (svars = get_subvars_for_var (base)))
! 	{
! 	  for (sv = svars; sv; sv = sv->next)
! 	    if (is_call_clobbered (sv->var))
! 	      return true;
! 	  return false;
! 	}
!       else
! 	return is_call_clobbered (base);
!     }
! 
!   if (INDIRECT_REF_P (base))
!     {
!       /* Check whether the alias tags associated with the pointer
! 	 are call clobbered.  */
!       tree ptr = TREE_OPERAND (base, 0);
!       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
!       tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
!       tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
! 
!       if ((nmt && is_call_clobbered (nmt))
! 	  || (tmt && is_call_clobbered (tmt)))
! 	return true;
! 
!       return false;
!     }
! 
!   gcc_unreachable ();
! }
! 
! /* Determine whether all memory references inside LOOP corresponding to the
!    virtual ssa name REG are equal to each other, and whether the address of
!    this common reference can be hoisted outside of the loop.  If this is true,
!    prepare the statements that load the value of the memory reference to a
!    temporary variable in the loop preheader, store it back on the loop exits,
!    and replace all the references inside LOOP by this temporary variable.
!    LOOP has N_EXITS stored in EXITS.  */
  
  static void
! determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
  {
!   tree ref;
!   struct mem_ref *mem_refs, *aref;
    struct loop *must_exec;
-   bool sees_call;
-   
-   if (is_gimple_reg (reg))
-     return;
-   
-   ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs,
- 				  &sees_call);
-   if (!ref)
-     return;
  
!   /* If we cannot create a ssa name for the result, give up.  */
!   if (!is_gimple_reg_type (TREE_TYPE (ref))
!       || TREE_THIS_VOLATILE (ref))
!     goto fail;
! 
!   /* If there is a call that may use the location, give up as well.  */
!   if (sees_call
!       && is_call_clobbered_ref (ref))
!     goto fail;
  
!   if (!for_each_index (&ref, may_move_till, loop))
!     goto fail;
  
!   if (tree_could_trap_p (ref))
      {
        /* If the memory access is unsafe (i.e. it might trap), ensure that some
  	 of the statements in that it occurs is always executed when the loop
--- 960,990 ----
      }
  }
  
! /* Check whether memory reference REF can be hoisted out of the LOOP.  If this
!    is true, prepare the statements that load the value of the memory reference
!    to a temporary variable in the loop preheader, store it back on the loop
!    exits, and replace all the references inside LOOP by this temporary variable.
!    LOOP has N_EXITS stored in EXITS.  CLOBBERED_VOPS is the bitmap of virtual
!    operands that are clobbered by a call or accessed through multiple references
!    in loop.  */
  
  static void
! determine_lsm_ref (struct loop *loop, edge *exits, unsigned n_exits,
! 		   bitmap clobbered_vops, struct mem_ref *ref)
  {
!   struct mem_ref_loc *aref;
    struct loop *must_exec;
  
!   /* In case the memory is not stored to, there is nothing for SM to do.  */
!   if (!ref->is_stored)
!     return;
  
!   /* If the reference is aliased with any different ref, or killed by call
!      in function, then fail.  */
!   if (bitmap_intersect_p (ref->vops, clobbered_vops))
!     return;
  
!   if (tree_could_trap_p (ref->mem))
      {
        /* If the memory access is unsafe (i.e. it might trap), ensure that some
  	 of the statements in that it occurs is always executed when the loop
*************** determine_lsm_reg (struct loop *loop, ed
*** 1297,1303 ****
  	 least one of the statements containing the memory reference is
  	 executed.  */
  
!       for (aref = mem_refs; aref; aref = aref->next)
  	{
  	  if (!LIM_DATA (aref->stmt))
  	    continue;
--- 997,1003 ----
  	 least one of the statements containing the memory reference is
  	 executed.  */
  
!       for (aref = ref->locs; aref; aref = aref->next)
  	{
  	  if (!LIM_DATA (aref->stmt))
  	    continue;
*************** determine_lsm_reg (struct loop *loop, ed
*** 1312,1324 ****
  	}
  
        if (!aref)
! 	goto fail;
      }
  
!   schedule_sm (loop, exits, n_exits, ref, mem_refs);
  
! fail: ;
!   free_mem_refs (mem_refs);
  }
  
  /* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
--- 1012,1045 ----
  	}
  
        if (!aref)
! 	return;
      }
  
!   schedule_sm (loop, exits, n_exits, ref->mem, ref->locs);
! }
! 
! /* Attempts to hoist memory reference described in SLOT out of loop
!    described in DATA.  Callback for htab_traverse.  */
! 
! struct hmr_data
! {
!   struct loop *loop;	/* Loop from that the reference should be hoisted.  */
!   edge *exits;		/* Exits of the loop.  */
!   unsigned n_exits;	/* Length of the exits array.  */
!   bitmap clobbered_vops;/* The vops clobbered by call in loop or accessed by
! 			   multiple memory references.  */
! };
! 
! static int
! hoist_memory_reference (void **slot, void *data)
! {
!   struct mem_ref *ref = *slot;
!   struct hmr_data *hmr_data = data;
! 
!   determine_lsm_ref (hmr_data->loop, hmr_data->exits, hmr_data->n_exits,
! 		     hmr_data->clobbered_vops, ref);
  
!   return 1;
  }
  
  /* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
*************** loop_suitable_for_sm (struct loop *loop 
*** 1338,1352 ****
    return true;
  }
  
  /* Try to perform store motion for all memory references modified inside
     LOOP.  */
  
  static void
  determine_lsm_loop (struct loop *loop)
  {
-   tree phi;
    unsigned n_exits;
    edge *exits = get_loop_exit_edges (loop, &n_exits);
  
    if (!loop_suitable_for_sm (loop, exits, n_exits))
      {
--- 1059,1245 ----
    return true;
  }
  
+ /* A hash function for struct mem_ref object OBJ.  */
+ 
+ static hashval_t
+ memref_hash (const void *obj)
+ {
+   const struct mem_ref *mem = obj;
+ 
+   return mem->hash;
+ }
+ 
+ /* An equality function for struct mem_ref object OBJ1 with
+    memory reference OBJ2.  */
+ 
+ static int
+ memref_eq (const void *obj1, const void *obj2)
+ {
+   const struct mem_ref *mem1 = obj1;
+ 
+   return operand_equal_p (mem1->mem, (tree) obj2, 0);
+ }
+ 
+ /* A function to free the struct mem_ref object OBJ.  */
+ 
+ static void
+ memref_del (void *obj)
+ {
+   struct mem_ref *mem = obj;
+ 
+   free_mem_ref_locs (mem->locs);
+   BITMAP_FREE (mem->vops);
+   free (mem);
+ }
+ 
+ /* Gathers memory references in statement STMT in LOOP, storing the
+    information about them in MEM_REFS hash table.  Note vops accessed through
+    unrecognized statements in CLOBBERED_VOPS.  */
+ 
+ static void
+ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
+ 		      bitmap clobbered_vops, tree stmt)
+ {
+   tree *lhs, *rhs, *mem = NULL;
+   hashval_t hash;
+   PTR *slot;
+   struct mem_ref *ref = NULL;
+   ssa_op_iter oi;
+   tree vname;
+   bool is_stored;
+ 
+   if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+     return;
+ 
+   /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns.  */
+   if (TREE_CODE (stmt) != MODIFY_EXPR)
+     goto fail;
+ 
+   lhs = &TREE_OPERAND (stmt, 0);
+   rhs = &TREE_OPERAND (stmt, 1);
+ 
+   if (TREE_CODE (*lhs) == SSA_NAME)
+     {
+       if (!is_gimple_addressable (*rhs))
+ 	goto fail;
+ 
+       mem = rhs;
+       is_stored = false;
+     }
+   else if (TREE_CODE (*rhs) == SSA_NAME
+ 	   || is_gimple_min_invariant (*rhs))
+     {
+       mem = lhs;
+       is_stored = true;
+     }
+   else
+     goto fail;
+ 
+   /* If we cannot create a ssa name for the result, give up.  */
+   if (!is_gimple_reg_type (TREE_TYPE (*mem))
+       || TREE_THIS_VOLATILE (*mem))
+     goto fail;
+ 
+   /* If we cannot move the reference from the loop, fail.  */
+   if (!for_each_index (mem, may_move_till, loop))
+     goto fail;
+ 
+   hash = iterative_hash_expr (*mem, 0);
+   slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
+ 
+   if (*slot)
+     ref = *slot;
+   else
+     {
+       ref = xmalloc (sizeof (struct mem_ref));
+       ref->mem = *mem;
+       ref->hash = hash;
+       ref->locs = NULL;
+       ref->is_stored = false;
+       ref->vops = BITMAP_ALLOC (NULL);
+       *slot = ref;
+     }
+   ref->is_stored |= is_stored;
+ 
+   FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
+ 			     SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+     {
+       bitmap_set_bit (ref->vops,
+ 		      var_ann (SSA_NAME_VAR (vname))->uid);
+     }
+   record_mem_ref_loc (&ref->locs, stmt, mem);
+   return;
+ 
+ fail:
+   FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
+ 			     SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+     {
+       bitmap_set_bit (clobbered_vops,
+ 		      var_ann (SSA_NAME_VAR (vname))->uid);
+     }
+ }
+ 
+ /* Gathers memory references in LOOP, storing the information about them
+    in MEM_REFS hash table.  Note vops accessed through unrecognized
+    statements in CLOBBERED_VOPS.  */
+ 
+ static void
+ gather_mem_refs (struct loop *loop, htab_t mem_refs, bitmap clobbered_vops)
+ {
+   basic_block *body = get_loop_body (loop);
+   block_stmt_iterator bsi;
+   unsigned i;
+ 
+   for (i = 0; i < loop->num_nodes; i++)
+     {
+       for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
+ 	gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi));
+     }
+ 
+   free (body);
+ }
+ 
+ /* Finds the vops accessed by memory reference described in SLOT as well as
+    some other reference(s) and marks them in DATA->clobbered_vops.
+    Callback for htab_traverse.  */
+ 
+ struct fmrv_data
+ {
+   bitmap clobbered_vops;	/* The vops clobbered by call in loop or accessed by
+ 			   multiple memory references.  */
+   bitmap all_vops;	/* All vops referenced in the loop.  */
+ };
+ 
+ static int
+ find_more_ref_vops (void **slot, void *data)
+ {
+   struct mem_ref *ref = *slot;
+   struct fmrv_data *fmrv_data = data;
+   bitmap_head tmp;
+ 
+   /* The vops that are already in all_vops are accessed by more than
+      one memory reference.  */
+   bitmap_initialize (&tmp, &bitmap_default_obstack);
+   bitmap_and (&tmp, fmrv_data->all_vops, ref->vops);
+   bitmap_ior_into (fmrv_data->clobbered_vops, &tmp);
+   bitmap_clear (&tmp);
+ 
+   bitmap_ior_into (fmrv_data->all_vops, ref->vops);
+   return 1;
+ }
+ 
  /* Try to perform store motion for all memory references modified inside
     LOOP.  */
  
  static void
  determine_lsm_loop (struct loop *loop)
  {
    unsigned n_exits;
    edge *exits = get_loop_exit_edges (loop, &n_exits);
+   htab_t mem_refs;
+   struct hmr_data hmr_data;
+   struct fmrv_data fmrv_data;
+   bitmap clobbered_vops;
  
    if (!loop_suitable_for_sm (loop, exits, n_exits))
      {
*************** determine_lsm_loop (struct loop *loop)
*** 1354,1363 ****
        return;
      }
  
!   for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
!     determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
  
    free (exits);
  }
  
  /* Try to perform store motion for all memory references modified inside
--- 1247,1274 ----
        return;
      }
  
!   mem_refs = htab_create (100, memref_hash, memref_eq, memref_del);
! 
!   /* Find the memory references in LOOP.  */
!   clobbered_vops = BITMAP_ALLOC (NULL);
!   gather_mem_refs (loop, mem_refs, clobbered_vops);
! 
!   /* Find the vops that are used for more than one reference.  */
!   fmrv_data.all_vops = BITMAP_ALLOC (NULL);
!   fmrv_data.clobbered_vops = clobbered_vops;
!   htab_traverse (mem_refs, find_more_ref_vops, &fmrv_data);
!   BITMAP_FREE (fmrv_data.all_vops);
! 
!   /* Hoist all suitable memory references.  */
!   hmr_data.loop = loop;
!   hmr_data.exits = exits;
!   hmr_data.n_exits = n_exits;
!   hmr_data.clobbered_vops = clobbered_vops;
!   htab_traverse (mem_refs, hoist_memory_reference, &hmr_data);
  
+   htab_delete (mem_refs);
    free (exits);
+   BITMAP_FREE (clobbered_vops);
  }
  
  /* Try to perform store motion for all memory references modified inside
*************** static void
*** 1367,1391 ****
  determine_lsm (struct loops *loops)
  {
    struct loop *loop;
-   basic_block bb;
  
    if (!loops->tree_root->inner)
      return;
  
!   /* Create a UID for each statement in the function.  Ordering of the
!      UIDs is not important for this pass.  */
!   max_stmt_uid = 0;
!   FOR_EACH_BB (bb)
!     {
!       block_stmt_iterator bsi;
! 
!       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
! 	stmt_ann (bsi_stmt (bsi))->uid = max_stmt_uid++;
!     }
! 
!   /* Pass the loops from the outermost.  For each virtual operand loop phi node
!      check whether all the references inside the loop correspond to a single
!      address, and if so, move them.  */
  
    loop = loops->tree_root->inner;
    while (1)
--- 1278,1289 ----
  determine_lsm (struct loops *loops)
  {
    struct loop *loop;
  
    if (!loops->tree_root->inner)
      return;
  
!   /* Pass the loops from the outermost and perform the store motion as
!      suitable.  */
  
    loop = loops->tree_root->inner;
    while (1)


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]