This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[patch 4/4 v3] Allow loop prefetch code to speculatively prefetch non constant steps


Am Freitag 07 Mai 2010 17:00:10 schrieb Zdenek Dvorak:
> OK, modulo coding style fixes:
Lets hope that I found everything.

> [...]
> It might be useful to dump the step in this case, anyway.
Ok, I now use print_node_brief and the output looks like:
  group 0x80affad0 (base *arc_45, step is non-constant and loop-invariant: <nop_expr 0x20000802150>)
> [...]




Allow loop prefetch code to speculatively prefetch non constant steps

This patch changes the prefetch code to accept non-constant step sizes
for memory references in loops. If the step size is a constant integer
the patch should not change the current behaviour. 

This patch uses a heuristic to speculatively activate prefetching for
non-constant steps by prefetching "ahead" iterations in advance.

This patch improves some testcases where the old prefetch code did not
improve the performance (e.g. mcf).

Bootstrapped and tested on s390x-ibm-linux-gnu.

Christian.

2010-05-07  Christian Borntraeger  <borntraeger@de.ibm.com>

        * tree-ssa-loop-prefetch.c (mem_ref_group): Change step to tree.
        * tree-ssa-loop-prefetch.c (ar_data): Change step to tree.
        * tree-ssa-loop-prefetch.c (dump_mem_ref): Adopt debug code to 
        handle a tree as step.  This also checks for a constant int vs.
        non-constant but loop-invariant steps.
        * tree-ssa-loop-prefetch.c (find_or_create_group): Change the sort 
        algorithm to only consider steps that are constant ints.
        * tree-ssa-loop-prefetch.c (idx_analyze_ref): Adopt code to handle
        a tree instead of a HOST_WIDE_INT for step.
        * tree-ssa-loop-prefetch.c (gather_memory_references_ref): Handle
        tree instead of int and be prepared to see a NULL_TREE.
        * tree-ssa-loop-prefetch.c (prune_ref_by_self_reuse): Do not prune
        prefetches if the step cannot be calculated at compile time.
        * tree-ssa-loop-prefetch.c (prune_ref_by_group_reuse): Do not prune
        prefetches if the step cannot be calculated at compile time.
        * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Issue prefetches
        for non-constant but loop-invariant steps.
 
Index: gcc/tree-ssa-loop-prefetch.c
===================================================================
*** gcc/tree-ssa-loop-prefetch.c.orig
--- gcc/tree-ssa-loop-prefetch.c
*************** along with GCC; see the file COPYING3.  
*** 204,210 ****
  struct mem_ref_group
  {
    tree base;			/* Base of the reference.  */
!   HOST_WIDE_INT step;		/* Step of the reference.  */
    struct mem_ref *refs;		/* References in the group.  */
    struct mem_ref_group *next;	/* Next group of references.  */
  };
--- 204,210 ----
  struct mem_ref_group
  {
    tree base;			/* Base of the reference.  */
!   tree step;			/* Step of the reference.  */
    struct mem_ref *refs;		/* References in the group.  */
    struct mem_ref_group *next;	/* Next group of references.  */
  };
*************** dump_mem_ref (FILE *file, struct mem_ref
*** 248,254 ****
    fprintf (file, "  group %p (base ", (void *) ref->group);
    print_generic_expr (file, ref->group->base, TDF_SLIM);
    fprintf (file, ", step ");
!   fprintf (file, HOST_WIDE_INT_PRINT_DEC, ref->group->step);
    fprintf (file, ")\n");
  
    fprintf (file, "  delta ");
--- 248,258 ----
    fprintf (file, "  group %p (base ", (void *) ref->group);
    print_generic_expr (file, ref->group->base, TDF_SLIM);
    fprintf (file, ", step ");
!   if (cst_and_fits_in_hwi (ref->group->step))
!     fprintf (file, HOST_WIDE_INT_PRINT_DEC, int_cst_value (ref->group->step));
!   else
!     print_node_brief (file, "is non-constant and loop-invariant:",
! 		      ref->group->step, 0);
    fprintf (file, ")\n");
  
    fprintf (file, "  delta ");
*************** dump_mem_ref (FILE *file, struct mem_ref
*** 264,282 ****
     exist.  */
  
  static struct mem_ref_group *
! find_or_create_group (struct mem_ref_group **groups, tree base,
! 		      HOST_WIDE_INT step)
  {
    struct mem_ref_group *group;
  
    for (; *groups; groups = &(*groups)->next)
      {
!       if ((*groups)->step == step
  	  && operand_equal_p ((*groups)->base, base, 0))
  	return *groups;
  
!       /* Keep the list of groups sorted by decreasing step.  */
!       if ((*groups)->step < step)
  	break;
      }
  
--- 268,287 ----
     exist.  */
  
  static struct mem_ref_group *
! find_or_create_group (struct mem_ref_group **groups, tree base, tree step)
  {
    struct mem_ref_group *group;
  
    for (; *groups; groups = &(*groups)->next)
      {
!       if (operand_equal_p ((*groups)->step, step, 0)
  	  && operand_equal_p ((*groups)->base, base, 0))
  	return *groups;
  
!       /* If step is an integer constant, keep the list of groups sorted
!          by decreasing step.  */
!         if (cst_and_fits_in_hwi ((*groups)->step) && cst_and_fits_in_hwi (step)
!             && (int_cst_value ((*groups)->step) < int_cst_value (step)))
  	break;
      }
  
*************** struct ar_data
*** 361,367 ****
  {
    struct loop *loop;			/* Loop of the reference.  */
    gimple stmt;				/* Statement of the reference.  */
!   HOST_WIDE_INT *step;			/* Step of the memory reference.  */
    HOST_WIDE_INT *delta;			/* Offset of the memory reference.  */
  };
  
--- 366,372 ----
  {
    struct loop *loop;			/* Loop of the reference.  */
    gimple stmt;				/* Statement of the reference.  */
!   tree *step;				/* Step of the memory reference.  */
    HOST_WIDE_INT *delta;			/* Offset of the memory reference.  */
  };
  
*************** idx_analyze_ref (tree base, tree *index,
*** 373,379 ****
  {
    struct ar_data *ar_data = (struct ar_data *) data;
    tree ibase, step, stepsize;
!   HOST_WIDE_INT istep, idelta = 0, imult = 1;
    affine_iv iv;
  
    if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF
--- 378,384 ----
  {
    struct ar_data *ar_data = (struct ar_data *) data;
    tree ibase, step, stepsize;
!   HOST_WIDE_INT idelta = 0, imult = 1;
    affine_iv iv;
  
    if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF
*************** idx_analyze_ref (tree base, tree *index,
*** 381,395 ****
      return false;
  
    if (!simple_iv (ar_data->loop, loop_containing_stmt (ar_data->stmt),
! 		  *index, &iv, false))
      return false;
    ibase = iv.base;
    step = iv.step;
  
-   if (!cst_and_fits_in_hwi (step))
-     return false;
-   istep = int_cst_value (step);
- 
    if (TREE_CODE (ibase) == POINTER_PLUS_EXPR
        && cst_and_fits_in_hwi (TREE_OPERAND (ibase, 1)))
      {
--- 386,396 ----
      return false;
  
    if (!simple_iv (ar_data->loop, loop_containing_stmt (ar_data->stmt),
! 		  *index, &iv, true))
      return false;
    ibase = iv.base;
    step = iv.step;
  
    if (TREE_CODE (ibase) == POINTER_PLUS_EXPR
        && cst_and_fits_in_hwi (TREE_OPERAND (ibase, 1)))
      {
*************** idx_analyze_ref (tree base, tree *index,
*** 402,407 ****
--- 403,414 ----
        ibase = build_int_cst (TREE_TYPE (ibase), 0);
      }
  
+   if (*ar_data->step == NULL_TREE)
+     *ar_data->step = step;
+   else
+     *ar_data->step = fold_build2 (PLUS_EXPR, sizetype,
+ 				  fold_convert (sizetype, *ar_data->step),
+ 				  fold_convert (sizetype, step));
    if (TREE_CODE (base) == ARRAY_REF)
      {
        stepsize = array_ref_element_size (base);
*************** idx_analyze_ref (tree base, tree *index,
*** 409,419 ****
  	return false;
        imult = int_cst_value (stepsize);
  
!       istep *= imult;
        idelta *= imult;
      }
  
-   *ar_data->step += istep;
    *ar_data->delta += idelta;
    *index = ibase;
  
--- 416,427 ----
  	return false;
        imult = int_cst_value (stepsize);
  
!       *ar_data->step = fold_build2 (MULT_EXPR, sizetype,
! 				    fold_convert (sizetype, *ar_data->step),
! 				    fold_convert (sizetype, step));
        idelta *= imult;
      }
  
    *ar_data->delta += idelta;
    *index = ibase;
  
*************** idx_analyze_ref (tree base, tree *index,
*** 427,433 ****
  
  static bool
  analyze_ref (struct loop *loop, tree *ref_p, tree *base,
! 	     HOST_WIDE_INT *step, HOST_WIDE_INT *delta,
  	     gimple stmt)
  {
    struct ar_data ar_data;
--- 435,441 ----
  
  static bool
  analyze_ref (struct loop *loop, tree *ref_p, tree *base,
! 	     tree *step, HOST_WIDE_INT *delta,
  	     gimple stmt)
  {
    struct ar_data ar_data;
*************** analyze_ref (struct loop *loop, tree *re
*** 435,441 ****
    HOST_WIDE_INT bit_offset;
    tree ref = *ref_p;
  
!   *step = 0;
    *delta = 0;
  
    /* First strip off the component references.  Ignore bitfields.  */
--- 443,449 ----
    HOST_WIDE_INT bit_offset;
    tree ref = *ref_p;
  
!   *step = NULL_TREE;
    *delta = 0;
  
    /* First strip off the component references.  Ignore bitfields.  */
*************** static bool
*** 470,477 ****
  gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
  			      tree ref, bool write_p, gimple stmt)
  {
!   tree base;
!   HOST_WIDE_INT step, delta;
    struct mem_ref_group *agrp;
  
    if (get_base_address (ref) == NULL)
--- 478,485 ----
  gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
  			      tree ref, bool write_p, gimple stmt)
  {
!   tree base, step;
!   HOST_WIDE_INT delta;
    struct mem_ref_group *agrp;
  
    if (get_base_address (ref) == NULL)
*************** gather_memory_references_ref (struct loo
*** 479,484 ****
--- 487,495 ----
  
    if (!analyze_ref (loop, &ref, &base, &step, &delta, stmt))
      return false;
+   /* If analyze_ref fails the default is a NULL_TREE.  We can stop here.  */
+   if (step == NULL_TREE)
+     return false;
  
    /* Now we know that REF = &BASE + STEP * iter + DELTA, where DELTA and STEP
       are integer constants.  */
*************** gather_memory_references (struct loop *l
*** 553,560 ****
  static void
  prune_ref_by_self_reuse (struct mem_ref *ref)
  {
!   HOST_WIDE_INT step = ref->group->step;
!   bool backward = step < 0;
  
    if (step == 0)
      {
--- 564,579 ----
  static void
  prune_ref_by_self_reuse (struct mem_ref *ref)
  {
!   HOST_WIDE_INT step;
!   bool backward;
! 
!   /* If the step size is non constant, we cannot calculate prefetch_mod.  */
!   if (!cst_and_fits_in_hwi (ref->group->step))
!     return;
! 
!   step = int_cst_value (ref->group->step);
! 
!   backward = step < 0;
  
    if (step == 0)
      {
*************** static void
*** 638,645 ****
  prune_ref_by_group_reuse (struct mem_ref *ref, struct mem_ref *by,
  			  bool by_is_before)
  {
!   HOST_WIDE_INT step = ref->group->step;
!   bool backward = step < 0;
    HOST_WIDE_INT delta_r = ref->delta, delta_b = by->delta;
    HOST_WIDE_INT delta = delta_b - delta_r;
    HOST_WIDE_INT hit_from;
--- 657,664 ----
  prune_ref_by_group_reuse (struct mem_ref *ref, struct mem_ref *by,
  			  bool by_is_before)
  {
!   HOST_WIDE_INT step;
!   bool backward;
    HOST_WIDE_INT delta_r = ref->delta, delta_b = by->delta;
    HOST_WIDE_INT delta = delta_b - delta_r;
    HOST_WIDE_INT hit_from;
*************** prune_ref_by_group_reuse (struct mem_ref
*** 650,655 ****
--- 669,684 ----
    tree ref_type;
    int align_unit;
  
+   /* If the step is non constant we cannot calculate prefetch_before.  */
+   if (!cst_and_fits_in_hwi (ref->group->step)) {
+     return;
+   }
+ 
+   step = int_cst_value (ref->group->step);
+ 
+   backward = step < 0;
+ 
+ 
    if (delta == 0)
      {
        /* If the references has the same address, only prefetch the
*************** static void
*** 957,963 ****
  issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
  {
    HOST_WIDE_INT delta;
!   tree addr, addr_base, write_p, local;
    gimple prefetch;
    gimple_stmt_iterator bsi;
    unsigned n_prefetches, ap;
--- 986,992 ----
  issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
  {
    HOST_WIDE_INT delta;
!   tree addr, addr_base, write_p, local, forward;
    gimple prefetch;
    gimple_stmt_iterator bsi;
    unsigned n_prefetches, ap;
*************** issue_prefetch_ref (struct mem_ref *ref,
*** 980,992 ****
  
    for (ap = 0; ap < n_prefetches; ap++)
      {
!       /* Determine the address to prefetch.  */
!       delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
!       addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
! 			  addr_base, size_int (delta));
!       addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
! 				       true, GSI_SAME_STMT);
! 
        /* Create the prefetch instruction.  */
        prefetch = gimple_build_call (built_in_decls[BUILT_IN_PREFETCH],
  				    3, addr, write_p, local);
--- 1009,1036 ----
  
    for (ap = 0; ap < n_prefetches; ap++)
      {
!       if (cst_and_fits_in_hwi (ref->group->step))
!         {
!           /* Determine the address to prefetch.  */
!           delta = (ahead + ap * ref->prefetch_mod) *
! 		   int_cst_value (ref->group->step);
!           addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
!                               addr_base, size_int (delta));
!           addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
!                                            true, GSI_SAME_STMT);
!         }
!       else
!         {
!           /* The step size is non-constant but loop-invariant.  We use the
!              heuristic to simply prefetch ahead iterations ahead.  */
!           forward = fold_build2 (MULT_EXPR, sizetype,
!                                  fold_convert (sizetype, ref->group->step),
!                                  fold_convert (sizetype, size_int (ahead)));
!           addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr_base,
! 			      forward);
!           addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true,
! 					   NULL, true, GSI_SAME_STMT);
!       }
        /* Create the prefetch instruction.  */
        prefetch = gimple_build_call (built_in_decls[BUILT_IN_PREFETCH],
  				    3, addr, write_p, local);


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]