This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[tree-ssa] revise sra datastructures


The vast majority of variables in a function will not need scalarization.
It is therefore a waste to allocate arrays covering all variable uids.
We now use hash tables instead of arrays, and bitmaps instead of sbitmaps.

I hope this will also give me the flexibility needed to handle aggregates
nested inside other aggregates, as well as scalarization of arrays.


r~


        * tree-sra.c (REALPART_INDEX, IMAGPART_INDEX): Remove.
        (sra_candidates, needs_copy_in): Use a bitmap.  Update all users.
        (struct sra_elt, sra_elt_hash, sra_elt_eq): New.
        (sra_map_size): Remove.
        (sra_map): Use a htab_t.
        (lookup_scalar): Update to match.
        (get_scalar_for_field, get_scalar_for_complex_part): Likewise.
        (scalarize_structure_assignment): Use annotate_all_with_locus.
        (csc_build_component_ref): Remove index argument.
        (csc_build_complex_part): Take tree_code, not index.
        (create_scalar_copies): Don't collect indicies.
        (emit_scalar_copies): New.
        (scalarize_modify_expr, scalarize_tree_list): Use it.
        (scalarize_return_expr): Likewise.
        (scalarize_structures): Simplify needs_copy_in iteration.
        (scalarize_call_expr): Use annotate_all_with_locus.
        (dump_sra_map_trav): Split from ...
        (dump_sra_map): ... here.  Update for hash table.
        (tree_sra): Update for new datastructures.

Index: tree-sra.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Attic/tree-sra.c,v
retrieving revision 1.1.2.15
diff -c -p -d -r1.1.2.15 tree-sra.c
*** tree-sra.c	17 Jan 2004 20:20:57 -0000	1.1.2.15
--- tree-sra.c	18 Jan 2004 22:55:21 -0000
*************** Software Foundation, 59 Temple Place - S
*** 51,66 ****
  	  sensible setting.  */
  #define MAX_NFIELDS_FOR_SRA	5
  
- /* Matrix indicies for the real and imaginary parts of a complex variable. */
- #define REALPART_INDEX        0
- #define IMAGPART_INDEX        1  
- 
  /* Codes indicating how to copy one structure into another.  */
  enum sra_copy_mode { SCALAR_SCALAR, FIELD_SCALAR, SCALAR_FIELD };
  
  /* Local functions.  */
  static inline bool can_be_scalarized_p (tree);
- static inline tree lookup_scalar (int var_ix, int field_ix, tree);
  static inline void insert_edge_copies (tree stmt, basic_block bb);
  static tree create_scalar_copies (tree lhs, tree rhs, enum sra_copy_mode mode);
  static inline void scalarize_component_ref (tree, tree *tp);
--- 51,61 ----
*************** static void scalarize_asm_expr (block_st
*** 72,91 ****
  static void scalarize_return_expr (block_stmt_iterator *);
  
  /* The set of aggregate variables that are candidates for scalarization.  */
! static sbitmap sra_candidates;
  
  /* Set of scalarizable PARM_DECLs that need copy-in operations at the
     beginning of the function.  */
! static sbitmap needs_copy_in;
  
! /* A matrix of NUM_REFERENCED_VARIABLES x MAX_NFIELDS_FOR_SRA to map the
!    temporary variables to the aggregate reference that they represent.
!    For example, suppose that variable 'A' is a scalarizable aggregate with
!    fields 'a', 'b' and 'c'.  If the UID of 'A' is 6, then SRA_MAP[6][1]
!    will contain the temporary variable representing 'A.b'.  */
! static tree **sra_map;
! static size_t sra_map_size;
  
  
  /* Build a temporary.  Make sure and register it to be renamed.  */
  
--- 67,116 ----
  static void scalarize_return_expr (block_stmt_iterator *);
  
  /* The set of aggregate variables that are candidates for scalarization.  */
! static bitmap sra_candidates;
  
  /* Set of scalarizable PARM_DECLs that need copy-in operations at the
     beginning of the function.  */
! static bitmap needs_copy_in;
  
! /* This structure holds the mapping between and element of an aggregate
!    and the scalar replacement variable.  */
! struct sra_elt
! {
!   enum tree_code kind;
!   tree base;
!   tree field;
!   tree replace;
! };
!     
! static htab_t sra_map;
  
+ static hashval_t
+ sra_elt_hash (const void *x)
+ {
+   const struct sra_elt *e = x;
+   hashval_t h = (size_t) e->base * e->kind;
+   if (e->kind == COMPONENT_REF)
+     h ^= (size_t) e->field;
+   return h;
+ }
+ 
+ static int
+ sra_elt_eq (const void *x, const void *y)
+ {
+   const struct sra_elt *a = x;
+   const struct sra_elt *b = y;
+ 
+   if (a->kind != b->kind)
+     return false;
+   if (a->base != b->base)
+     return false;
+   if (a->kind == COMPONENT_REF)
+     if (a->field != b->field)
+       return false;
+ 
+   return true;
+ }
  
  /* Build a temporary.  Make sure and register it to be renamed.  */
  
*************** mark_all_vdefs (tree stmt)
*** 123,129 ****
  static bool
  is_sra_candidate_decl (tree decl)
  {
!   return DECL_P (decl) && TEST_BIT (sra_candidates, var_ann (decl)->uid);
  }
  
  /* Return true if EXP is of the form <ref decl>, where REF is one of the
--- 148,154 ----
  static bool
  is_sra_candidate_decl (tree decl)
  {
!   return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
  }
  
  /* Return true if EXP is of the form <ref decl>, where REF is one of the
*************** is_sra_candidate_ref (tree exp, bool all
*** 158,182 ****
     a new scalar with type TYPE.  */
  
  static tree
! lookup_scalar (int var_ix, int field_ix, tree type)
  {
! #ifdef ENABLE_CHECKING
!   if (var_ix < 0 || (size_t) var_ix >= sra_map_size)
!     abort ();
!   if (field_ix < 0 || field_ix >= MAX_NFIELDS_FOR_SRA)
!     abort ();
! #endif
! 
!   /* Create a new row for VAR in SRA_MAP, if necessary.  */
!   if (sra_map[var_ix] == NULL)
!     sra_map[var_ix] = xcalloc (MAX_NFIELDS_FOR_SRA, sizeof (tree));
  
!   /* If we still have not created a new scalar for FIELD, create one and
!      add it to the list of referenced variables.  */
!   if (sra_map[var_ix][field_ix] == NULL_TREE)
!     sra_map[var_ix][field_ix] = make_temp (type, "SR");
  
!   return sra_map[var_ix][field_ix];
  }
  
  
--- 183,203 ----
     a new scalar with type TYPE.  */
  
  static tree
! lookup_scalar (struct sra_elt *key, tree type)
  {
!   struct sra_elt **slot, *res;
  
!   slot = (struct sra_elt **) htab_find_slot (sra_map, key, INSERT);
!   res = *slot;
!   if (!res)
!     {
!       res = xmalloc (sizeof (*res));
!       *slot = res;
!       *res = *key;
!       res->replace = make_temp (type, "SR");
!     }
  
!   return res->replace;
  }
  
  
*************** lookup_scalar (int var_ix, int field_ix,
*** 187,225 ****
  static tree
  get_scalar_for_field (tree var, tree field)
  {
!   int var_ix, f_ix;
!   tree f, type;
! 
!   var_ix = var_ann (var)->uid;
! 
!   /* Find the index number for FIELD.  */
!   type = TREE_TYPE (var);
!   f_ix = 0;
!   for (f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
!     {
!       if (TREE_CODE (f) != FIELD_DECL)
! 	continue;
! 
!       if (field == f)
! 	break;
! 
!       f_ix++;
!     }
  
!   /* We should have found FIELD.  */
!   if (f == NULL_TREE)
!     abort ();
  
!   return lookup_scalar (var_ix, f_ix, TREE_TYPE (field));
  }
  
  
  /* Similarly for the parts of a complex type.  */
  
  static tree
! get_scalar_for_complex_part (tree var, int part)
  {
!   return lookup_scalar (var_ann (var)->uid, part, TREE_TYPE (TREE_TYPE (var)));
  }
  
  /* Return true if the fields of VAR can be replaced by scalar temporaries.
--- 208,234 ----
  static tree
  get_scalar_for_field (tree var, tree field)
  {
!   struct sra_elt key;
  
!   key.kind = COMPONENT_REF;
!   key.base = var;
!   key.field = field;
  
!   return lookup_scalar (&key, TREE_TYPE (field));
  }
  
  
  /* Similarly for the parts of a complex type.  */
  
  static tree
! get_scalar_for_complex_part (tree var, enum tree_code part)
  {
!   struct sra_elt key;
! 
!   key.kind = part;
!   key.base = var;
! 
!   return lookup_scalar (&key, TREE_TYPE (TREE_TYPE (var)));
  }
  
  /* Return true if the fields of VAR can be replaced by scalar temporaries.
*************** scalarize_component_ref (tree stmt, tree
*** 351,357 ****
       operations may end up being dead, but we won't know until we rename
       the new variables into SSA.  */
    if (TREE_CODE (obj) == PARM_DECL)
!     SET_BIT (needs_copy_in, var_ann (obj)->uid);
  
    switch (TREE_CODE (t))
      {
--- 360,366 ----
       operations may end up being dead, but we won't know until we rename
       the new variables into SSA.  */
    if (TREE_CODE (obj) == PARM_DECL)
!     bitmap_set_bit (needs_copy_in, var_ann (obj)->uid);
  
    switch (TREE_CODE (t))
      {
*************** scalarize_component_ref (tree stmt, tree
*** 359,368 ****
        t = get_scalar_for_field (obj, TREE_OPERAND (t, 1));
        break;
      case REALPART_EXPR:
-       t = get_scalar_for_complex_part (obj, REALPART_INDEX);
-       break;
      case IMAGPART_EXPR:
!       t = get_scalar_for_complex_part (obj, IMAGPART_INDEX);
        break;
      default:
        abort ();
--- 368,375 ----
        t = get_scalar_for_field (obj, TREE_OPERAND (t, 1));
        break;
      case REALPART_EXPR:
      case IMAGPART_EXPR:
!       t = get_scalar_for_complex_part (obj, TREE_CODE (t));
        break;
      default:
        abort ();
*************** scalarize_structure_assignment (block_st
*** 407,414 ****
      abort ();
  #endif
  
!   lhs_can = lhs_ann && TEST_BIT (sra_candidates, lhs_ann->uid);
!   rhs_can = rhs_ann && TEST_BIT (sra_candidates, rhs_ann->uid);
  
    /* Both LHS and RHS are scalarizable.  */
    if (lhs_can && rhs_can)
--- 414,421 ----
      abort ();
  #endif
  
!   lhs_can = lhs_ann && bitmap_bit_p (sra_candidates, lhs_ann->uid);
!   rhs_can = rhs_ann && bitmap_bit_p (sra_candidates, rhs_ann->uid);
  
    /* Both LHS and RHS are scalarizable.  */
    if (lhs_can && rhs_can)
*************** scalarize_structure_assignment (block_st
*** 423,431 ****
      list = create_scalar_copies (lhs, rhs, SCALAR_FIELD);
  
    /* If neither side is scalarizable, do nothing else.  */
!   if (list == NULL_TREE)
      return;
  
    /* Replace the existing statement with the newly created list of
       scalarized copies.  When replacing the original statement, the first
       copy replaces it and the remaining copies are inserted either after
--- 430,442 ----
      list = create_scalar_copies (lhs, rhs, SCALAR_FIELD);
  
    /* If neither side is scalarizable, do nothing else.  */
!   else
      return;
  
+   /* Set line number information for our replacements.  */
+   if (EXPR_LOCUS (orig_stmt))
+     annotate_all_with_locus (&list, *EXPR_LOCUS (orig_stmt));
+ 
    /* Replace the existing statement with the newly created list of
       scalarized copies.  When replacing the original statement, the first
       copy replaces it and the remaining copies are inserted either after
*************** find_candidates_for_sra (void)
*** 460,466 ****
  	   || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
  	  && can_be_scalarized_p (var))
  	{
! 	  SET_BIT (sra_candidates, var_ann (var)->uid);
  	  any_set = true;
  	}
      }
--- 471,477 ----
  	   || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
  	  && can_be_scalarized_p (var))
  	{
! 	  bitmap_set_bit (sra_candidates, var_ann (var)->uid);
  	  any_set = true;
  	}
      }
*************** csc_assign (tree_stmt_iterator *tsi, tre
*** 514,520 ****
     expression for BASE referencing FIELD.  INDEX is the field index.  */
  
  static tree
! csc_build_component_ref (tree base, tree field, int index)
  {
    switch (TREE_CODE (base))
      {
--- 525,531 ----
     expression for BASE referencing FIELD.  INDEX is the field index.  */
  
  static tree
! csc_build_component_ref (tree base, tree field)
  {
    switch (TREE_CODE (base))
      {
*************** csc_build_component_ref (tree base, tree
*** 528,535 ****
  
      default:
        /* Avoid sharing BASE when building the different COMPONENT_REFs.
! 	 We let the field with index zero have the original version.  */
!       if (index > 0)
  	base = unshare_expr (base);
        break;
  
--- 539,546 ----
  
      default:
        /* Avoid sharing BASE when building the different COMPONENT_REFs.
! 	 We let the first field have the original version.  */
!       if (field != TYPE_FIELDS (TREE_TYPE (base)))
  	base = unshare_expr (base);
        break;
  
*************** csc_build_component_ref (tree base, tree
*** 545,567 ****
  /* Similarly for REALPART_EXPR and IMAGPART_EXPR for complex types.  */
  
  static tree
! csc_build_complex_part (tree base, int part)
  {
    switch (TREE_CODE (base))
      {
      case COMPLEX_CST:
!       if (part == REALPART_INDEX)
  	return TREE_REALPART (base);
        else
  	return TREE_IMAGPART (base);
  
      case COMPLEX_EXPR:
!       return TREE_OPERAND (base, part);
  
      default:
        /* Avoid sharing BASE when building the different references.
  	 We let the real part have the original version.  */
!       if (part > 0)
  	base = unshare_expr (base);
        break;
  
--- 556,581 ----
  /* Similarly for REALPART_EXPR and IMAGPART_EXPR for complex types.  */
  
  static tree
! csc_build_complex_part (tree base, enum tree_code part)
  {
    switch (TREE_CODE (base))
      {
      case COMPLEX_CST:
!       if (part == REALPART_EXPR)
  	return TREE_REALPART (base);
        else
  	return TREE_IMAGPART (base);
  
      case COMPLEX_EXPR:
!       if (part == REALPART_EXPR)
!         return TREE_OPERAND (base, 0);
!       else
!         return TREE_OPERAND (base, 1);
  
      default:
        /* Avoid sharing BASE when building the different references.
  	 We let the real part have the original version.  */
!       if (part != REALPART_EXPR)
  	base = unshare_expr (base);
        break;
  
*************** csc_build_complex_part (tree base, int p
*** 571,578 ****
        break;
      }
  
!   return build1 (part == REALPART_INDEX ? REALPART_EXPR : IMAGPART_EXPR,
! 		 TREE_TYPE (TREE_TYPE (base)), base);
  }
  
  /* Create and return a list of assignments to perform a scalarized
--- 585,591 ----
        break;
      }
  
!   return build1 (part, TREE_TYPE (TREE_TYPE (base)), base);
  }
  
  /* Create and return a list of assignments to perform a scalarized
*************** create_scalar_copies (tree lhs, tree rhs
*** 593,599 ****
  {
    tree type, list;
    tree_stmt_iterator tsi;
-   int lhs_ix, rhs_ix;
  
  #if defined ENABLE_CHECKING
    /* Sanity checking.  Check that we are not trying to scalarize a
--- 606,611 ----
*************** create_scalar_copies (tree lhs, tree rhs
*** 604,613 ****
      abort ();
  #endif
  
-   lhs_ix = DECL_P (lhs) ? (int) var_ann (lhs)->uid : -1;
-   rhs_ix = DECL_P (rhs) ? (int) var_ann (rhs)->uid : -1;
    type = TREE_TYPE (lhs);
- 
    list = alloc_stmt_list ();
    tsi = tsi_start (list);
  
--- 616,622 ----
*************** create_scalar_copies (tree lhs, tree rhs
*** 636,659 ****
       we rename the new variables into SSA.  */
    if ((mode == SCALAR_SCALAR || mode == FIELD_SCALAR)
        && TREE_CODE (rhs) == PARM_DECL)
!     SET_BIT (needs_copy_in, rhs_ix);
  
    /* Now create scalar copies for each individual field according to MODE.  */
    if (TREE_CODE (type) == COMPLEX_TYPE)
      {
        /* Create scalar copies of both the real and imaginary parts.  */
        tree real_lhs, real_rhs, imag_lhs, imag_rhs;
-       tree inner_type = TREE_TYPE (type);
  
        if (mode == SCALAR_FIELD)
  	{
! 	  real_rhs = csc_build_complex_part (rhs, REALPART_INDEX);
! 	  imag_rhs = csc_build_complex_part (rhs, IMAGPART_INDEX);
  	}
        else
  	{
! 	  real_rhs = lookup_scalar (rhs_ix, REALPART_INDEX, inner_type);
! 	  imag_rhs = lookup_scalar (rhs_ix, IMAGPART_INDEX, inner_type);
  	}
  
        if (mode == FIELD_SCALAR)
--- 645,667 ----
       we rename the new variables into SSA.  */
    if ((mode == SCALAR_SCALAR || mode == FIELD_SCALAR)
        && TREE_CODE (rhs) == PARM_DECL)
!     bitmap_set_bit (needs_copy_in, var_ann (rhs)->uid);
  
    /* Now create scalar copies for each individual field according to MODE.  */
    if (TREE_CODE (type) == COMPLEX_TYPE)
      {
        /* Create scalar copies of both the real and imaginary parts.  */
        tree real_lhs, real_rhs, imag_lhs, imag_rhs;
  
        if (mode == SCALAR_FIELD)
  	{
! 	  real_rhs = csc_build_complex_part (rhs, REALPART_EXPR);
! 	  imag_rhs = csc_build_complex_part (rhs, IMAGPART_EXPR);
  	}
        else
  	{
! 	  real_rhs = get_scalar_for_complex_part (rhs, REALPART_EXPR);
! 	  imag_rhs = get_scalar_for_complex_part (rhs, IMAGPART_EXPR);
  	}
  
        if (mode == FIELD_SCALAR)
*************** create_scalar_copies (tree lhs, tree rhs
*** 669,676 ****
  	}
        else
  	{
! 	  real_lhs = lookup_scalar (lhs_ix, REALPART_INDEX, inner_type);
! 	  imag_lhs = lookup_scalar (lhs_ix, IMAGPART_INDEX, inner_type);
  
  	  csc_assign (&tsi, real_lhs, real_rhs);
  	  csc_assign (&tsi, imag_lhs, imag_rhs);
--- 677,684 ----
  	}
        else
  	{
! 	  real_lhs = get_scalar_for_complex_part (lhs, REALPART_EXPR);
! 	  imag_lhs = get_scalar_for_complex_part (lhs, IMAGPART_EXPR);
  
  	  csc_assign (&tsi, real_lhs, real_rhs);
  	  csc_assign (&tsi, imag_lhs, imag_rhs);
*************** create_scalar_copies (tree lhs, tree rhs
*** 678,711 ****
      }
    else
      {
-       int f_ix;
        tree f;
  
!       for (f_ix = 0, f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
  	{
! 	  tree lhs_var, rhs_var, inner_type;
  
  	  /* Only copy FIELD_DECLs.  */
  	  if (TREE_CODE (f) != FIELD_DECL)
  	    continue;
  
- 	  inner_type = TREE_TYPE (f);
- 
  	  if (mode == FIELD_SCALAR)
! 	    lhs_var = csc_build_component_ref (lhs, f, f_ix);
  	  else
! 	    lhs_var = lookup_scalar (lhs_ix, f_ix, inner_type);
  
  	  if (mode == SCALAR_FIELD)
! 	    rhs_var = csc_build_component_ref (rhs, f, f_ix);
  	  else
! 	    rhs_var = lookup_scalar (rhs_ix, f_ix, inner_type);
  
  	  csc_assign (&tsi, lhs_var, rhs_var);
- 
- 	  /* Note that we cannot increase the field index in the
- 	     loop header because we skip non-decl fields.  */
- 	  f_ix++;
  	}
      }
  
--- 686,712 ----
      }
    else
      {
        tree f;
  
!       for (f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
  	{
! 	  tree lhs_var, rhs_var;
  
  	  /* Only copy FIELD_DECLs.  */
  	  if (TREE_CODE (f) != FIELD_DECL)
  	    continue;
  
  	  if (mode == FIELD_SCALAR)
! 	    lhs_var = csc_build_component_ref (lhs, f);
  	  else
! 	    lhs_var = get_scalar_for_field (lhs, f);
  
  	  if (mode == SCALAR_FIELD)
! 	    rhs_var = csc_build_component_ref (rhs, f);
  	  else
! 	    rhs_var = get_scalar_for_field (rhs, f);
  
  	  csc_assign (&tsi, lhs_var, rhs_var);
  	}
      }
  
*************** create_scalar_copies (tree lhs, tree rhs
*** 733,738 ****
--- 734,754 ----
    return list;
  }
  
+ /* A helper function that creates the copies, updates line info,
+    and emits the code either before or after BSI.  */
+ 
+ static void
+ emit_scalar_copies (block_stmt_iterator *bsi, tree lhs, tree rhs,
+ 		    enum sra_copy_mode mode)
+ {
+   tree list = create_scalar_copies (lhs, rhs, mode);
+   tree stmt = bsi_stmt (*bsi);
+ 
+   if (EXPR_LOCUS (stmt))
+     annotate_all_with_locus (&list, *EXPR_LOCUS (stmt));
+ 
+   bsi_insert_before (bsi, list, BSI_SAME_STMT);
+ }
  
  /* Traverse all the statements in the function replacing references to
     scalarizable structures with their corresponding scalar temporaries.  */
*************** scalarize_structures (void)
*** 771,785 ****
  
    /* Initialize the scalar replacements for every structure that is a
       function argument.  */
!   EXECUTE_IF_SET_IN_SBITMAP (sra_candidates, 0, i,
      {
        tree var = referenced_var (i);
!       if (TREE_CODE (var) == PARM_DECL
! 	  && TEST_BIT (needs_copy_in, var_ann (var)->uid))
! 	{
! 	  tree list = create_scalar_copies (var, var, SCALAR_FIELD);
! 	  bsi_insert_on_edge (ENTRY_BLOCK_PTR->succ, list);
! 	}
      });
  
    /* Commit edge insertions.  */
--- 787,797 ----
  
    /* Initialize the scalar replacements for every structure that is a
       function argument.  */
!   EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
      {
        tree var = referenced_var (i);
!       tree list = create_scalar_copies (var, var, SCALAR_FIELD);
!       bsi_insert_on_edge (ENTRY_BLOCK_PTR->succ, list);
      });
  
    /* Commit edge insertions.  */
*************** scalarize_modify_expr (block_stmt_iterat
*** 861,868 ****
    else if (is_sra_candidate_ref (rhs, true))
      {
        tree var = TREE_OPERAND (rhs, 0);
!       tree list = create_scalar_copies (var, var, FIELD_SCALAR);
!       bsi_insert_before (si_p, list, BSI_SAME_STMT);
      }
  
    /* Found AGGREGATE = ... or ... = AGGREGATE  */
--- 873,879 ----
    else if (is_sra_candidate_ref (rhs, true))
      {
        tree var = TREE_OPERAND (rhs, 0);
!       emit_scalar_copies (si_p, var, var, FIELD_SCALAR);
      }
  
    /* Found AGGREGATE = ... or ... = AGGREGATE  */
*************** scalarize_tree_list (tree list, block_st
*** 887,894 ****
  	  int index = var_ann (arg)->uid;
  	  if (!bitmap_bit_p (done, index))
  	    {
! 	      tree list = create_scalar_copies (arg, arg, FIELD_SCALAR);
! 	      bsi_insert_before (si_p, list, BSI_SAME_STMT);
  	      bitmap_set_bit (done, index);
  	    }
  	}
--- 898,904 ----
  	  int index = var_ann (arg)->uid;
  	  if (!bitmap_bit_p (done, index))
  	    {
! 	      emit_scalar_copies (si_p, arg, arg, FIELD_SCALAR);
  	      bitmap_set_bit (done, index);
  	    }
  	}
*************** scalarize_call_expr (block_stmt_iterator
*** 927,932 ****
--- 937,944 ----
        if (is_sra_candidate_decl (var))
  	{
  	  tree list = create_scalar_copies (var, var, SCALAR_FIELD);
+ 	  if (EXPR_LOCUS (stmt))
+ 	    annotate_all_with_locus (&list, *EXPR_LOCUS (stmt));
  	  if (stmt_ends_bb_p (stmt))
  	    insert_edge_copies (list, bb_for_stmt (stmt));
  	  else
*************** scalarize_return_expr (block_stmt_iterat
*** 967,988 ****
    /* Handle a bare RESULT_DECL.  This will handle for types needed
       constructors, or possibly after NRV type optimizations.  */
    if (is_sra_candidate_decl (op))
!     {
!       tree list = create_scalar_copies (op, op, FIELD_SCALAR);
!       bsi_insert_before (si_p, list, BSI_SAME_STMT);
!     }
    else if (TREE_CODE (op) == MODIFY_EXPR)
      {
        tree *rhs_p = &TREE_OPERAND (op, 1);
        tree rhs = *rhs_p;
  
- 
        /* Handle 'return STRUCTURE;'  */
        if (is_sra_candidate_decl (rhs))
! 	{
! 	  tree list = create_scalar_copies (rhs, rhs, FIELD_SCALAR);
! 	  bsi_insert_before (si_p, list, BSI_SAME_STMT);
! 	}
  
        /* Handle 'return STRUCTURE.FIELD;'  */
        else if (is_sra_candidate_ref (rhs, false))
--- 979,993 ----
    /* Handle a bare RESULT_DECL.  This will handle for types needed
       constructors, or possibly after NRV type optimizations.  */
    if (is_sra_candidate_decl (op))
!     emit_scalar_copies (si_p, op, op, FIELD_SCALAR);
    else if (TREE_CODE (op) == MODIFY_EXPR)
      {
        tree *rhs_p = &TREE_OPERAND (op, 1);
        tree rhs = *rhs_p;
  
        /* Handle 'return STRUCTURE;'  */
        if (is_sra_candidate_decl (rhs))
! 	emit_scalar_copies (si_p, rhs, rhs, FIELD_SCALAR);
  
        /* Handle 'return STRUCTURE.FIELD;'  */
        else if (is_sra_candidate_ref (rhs, false))
*************** scalarize_return_expr (block_stmt_iterat
*** 1002,1058 ****
  
  /* Debugging dump for the scalar replacement map.  */
  
! static void
! dump_sra_map (FILE *f)
  {
!   size_t i, j;
! 
!   if (!sra_map)
!     return;
! 
!   fprintf (f, "Scalar replacements for %s:\n\n",
! 	   (*lang_hooks.decl_printable_name) (current_function_decl, 2));
  
!   for (i = 0; i < sra_map_size; i++)
      {
!       tree var, type;
! 
!       if (!sra_map[i])
! 	continue;
! 
!       var = referenced_var (i);
!       type = TREE_TYPE (var);
  
!       if (TREE_CODE (type) == COMPLEX_TYPE)
! 	{
! 	  for (j = 0; j < 2; j++)
! 	    if (sra_map[i][j])
! 	      {
! 		fputs (j == REALPART_INDEX ? "__real__ " : "__imag__ ", f);
! 		print_generic_expr (tree_dump_file, var, 0);
! 		fprintf (f, " -> %s\n", get_name (sra_map[i][j]));
! 	      }
! 	}
!       else
! 	{
! 	  tree field;
! 	  for (j = 0, field = TYPE_FIELDS (type); field;
! 	       field = TREE_CHAIN (field))
! 	    {
! 	      if (TREE_CODE (field) != FIELD_DECL)
! 		continue;
! 	      if (sra_map[i][j])
! 		{
! 		  print_generic_expr (tree_dump_file, var, 0);
! 		  fprintf (f, ".%s -> %s\n", get_name (field),
! 			   get_name (sra_map[i][j]));
! 		}
! 	      j++;
! 	    }
! 	}
  
!       fprintf (f, "\n");
!     }
  }
  
  /* Main entry point to Scalar Replacement of Aggregates (SRA).  This pass
--- 1007,1047 ----
  
  /* Debugging dump for the scalar replacement map.  */
  
! static int
! dump_sra_map_trav (void **slot, void *data)
  {
!   struct sra_elt *e = *slot;
!   FILE *f = data;
  
!   switch (e->kind)
      {
!     case REALPART_EXPR:
!       fputs ("__real__ ", f);
!       print_generic_expr (tree_dump_file, e->base, 0);
!       fprintf (f, " -> %s\n", get_name (e->replace));
!       break;
!     case IMAGPART_EXPR:
!       fputs ("__imag__ ", f);
!       print_generic_expr (tree_dump_file, e->base, 0);
!       fprintf (f, " -> %s\n", get_name (e->replace));
!       break;
!     case COMPONENT_REF:
!       print_generic_expr (tree_dump_file, e->base, 0);
!       fprintf (f, ".%s -> %s\n", get_name (e->field), get_name (e->replace));
!       break;
!     default:
!       abort ();
!     }
  
!   return 1;
! }
  
! static void
! dump_sra_map (FILE *f)
! {
!   fputs ("Scalar replacements:\n", f);
!   htab_traverse_noresize (sra_map, dump_sra_map_trav, f);
!   fputs ("\n\n", f);
  }
  
  /* Main entry point to Scalar Replacement of Aggregates (SRA).  This pass
*************** dump_sra_map (FILE *f)
*** 1081,1108 ****
  static void
  tree_sra (void)
  {
-   size_t i;
- 
    /* Initialize local variables.  */
!   sra_candidates = sbitmap_alloc (num_referenced_vars);
!   sbitmap_zero (sra_candidates);
    sra_map = NULL;
    needs_copy_in = NULL;
  
    /* Find structures to be scalarized.  */
    if (!find_candidates_for_sra ())
      {
!       sbitmap_free (sra_candidates);
        return;
      }
  
    /* If we found any, re-write structure references with their
       corresponding scalar replacement.  */
!   sra_map = xcalloc (num_referenced_vars, sizeof (tree *));
!   sra_map_size = num_referenced_vars;
! 
!   needs_copy_in = sbitmap_alloc (num_referenced_vars);
!   sbitmap_zero (needs_copy_in);
  
    scalarize_structures ();
  
--- 1070,1091 ----
  static void
  tree_sra (void)
  {
    /* Initialize local variables.  */
!   sra_candidates = BITMAP_XMALLOC ();
    sra_map = NULL;
    needs_copy_in = NULL;
  
    /* Find structures to be scalarized.  */
    if (!find_candidates_for_sra ())
      {
!       BITMAP_XFREE (sra_candidates);
        return;
      }
  
    /* If we found any, re-write structure references with their
       corresponding scalar replacement.  */
!   sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, free);
!   needs_copy_in = BITMAP_XMALLOC ();
  
    scalarize_structures ();
  
*************** tree_sra (void)
*** 1110,1120 ****
      dump_sra_map (tree_dump_file);
  
    /* Free allocated memory.  */
!   for (i = 0; i < sra_map_size; i++)
!     free (sra_map[i]);
!   free (sra_map);
!   sbitmap_free (needs_copy_in);
!   sbitmap_free (sra_candidates);
  }
  
  static bool
--- 1093,1102 ----
      dump_sra_map (tree_dump_file);
  
    /* Free allocated memory.  */
!   htab_delete (sra_map);
!   sra_map = NULL;
!   BITMAP_XFREE (needs_copy_in);
!   BITMAP_XFREE (sra_candidates);
  }
  
  static bool


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]