This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: RFC/RFA: patch for PR 22156: improve SRA for bit-fields


On Mar 20, 2007, Alexandre Oliva <aoliva@redhat.com> wrote:

> I should probably have used a separate flag, but I started out
> misunderstanding how groups worked, and then, when I noticed I was
> looking for something entirely different, I had already started
> using the same flag.  I can change it if it bothers you.

Done in the patch below.

I've also fixed a couple of bugs that were present in the previous
version of the patch, one of which definitely caused a bootstrap
failure on x86 (surprisingly, not on x86_64): generate_one_element_ref
modified field instead of ret.  The other change was an improvement to
the recursion logic in generate_element_zero, which might refrain from
initializing field blocks unless called explicitly for it, and would
needlessly loop over their members instead of skipping them
altogether.

The other change is an improvement to assignment to BIT_FIELD_REFs:
when assigning to the whole field block, we attempt to assign to the
entire variable, even if this means copying extraneous bits from the
source.  This should greatly improve the code generated to copy
partially-grouped data structures.

This is still undergoing testing, but I'd appreciate if people could
give it a spin on big-endian iron.  I've tried some testing of
embedded targets with simulators in a unified build tree, but it seems
that current dejagnu and the gcc/testsuite/<subdir> arrangements
aren't getting along together, and compilation fails to find newlib
headers in the newlib source tree.  I haven't been able to fix that
without breaking the GCC testsuite entirely, unfortunately.

for  gcc/ChangeLog
from  Alexandre Oliva  <aoliva@redhat.com>

	PR middle-end/22156
	* tree-sra.c (struct sra_elt): Add in_bitfld_block.
	(sra_hash_tree): Handle BIT_FIELD_REFs.
	(sra_elt_hash): Don't hash bitfld blocks.
	(sra_elt_eq): Skip them in parent compares as well.  Handle
	BIT_FIELD_REFs.
	(build_element_name_1): Handle BIT_FIELD_REFs.
	(instantiate_missing_elements_1): Return the sra_elt.
	(canon_type_for_field): New.
	(try_instantiate_multiple_fields): New.
	(instantiate_missing_elemnts): Use them.
	(generate_one_element_ref): Handle BIT_FIELD_REFs.
	(REPLDUP, sra_build_elt_assignment): New.
	(generate_copy_inout): Use them.
	(generate_element_copy): Likewise.  Handle bitfld differences.
	(generate_element_zero): Don't recurse for blocks.  Use
	sra_build_elt_assignment.
	(generate_one_element_int): Take elt instead of var.  Use
	sra_build_elt_assignment.
	(generate_element_init_1): Adjust.
	(scalarize_use, scalarize_copy): Use REPLDUP.
	(scalarize_ldst): Move assert before dereference.
	(dump_sra_elt_name): Handle BIT_FIELD_REFs.

Index: gcc/tree-sra.c
===================================================================
--- gcc/tree-sra.c.orig	2007-03-20 01:26:13.000000000 -0300
+++ gcc/tree-sra.c	2007-03-20 06:40:39.000000000 -0300
@@ -147,6 +147,10 @@ struct sra_elt
 
   /* True if there is BIT_FIELD_REF on the lhs with a vector. */
   bool is_vector_lhs;
+
+  /* 1 if the element is a field that is part of a block, 2 if the field
+     is the block itself, 0 if it's neither.  */
+  char in_bitfld_block;
 };
 
 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
@@ -461,6 +465,12 @@ sra_hash_tree (tree t)
       h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
       break;
 
+    case BIT_FIELD_REF:
+      /* Don't take operand 0 into account, that's our parent.  */
+      h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
+      h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
+      break;
+
     default:
       gcc_unreachable ();
     }
@@ -479,12 +489,14 @@ sra_elt_hash (const void *x)
 
   h = sra_hash_tree (e->element);
 
-  /* Take into account everything back up the chain.  Given that chain
-     lengths are rarely very long, this should be acceptable.  If we
-     truly identify this as a performance problem, it should work to
-     hash the pointer value "e->parent".  */
+  /* Take into account everything except bitfield blocks back up the
+     chain.  Given that chain lengths are rarely very long, this
+     should be acceptable.  If we truly identify this as a performance
+     problem, it should work to hash the pointer value
+     "e->parent".  */
   for (p = e->parent; p ; p = p->parent)
-    h = (h * 65521) ^ sra_hash_tree (p->element);
+    if (!p->in_bitfld_block)
+      h = (h * 65521) ^ sra_hash_tree (p->element);
 
   return h;
 }
@@ -497,8 +509,17 @@ sra_elt_eq (const void *x, const void *y
   const struct sra_elt *a = x;
   const struct sra_elt *b = y;
   tree ae, be;
+  const struct sra_elt *ap = a->parent;
+  const struct sra_elt *bp = b->parent;
+
+  if (ap)
+    while (ap->in_bitfld_block)
+      ap = ap->parent;
+  if (bp)
+    while (bp->in_bitfld_block)
+      bp = bp->parent;
 
-  if (a->parent != b->parent)
+  if (ap != bp)
     return false;
 
   ae = a->element;
@@ -533,6 +554,11 @@ sra_elt_eq (const void *x, const void *y
 	return false;
       return fields_compatible_p (ae, be);
 
+    case BIT_FIELD_REF:
+      return
+	tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
+	&& tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
+
     default:
       gcc_unreachable ();
     }
@@ -1177,6 +1203,15 @@ build_element_name_1 (struct sra_elt *el
       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
       obstack_grow (&sra_obstack, buffer, strlen (buffer));
     }
+  else if (TREE_CODE (t) == BIT_FIELD_REF)
+    {
+      sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
+	       tree_low_cst (TREE_OPERAND (t, 2), 1));
+      obstack_grow (&sra_obstack, buffer, strlen (buffer));
+      sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
+	       tree_low_cst (TREE_OPERAND (t, 1), 1));
+      obstack_grow (&sra_obstack, buffer, strlen (buffer));
+    }
   else
     {
       tree name = DECL_NAME (t);
@@ -1337,7 +1372,7 @@ sum_instantiated_sizes (struct sra_elt *
 
 static void instantiate_missing_elements (struct sra_elt *elt);
 
-static void
+static struct sra_elt *
 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
 {
   struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
@@ -1348,6 +1383,256 @@ instantiate_missing_elements_1 (struct s
     }
   else
     instantiate_missing_elements (sub);
+  return sub;
+}
+
+/* Obtain the canonical type for field F of ELEMENT.  */
+
+static tree
+canon_type_for_field (tree f, tree element)
+{
+  tree field_type = TREE_TYPE (f);
+
+  /* canonicalize_component_ref() unwidens some bit-field types (not
+     marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
+     may introduce type mismatches.  */
+  if (INTEGRAL_TYPE_P (field_type)
+      && DECL_MODE (f) != TYPE_MODE (field_type))
+    field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
+						   field_type,
+						   element,
+						   f, NULL_TREE),
+					   NULL_TREE));
+
+  return field_type;
+}
+
+/* Look for adjacent fields of ELT starting at F that we'd like to
+   scalarize as a single variable.  Return the last field of the
+   group.  */
+
+static tree
+try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
+{
+  unsigned HOST_WIDE_INT align, oalign, word, bit, size, alchk;
+  enum machine_mode mode;
+  tree first = f, prev;
+  tree type, var;
+  struct sra_elt *block;
+
+  if (!is_sra_scalar_type (TREE_TYPE (f))
+      || !host_integerp (DECL_FIELD_OFFSET (f), 1)
+      || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+      || !host_integerp (DECL_SIZE (f), 1)
+      || lookup_element (elt, f, NULL, NO_INSERT))
+    return f;
+
+  align = DECL_ALIGN (elt->element);
+  oalign = DECL_OFFSET_ALIGN (f);
+  word = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+  bit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+  size = tree_low_cst (DECL_SIZE (f), 1);
+
+  if (align > oalign)
+    align = oalign;
+
+  alchk = align - 1;
+  alchk = ~alchk;
+
+  if ((bit & alchk) != ((bit + size - 1) & alchk))
+    return f;
+
+  /* Find adjacent fields in the same alignment word.  */
+
+  for (prev = f, f = TREE_CHAIN (f);
+       f && TREE_CODE (f) == FIELD_DECL
+	 && is_sra_scalar_type (TREE_TYPE (f))
+	 && host_integerp (DECL_FIELD_OFFSET (f), 1)
+	 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+	 && host_integerp (DECL_SIZE (f), 1)
+	 && (HOST_WIDE_INT)word == tree_low_cst (DECL_FIELD_OFFSET (f), 1)
+	 && !lookup_element (elt, f, NULL, NO_INSERT);
+       prev = f, f = TREE_CHAIN (f))
+    {
+      unsigned HOST_WIDE_INT nbit, nsize;
+
+      nbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+      nsize = tree_low_cst (DECL_SIZE (f), 1);
+
+      if (bit + size == nbit)
+	{
+	  if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
+	    break;
+	  size += nsize;
+	}
+      else if (nbit + nsize == bit)
+	{
+	  if ((nbit & alchk) != ((bit + size - 1) & alchk))
+	    break;
+	  bit = nbit;
+	}
+      else
+	break;
+    }
+
+  f = prev;
+
+  if (f == first)
+    return f;
+
+  gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
+
+  /* Try to widen the bit range so as to cover padding bits as well.  */
+
+  if ((bit & ~alchk) || size != align)
+    {
+      unsigned HOST_WIDE_INT mbit = bit & alchk;
+      unsigned HOST_WIDE_INT msize = align;
+
+      for (f = TYPE_FIELDS (elt->type);
+	   f; f = TREE_CHAIN (f))
+	{
+	  unsigned HOST_WIDE_INT fword, fbit, fsize;
+
+	  /* Skip the fields from first to prev.  */
+	  if (f == first)
+	    {
+	      f = prev;
+	      continue;
+	    }
+
+	  if (!(TREE_CODE (f) == FIELD_DECL
+		&& host_integerp (DECL_FIELD_OFFSET (f), 1)
+		&& host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
+	    continue;
+
+	  fword = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+	  /* If we're past the selected word, we're fine.  */
+	  if (word < fword)
+	    continue;
+
+	  fbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+
+	  if (host_integerp (DECL_SIZE (f), 1))
+	    fsize = tree_low_cst (DECL_SIZE (f), 1);
+	  else
+	    /* Assume a variable-sized field takes up all space till
+	       the end of the word.  ??? Endianness issues?  */
+	    fsize = align - fbit;
+
+	  if (fword < word)
+	    {
+	      /* A large field might start at a previous word and
+		 extend into the selected word.  Exclude those
+		 bits.  ??? Endianness issues? */
+	      HOST_WIDE_INT diff = fbit + fsize
+		- (HOST_WIDE_INT)((word - fword) * BITS_PER_UNIT + mbit);
+
+	      if (diff <= 0)
+		continue;
+
+	      mbit += diff;
+	      msize -= diff;
+	    }
+	  else
+	    {
+	      gcc_assert (fword == word);
+
+	      /* Non-overlapping, great.  */
+	      if (fbit + fsize <= mbit
+		  || mbit + msize <= fbit)
+		continue;
+
+	      if (fbit <= mbit)
+		{
+		  unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
+		  mbit += diff;
+		  msize -= diff;
+		}
+	      else if (fbit > mbit)
+		msize -= (mbit + msize - fbit);
+	      else
+		gcc_unreachable ();
+	    }
+	}
+
+      bit = mbit;
+      size = msize;
+    }
+
+  /* Now we know the bit range we're interested in.  Find the smallest
+     machine mode we can use to access it.  */
+
+  for (mode = smallest_mode_for_size (size, MODE_INT);
+       ;
+       mode = GET_MODE_WIDER_MODE (mode))
+    {
+      gcc_assert (mode != VOIDmode);
+
+      alchk = GET_MODE_PRECISION (mode) - 1;
+      alchk = ~alchk;
+
+      if ((bit & alchk) == ((bit + size - 1) & alchk))
+	break;
+    }
+
+  gcc_assert (~alchk < align);
+
+  /* Create the field group as a single variable.  */
+
+  type = lang_hooks.types.type_for_mode (mode, 1);
+
+  var = build3 (BIT_FIELD_REF, lang_hooks.types.type_for_size (size, 1),
+		NULL_TREE,
+		build_int_cst_wide (bitsizetype, size, 0),
+		build_int_cst_wide (bitsizetype,
+				    word * BITS_PER_UNIT + bit, 0));
+  BIT_FIELD_REF_UNSIGNED (var) = 1;
+
+  block = instantiate_missing_elements_1 (elt, var, type);
+  gcc_assert (block && block->is_scalar);
+
+  var = block->replacement;
+
+  if (((word * BITS_PER_UNIT + bit) & ~alchk)
+      || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
+    {
+      block->replacement = build3 (BIT_FIELD_REF, TREE_TYPE (elt->element), var,
+				   build_int_cst_wide (bitsizetype, size, 0),
+				   build_int_cst_wide (bitsizetype,
+						       (word * BITS_PER_UNIT
+							+ bit)
+						       & ~alchk, 0));
+      BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
+      TREE_NO_WARNING (block->replacement) = 1;
+    }
+
+  block->in_bitfld_block = 2;
+
+  /* Add the member fields to the group, such that they access
+     portions of the group variable.  */
+
+  for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
+    {
+      tree field_type = canon_type_for_field (f, elt->element);
+      struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
+
+      gcc_assert (fld && fld->is_scalar && !fld->replacement);
+
+      fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
+				 DECL_SIZE (f),
+				 build_int_cst_wide
+				 (bitsizetype,
+				  (word * BITS_PER_UNIT
+				   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f),
+						   1))
+				  & ~alchk, 0));
+      BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
+      TREE_NO_WARNING (block->replacement) = 1;
+      fld->in_bitfld_block = 1;
+    }
+
+  return prev;
 }
 
 static void
@@ -1363,21 +1648,17 @@ instantiate_missing_elements (struct sra
 	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
 	  if (TREE_CODE (f) == FIELD_DECL)
 	    {
-	      tree field_type = TREE_TYPE (f);
+	      tree last = try_instantiate_multiple_fields (elt, f);
 
-	      /* canonicalize_component_ref() unwidens some bit-field
-		 types (not marked as DECL_BIT_FIELD in C++), so we
-		 must do the same, lest we may introduce type
-		 mismatches.  */
-	      if (INTEGRAL_TYPE_P (field_type)
-		  && DECL_MODE (f) != TYPE_MODE (field_type))
-		field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
-							       field_type,
-							       elt->element,
-							       f, NULL_TREE),
-						       NULL_TREE));
+	      if (last != f)
+		{
+		  f = last;
+		  continue;
+		}
 
-	      instantiate_missing_elements_1 (elt, f, field_type);
+	      instantiate_missing_elements_1 (elt, f,
+					      canon_type_for_field
+					      (f, elt->element));
 	    }
 	break;
       }
@@ -1689,6 +1970,16 @@ generate_one_element_ref (struct sra_elt
       {
 	tree field = elt->element;
 
+	/* We can't test elt->in_bitfld_blk here because, when this is
+	   called from instantiate_element, we haven't set this field
+	   yet.  */
+	if (TREE_CODE (field) == BIT_FIELD_REF)
+	  {
+	    tree ret = copy_node (field);
+	    TREE_OPERAND (ret, 0) = base;
+	    return ret;
+	  }
+
 	/* Watch out for compatible records with differing field lists.  */
 	if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
 	  field = find_compatible_field (TREE_TYPE (base), field);
@@ -1741,6 +2032,112 @@ sra_build_assignment (tree dst, tree src
   return build_gimple_modify_stmt (dst, src);
 }
 
+/* BIT_FIELD_REFs must not be shared.  sra_build_elt_assignment()
+   takes care of assignments, but we must create copies for uses.  */
+#define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : copy_node (t))
+
+static tree
+sra_build_elt_assignment (struct sra_elt *elt, tree src)
+{
+  tree dst = elt->replacement;
+  tree var, type, tmp, tmp2, tmp3;
+  tree list, stmt;
+  tree cst, cst2, mask;
+  tree minshift, maxshift;
+
+  if (TREE_CODE (dst) != BIT_FIELD_REF
+      || !elt->in_bitfld_block)
+    return sra_build_assignment (REPLDUP (dst), src);
+
+  var = TREE_OPERAND (dst, 0);
+
+  /* Try to widen the assignment to the entire variable.
+     We need the source to be a BIT_FIELD_REF as well, such that, for
+     BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
+     if sp >= dp and sp+width(d) >= width(s), we can turn it into
+     d = BIT_FIELD_REF<s,sp+sz,sp-dp>.  */
+  if (elt->in_bitfld_block == 2
+      && TREE_CODE (src) == BIT_FIELD_REF
+      && !tree_int_cst_lt (TREE_OPERAND (src, 2), TREE_OPERAND (dst, 2)))
+    {
+      src = build3 (BIT_FIELD_REF, TREE_TYPE (var),
+		    TREE_OPERAND (src, 0),
+		    int_const_binop (PLUS_EXPR, TREE_OPERAND (src, 1),
+				     TREE_OPERAND (src, 2), 1),
+		    int_const_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
+				     TREE_OPERAND (dst, 2), 1));
+      BIT_FIELD_REF_UNSIGNED (src) = 1;
+      return sra_build_assignment (var, src);
+    }
+
+  if (!is_gimple_reg (var))
+    return sra_build_assignment (REPLDUP (dst), src);
+
+  list = alloc_stmt_list ();
+
+  cst = TREE_OPERAND (dst, 2);
+  if (WORDS_BIG_ENDIAN)
+    {
+      cst = int_const_binop (MINUS_EXPR, DECL_SIZE (var), cst, 1);
+      maxshift = cst;
+    }
+  else
+    minshift = cst;
+
+  cst2 = int_const_binop (PLUS_EXPR, TREE_OPERAND (dst, 1),
+			  TREE_OPERAND (dst, 2), 1);
+  if (WORDS_BIG_ENDIAN)
+    {
+      cst2 = int_const_binop (MINUS_EXPR, DECL_SIZE (var), cst2, 1);
+      minshift = cst2;
+    }
+  else
+    maxshift = cst2;
+
+  type = TREE_TYPE (var);
+
+  mask = build_int_cst_wide (type, 1, 0);
+  cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, 1);
+  cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, 1);
+  mask = int_const_binop (MINUS_EXPR, cst, cst2, 1);
+  mask = fold_build1 (BIT_NOT_EXPR, type, mask);
+
+  if (WORDS_BIG_ENDIAN)
+    ;
+  else
+    cst2 = TREE_OPERAND (dst, 2);
+
+  tmp = make_rename_temp (type, "SR");
+  stmt = build_gimple_modify_stmt (tmp,
+				   build2 (BIT_AND_EXPR, type,
+					   var, mask));
+  append_to_statement_list (stmt, &list);
+
+  tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
+  stmt = sra_build_assignment (tmp2, src);
+  append_to_statement_list (stmt, &list);
+
+  tmp3 = make_rename_temp (type, "SR");
+  stmt = build_gimple_modify_stmt (tmp3,
+				   build1 (NOP_EXPR, type, tmp2));
+  append_to_statement_list (stmt, &list);
+  tmp2 = tmp3;
+
+  tmp3 = make_rename_temp (type, "SR");
+  stmt = build_gimple_modify_stmt (tmp3,
+				   build2 (LSHIFT_EXPR, type,
+					   tmp2, minshift));
+  append_to_statement_list (stmt, &list);
+  tmp2 = tmp3;
+
+  stmt = build_gimple_modify_stmt (var,
+				   build2 (BIT_IOR_EXPR, type,
+					   tmp, tmp2));
+  append_to_statement_list (stmt, &list);
+
+  return list;
+}
+
 /* Generate a set of assignment statements in *LIST_P to copy all
    instantiated elements under ELT to or from the equivalent structure
    rooted at EXPR.  COPY_OUT controls the direction of the copy, with
@@ -1771,9 +2168,9 @@ generate_copy_inout (struct sra_elt *elt
   else if (elt->replacement)
     {
       if (copy_out)
-	t = sra_build_assignment (elt->replacement, expr);
+	t = sra_build_elt_assignment (elt, expr);
       else
-	t = sra_build_assignment (expr, elt->replacement);
+	t = sra_build_assignment (expr, REPLDUP (elt->replacement));
       append_to_statement_list (t, list_p);
     }
   else
@@ -1798,6 +2195,19 @@ generate_element_copy (struct sra_elt *d
   FOR_EACH_ACTUAL_CHILD (dc, dst)
     {
       sc = lookup_element (src, dc->element, NULL, NO_INSERT);
+      if (!sc && dc->in_bitfld_block == 2)
+	{
+	  struct sra_elt *dcs;
+
+	  FOR_EACH_ACTUAL_CHILD (dcs, dc)
+	    {
+	      sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
+	      gcc_assert (sc);
+	      generate_element_copy (dcs, sc, list_p);
+	    }
+
+	  continue;
+	}
       gcc_assert (sc);
       generate_element_copy (dc, sc, list_p);
     }
@@ -1808,7 +2218,7 @@ generate_element_copy (struct sra_elt *d
 
       gcc_assert (src->replacement);
 
-      t = sra_build_assignment (dst->replacement, src->replacement);
+      t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
       append_to_statement_list (t, list_p);
     }
 }
@@ -1829,8 +2239,9 @@ generate_element_zero (struct sra_elt *e
       return;
     }
 
-  FOR_EACH_ACTUAL_CHILD (c, elt)
-    generate_element_zero (c, list_p);
+  if (!elt->in_bitfld_block)
+    FOR_EACH_ACTUAL_CHILD (c, elt)
+      generate_element_zero (c, list_p);
 
   if (elt->replacement)
     {
@@ -1839,7 +2250,7 @@ generate_element_zero (struct sra_elt *e
       gcc_assert (elt->is_scalar);
       t = fold_convert (elt->type, integer_zero_node);
 
-      t = sra_build_assignment (elt->replacement, t);
+      t = sra_build_elt_assignment (elt, t);
       append_to_statement_list (t, list_p);
     }
 }
@@ -1848,10 +2259,10 @@ generate_element_zero (struct sra_elt *e
    Add the result to *LIST_P.  */
 
 static void
-generate_one_element_init (tree var, tree init, tree *list_p)
+generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
 {
   /* The replacement can be almost arbitrarily complex.  Gimplify.  */
-  tree stmt = sra_build_assignment (var, init);
+  tree stmt = sra_build_elt_assignment (elt, init);
   gimplify_and_add (stmt, list_p);
 }
 
@@ -1880,7 +2291,7 @@ generate_element_init_1 (struct sra_elt 
     {
       if (elt->replacement)
 	{
-	  generate_one_element_init (elt->replacement, init, list_p);
+	  generate_one_element_init (elt, init, list_p);
 	  elt->visited = true;
 	}
       return result;
@@ -2049,7 +2460,7 @@ scalarize_use (struct sra_elt *elt, tree
 	 simple as modifying the existing statement in place.  */
       if (is_output)
 	mark_all_v_defs (stmt);
-      *expr_p = elt->replacement;
+      *expr_p = REPLDUP (elt->replacement);
       update_stmt (stmt);
     }
   else
@@ -2101,7 +2512,7 @@ scalarize_copy (struct sra_elt *lhs_elt,
       gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
 
       GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
-      GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
+      GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
       update_stmt (stmt);
     }
   else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
@@ -2255,8 +2666,8 @@ scalarize_ldst (struct sra_elt *elt, tre
 
       mark_all_v_defs (stmt);
       generate_copy_inout (elt, is_output, other, &list);
-      mark_all_v_defs (list);
       gcc_assert (list);
+      mark_all_v_defs (list);
 
       /* Preserve EH semantics.  */
       if (stmt_ends_bb_p (stmt))
@@ -2352,6 +2763,10 @@ dump_sra_elt_name (FILE *f, struct sra_e
 	    fputc ('.', f);
 	  print_generic_expr (f, elt->element, dump_flags);
 	}
+      else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
+	fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
+		 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
+		 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
       else if (TREE_CODE (elt->element) == RANGE_EXPR)
 	fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
 		 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
-- 
Alexandre Oliva         http://www.lsd.ic.unicamp.br/~oliva/
FSF Latin America Board Member         http://www.fsfla.org/
Red Hat Compiler Engineer   aoliva@{redhat.com, gcc.gnu.org}
Free Software Evangelist  oliva@{lsd.ic.unicamp.br, gnu.org}

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]