This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [tuples][patch] Tuplified pass_sra


On Fri, Apr 11, 2008 at 1:43 PM, Diego Novillo <dnovillo@google.com> wrote:
>
> On Tue, Apr 8, 2008 at 19:57, Oleg Ryjkov <olegr@google.com> wrote:
>
>  >  This patch tuplififies pass_sra. Tested on i686-linux. There
>  >  were couple of testcases fixed in the gcc testsute, which
>  >  tested for certain optimizations to occur. Also I noticed 2
>  >  new failures in the fortran testsuite, however after some
>  >  analyzing, I concluded that pass_sra simply exposed bugs that
>  >  were already there.
>
>  Yes, I've seen these failures in other Fortran tests.  This is
>  the ICE in output_die, right?  The inliner is also tickling it.
>  I'll have a look.
Yes, the failures were in output_die.

>
>  > Index: ChangeLog.tuples
>  >  ===================================================================
>  >  --- ChangeLog.tuples    (revision 134115)
>  >  +++ ChangeLog.tuples    (working copy)
>  >  @@ -1,3 +1,35 @@
>  >  +2008-04-08  Oleg Ryjkov  <olegr@google.com>
>  >  +
>  >  +       * tree-sra.c (sra_build_assignment): Tuplified.
>  >  +       (mark_all_v_defs): Removed.
>  >  +       (mark_all_v_defs_seq, mark_all_v_defs_stmt): New functions.
>  >  +       (sra_walk_expr): Tuplified.
>  >  +       (sra_walk_tree_list): Removed.
>  >  +       (sra_walk_call_expr, sra_walk_gimple_asm,
>  >  +       sra_walk_gimple_modifY_stmt, ): Tuplified and renamed.
>  >  +       (sra_walk_gimple_call, sra_walk_gimple_asm,
>  >  +       sra_walk_gimple_assign): New names for tuplified functions.
>  >  +       (sra_walk_function, find_candidates_for_sra, scan_use, scan_copy,
>  >  +       scan_ldst, instantiate_element, decide_instantiations,
>  >  +       mark_all_v_defs_1, sra_build_assignment, sra_build_bf_assignment,
>  >  +       sra_build_elt_assignment, generate_copy_inout,
>  >  +       generate_element_copy, generate_element_zero,
>  >  +       generate_one_element_init, generate_element_init_1): Tuplified.
>  >  +       (insert_edge_copies): Removed.
>  >  +       (insert_edge_copies_stmt, insert_edge_copies_seq): New functions.
>  >  +       (sra_insert_before, sra_insert_after, sra_replace,
>  >  +       sra_explode_bitfield_assignment, sra_sync_for_bitfield_assignment,
>  >  +       scalarize_use, scalarize_copy, scalarize_init, mark_no_trap,
>  >  +       scalarize_ldst, scalarize_parms, scalarize_function): Tuplified.
>  >  +       (tree_sra, tree_sra_early): Enabled
>  >  +       (sra_init_cache): Removed extra space.
>  >  +       * tree-flow.h (insert_edge_copies_stmt, insert_edge_copies_seq):
>  >  +       Synced with the definitions.
>  >  +       * gimple.h (gimple_asm_input_op_ptr, gimple_asm_output_op_ptr,
>  >  +       gimple_return_retval_ptr): New functions.
>  >  +       * passes.c (init_optimization_passes): Enabled pass_sra,
>  >  +       pass_early_sra.
>  >  +
>
>  OK with:
>
>  >  @@ -2825,70 +2801,71 @@ generate_element_init (struct sra_elt *e
>  >     abnormal edges will be ignored.  */
>  >
>  >   void
>  >  -insert_edge_copies (tree stmt, basic_block bb)
>  >  +insert_edge_copies_stmt (gimple stmt, basic_block bb)
>  >   {
>  >    edge e;
>  >    edge_iterator ei;
>  >  -  bool first_copy;
>  >
>  >  -  first_copy = true;
>  >    FOR_EACH_EDGE (e, ei, bb->succs)
>  >  -    {
>  >  -      /* We don't need to insert copies on abnormal edges.  The
>  >  -        value of the scalar replacement is not guaranteed to
>  >  -        be valid through an abnormal edge.  */
>  >  -      if (!(e->flags & EDGE_ABNORMAL))
>  >  -       {
>  >  -         if (first_copy)
>  >  -           {
>  >  -             bsi_insert_on_edge (e, stmt);
>  >  -             first_copy = false;
>  >  -           }
>  >  -         else
>  >  -           bsi_insert_on_edge (e, unsave_expr_now (stmt));
>  >  -       }
>  >  -    }
>  >  +    /* We don't need to insert copies on abnormal edges.  The
>  >  +       value of the scalar replacement is not guaranteed to
>  >  +       be valid through an abnormal edge.  */
>  >  +    if (!(e->flags & EDGE_ABNORMAL))
>  >  +      gsi_insert_on_edge (e, gimple_deep_copy (stmt));
>
>  s/gimple_deep_copy/gimple_copy/  (this was changed after your
>  patch).
>
>  Why are you undoing the logic for not creating a copy the first
>  time?  That should be kept the same.
Actually, I removed the first_copy flag when I converted this
subroutine to insert gimple_seq, which are destroyed when
gsi_insert_seq_on_edge () is called on them.
Then I just copied the code to work on just gimple_stmt (I'm not sure
why). It turns out that insert_edge_copies_stmt is not
ever used, so I removed it in the new patch.

>
>
>  >  +          /* RHS must be a single operand. */
>  >  +          gcc_assert (!gimple_assign_rhs2 (stmt));
>
>  s/!gimple_assign_rhs2/gimple_assign_single_p/
Fixed.

>
>  >  @@ -1945,6 +1945,16 @@ gimple_asm_input_op (const_gimple gs, si
>  >    return gimple_op (gs, index);
>  >   }
>  >
>  >  +/* Return input operand INDEX of GIMPLE_ASM GS.  */
>
>  s/input operand/a pointer to input operand/
Fixed.

>
>  >  @@ -1968,6 +1978,16 @@ gimple_asm_output_op (const_gimple gs, s
>  >    return gimple_op (gs, index + gs->gimple_asm.ni);
>  >   }
>  >
>  >  +/* Return output operand INDEX of GIMPLE_ASM GS.  */
>
>  s/output operand/a pointer to output operand/
Fixed.

>
>  >  @@ -3030,6 +3050,16 @@ gimple_omp_continue_set_control_use (gim
>  >
>  >   /* Return the return value for GIMPLE_RETURN GS.  */
>  >
>  >  +static inline tree *
>  >  +gimple_return_retval_ptr (const_gimple gs)
>
>  Needs comment.
I added a comment.

I've tested the new patch, and did not see any new failures. So I will
commit it.

Thanks,
Oleg
Index: ChangeLog.tuples
===================================================================
--- ChangeLog.tuples	(revision 134214)
+++ ChangeLog.tuples	(working copy)
@@ -1,3 +1,34 @@
+2008-04-11  Oleg Ryjkov  <olegr@google.com>
+
+	* tree-sra.c (sra_build_assignment): Tuplified.
+	(mark_all_v_defs): Removed.
+	(mark_all_v_defs_seq, mark_all_v_defs_stmt): New functions.
+	(sra_walk_expr): Tuplified.
+	(sra_walk_tree_list): Removed.
+	(sra_walk_call_expr, sra_walk_gimple_asm,
+	sra_walk_gimple_modifY_stmt, ): Tuplified and renamed.
+	(sra_walk_gimple_call, sra_walk_gimple_asm,
+	sra_walk_gimple_assign): New names for tuplified functions.
+	(sra_walk_function, find_candidates_for_sra, scan_use, scan_copy,
+	scan_ldst, instantiate_element, decide_instantiations,
+	mark_all_v_defs_1, sra_build_assignment, sra_build_bf_assignment,
+	sra_build_elt_assignment, generate_copy_inout,
+	generate_element_copy, generate_element_zero,
+	generate_one_element_init, generate_element_init_1): Tuplified.
+	(insert_edge_copies): Removed.
+	(insert_edge_copies_seq): New function.
+	(sra_insert_before, sra_insert_after, sra_replace,
+	sra_explode_bitfield_assignment, sra_sync_for_bitfield_assignment,
+	scalarize_use, scalarize_copy, scalarize_init, mark_no_trap,
+	scalarize_ldst, scalarize_parms, scalarize_function): Tuplified.
+	(tree_sra, tree_sra_early): Enabled
+	(sra_init_cache): Removed extra space.
+	* tree-flow.h (insert_edge_copies_seq): New declaration.
+	* gimple.h (gimple_asm_input_op_ptr, gimple_asm_output_op_ptr,
+	gimple_return_retval_ptr): New functions.
+	* passes.c (init_optimization_passes): Enabled pass_sra,
+	pass_early_sra.
+
 2008-04-11  Doug Kwan  <dougkwan@google.com>
 
 	* ipa-pure-const.c (get_asm_expr_operands): Tuplify.
Index: tree-sra.c
===================================================================
--- tree-sra.c	(revision 134214)
+++ tree-sra.c	(working copy)
@@ -50,8 +50,6 @@ along with GCC; see the file COPYING3.  
 #include "params.h"
 
 
-/* FIXME tuples.  */
-#if 0
 /* This object of this pass is to replace a non-addressable aggregate with a
    set of independent variables.  Most of the time, all of these variables
    will be scalars.  But a secondary objective is to break up larger
@@ -210,8 +208,9 @@ extern void debug_sra_elt_name (struct s
 
 /* Forward declarations.  */
 static tree generate_element_ref (struct sra_elt *);
-static tree sra_build_assignment (tree dst, tree src);
-static void mark_all_v_defs (tree list);
+static gimple_seq sra_build_assignment (tree dst, tree src);
+static void mark_all_v_defs_seq (gimple_seq);
+static void mark_all_v_defs_stmt (gimple);
 
 
 /* Return true if DECL is an SRA candidate.  */
@@ -702,7 +701,7 @@ maybe_lookup_element_for_expr (tree expr
    references, and categorize them.  */
 
 /* A set of callbacks for phases 2 and 4.  They'll be invoked for the
-   various kinds of references seen.  In all cases, *BSI is an iterator
+   various kinds of references seen.  In all cases, *GSI is an iterator
    pointing to the statement being processed.  */
 struct sra_walk_fns
 {
@@ -712,21 +711,21 @@ struct sra_walk_fns
      is a left-hand-side reference.  USE_ALL is true if we saw something we
      couldn't quite identify and had to force the use of the entire object.  */
   void (*use) (struct sra_elt *elt, tree *expr_p,
-	       block_stmt_iterator *bsi, bool is_output, bool use_all);
+	       gimple_stmt_iterator *gsi, bool is_output, bool use_all);
 
   /* Invoked when we have a copy between two scalarizable references.  */
   void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-		block_stmt_iterator *bsi);
+		gimple_stmt_iterator *gsi);
 
   /* Invoked when ELT is initialized from a constant.  VALUE may be NULL,
      in which case it should be treated as an empty CONSTRUCTOR.  */
-  void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
+  void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
 
   /* Invoked when we have a copy between one scalarizable reference ELT
      and one non-scalarizable reference OTHER without side-effects. 
      IS_OUTPUT is true if ELT is on the left-hand side.  */
   void (*ldst) (struct sra_elt *elt, tree other,
-		block_stmt_iterator *bsi, bool is_output);
+		gimple_stmt_iterator *gsi, bool is_output);
 
   /* True during phase 2, false during phase 4.  */
   /* ??? This is a hack.  */
@@ -760,7 +759,7 @@ sra_find_candidate_decl (tree *tp, int *
    If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
+sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
 	       const struct sra_walk_fns *fns)
 {
   tree expr = *expr_p;
@@ -787,7 +786,7 @@ sra_walk_expr (tree *expr_p, block_stmt_
 	    if (disable_scalarization)
 	      elt->cannot_scalarize = true;
 	    else
-	      fns->use (elt, expr_p, bsi, is_output, use_all_p);
+	      fns->use (elt, expr_p, gsi, is_output, use_all_p);
 	  }
 	return;
 
@@ -889,60 +888,64 @@ sra_walk_expr (tree *expr_p, block_stmt_
       }
 }
 
-/* Walk a TREE_LIST of values looking for scalarizable aggregates.
-   If we find one, invoke FNS->USE.  */
-
-static void
-sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
-		    const struct sra_walk_fns *fns)
-{
-  tree op;
-  for (op = list; op ; op = TREE_CHAIN (op))
-    sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
-}
-
-/* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
+/* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
    If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
+sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
 		    const struct sra_walk_fns *fns)
 {
   int i;
-  int nargs = call_expr_nargs (expr);
+  int nargs = gimple_call_num_args (stmt);
+
   for (i = 0; i < nargs; i++)
-    sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
+    sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
+
+  if (gimple_call_lhs (stmt))
+    sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
 }
 
-/* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
+/* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
    aggregates.  If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
+sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
 		   const struct sra_walk_fns *fns)
 {
-  sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
-  sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
+  size_t i;
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+    sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
+  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+    sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
 }
 
-/* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately.  */
+/* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately.  */
 
 static void
-sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
-			     const struct sra_walk_fns *fns)
+sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
+			const struct sra_walk_fns *fns)
 {
-  struct sra_elt *lhs_elt, *rhs_elt;
-  tree lhs, rhs;
+  struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
+  tree lhs, rhs, rhs2;
+
+  lhs = gimple_assign_lhs (stmt);
+  rhs = gimple_assign_rhs1 (stmt);
+  rhs2 = gimple_assign_rhs2 (stmt);
+
+  /* If there is more than 1 element on the RHS, only walk the lhs.  */
+  if (rhs2)
+    {
+      sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
+      return;
+    }
 
-  lhs = GIMPLE_STMT_OPERAND (expr, 0);
-  rhs = GIMPLE_STMT_OPERAND (expr, 1);
   lhs_elt = maybe_lookup_element_for_expr (lhs);
   rhs_elt = maybe_lookup_element_for_expr (rhs);
 
   /* If both sides are scalarizable, this is a COPY operation.  */
   if (lhs_elt && rhs_elt)
     {
-      fns->copy (lhs_elt, rhs_elt, bsi);
+      fns->copy (lhs_elt, rhs_elt, gsi);
       return;
     }
 
@@ -950,9 +953,9 @@ sra_walk_gimple_modify_stmt (tree expr, 
   if (rhs_elt)
     {
       if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
-	fns->ldst (rhs_elt, lhs, bsi, false);
+	fns->ldst (rhs_elt, lhs, gsi, false);
       else
-	fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
+	fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
     }
 
   /* If it isn't scalarizable, there may be scalarizable variables within, so
@@ -961,24 +964,18 @@ sra_walk_gimple_modify_stmt (tree expr, 
      that the statements get inserted in the proper place, before any
      copy-out operations.  */
   else
-    {
-      tree call = get_call_expr_in (rhs);
-      if (call)
-	sra_walk_call_expr (call, bsi, fns);
-      else
-	sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
-    }
+    sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
 
   /* Likewise, handle the LHS being scalarizable.  We have cases similar
      to those above, but also want to handle RHS being constant.  */
-  if (lhs_elt)
+  if (lhs_elt && !rhs2)
     {
       /* If this is an assignment from a constant, or constructor, then
 	 we have access to all of the elements individually.  Invoke INIT.  */
       if (TREE_CODE (rhs) == COMPLEX_EXPR
 	  || TREE_CODE (rhs) == COMPLEX_CST
 	  || TREE_CODE (rhs) == CONSTRUCTOR)
-	fns->init (lhs_elt, rhs, bsi);
+	fns->init (lhs_elt, rhs, gsi);
 
       /* If this is an assignment from read-only memory, treat this as if
 	 we'd been passed the constructor directly.  Invoke INIT.  */
@@ -986,7 +983,7 @@ sra_walk_gimple_modify_stmt (tree expr, 
 	       && TREE_STATIC (rhs)
 	       && TREE_READONLY (rhs)
 	       && targetm.binds_local_p (rhs))
-	fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
+	fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
 
       /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
 	 The lvalue requirement prevents us from trying to directly scalarize
@@ -994,19 +991,19 @@ sra_walk_gimple_modify_stmt (tree expr, 
 	 the function multiple times, and other evil things.  */
       else if (!lhs_elt->is_scalar
 	       && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
-	fns->ldst (lhs_elt, rhs, bsi, true);
+	fns->ldst (lhs_elt, rhs, gsi, true);
 
       /* Otherwise we're being used in some context that requires the
 	 aggregate to be seen as a whole.  Invoke USE.  */
       else
-	fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
+	fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
     }
 
   /* Similarly to above, LHS_ELT being null only means that the LHS as a
      whole is not a scalarizable reference.  There may be occurrences of
      scalarizable variables within, which implies a USE.  */
   else
-    sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
+    sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
 }
 
 /* Entry point to the walk functions.  Search the entire function,
@@ -1017,22 +1014,20 @@ static void
 sra_walk_function (const struct sra_walk_fns *fns)
 {
   basic_block bb;
-  block_stmt_iterator si, ni;
+  gimple_stmt_iterator si, ni;
 
   /* ??? Phase 4 could derive some benefit to walking the function in
      dominator tree order.  */
 
   FOR_EACH_BB (bb)
-    for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
+    for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
       {
-	tree stmt, t;
-	stmt_ann_t ann;
+	gimple stmt;
 
-	stmt = bsi_stmt (si);
-	ann = stmt_ann (stmt);
+	stmt = gsi_stmt (si);
 
 	ni = si;
-	bsi_next (&ni);
+	gsi_next (&ni);
 
 	/* If the statement has no virtual operands, then it doesn't
 	   make any structure references that we care about.  */
@@ -1040,35 +1035,28 @@ sra_walk_function (const struct sra_walk
 	    && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
 	      continue;
 
-	switch (TREE_CODE (stmt))
+	switch (gimple_code (stmt))
 	  {
-	  case RETURN_EXPR:
+	  case GIMPLE_RETURN:
 	    /* If we have "return <retval>" then the return value is
 	       already exposed for our pleasure.  Walk it as a USE to
 	       force all the components back in place for the return.
-
-	       If we have an embedded assignment, then <retval> is of
-	       a type that gets returned in registers in this ABI, and
-	       we do not wish to extend their lifetimes.  Treat this
-	       as a USE of the variable on the RHS of this assignment.  */
-
-	    t = TREE_OPERAND (stmt, 0);
-	    if (t == NULL_TREE)
+	       */
+	    if (gimple_return_retval (stmt)  == NULL_TREE)
 	      ;
-	    else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-	      sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
 	    else
-	      sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
+	      sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
+                             fns);
 	    break;
 
-	  case GIMPLE_MODIFY_STMT:
-	    sra_walk_gimple_modify_stmt (stmt, &si, fns);
+	  case GIMPLE_ASSIGN:
+	    sra_walk_gimple_assign (stmt, &si, fns);
 	    break;
-	  case CALL_EXPR:
-	    sra_walk_call_expr (stmt, &si, fns);
+	  case GIMPLE_CALL:
+	    sra_walk_gimple_call (stmt, &si, fns);
 	    break;
-	  case ASM_EXPR:
-	    sra_walk_asm_expr (stmt, &si, fns);
+	  case GIMPLE_ASM:
+	    sra_walk_gimple_asm (stmt, &si, fns);
 	    break;
 
 	  default:
@@ -1109,7 +1097,7 @@ find_candidates_for_sra (void)
 
 static void
 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
-	  block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
+	  gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
 	  bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
 {
   elt->n_uses += 1;
@@ -1117,7 +1105,7 @@ scan_use (struct sra_elt *elt, tree *exp
 
 static void
 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-	   block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
+	   gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
 {
   lhs_elt->n_copies += 1;
   rhs_elt->n_copies += 1;
@@ -1125,14 +1113,14 @@ scan_copy (struct sra_elt *lhs_elt, stru
 
 static void
 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
-	   block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
+	   gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
 {
   lhs_elt->n_copies += 1;
 }
 
 static void
 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
-	   block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
+	   gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
 	   bool is_output ATTRIBUTE_UNUSED)
 {
   elt->n_copies += 1;
@@ -1323,10 +1311,12 @@ instantiate_element (struct sra_elt *elt
       || (var != elt->replacement
 	  && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
     {
-      tree init = sra_build_assignment (var, fold_convert (TREE_TYPE (var),
-							   integer_zero_node));
-      insert_edge_copies (init, ENTRY_BLOCK_PTR);
-      mark_all_v_defs (init);
+      gimple_seq init = sra_build_assignment (var,
+                                              fold_convert (TREE_TYPE (var),
+                                                            integer_zero_node)
+                                             );
+      insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
+      mark_all_v_defs_seq (init);
     }
 
   if (dump_file)
@@ -1986,12 +1976,7 @@ decide_instantiations (void)
     }
   bitmap_clear (&done_head);
   
-  /* FIXME tuples.  */
-#if 0
   mark_set_for_renaming (sra_candidates);
-#else
-  gimple_unreachable ();
-#endif
 
   if (dump_file)
     fputc ('\n', dump_file);
@@ -2005,7 +1990,7 @@ decide_instantiations (void)
    non-scalar.  */
 
 static void
-mark_all_v_defs_1 (tree stmt)
+mark_all_v_defs_stmt (gimple stmt)
 {
   tree sym;
   ssa_op_iter iter;
@@ -2016,12 +2001,7 @@ mark_all_v_defs_1 (tree stmt)
     {
       if (TREE_CODE (sym) == SSA_NAME)
 	sym = SSA_NAME_VAR (sym);
-      /* FIXME tuples.  */
-#if 0
       mark_sym_for_renaming (sym);
-#else
-      gimple_unreachable ();
-#endif
     }
 }
 
@@ -2030,18 +2010,13 @@ mark_all_v_defs_1 (tree stmt)
    LIST for renaming.  */
 
 static void
-mark_all_v_defs (tree list)
+mark_all_v_defs_seq (gimple_seq seq)
 {
-  if (TREE_CODE (list) != STATEMENT_LIST)
-    mark_all_v_defs_1 (list);
-  else
-    {
-      tree_stmt_iterator i;
-      for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
-	mark_all_v_defs_1 (tsi_stmt (i));
-    }
-}
+  gimple_stmt_iterator gsi;
 
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    mark_all_v_defs_stmt (gsi_stmt (gsi));
+}
 
 /* Mark every replacement under ELT with TREE_NO_WARNING.  */
 
@@ -2135,9 +2110,11 @@ scalar_bitfield_p (tree bf)
 
 /* Create an assignment statement from SRC to DST.  */
 
-static tree
+static gimple_seq
 sra_build_assignment (tree dst, tree src)
 {
+  gimple stmt;
+  gimple_seq seq = NULL;
   /* Turning BIT_FIELD_REFs into bit operations enables other passes
      to do a much better job at optimizing the code.
      From dst = BIT_FIELD_REF <var, sz, off> we produce
@@ -2152,7 +2129,6 @@ sra_build_assignment (tree dst, tree src
     {
       tree var, shift, width;
       tree utype, stype, stmp, utmp, dtmp;
-      tree list, stmt;
       bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
 		        ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
 
@@ -2185,7 +2161,6 @@ sra_build_assignment (tree dst, tree src
       else if (!TYPE_UNSIGNED (utype))
 	utype = unsigned_type_for (utype);
 
-      list = NULL;
       stmp = make_rename_temp (stype, "SR");
 
       /* Convert the base var of the BIT_FIELD_REF to the scalar type
@@ -2193,22 +2168,19 @@ sra_build_assignment (tree dst, tree src
       if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
 	{
 	  if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
-	    stmt = build_gimple_modify_stmt (stmp,
-					     fold_convert (stype, var));
+	    stmt = gimple_build_assign (stmp, fold_convert (stype, var));
 	  else
-	    stmt = build_gimple_modify_stmt (stmp,
-					     fold_build1 (VIEW_CONVERT_EXPR,
-							  stype, var));
-	  append_to_statement_list (stmt, &list);
+	    stmt = gimple_build_assign (stmp, fold_build1 (VIEW_CONVERT_EXPR,
+							   stype, var));
+	  gimple_seq_add_stmt (&seq, stmt);
 	  var = stmp;
 	}
 
       if (!integer_zerop (shift))
 	{
-	  stmt = build_gimple_modify_stmt (stmp,
-					   fold_build2 (RSHIFT_EXPR, stype,
-							var, shift));
-	  append_to_statement_list (stmt, &list);
+	  stmt = gimple_build_assign (stmp, fold_build2 (RSHIFT_EXPR, stype,
+							 var, shift));
+	  gimple_seq_add_stmt (&seq, stmt);
 	  var = stmp;
 	}
 
@@ -2221,10 +2193,9 @@ sra_build_assignment (tree dst, tree src
 	  tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
 	  mask = int_const_binop (MINUS_EXPR, mask, one, 0);
 
-	  stmt = build_gimple_modify_stmt (stmp,
-					   fold_build2 (BIT_AND_EXPR, stype,
-							var, mask));
-	  append_to_statement_list (stmt, &list);
+	  stmt = gimple_build_assign (stmp, fold_build2 (BIT_AND_EXPR, stype,
+							 var, mask));
+	  gimple_seq_add_stmt (&seq, stmt);
 	  var = stmp;
 	}
 
@@ -2234,8 +2205,8 @@ sra_build_assignment (tree dst, tree src
 	{
 	  utmp = make_rename_temp (utype, "SR");
 
-	  stmt = build_gimple_modify_stmt (utmp, fold_convert (utype, var));
-	  append_to_statement_list (stmt, &list);
+	  stmt = gimple_build_assign (utmp, fold_convert (utype, var));
+	  gimple_seq_add_stmt (&seq, stmt);
 
 	  var = utmp;
 	}
@@ -2249,15 +2220,13 @@ sra_build_assignment (tree dst, tree src
 					  size_binop (MINUS_EXPR, width,
 						      bitsize_int (1)), 0);
 
-	  stmt = build_gimple_modify_stmt (utmp,
-					   fold_build2 (BIT_XOR_EXPR, utype,
-							var, signbit));
-	  append_to_statement_list (stmt, &list);
+	  stmt = gimple_build_assign (utmp, fold_build2 (BIT_XOR_EXPR, utype,
+							 var, signbit));
+	  gimple_seq_add_stmt (&seq, stmt);
 
-	  stmt = build_gimple_modify_stmt (utmp,
-					   fold_build2 (MINUS_EXPR, utype,
-							utmp, signbit));
-	  append_to_statement_list (stmt, &list);
+	  stmt = gimple_build_assign (utmp, fold_build2 (MINUS_EXPR, utype,
+							 utmp, signbit));
+	  gimple_seq_add_stmt (&seq, stmt);
 
 	  var = utmp;
 	}
@@ -2275,15 +2244,15 @@ sra_build_assignment (tree dst, tree src
 	  if (!is_gimple_reg (dst))
 	    {
 	      dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
-	      stmt = build_gimple_modify_stmt (dtmp, var);
-	      append_to_statement_list (stmt, &list);
+	      stmt = gimple_build_assign (dtmp, var);
+	      gimple_seq_add_stmt (&seq, stmt);
 	      var = dtmp;
 	    }
 	}
-      stmt = build_gimple_modify_stmt (dst, var);
-      append_to_statement_list (stmt, &list);
+      stmt = gimple_build_assign (dst, var);
+      gimple_seq_add_stmt (&seq, stmt);
 
-      return list;
+      return seq;
     }
 
   /* It was hoped that we could perform some type sanity checking
@@ -2300,7 +2269,9 @@ sra_build_assignment (tree dst, tree src
       && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
     src = fold_convert (TREE_TYPE (dst), src);
 
-  return build_gimple_modify_stmt (dst, src);
+  stmt = gimple_build_assign (dst, src);
+  gimple_seq_add_stmt (&seq, stmt);
+  return seq;
 }
 
 /* BIT_FIELD_REFs must not be shared.  sra_build_elt_assignment()
@@ -2310,11 +2281,12 @@ sra_build_assignment (tree dst, tree src
 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
    BIT_FIELD_REF, turn it into bit operations.  */
 
-static tree
+static gimple_seq
 sra_build_bf_assignment (tree dst, tree src)
 {
   tree var, type, utype, tmp, tmp2, tmp3;
-  tree list, stmt;
+  gimple_seq seq;
+  gimple stmt;
   tree cst, cst2, mask;
   tree minshift, maxshift;
 
@@ -2326,7 +2298,7 @@ sra_build_bf_assignment (tree dst, tree 
   if (!scalar_bitfield_p (dst))
     return sra_build_assignment (REPLDUP (dst), src);
 
-  list = NULL;
+  seq = NULL;
 
   cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
   cst2 = size_binop (PLUS_EXPR,
@@ -2375,11 +2347,11 @@ sra_build_bf_assignment (tree dst, tree 
       tmp2 = make_rename_temp (utype, "SR");
 
       if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
-	stmt = build_gimple_modify_stmt (tmp2, fold_convert (utype, tmp));
+	stmt = gimple_build_assign (tmp2, fold_convert (utype, tmp));
       else
-	stmt = build_gimple_modify_stmt (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
-							    utype, tmp));
-      append_to_statement_list (stmt, &list);
+	stmt = gimple_build_assign (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
+						       utype, tmp));
+      gimple_seq_add_stmt (&seq, stmt);
     }
   else
     tmp2 = var;
@@ -2387,10 +2359,9 @@ sra_build_bf_assignment (tree dst, tree 
   if (!integer_zerop (mask))
     {
       tmp = make_rename_temp (utype, "SR");
-      stmt = build_gimple_modify_stmt (tmp,
-				       fold_build2 (BIT_AND_EXPR, utype,
+      stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
 						    tmp2, mask));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_stmt (&seq, stmt);
     }
   else
     tmp = mask;
@@ -2399,28 +2370,31 @@ sra_build_bf_assignment (tree dst, tree 
     tmp2 = src;
   else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
     {
+      gimple_seq tmp_seq;
       tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
-      stmt = sra_build_assignment (tmp2, src);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp2, src);
+      gimple_seq_add_seq (&seq, tmp_seq);
     }
   else
     {
+      gimple_seq tmp_seq;
       tmp2 = make_rename_temp
 	(lang_hooks.types.type_for_size
 	 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
 	  1), "SR");
-      stmt = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
+      tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
 						      TREE_TYPE (tmp2), src));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_seq (&seq, tmp_seq);
     }
 
   if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
     {
+      gimple_seq tmp_seq;
       tree ut = unsigned_type_for (TREE_TYPE (tmp2));
       tmp3 = make_rename_temp (ut, "SR");
       tmp2 = fold_convert (ut, tmp2);
-      stmt = sra_build_assignment (tmp3, tmp2);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp3, tmp2);
+      gimple_seq_add_seq (&seq, tmp_seq);
 
       tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
       tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
@@ -2430,8 +2404,8 @@ sra_build_bf_assignment (tree dst, tree 
       if (tmp3 != tmp2)
 	{
 	  tmp3 = make_rename_temp (ut, "SR");
-	  stmt = sra_build_assignment (tmp3, tmp2);
-	  append_to_statement_list (stmt, &list);
+	  tmp_seq = sra_build_assignment (tmp3, tmp2);
+          gimple_seq_add_seq (&seq, tmp_seq);
 	}
 
       tmp2 = tmp3;
@@ -2439,20 +2413,20 @@ sra_build_bf_assignment (tree dst, tree 
 
   if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
     {
+      gimple_seq tmp_seq;
       tmp3 = make_rename_temp (utype, "SR");
       tmp2 = fold_convert (utype, tmp2);
-      stmt = sra_build_assignment (tmp3, tmp2);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp3, tmp2);
+      gimple_seq_add_seq (&seq, tmp_seq);
       tmp2 = tmp3;
     }
 
   if (!integer_zerop (minshift))
     {
       tmp3 = make_rename_temp (utype, "SR");
-      stmt = build_gimple_modify_stmt (tmp3,
-				       fold_build2 (LSHIFT_EXPR, utype,
-						    tmp2, minshift));
-      append_to_statement_list (stmt, &list);
+      stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
+						     tmp2, minshift));
+      gimple_seq_add_stmt (&seq, stmt);
       tmp2 = tmp3;
     }
 
@@ -2460,35 +2434,34 @@ sra_build_bf_assignment (tree dst, tree 
     tmp3 = make_rename_temp (utype, "SR");
   else
     tmp3 = var;
-  stmt = build_gimple_modify_stmt (tmp3,
-				   fold_build2 (BIT_IOR_EXPR, utype,
-						tmp, tmp2));
-  append_to_statement_list (stmt, &list);
+  stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
+						 tmp, tmp2));
+      gimple_seq_add_stmt (&seq, stmt);
 
   if (tmp3 != var)
     {
       if (TREE_TYPE (var) == type)
-	stmt = build_gimple_modify_stmt (var,
-					 fold_convert (type, tmp3));
+	stmt = gimple_build_assign (var, fold_convert (type, tmp3));
       else
-	stmt = build_gimple_modify_stmt (var,
-					 fold_build1 (VIEW_CONVERT_EXPR,
+	stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
 						      TREE_TYPE (var), tmp3));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_stmt (&seq, stmt);
     }
 
-  return list;
+  return seq;
 }
 
 /* Expand an assignment of SRC to the scalarized representation of
    ELT.  If it is a field group, try to widen the assignment to cover
    the full variable.  */
 
-static tree
+static gimple_seq
 sra_build_elt_assignment (struct sra_elt *elt, tree src)
 {
   tree dst = elt->replacement;
-  tree var, tmp, cst, cst2, list, stmt;
+  tree var, tmp, cst, cst2;
+  gimple stmt;
+  gimple_seq seq;
 
   if (TREE_CODE (dst) != BIT_FIELD_REF
       || !elt->in_bitfld_block)
@@ -2524,7 +2497,8 @@ sra_build_elt_assignment (struct sra_elt
 	  if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
 	      != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
 	    {
-	      list = NULL;
+              gimple_seq tmp_seq;
+	      seq = NULL;
 
 	      if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
 		src = fold_build1 (VIEW_CONVERT_EXPR,
@@ -2535,15 +2509,15 @@ sra_build_elt_assignment (struct sra_elt
 	      gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
 
 	      tmp = make_rename_temp (TREE_TYPE (src), "SR");
-	      stmt = build_gimple_modify_stmt (tmp, src);
-	      append_to_statement_list (stmt, &list);
+	      stmt = gimple_build_assign (tmp, src);
+	      gimple_seq_add_stmt (&seq, stmt);
 
-	      stmt = sra_build_assignment (var,
-					   fold_convert (TREE_TYPE (var),
-							 tmp));
-	      append_to_statement_list (stmt, &list);
+	      tmp_seq = sra_build_assignment (var,
+					      fold_convert (TREE_TYPE (var),
+							    tmp));
+	      gimple_seq_add_seq (&seq, tmp_seq);
 
-	      return list;
+	      return seq;
 	    }
 
 	  src = fold_convert (TREE_TYPE (var), src);
@@ -2566,9 +2540,10 @@ sra_build_elt_assignment (struct sra_elt
 
 static void
 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
-		     tree *list_p)
+		     gimple_seq *seq_p)
 {
   struct sra_elt *c;
+  gimple_seq tmp_seq;
   tree t;
 
   if (!copy_out && TREE_CODE (expr) == SSA_NAME
@@ -2582,24 +2557,25 @@ generate_copy_inout (struct sra_elt *elt
       i = c->replacement;
 
       t = build2 (COMPLEX_EXPR, elt->type, r, i);
-      t = sra_build_bf_assignment (expr, t);
-      SSA_NAME_DEF_STMT (expr) = t;
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_bf_assignment (expr, t);
+      gcc_assert (gimple_seq_singleton_p (tmp_seq));
+      SSA_NAME_DEF_STMT (expr) = gimple_seq_first_stmt (tmp_seq);
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
   else if (elt->replacement)
     {
       if (copy_out)
-	t = sra_build_elt_assignment (elt, expr);
+	tmp_seq = sra_build_elt_assignment (elt, expr);
       else
-	t = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
-      append_to_statement_list (t, list_p);
+	tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
   else
     {
       FOR_EACH_ACTUAL_CHILD (c, elt)
 	{
 	  t = generate_one_element_ref (c, unshare_expr (expr));
-	  generate_copy_inout (c, copy_out, t, list_p);
+	  generate_copy_inout (c, copy_out, t, seq_p);
 	}
     }
 }
@@ -2609,7 +2585,7 @@ generate_copy_inout (struct sra_elt *elt
    correspondence of instantiated elements.  */
 
 static void
-generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
+generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
 {
   struct sra_elt *dc, *sc;
 
@@ -2624,7 +2600,7 @@ generate_element_copy (struct sra_elt *d
 	    {
 	      sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
 	      gcc_assert (sc);
-	      generate_element_copy (dcs, sc, list_p);
+	      generate_element_copy (dcs, sc, seq_p);
 	    }
 
 	  continue;
@@ -2656,17 +2632,17 @@ generate_element_copy (struct sra_elt *d
 	  sc = lookup_element (src, f, NULL, NO_INSERT);
 	}
 
-      generate_element_copy (dc, sc, list_p);
+      generate_element_copy (dc, sc, seq_p);
     }
 
   if (dst->replacement)
     {
-      tree t;
+      gimple_seq tmp_seq;
 
       gcc_assert (src->replacement);
 
-      t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
 }
 
@@ -2676,7 +2652,7 @@ generate_element_copy (struct sra_elt *d
    with generate_element_init.  */
 
 static void
-generate_element_zero (struct sra_elt *elt, tree *list_p)
+generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
 {
   struct sra_elt *c;
 
@@ -2688,17 +2664,18 @@ generate_element_zero (struct sra_elt *e
 
   if (!elt->in_bitfld_block)
     FOR_EACH_ACTUAL_CHILD (c, elt)
-      generate_element_zero (c, list_p);
+      generate_element_zero (c, seq_p);
 
   if (elt->replacement)
     {
       tree t;
+      gimple_seq tmp_seq;
 
       gcc_assert (elt->is_scalar);
       t = fold_convert (elt->type, integer_zero_node);
 
-      t = sra_build_elt_assignment (elt, t);
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_elt_assignment (elt, t);
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
 }
 
@@ -2706,11 +2683,10 @@ generate_element_zero (struct sra_elt *e
    Add the result to *LIST_P.  */
 
 static void
-generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
+generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
-  /* The replacement can be almost arbitrarily complex.  Gimplify.  */
-  tree stmt = sra_build_elt_assignment (elt, init);
-  gimplify_and_add (stmt, list_p);
+  gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
+  gimple_seq_add_seq (seq_p, tmp_seq);
 }
 
 /* Generate a set of assignment statements in *LIST_P to set all instantiated
@@ -2720,7 +2696,7 @@ generate_one_element_init (struct sra_el
    handle.  */
 
 static bool
-generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
+generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
   bool result = true;
   enum tree_code init_code;
@@ -2738,7 +2714,7 @@ generate_element_init_1 (struct sra_elt 
     {
       if (elt->replacement)
 	{
-	  generate_one_element_init (elt, init, list_p);
+	  generate_one_element_init (elt, init, seq_p);
 	  elt->visited = true;
 	}
       return result;
@@ -2756,7 +2732,7 @@ generate_element_init_1 (struct sra_elt 
 	  else
 	    t = (init_code == COMPLEX_EXPR
 		 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
-	  result &= generate_element_init_1 (sub, t, list_p);
+	  result &= generate_element_init_1 (sub, t, seq_p);
 	}
       break;
 
@@ -2772,7 +2748,7 @@ generate_element_init_1 (struct sra_elt 
 		{
 	  	  sub = lookup_element (elt, lower, NULL, NO_INSERT);
 		  if (sub != NULL)
-		    result &= generate_element_init_1 (sub, value, list_p);
+		    result &= generate_element_init_1 (sub, value, seq_p);
 		  if (tree_int_cst_equal (lower, upper))
 		    break;
 		  lower = int_const_binop (PLUS_EXPR, lower,
@@ -2783,7 +2759,7 @@ generate_element_init_1 (struct sra_elt 
 	    {
 	      sub = lookup_element (elt, purpose, NULL, NO_INSERT);
 	      if (sub != NULL)
-		result &= generate_element_init_1 (sub, value, list_p);
+		result &= generate_element_init_1 (sub, value, seq_p);
 	    }
 	}
       break;
@@ -2800,95 +2776,81 @@ generate_element_init_1 (struct sra_elt 
    gimplification.  */
 
 static bool
-generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
+generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
   bool ret;
 
   push_gimplify_context ();
-  ret = generate_element_init_1 (elt, init, list_p);
+  ret = generate_element_init_1 (elt, init, seq_p);
   pop_gimplify_context (NULL);
 
   /* The replacement can expose previously unreferenced variables.  */
-  if (ret && *list_p)
+  if (ret && *seq_p)
     {
-      tree_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
-	find_new_referenced_vars (tsi_stmt_ptr (i));
+      for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
+	find_new_referenced_vars (gsi_stmt (i));
     }
 
   return ret;
 }
 
-/* Insert STMT on all the outgoing edges out of BB.  Note that if BB
-   has more than one edge, STMT will be replicated for each edge.  Also,
-   abnormal edges will be ignored.  */
+/* Insert a gimple_seq SEQ on all the outgoing edges out of BB.  Note that
+   if BB has more than one edge, STMT will be replicated for each edge.
+   Also, abnormal edges will be ignored.  */
 
 void
-insert_edge_copies (tree stmt, basic_block bb)
+insert_edge_copies_seq (gimple_seq seq, basic_block bb)
 {
   edge e;
   edge_iterator ei;
-  bool first_copy;
+  bool first_copy = true;
 
-  first_copy = true;
   FOR_EACH_EDGE (e, ei, bb->succs)
-    {
-      /* We don't need to insert copies on abnormal edges.  The
-	 value of the scalar replacement is not guaranteed to
-	 be valid through an abnormal edge.  */
-      if (!(e->flags & EDGE_ABNORMAL))
-	{
-	  if (first_copy)
-	    {
-	      bsi_insert_on_edge (e, stmt);
-	      first_copy = false;
-	    }
-	  else
-	    bsi_insert_on_edge (e, unsave_expr_now (stmt));
-	}
-    }
+    if (!(e->flags & EDGE_ABNORMAL)) 
+      gsi_insert_seq_on_edge (e, gimple_seq_copy (seq));
 }
 
-/* Helper function to insert LIST before BSI, and set up line number info.  */
+/* Helper function to insert LIST before GSI, and set up line number info.  */
 
 void
-sra_insert_before (block_stmt_iterator *bsi, tree list)
+sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (EXPR_HAS_LOCATION (stmt))
-    annotate_all_with_location (&list, EXPR_LOCATION (stmt));
-  bsi_insert_before (bsi, list, BSI_SAME_STMT);
+  if (gimple_has_location (stmt))
+    annotate_all_with_location (seq, gimple_location (stmt));
+  gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
 }
 
-/* Similarly, but insert after BSI.  Handles insertion onto edges as well.  */
+/* Similarly, but insert after GSI.  Handles insertion onto edges as well.  */
 
 void
-sra_insert_after (block_stmt_iterator *bsi, tree list)
+sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (EXPR_HAS_LOCATION (stmt))
-    annotate_all_with_location (&list, EXPR_LOCATION (stmt));
+  if (gimple_has_location (stmt))
+    annotate_all_with_location (seq, gimple_location (stmt));
 
   if (stmt_ends_bb_p (stmt))
-    insert_edge_copies (list, bsi->bb);
+    insert_edge_copies_seq (seq, gsi_bb (*gsi));
   else
-    bsi_insert_after (bsi, list, BSI_SAME_STMT);
+    gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
 }
 
-/* Similarly, but replace the statement at BSI.  */
+/* Similarly, but replace the statement at GSI.  */
 
 static void
-sra_replace (block_stmt_iterator *bsi, tree list)
+sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  sra_insert_before (bsi, list);
-  bsi_remove (bsi, false);
-  if (bsi_end_p (*bsi))
-    *bsi = bsi_last (bsi->bb);
+  sra_insert_before (gsi, seq);
+  gsi_remove (gsi, false);
+  if (gsi_end_p (*gsi))
+    *gsi = gsi_last (gsi_seq (*gsi));
   else
-    bsi_prev (bsi);
+    gsi_prev (gsi);
 }
 
 /* Data structure that bitfield_overlaps_p fills in with information
@@ -3002,7 +2964,7 @@ bitfield_overlaps_p (tree blen, tree bpo
 
 static void
 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
-				 tree *listp, tree blen, tree bpos,
+				 gimple_seq *seq_p, tree blen, tree bpos,
 				 struct sra_elt *elt)
 {
   struct sra_elt *fld;
@@ -3020,7 +2982,8 @@ sra_explode_bitfield_assignment (tree va
 
       if (fld->replacement)
 	{
-	  tree infld, invar, st, type;
+	  tree infld, invar, type;
+          gimple_seq st;
 
 	  infld = fld->replacement;
 
@@ -3059,7 +3022,7 @@ sra_explode_bitfield_assignment (tree va
 	  else
 	    st = sra_build_bf_assignment (infld, invar);
 
-	  append_to_statement_list (st, listp);
+	  gimple_seq_add_seq (seq_p, st);
 	}
       else
 	{
@@ -3068,7 +3031,7 @@ sra_explode_bitfield_assignment (tree va
 	  if (flp.overlap_pos)
 	    sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
 
-	  sra_explode_bitfield_assignment (var, sub, to_var, listp,
+	  sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
 					   flen, fpos, fld);
 	}
     }
@@ -3081,7 +3044,8 @@ sra_explode_bitfield_assignment (tree va
    full variable back to the scalarized variables.  */
 
 static void
-sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
+sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
+                                  gimple_seq *seq_after_p,
 				  tree blen, tree bpos,
 				  struct sra_elt *elt)
 {
@@ -3094,18 +3058,18 @@ sra_sync_for_bitfield_assignment (tree *
 	if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
 	  {
 	    generate_copy_inout (fld, false, generate_element_ref (fld),
-				 listbeforep);
+				 seq_before_p);
 	    mark_no_warning (fld);
-	    if (listafterp)
+	    if (seq_after_p)
 	      generate_copy_inout (fld, true, generate_element_ref (fld),
-				   listafterp);
+				   seq_after_p);
 	  }
 	else
 	  {
 	    tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
 	    tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
 
-	    sra_sync_for_bitfield_assignment (listbeforep, listafterp,
+	    sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
 					      flen, fpos, fld);
 	  }
       }
@@ -3116,10 +3080,10 @@ sra_sync_for_bitfield_assignment (tree *
    aggregate.  IS_OUTPUT is true if ELT is being modified.  */
 
 static void
-scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
+scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
 	       bool is_output, bool use_all)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
   tree bfexpr;
 
   if (elt->replacement)
@@ -3131,52 +3095,43 @@ scalarize_use (struct sra_elt *elt, tree
       if (is_output
 	  && TREE_CODE (elt->replacement) == BIT_FIELD_REF
 	  && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
-	  && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-	  && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
+	  && gimple_code (stmt) == GIMPLE_ASSIGN
+	  && gimple_assign_lhs_ptr (stmt) == expr_p)
 	{
-	  tree newstmt = sra_build_elt_assignment
-	    (elt, GIMPLE_STMT_OPERAND (stmt, 1));
-	  if (TREE_CODE (newstmt) != STATEMENT_LIST)
-	    {
-	      tree list = NULL;
-	      append_to_statement_list (newstmt, &list);
-	      newstmt = list;
-	    }
-	  sra_replace (bsi, newstmt);
+          /* RHS must be a single operand. */
+          gcc_assert (gimple_assign_single_p (stmt));
+	  gimple_seq newseq = sra_build_elt_assignment
+	    (elt, gimple_assign_rhs1 (stmt));
+	  sra_replace (gsi, newseq);
 	  return;
 	}
       else if (!is_output
 	       && TREE_CODE (elt->replacement) == BIT_FIELD_REF
-	       && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-	       && &GIMPLE_STMT_OPERAND (stmt, 1) == expr_p)
+	       && gimple_code (stmt) == GIMPLE_ASSIGN
+	       && gimple_assign_rhs1_ptr (stmt) == expr_p)
 	{
 	  tree tmp = make_rename_temp
-	    (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)), "SR");
-	  tree newstmt = sra_build_assignment (tmp, REPLDUP (elt->replacement));
+	    (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
+	  gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
 
-	  if (TREE_CODE (newstmt) != STATEMENT_LIST)
-	    {
-	      tree list = NULL;
-	      append_to_statement_list (newstmt, &list);
-	      newstmt = list;
-	    }
-	  sra_insert_before (bsi, newstmt);
+	  sra_insert_before (gsi, newseq);
 	  replacement = tmp;
 	}
       if (is_output)
-	  mark_all_v_defs (stmt);
+	  mark_all_v_defs_stmt (stmt);
       *expr_p = REPLDUP (replacement);
       update_stmt (stmt);
     }
   else if (use_all && is_output
-	   && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+	   && gimple_code (stmt) == GIMPLE_ASSIGN
 	   && TREE_CODE (bfexpr
-			 = GIMPLE_STMT_OPERAND (stmt, 0)) == BIT_FIELD_REF
+			 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
 	   && &TREE_OPERAND (bfexpr, 0) == expr_p
 	   && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
 	   && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
     {
-      tree listbefore = NULL, listafter = NULL;
+      gimple_seq seq_before = NULL;
+      gimple_seq seq_after = NULL;
       tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
       tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
       bool update = false;
@@ -3184,18 +3139,18 @@ scalarize_use (struct sra_elt *elt, tree
       if (!elt->use_block_copy)
 	{
 	  tree type = TREE_TYPE (bfexpr);
-	  tree var = make_rename_temp (type, "SR"), tmp, st, vpos;
+	  tree var = make_rename_temp (type, "SR"), tmp, vpos;
+          gimple st;
 
-	  GIMPLE_STMT_OPERAND (stmt, 0) = var;
+	  gimple_assign_set_lhs (stmt, var);
 	  update = true;
 
 	  if (!TYPE_UNSIGNED (type))
 	    {
 	      type = unsigned_type_for (type);
 	      tmp = make_rename_temp (type, "SR");
-	      st = build_gimple_modify_stmt (tmp,
-					     fold_convert (type, var));
-	      append_to_statement_list (st, &listafter);
+	      st = gimple_build_assign (tmp, fold_convert (type, var));
+	      gimple_seq_add_stmt (&seq_after, st);
 	      var = tmp;
 	    }
 
@@ -3208,35 +3163,35 @@ scalarize_use (struct sra_elt *elt, tree
 	  else
 	    vpos = bitsize_int (0);
 	  sra_explode_bitfield_assignment
-	    (var, vpos, false, &listafter, blen, bpos, elt);
+	    (var, vpos, false, &seq_after, blen, bpos, elt);
 	}
       else
 	sra_sync_for_bitfield_assignment
-	  (&listbefore, &listafter, blen, bpos, elt);
+	  (&seq_before, &seq_after, blen, bpos, elt);
 
-      if (listbefore)
+      if (seq_before)
 	{
-	  mark_all_v_defs (listbefore);
-	  sra_insert_before (bsi, listbefore);
+	  mark_all_v_defs_seq (seq_before);
+	  sra_insert_before (gsi, seq_before);
 	}
-      if (listafter)
+      if (seq_after)
 	{
-	  mark_all_v_defs (listafter);
-	  sra_insert_after (bsi, listafter);
+	  mark_all_v_defs_seq (seq_after);
+	  sra_insert_after (gsi, seq_after);
 	}
 
       if (update)
 	update_stmt (stmt);
     }
   else if (use_all && !is_output
-	   && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+	   && gimple_code (stmt) == GIMPLE_ASSIGN
 	   && TREE_CODE (bfexpr
-			 = GIMPLE_STMT_OPERAND (stmt, 1)) == BIT_FIELD_REF
-	   && &TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0) == expr_p
+			 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
+	   && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
 	   && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
 	   && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
     {
-      tree list = NULL;
+      gimple_seq seq = NULL;
       tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
       tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
       bool update = false;
@@ -3251,9 +3206,9 @@ scalarize_use (struct sra_elt *elt, tree
 
 	  var = make_rename_temp (type, "SR");
 
-	  append_to_statement_list (build_gimple_modify_stmt
-				    (var, build_int_cst_wide (type, 0, 0)),
-				    &list);
+	  gimple_seq_add_stmt (&seq,
+                               gimple_build_assign
+				 (var, build_int_cst_wide (type, 0, 0)));
 
 	  /* If VAR is wider than BLEN bits, it is padded at the
 	     most-significant end.  We want to set VPOS such that
@@ -3264,19 +3219,19 @@ scalarize_use (struct sra_elt *elt, tree
 	  else
 	    vpos = bitsize_int (0);
 	  sra_explode_bitfield_assignment
-	    (var, vpos, true, &list, blen, bpos, elt);
+	    (var, vpos, true, &seq, blen, bpos, elt);
 
-	  GIMPLE_STMT_OPERAND (stmt, 1) = var;
+	  gimple_assign_set_rhs1 (stmt, var);
 	  update = true;
 	}
       else
 	sra_sync_for_bitfield_assignment
-	  (&list, NULL, blen, bpos, elt);
+	  (&seq, NULL, blen, bpos, elt);
 
-      if (list)
+      if (seq)
 	{
-	  mark_all_v_defs (list);
-	  sra_insert_before (bsi, list);
+	  mark_all_v_defs_seq (seq);
+	  sra_insert_before (gsi, seq);
 	}
 
       if (update)
@@ -3284,7 +3239,7 @@ scalarize_use (struct sra_elt *elt, tree
     }
   else
     {
-      tree list = NULL;
+      gimple_seq seq = NULL;
 
       /* Otherwise we need some copies.  If ELT is being read, then we
 	 want to store all (modified) sub-elements back into the
@@ -3300,15 +3255,15 @@ scalarize_use (struct sra_elt *elt, tree
 	 This optimization would be most effective if sra_walk_function
 	 processed the blocks in dominator order.  */
 
-      generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
-      if (list == NULL)
+      generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
+      if (seq == NULL)
 	return;
-      mark_all_v_defs (list);
+      mark_all_v_defs_seq (seq);
       if (is_output)
-	sra_insert_after (bsi, list);
+	sra_insert_after (gsi, seq);
       else
 	{
-	  sra_insert_before (bsi, list);
+	  sra_insert_before (gsi, seq);
 	  if (use_all)
 	    mark_no_warning (elt);
 	}
@@ -3320,21 +3275,24 @@ scalarize_use (struct sra_elt *elt, tree
 
 static void
 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-		block_stmt_iterator *bsi)
+		gimple_stmt_iterator *gsi)
 {
-  tree list, stmt;
+  gimple_seq seq;
+  gimple stmt;
 
   if (lhs_elt->replacement && rhs_elt->replacement)
     {
       /* If we have two scalar operands, modify the existing statement.  */
-      stmt = bsi_stmt (*bsi);
+      stmt = gsi_stmt (*gsi);
 
       /* See the commentary in sra_walk_function concerning
 	 RETURN_EXPR, and why we should never see one here.  */
-      gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+      gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+      gcc_assert (gimple_assign_copy_p (stmt));
 
-      GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
-      GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
+
+      gimple_assign_set_lhs (stmt, lhs_elt->replacement);
+      gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
       update_stmt (stmt);
     }
   else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
@@ -3347,22 +3305,22 @@ scalarize_copy (struct sra_elt *lhs_elt,
 	 would at least allow those elements that are instantiated in
 	 both structures to be optimized well.  */
 
-      list = NULL;
+      seq = NULL;
       generate_copy_inout (rhs_elt, false,
-			   generate_element_ref (rhs_elt), &list);
-      if (list)
+			   generate_element_ref (rhs_elt), &seq);
+      if (seq)
 	{
-	  mark_all_v_defs (list);
-	  sra_insert_before (bsi, list);
+	  mark_all_v_defs_seq (seq);
+	  sra_insert_before (gsi, seq);
 	}
 
-      list = NULL;
+      seq = NULL;
       generate_copy_inout (lhs_elt, true,
-			   generate_element_ref (lhs_elt), &list);
-      if (list)
+			   generate_element_ref (lhs_elt), &seq);
+      if (seq)
 	{
-	  mark_all_v_defs (list);
-	  sra_insert_after (bsi, list);
+	  mark_all_v_defs_seq (seq);
+	  sra_insert_after (gsi, seq);
 	}
     }
   else
@@ -3371,14 +3329,14 @@ scalarize_copy (struct sra_elt *lhs_elt,
 	 case perform pair-wise element assignments and replace the
 	 original block copy statement.  */
 
-      stmt = bsi_stmt (*bsi);
-      mark_all_v_defs (stmt);
+      stmt = gsi_stmt (*gsi);
+      mark_all_v_defs_stmt (stmt);
 
-      list = NULL;
-      generate_element_copy (lhs_elt, rhs_elt, &list);
-      gcc_assert (list);
-      mark_all_v_defs (list);
-      sra_replace (bsi, list);
+      seq = NULL;
+      generate_element_copy (lhs_elt, rhs_elt, &seq);
+      gcc_assert (seq);
+      mark_all_v_defs_seq (seq);
+      sra_replace (gsi, seq);
     }
 }
 
@@ -3388,23 +3346,23 @@ scalarize_copy (struct sra_elt *lhs_elt,
    CONSTRUCTOR.  */
 
 static void
-scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
+scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
 {
   bool result = true;
-  tree list = NULL, init_list = NULL;
+  gimple_seq seq = NULL, init_seq = NULL;
 
   /* Generate initialization statements for all members extant in the RHS.  */
   if (rhs)
     {
       /* Unshare the expression just in case this is from a decl's initial.  */
       rhs = unshare_expr (rhs);
-      result = generate_element_init (lhs_elt, rhs, &init_list);
+      result = generate_element_init (lhs_elt, rhs, &init_seq);
     }
 
   /* CONSTRUCTOR is defined such that any member not mentioned is assigned
      a zero value.  Initialize the rest of the instantiated elements.  */
-  generate_element_zero (lhs_elt, &list);
-  append_to_statement_list (init_list, &list);
+  generate_element_zero (lhs_elt, &seq);
+  gimple_seq_add_seq (&seq, init_seq);
 
   if (!result)
     {
@@ -3414,11 +3372,11 @@ scalarize_init (struct sra_elt *lhs_elt,
 	 constants.  The easiest way to do this is to generate a complete
 	 copy-out, and then follow that with the constant assignments
 	 that we were able to build.  DCE will clean things up.  */
-      tree list0 = NULL;
+      gimple_seq seq0 = NULL;
       generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
-			   &list0);
-      append_to_statement_list (list, &list0);
-      list = list0;
+			   &seq0);
+      gimple_seq_add_seq (&seq0, seq);
+      seq = seq0;
     }
 
   if (lhs_elt->use_block_copy || !result)
@@ -3426,20 +3384,20 @@ scalarize_init (struct sra_elt *lhs_elt,
       /* Since LHS is not fully instantiated, we must leave the structure
 	 assignment in place.  Treating this case differently from a USE
 	 exposes constants to later optimizations.  */
-      if (list)
+      if (seq)
 	{
-	  mark_all_v_defs (list);
-	  sra_insert_after (bsi, list);
+	  mark_all_v_defs_seq (seq);
+	  sra_insert_after (gsi, seq);
 	}
     }
   else
     {
       /* The LHS is fully instantiated.  The list of initializations
 	 replaces the original structure assignment.  */
-      gcc_assert (list);
-      mark_all_v_defs (bsi_stmt (*bsi));
-      mark_all_v_defs (list);
-      sra_replace (bsi, list);
+      gcc_assert (seq);
+      mark_all_v_defs_stmt (gsi_stmt (*gsi));
+      mark_all_v_defs_seq (seq);
+      sra_replace (gsi, seq);
     }
 }
 
@@ -3468,7 +3426,7 @@ mark_notrap (tree *tp, int *walk_subtree
 
 static void
 scalarize_ldst (struct sra_elt *elt, tree other,
-		block_stmt_iterator *bsi, bool is_output)
+		gimple_stmt_iterator *gsi, bool is_output)
 {
   /* Shouldn't have gotten called for a scalar.  */
   gcc_assert (!elt->replacement);
@@ -3477,7 +3435,7 @@ scalarize_ldst (struct sra_elt *elt, tre
     {
       /* Since ELT is not fully instantiated, we have to leave the
 	 block copy in place.  Treat this as a USE.  */
-      scalarize_use (elt, NULL, bsi, is_output, false);
+      scalarize_use (elt, NULL, gsi, is_output, false);
     }
   else
     {
@@ -3485,19 +3443,21 @@ scalarize_ldst (struct sra_elt *elt, tre
 	 case we can have each element stored/loaded directly to/from the
 	 corresponding slot in OTHER.  This avoids a block copy.  */
 
-      tree list = NULL, stmt = bsi_stmt (*bsi);
+      gimple_seq seq = NULL;
+      gimple stmt = gsi_stmt (*gsi);
 
-      mark_all_v_defs (stmt);
-      generate_copy_inout (elt, is_output, other, &list);
-      gcc_assert (list);
-      mark_all_v_defs (list);
+      mark_all_v_defs_stmt (stmt);
+      generate_copy_inout (elt, is_output, other, &seq);
+      gcc_assert (seq);
+      mark_all_v_defs_seq (seq);
 
       /* Preserve EH semantics.  */
       if (stmt_ends_bb_p (stmt))
 	{
-	  tree_stmt_iterator tsi;
-	  tree first, blist = NULL;
-	  bool thr = tree_could_throw_p (stmt);
+	  gimple_stmt_iterator si;
+	  gimple first;
+          gimple_seq blist = NULL;
+	  bool thr = stmt_could_throw_p (stmt);
 
 	  /* If the last statement of this BB created an EH edge
 	     before scalarization, we have to locate the first
@@ -3508,26 +3468,26 @@ scalarize_ldst (struct sra_elt *elt, tre
 	     list will be added to normal outgoing edges of the same
 	     BB.  If they access any memory, it's the same memory, so
 	     we can assume they won't throw.  */
-	  tsi = tsi_start (list);
-	  for (first = tsi_stmt (tsi);
-	       thr && !tsi_end_p (tsi) && !tree_could_throw_p (first);
-	       first = tsi_stmt (tsi))
+	  si = gsi_start (seq);
+	  for (first = gsi_stmt (si);
+	       thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
+	       first = gsi_stmt (si))
 	    {
-	      tsi_delink (&tsi);
-	      append_to_statement_list (first, &blist);
+	      gsi_remove (&si, false);
+	      gimple_seq_add_stmt (&blist, first);
 	    }
 
 	  /* Extract the first remaining statement from LIST, this is
 	     the EH statement if there is one.  */
-	  tsi_delink (&tsi);
+	  gsi_remove (&si, false);
 
 	  if (blist)
-	    sra_insert_before (bsi, blist);
+	    sra_insert_before (gsi, blist);
 
 	  /* Replace the old statement with this new representative.  */
-	  bsi_replace (bsi, first, true);
+	  gsi_replace (gsi, first, true);
 
-	  if (!tsi_end_p (tsi))
+	  if (!gsi_end_p (si))
 	    {
 	      /* If any reference would trap, then they all would.  And more
 		 to the point, the first would.  Therefore none of the rest
@@ -3536,16 +3496,16 @@ scalarize_ldst (struct sra_elt *elt, tre
 		 TREE_THIS_NOTRAP in all INDIRECT_REFs.  */
 	      do
 		{
-		  walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
-		  tsi_next (&tsi);
+		  walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
+		  gsi_next (&si);
 		}
-	      while (!tsi_end_p (tsi));
+	      while (!gsi_end_p (si));
 
-	      insert_edge_copies (list, bsi->bb);
+	      insert_edge_copies_seq (seq, gsi_bb (*gsi));
 	    }
 	}
       else
-	sra_replace (bsi, list);
+	sra_replace (gsi, seq);
     }
 }
 
@@ -3554,7 +3514,7 @@ scalarize_ldst (struct sra_elt *elt, tre
 static void
 scalarize_parms (void)
 {
-  tree list = NULL;
+  gimple_seq seq = NULL;
   unsigned i;
   bitmap_iterator bi;
 
@@ -3562,13 +3522,13 @@ scalarize_parms (void)
     {
       tree var = referenced_var (i);
       struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
-      generate_copy_inout (elt, true, var, &list);
+      generate_copy_inout (elt, true, var, &seq);
     }
 
-  if (list)
+  if (seq)
     {
-      insert_edge_copies (list, ENTRY_BLOCK_PTR);
-      mark_all_v_defs (list);
+      insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
+      mark_all_v_defs_seq (seq);
     }
 }
 
@@ -3583,7 +3543,7 @@ scalarize_function (void)
 
   sra_walk_function (&fns);
   scalarize_parms ();
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
 }
 
 
@@ -3633,13 +3593,12 @@ debug_sra_elt_name (struct sra_elt *elt)
 void 
 sra_init_cache (void)
 {
-  if (sra_type_decomp_cache) 
+  if (sra_type_decomp_cache)
     return;
 
   sra_type_decomp_cache = BITMAP_ALLOC (NULL);
   sra_type_inst_cache = BITMAP_ALLOC (NULL);
 }
-#endif
 
 
 /* Main entry point.  */
@@ -3647,8 +3606,6 @@ sra_init_cache (void)
 static unsigned int
 tree_sra (void)
 {
-  /* FIXME tuples.  */
-#if 0
   /* Initialize local variables.  */
   todoflags = 0;
   gcc_obstack_init (&sra_obstack);
@@ -3676,17 +3633,11 @@ tree_sra (void)
   BITMAP_FREE (sra_type_inst_cache);
   obstack_free (&sra_obstack, NULL);
   return todoflags;
-#else
-  gimple_unreachable ();
-  return 0;
-#endif
 }
 
 static unsigned int
 tree_sra_early (void)
 {
-  /* FIXME tuples.  */
-#if 0
   unsigned int ret;
 
   early_sra = true;
@@ -3694,10 +3645,6 @@ tree_sra_early (void)
   early_sra = false;
 
   return ret & ~TODO_rebuild_alias;
-#else
-  gimple_unreachable ();
-  return 0;
-#endif
 }
 
 static bool
Index: tree-flow.h
===================================================================
--- tree-flow.h	(revision 134214)
+++ tree-flow.h	(working copy)
@@ -1097,9 +1097,9 @@ tree vn_lookup_with_vuses (tree, VEC (tr
 bool is_hidden_global_store (gimple);
 
 /* In tree-sra.c  */
-void insert_edge_copies (tree, basic_block);
-void sra_insert_before (gimple_stmt_iterator *, tree);
-void sra_insert_after (gimple_stmt_iterator *, tree);
+void insert_edge_copies_seq (gimple_seq, basic_block);
+void sra_insert_before (gimple_stmt_iterator *, gimple_seq);
+void sra_insert_after (gimple_stmt_iterator *, gimple_seq);
 void sra_init_cache (void);
 bool sra_type_can_be_decomposed_p (tree);
 
Index: gimple.h
===================================================================
--- gimple.h	(revision 134214)
+++ gimple.h	(working copy)
@@ -1984,6 +1984,16 @@ gimple_asm_input_op (const_gimple gs, si
   return gimple_op (gs, index);
 }
 
+/* Return a pointer to input operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree *
+gimple_asm_input_op_ptr (const_gimple gs, size_t index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.ni);
+  return gimple_op_ptr (gs, index);
+}
+
 
 /* Set IN_OP to be input operand INDEX in GIMPLE_ASM GS.  */
 
@@ -2007,6 +2017,16 @@ gimple_asm_output_op (const_gimple gs, s
   return gimple_op (gs, index + gs->gimple_asm.ni);
 }
 
+/* Return a pointer to output operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree *
+gimple_asm_output_op_ptr (const_gimple gs, size_t index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.no);
+  return gimple_op_ptr (gs, index + gs->gimple_asm.ni);
+}
+
 
 /* Set OUT_OP to be output operand INDEX in GIMPLE_ASM GS.  */
 
@@ -3067,6 +3087,16 @@ gimple_omp_continue_set_control_use (gim
 }
 
 
+/* Return a pointer to the return value for GIMPLE_RETURN GS.  */
+
+static inline tree *
+gimple_return_retval_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RETURN);
+  gcc_assert (gs->with_ops.num_ops == 1);
+  return gimple_op_ptr (gs, 0);
+}
+
 /* Return the return value for GIMPLE_RETURN GS.  */
 
 static inline tree
Index: passes.c
===================================================================
--- passes.c	(revision 134214)
+++ passes.c	(working copy)
@@ -540,11 +540,8 @@ init_optimization_passes (void)
 	  NEXT_PASS (pass_forwprop);
 #endif
 	  NEXT_PASS (pass_update_address_taken);
-/* FIXME tuples.  */
 	  NEXT_PASS (pass_simple_dse);
-#if 0
 	  NEXT_PASS (pass_sra_early);
-#endif
 	  NEXT_PASS (pass_copy_prop);
 /* FIXME tuples.  */
 #if 0
@@ -636,9 +633,9 @@ init_optimization_passes (void)
       NEXT_PASS (pass_stdarg);
 #endif
       NEXT_PASS (pass_lower_complex);
+      NEXT_PASS (pass_sra);
       /* FIXME tuples.  */
 #if 0
-      NEXT_PASS (pass_sra);
       NEXT_PASS (pass_rename_ssa_copies);
       NEXT_PASS (pass_dominator);
 

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]