This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: IPA merge 4: SSA inliner


Hi,

this patch needed some editing, so here is updated patch, hope I got the
updates all right. 

In addition to, I hope, all your suggestions, I've removed the two problematic
chunks (&""[0] hack and return slot optimization) and adds ICE instead of the
second.  I will post fix for that separately and we can discuss proper fix then.
Those are needed "only" for C++ and Fortran :)

Also SSA_NAME_IS_DEFAULT_DEF needed some care.

I've went all the way enabling SSA inliner now and it sort of works
(bootstraps at least C only, but I get many testsuite failures, all:
/root/trunk/gcc/testsuite/gcc.c-torture/execute/20020810-1.c:20: error: variable marked DECL_CALL_CLOBBERED but not in call_clobbered_vars bitmap.
R, UID 1963, struct R, is global, call clobbered ( ), sub-vars: { SFT.5 SFT.4 }

I will be looking into that tomorrow - quite obviously we want to make
that flag go into variables annotation so it is not shared across functions.

I still think it is quite smooth and sanity checks that the inliner bascially
works even after update to current infrastructure ;)

The patch was bootstrapped/regtested without SSA-inliner with the other changes
I posted, I am testing separately.  I am not sure if to interpret "fine"
as approval.  I will definitly do more testing tomorrow and then I would
be inclined to commit.

Thank you,
Honza

	* tree-inline.c (remap_ssa_name): New function.
	(remap_decl): Update SSA datastructures for DECLs.
	(copy_body_r): Deal with SSA_NAMEs; add referenced global vars.
	(copy_bb): Set SSA_NAME def stmts.
	(update_ssa_acorss_eh_edges): New function.
	(copy_edge_for_bb): Call it; mark new vars for renaming.
	(copy_phis_for_bb): New function.
	(initialize_cfun): Break out from ...
	(copy_cfg_body): ... here; maintain AUX map for both directions;
	call SSA updating workers.
	(setup_one_parameter): Do propagation across SSA form.
	(declare_return_variable): Work on SSA.
	(expand_call_inline): Update SSA from on return values.
	(optimize_inline_calls): Do sanity checking, dead blocks removal,
	update SSA form.
	(tree_function_verioning): Update initialize_cfun.
Index: tree-inline.c
===================================================================
*** tree-inline.c	(revision 120083)
--- tree-inline.c	(working copy)
*************** Boston, MA 02110-1301, USA.  */
*** 50,55 ****
--- 50,56 ----
  #include "pointer-set.h"
  #include "ipa-prop.h"
  #include "value-prof.h"
+ #include "tree-pass.h"
  
  /* I'm not real happy about this, but we need to handle gimple and
     non-gimple trees.  */
*************** insert_decl_map (copy_body_data *id, tre
*** 141,146 ****
--- 142,191 ----
  		       (splay_tree_value) value);
  }
  
+ /* Construct new SSA name for old NAME. ID is the inline context.  */
+ 
+ static tree
+ remap_ssa_name (tree name, copy_body_data *id)
+ {
+   tree new;
+   splay_tree_node n;
+ 
+   gcc_assert (TREE_CODE (name) == SSA_NAME);
+ 
+   n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
+   if (n)
+     return (tree) n->value;
+ 
+   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
+      in copy_bb.  */
+   new = remap_decl (SSA_NAME_VAR (name), id);
+   /* We might've substituted constant or another SSA_NAME for
+      the variable. 
+ 
+      Replace the SSA name representing RESULT_DECL by variable during
+      inlining:  this saves us from need to introduce PHI node in a case
+      return value is just partly initialized.  */
+   if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
+       && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
+ 	  || !id->transform_return_to_modify))
+     {
+       new = make_ssa_name (new, NULL);
+       insert_decl_map (id, name, new);
+       if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
+ 	{
+ 	  SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
+ 	  if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
+ 	    set_default_def (SSA_NAME_VAR (new), new);
+ 	}
+       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
+ 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
+     }
+   else
+     insert_decl_map (id, name, new);
+   TREE_TYPE (new) = remap_type (TREE_TYPE (name), id);
+   return new;
+ }
+ 
  /* Remap DECL during the copying of the BLOCK tree for the function.  */
  
  tree
*************** remap_decl (tree decl, copy_body_data *i
*** 188,193 ****
--- 233,254 ----
  	    walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
  	}
  
+       if (cfun && gimple_in_ssa_p (cfun)
+ 	  && (TREE_CODE (t) == VAR_DECL
+ 	      || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
+ 	{
+           tree def = gimple_default_def (id->src_cfun, decl);
+ 	  get_var_ann (t);
+ 	  if (TREE_CODE (decl) != PARM_DECL && def)
+ 	    {
+ 	      tree map = remap_ssa_name (def, id);
+ 	      /* Watch out RESULT_DECLs whose SSA names map directly
+ 		 to them.  */
+ 	      if (TREE_CODE (map) == SSA_NAME)
+ 	        set_default_def (t, map);
+ 	    }
+ 	  add_referenced_var (t);
+ 	}
        return t;
      }
  
*************** copy_body_r (tree *tp, int *walk_subtree
*** 500,505 ****
--- 561,572 ----
  	  return (tree) (void *)1;
  	}
      }
+   else if (TREE_CODE (*tp) == SSA_NAME)
+     {
+       *tp = remap_ssa_name (*tp, id);
+       *walk_subtrees = 0;
+       return NULL;
+     }
  
    /* Local variables and labels need to be replaced by equivalent
       variables.  We don't want to copy static variables; there's only
*************** copy_body_r (tree *tp, int *walk_subtree
*** 621,626 ****
--- 688,698 ----
        /* Here is the "usual case".  Copy this tree node, and then
  	 tweak some special cases.  */
        copy_tree_r (tp, walk_subtrees, NULL);
+ 
+       /* Global variables we didn't seen yet needs to go into referenced
+ 	 vars.  */
+       if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
+ 	add_referenced_var (*tp);
         
        /* If EXPR has block defined, map it to newly constructed block.
           When inlining we want EXPRs without block appear in the block
*************** copy_bb (copy_body_data *id, basic_block
*** 781,791 ****
--- 853,911 ----
  		  && tree_could_throw_p (stmt))
  		add_stmt_to_eh_region (stmt, id->eh_region);
  	    }
+ 	  if (gimple_in_ssa_p (cfun))
+ 	    {
+ 	       ssa_op_iter i;
+ 	       tree def;
+ 
+ 	       FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+ 		if (TREE_CODE (def) == SSA_NAME)
+ 		  SSA_NAME_DEF_STMT (def) = stmt;
+ 	    }
  	}
      }
    return copy_basic_block;
  }
  
+ /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
+    form is quite easy, since dominator relationship for old basic blocks does
+    not change.
+ 
+    There is however exception where inlining might change dominator relation
+    across EH edges from basic block within inlined functions destinating
+    to landging pads in function we inline into.
+ 
+    The function mark PHI_RESULT of such PHI nodes for renaming; it is
+    safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+    must be set.  This means, that there will be no overlapping live ranges
+    for the underlying symbol.
+ 
+    This might change in future if we allow redirecting of EH edges and
+    we might want to change way build CFG pre-inlining to include
+    all the possible edges then.  */
+ static void
+ update_ssa_across_eh_edges (basic_block bb)
+ {
+   edge e;
+   edge_iterator ei;
+ 
+   FOR_EACH_EDGE (e, ei, bb->succs)
+     if (!e->dest->aux
+ 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
+       {
+ 	tree phi;
+ 
+ 	gcc_assert (e->flags & EDGE_EH);
+ 	for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ 	  {
+ 	    gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ 			(PHI_RESULT (phi)));
+ 	    mark_sym_for_renaming
+ 	      (SSA_NAME_VAR (PHI_RESULT (phi)));
+ 	  }
+       }
+ }
+ 
  /* Copy edges from BB into its copy constructed earlier, scale profile
     accordingly.  Edges will be taken care of later.  Assume aux
     pointers to point to the copies of each BB.  */
*************** copy_edges_for_bb (basic_block bb, int c
*** 825,830 ****
--- 945,952 ----
  
        copy_stmt = bsi_stmt (bsi);
        update_stmt (copy_stmt);
+       if (gimple_in_ssa_p (cfun))
+         mark_symbols_for_renaming (copy_stmt);
        /* Do this before the possible split_block.  */
        bsi_next (&bsi);
  
*************** copy_edges_for_bb (basic_block bb, int c
*** 847,857 ****
--- 969,1022 ----
  	       right at this point; split_block doesn't care.  */
  	    {
  	      edge e = split_block (new_bb, copy_stmt);
+ 
  	      new_bb = e->dest;
+ 	      new_bb->aux = e->src->aux;
  	      bsi = bsi_start (new_bb);
  	    }
  
             make_eh_edges (copy_stmt);
+ 
+ 	   if (gimple_in_ssa_p (cfun))
+ 	     update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
+ 	}
+     }
+ }
+ 
+ /* Copy the PHIs.  All blocks and edges are copied, some blocks
+    was possibly split and new outgoing EH edges inserted.
+    BB points to the block of original function and AUX pointers links
+    the original and newly copied blocks.  */
+ 
+ static void
+ copy_phis_for_bb (basic_block bb, copy_body_data *id)
+ {
+   basic_block new_bb = bb->aux;
+   edge_iterator ei;
+   tree phi;
+ 
+   for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+     {
+       tree res = PHI_RESULT (phi);
+       tree new_res = res;
+       tree new_phi;
+       edge new_edge;
+ 
+       if (is_gimple_reg (res))
+ 	{
+ 	  walk_tree (&new_res, copy_body_r, id, NULL);
+ 	  SSA_NAME_DEF_STMT (new_res)
+ 	    = new_phi = create_phi_node (new_res, new_bb);
+ 	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+ 	    {
+ 	      edge old_edge = find_edge (new_edge->src->aux, bb);
+ 	      tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
+ 	      tree new_arg = arg;
+ 
+ 	      walk_tree (&new_arg, copy_body_r, id, NULL);
+ 	      gcc_assert (new_arg);
+ 	      add_phi_arg (new_phi, new_arg, new_edge);
+ 	    }
  	}
      }
  }
*************** remap_decl_1 (tree decl, void *data)
*** 863,868 ****
--- 1028,1093 ----
    return remap_decl (decl, (copy_body_data *) data);
  }
  
+ /* Build struct function and associated datastructures for the new clone
+    NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
+ 
+ static void
+ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
+ 		 int frequency)
+ {
+   struct function *new_cfun
+      = (struct function *) ggc_alloc_cleared (sizeof (struct function));
+   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+   int count_scale, frequency_scale;
+ 
+   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+     count_scale = (REG_BR_PROB_BASE * count
+ 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+   else
+     count_scale = 1;
+ 
+   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+     frequency_scale = (REG_BR_PROB_BASE * frequency
+ 		       /
+ 		       ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+   else
+     frequency_scale = count_scale;
+ 
+   /* Register specific tree functions.  */
+   tree_register_cfg_hooks ();
+   *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
+   new_cfun->unexpanded_var_list = NULL;
+   new_cfun->cfg = NULL;
+   new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
+   new_cfun->ib_boundaries_block = NULL;
+   DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
+   push_cfun (new_cfun);
+   init_empty_tree_cfg ();
+ 
+   ENTRY_BLOCK_PTR->count =
+     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+      REG_BR_PROB_BASE);
+   ENTRY_BLOCK_PTR->frequency =
+     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+      frequency_scale / REG_BR_PROB_BASE);
+   EXIT_BLOCK_PTR->count =
+     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+      REG_BR_PROB_BASE);
+   EXIT_BLOCK_PTR->frequency =
+     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+      frequency_scale / REG_BR_PROB_BASE);
+   if (src_cfun->eh)
+     init_eh_for_function ();
+ 
+   if (src_cfun->gimple_df)
+     {
+       init_tree_ssa ();
+       cfun->gimple_df->in_ssa_p = true;
+       init_ssa_operands ();
+     }
+   pop_cfun ();
+ }
+ 
  /* Make a copy of the body of FN so that it can be inserted inline in
     another function.  Walks FN via CFG, returns new fndecl.  */
  
*************** copy_cfg_body (copy_body_data * id, gcov
*** 873,880 ****
    tree callee_fndecl = id->src_fn;
    /* Original cfun for the callee, doesn't change.  */
    struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
-   /* Copy, built by this function.  */
-   struct function *new_cfun;
    /* Place to copy from; when a copy of the function was saved off earlier,
       use that instead of the main copy.  */
    struct function *cfun_to_copy =
--- 1098,1103 ----
*************** copy_cfg_body (copy_body_data * id, gcov
*** 882,887 ****
--- 1105,1111 ----
    basic_block bb;
    tree new_fndecl = NULL;
    int count_scale, frequency_scale;
+   int last;
  
    if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
      count_scale = (REG_BR_PROB_BASE * count
*************** copy_cfg_body (copy_body_data * id, gcov
*** 907,966 ****
  
    id->src_cfun = cfun_to_copy;
  
-   /* If requested, create new basic_block_info and label_to_block_maps.
-      Otherwise, insert our new blocks and labels into the existing cfg.  */
-   if (id->transform_new_cfg)
-     {
-       new_cfun =
- 	(struct function *) ggc_alloc_cleared (sizeof (struct function));
-       *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
-       new_cfun->cfg = NULL;
-       new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
-       new_cfun->ib_boundaries_block = NULL;
-       DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
-       push_cfun (new_cfun);
-       init_empty_tree_cfg ();
- 
-       ENTRY_BLOCK_PTR->count =
- 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- 	 REG_BR_PROB_BASE);
-       ENTRY_BLOCK_PTR->frequency =
- 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- 	 frequency_scale / REG_BR_PROB_BASE);
-       EXIT_BLOCK_PTR->count =
- 	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- 	 REG_BR_PROB_BASE);
-       EXIT_BLOCK_PTR->frequency =
- 	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- 	 frequency_scale / REG_BR_PROB_BASE);
- 
-       entry_block_map = ENTRY_BLOCK_PTR;
-       exit_block_map = EXIT_BLOCK_PTR;
-     }
  
    ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
    EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
  
    /* Duplicate any exception-handling regions.  */
    if (cfun->eh)
      {
-       if (id->transform_new_cfg)
-         init_eh_for_function ();
        id->eh_region_offset
  	= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
  				0, id->eh_region);
      }
    /* Use aux pointers to map the original blocks to copy.  */
    FOR_EACH_BB_FN (bb, cfun_to_copy)
!     bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
    /* Now that we've duplicated the blocks, duplicate their edges.  */
    FOR_ALL_BB_FN (bb, cfun_to_copy)
      copy_edges_for_bb (bb, count_scale);
    FOR_ALL_BB_FN (bb, cfun_to_copy)
!     bb->aux = NULL;
! 
!   if (id->transform_new_cfg)
!     pop_cfun ();
  
    return new_fndecl;
  }
--- 1131,1175 ----
  
    id->src_cfun = cfun_to_copy;
  
  
    ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
    EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
+   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
  
    /* Duplicate any exception-handling regions.  */
    if (cfun->eh)
      {
        id->eh_region_offset
  	= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
  				0, id->eh_region);
      }
    /* Use aux pointers to map the original blocks to copy.  */
    FOR_EACH_BB_FN (bb, cfun_to_copy)
!     {
!       basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
!       bb->aux = new;
!       new->aux = bb;
!     }
! 
!   last = n_basic_blocks;
    /* Now that we've duplicated the blocks, duplicate their edges.  */
    FOR_ALL_BB_FN (bb, cfun_to_copy)
      copy_edges_for_bb (bb, count_scale);
+   if (gimple_in_ssa_p (cfun))
+     FOR_ALL_BB_FN (bb, cfun_to_copy)
+       copy_phis_for_bb (bb, id);
    FOR_ALL_BB_FN (bb, cfun_to_copy)
!     {
!       ((basic_block)bb->aux)->aux = NULL;
!       bb->aux = NULL;
!     }
!   /* Zero out AUX fields of newly created block during EH edge
!      insertion. */
!   for (; last < n_basic_blocks; last++)
!     BASIC_BLOCK (last)->aux = NULL;
!   entry_block_map->aux = NULL;
!   exit_block_map->aux = NULL;
  
    return new_fndecl;
  }
*************** setup_one_parameter (copy_body_data *id,
*** 1017,1029 ****
    tree init_stmt;
    tree var;
    tree var_sub;
! 
!   /* If the parameter is never assigned to, we may not need to
!      create a new variable here at all.  Instead, we may be able
!      to just use the argument value.  */
    if (TREE_READONLY (p)
        && !TREE_ADDRESSABLE (p)
!       && value && !TREE_SIDE_EFFECTS (value))
      {
        /* We may produce non-gimple trees by adding NOPs or introduce
  	 invalid sharing when operand is not really constant.
--- 1226,1242 ----
    tree init_stmt;
    tree var;
    tree var_sub;
!   tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
!   tree def = (gimple_in_ssa_p (cfun)
! 	      ? gimple_default_def (id->src_cfun, p) : NULL);
! 
!   /* If the parameter is never assigned to, has no SSA_NAMEs created,
!      we may not need to create a new variable here at all.  Instead, we may
!      be able to just use the argument value.  */
    if (TREE_READONLY (p)
        && !TREE_ADDRESSABLE (p)
!       && value && !TREE_SIDE_EFFECTS (value)
!       && !def)
      {
        /* We may produce non-gimple trees by adding NOPs or introduce
  	 invalid sharing when operand is not really constant.
*************** setup_one_parameter (copy_body_data *id,
*** 1047,1052 ****
--- 1260,1270 ----
       here since the type of this decl must be visible to the calling
       function.  */
    var = copy_decl_to_var (p, id);
+   if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
+     {
+       get_var_ann (var);
+       add_referenced_var (var);
+     }
  
    /* See if the frontend wants to pass this by invisible reference.  If
       so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
*************** setup_one_parameter (copy_body_data *id,
*** 1085,1105 ****
    if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
      TREE_READONLY (var) = 0;
  
    /* Initialize this VAR_DECL from the equivalent argument.  Convert
       the argument to the proper type in case it was promoted.  */
    if (value)
      {
-       tree rhs = fold_convert (TREE_TYPE (var), value);
        block_stmt_iterator bsi = bsi_last (bb);
  
        if (rhs == error_mark_node)
! 	return;
  
        STRIP_USELESS_TYPE_CONVERSION (rhs);
  
        /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
  	 keep our trees in gimple form.  */
!       init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
  
        /* If we did not create a gimple value and we did not create a gimple
  	 cast of a gimple value, then we will need to gimplify INIT_STMTS
--- 1303,1356 ----
    if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
      TREE_READONLY (var) = 0;
  
+   /* If there is no setup required and we are in SSA, take the easy route
+      replacing all SSA names representing the function parameter by the
+      SSA name passed to function.
+ 
+      We need to construct map for the variable anyway as it might be used
+      in different SSA names when parameter is set in function.
+ 
+      FIXME: This usually kills the last connection in between inlined
+      function parameter and the actual value in debug info.  Can we do
+      better here?  If we just inserted the statement, copy propagation
+      would kill it anyway as it always did in older versions of GCC.
+ 
+      We might want to introduce a notion that single SSA_NAME might
+      represent multiple variables for purposes of debugging. */
+   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
+       && (TREE_CODE (rhs) == SSA_NAME
+ 	  || is_gimple_min_invariant (rhs)))
+     {
+       insert_decl_map (id, def, rhs);
+       return;
+     }
+ 
    /* Initialize this VAR_DECL from the equivalent argument.  Convert
       the argument to the proper type in case it was promoted.  */
    if (value)
      {
        block_stmt_iterator bsi = bsi_last (bb);
  
        if (rhs == error_mark_node)
! 	{
!   	  insert_decl_map (id, p, var_sub);
! 	  return;
! 	}
  
        STRIP_USELESS_TYPE_CONVERSION (rhs);
  
        /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
  	 keep our trees in gimple form.  */
!       if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
! 	{
! 	  def = remap_ssa_name (def, id);
!           init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), def, rhs);
! 	  SSA_NAME_DEF_STMT (def) = init_stmt;
! 	  SSA_NAME_IS_DEFAULT_DEF (def) = 0;
! 	  set_default_def (var, NULL);
! 	}
!       else
!         init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
  
        /* If we did not create a gimple value and we did not create a gimple
  	 cast of a gimple value, then we will need to gimplify INIT_STMTS
*************** setup_one_parameter (copy_body_data *id,
*** 1110,1121 ****
  	  && (!is_gimple_cast (rhs)
  	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
  	  || !is_gimple_reg (var))
! 	gimplify_stmt (&init_stmt);
  
        /* If VAR represents a zero-sized variable, it's possible that the
  	 assignment statment may result in no gimple statements.  */
        if (init_stmt)
          bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
      }
  }
  
--- 1361,1389 ----
  	  && (!is_gimple_cast (rhs)
  	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
  	  || !is_gimple_reg (var))
! 	{
!           tree_stmt_iterator i;
! 
! 	  push_gimplify_context ();
! 	  gimplify_stmt (&init_stmt);
! 	  if (gimple_in_ssa_p (cfun)
!               && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
! 	    {
! 	      /* The replacement can expose previously unreferenced
! 		 variables.  */
! 	      for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
! 		find_new_referenced_vars (tsi_stmt_ptr (i));
! 	     }
! 	  pop_gimplify_context (NULL);
! 	}
  
        /* If VAR represents a zero-sized variable, it's possible that the
  	 assignment statment may result in no gimple statements.  */
        if (init_stmt)
          bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+       if (gimple_in_ssa_p (cfun))
+ 	for (;!bsi_end_p (bsi); bsi_next (&bsi))
+ 	  mark_symbols_for_renaming (bsi_stmt (bsi));
      }
  }
  
*************** declare_return_variable (copy_body_data 
*** 1205,1213 ****
  	 a modify expression.  */
        gcc_assert (!modify_dest);
        if (DECL_BY_REFERENCE (result))
! 	var = return_slot_addr;
        else
! 	var = build_fold_indirect_ref (return_slot_addr);
        if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
             || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
  	  && !DECL_GIMPLE_REG_P (result)
--- 1473,1521 ----
  	 a modify expression.  */
        gcc_assert (!modify_dest);
        if (DECL_BY_REFERENCE (result))
! 	{
! 	  /* The address might be folded to direct assignment already.  */
! 	  if (TREE_CODE (return_slot_addr) == ADDR_EXPR)
! 	    {
! 	      tree base_var = TREE_OPERAND (return_slot_addr, 0);
! 
! 	      if (TREE_CODE (base_var) == SSA_NAME)
! 		gcc_unreachable ();
! 	      /* We are going to construct *&base_var and we can't do that
! 		 for variables believed to be not addressable. 
! 
! 		 FIXME: This check possibly can match, because values returned
! 		 via return slot optimization are not believed to have address
! 		 taken by alias analysis.  */
! 	      gcc_assert (TREE_CODE (base_var) != SSA_NAME);
! 	      if (gimple_in_ssa_p (cfun))
! 		{
! 		  HOST_WIDE_INT bitsize;
! 		  HOST_WIDE_INT bitpos;
! 		  tree offset;
! 		  enum machine_mode mode;
! 		  int unsignedp;
! 		  int volatilep;
! 		  tree base;
! 		  base = get_inner_reference (base_var, &bitsize, &bitpos,
! 					      &offset,
! 					      &mode, &unsignedp, &volatilep,
! 					      false);
! 		  if (TREE_CODE (base) == INDIRECT_REF)
! 		    base = TREE_OPERAND (base, 0);
! 		  if (TREE_CODE (base) == SSA_NAME)
! 		    base = SSA_NAME_VAR (base);
! 		  mark_sym_for_renaming (base);
! 		}
! 	    }
! 	  var = return_slot_addr;
! 	}
        else
! 	{
! 	  if (gimple_in_ssa_p (cfun))
! 	    mark_sym_for_renaming (TREE_OPERAND (return_slot_addr, 0));
! 	  var = build_fold_indirect_ref (return_slot_addr);
! 	}
        if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
             || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
  	  && !DECL_GIMPLE_REG_P (result)
*************** declare_return_variable (copy_body_data 
*** 1221,1227 ****
    gcc_assert (!TREE_ADDRESSABLE (callee_type));
  
    /* Attempt to avoid creating a new temporary variable.  */
!   if (modify_dest)
      {
        bool use_it = false;
  
--- 1529,1536 ----
    gcc_assert (!TREE_ADDRESSABLE (callee_type));
  
    /* Attempt to avoid creating a new temporary variable.  */
!   if (modify_dest
!       && TREE_CODE (modify_dest) != SSA_NAME)
      {
        bool use_it = false;
  
*************** declare_return_variable (copy_body_data 
*** 1270,1275 ****
--- 1579,1589 ----
    gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
  
    var = copy_result_decl_to_var (result, id);
+   if (gimple_in_ssa_p (cfun))
+     {
+       get_var_ann (var);
+       add_referenced_var (var);
+     }
  
    DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
    DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
*************** expand_call_inline (basic_block bb, tree
*** 2095,2100 ****
--- 2409,2415 ----
    /* Record the function we are about to inline.  */
    id->src_fn = fn;
    id->src_node = cg_edge->callee;
+   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
  
    initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
  
*************** expand_call_inline (basic_block bb, tree
*** 2164,2175 ****
    if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
      {
        *tp = use_retvar;
        maybe_clean_or_replace_eh_stmt (stmt, stmt);
      }
    else
      /* We're modifying a TSI owned by gimple_expand_calls_inline();
         tsi_delink() will leave the iterator in a sane state.  */
!     bsi_remove (&stmt_bsi, true);
  
    if (purge_dead_abnormal_edges)
      tree_purge_dead_abnormal_call_edges (return_block);
--- 2479,2522 ----
    if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
      {
        *tp = use_retvar;
+       if (gimple_in_ssa_p (cfun))
+ 	{
+           update_stmt (stmt);
+           mark_symbols_for_renaming (stmt);
+ 	}
        maybe_clean_or_replace_eh_stmt (stmt, stmt);
      }
    else
      /* We're modifying a TSI owned by gimple_expand_calls_inline();
         tsi_delink() will leave the iterator in a sane state.  */
!     {
!       /* Handle case of inlining function that miss return statement so 
!          return value becomes undefined.  */
!       if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
! 	  && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
! 	{
! 	  tree name = TREE_OPERAND (stmt, 0);
! 	  tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
! 	  tree def = gimple_default_def (cfun, var);
! 
! 	  /* If the variable is used undefined, make this name undefined via
! 	     move.  */
! 	  if (def)
! 	    {
! 	      TREE_OPERAND (stmt, 1) = def;
! 	      update_stmt (stmt);
! 	    }
! 	  /* Otherwise make this variable undefined.  */
! 	  else
! 	    {
! 	      bsi_remove (&stmt_bsi, true);
! 	      set_default_def (var, name);
! 	      SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
! 	    }
! 	}
!       else
!         bsi_remove (&stmt_bsi, true);
!     }
  
    if (purge_dead_abnormal_edges)
      tree_purge_dead_abnormal_call_edges (return_block);
*************** optimize_inline_calls (tree fn)
*** 2290,2296 ****
       as inlining loops might increase the maximum.  */
    if (ENTRY_BLOCK_PTR->count)
      counts_to_freqs ();
!   fold_cond_expr_cond ();
  }
  
  /* FN is a function that has a complete body, and CLONE is a function whose
--- 2637,2658 ----
       as inlining loops might increase the maximum.  */
    if (ENTRY_BLOCK_PTR->count)
      counts_to_freqs ();
!   if (gimple_in_ssa_p (cfun))
!     {
!       delete_unreachable_blocks ();
!       update_ssa (TODO_update_ssa);
! #ifdef ENABLE_CHECKING
!       verify_ssa (true);
! #endif
!       fold_cond_expr_cond ();
!       cleanup_tree_cfg ();
!       if (need_ssa_update_p ())
!         update_ssa (TODO_update_ssa);
!       free_dominance_info (CDI_DOMINATORS);
!       free_dominance_info (CDI_POST_DOMINATORS);
!     }
!   else
!     fold_cond_expr_cond ();
  }
  
  /* FN is a function that has a complete body, and CLONE is a function whose
*************** tree_function_versioning (tree old_decl,
*** 2782,2788 ****
    struct cgraph_node *old_version_node;
    struct cgraph_node *new_version_node;
    copy_body_data id;
!   tree p, new_fndecl;
    unsigned i;
    struct ipa_replace_map *replace_info;
    basic_block old_entry_block;
--- 3144,3150 ----
    struct cgraph_node *old_version_node;
    struct cgraph_node *new_version_node;
    copy_body_data id;
!   tree p;
    unsigned i;
    struct ipa_replace_map *replace_info;
    basic_block old_entry_block;
*************** tree_function_versioning (tree old_decl,
*** 2828,2833 ****
--- 3190,3201 ----
    id.transform_lang_insert_block = false;
  
    current_function_decl = new_decl;
+   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+     (DECL_STRUCT_FUNCTION (old_decl));
+   initialize_cfun (new_decl, old_decl,
+ 		   old_entry_block->count,
+ 		   old_entry_block->frequency);
+   push_cfun (DECL_STRUCT_FUNCTION (new_decl));
    
    /* Copy the function's static chain.  */
    p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
*************** tree_function_versioning (tree old_decl,
*** 2871,2892 ****
        }
    
    /* Copy the Function's body.  */
!   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
!     (DECL_STRUCT_FUNCTION (old_decl));
!   new_fndecl = copy_body (&id,
! 			  old_entry_block->count,
! 			  old_entry_block->frequency, NULL, NULL);
    
-   DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
- 
-   DECL_STRUCT_FUNCTION (new_decl)->cfg =
-     DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
-   DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
-   DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
-     DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
-   DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
-     DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
- 
    if (DECL_RESULT (old_decl) != NULL_TREE)
      {
        tree *res_decl = &DECL_RESULT (old_decl);
--- 3239,3246 ----
        }
    
    /* Copy the Function's body.  */
!   copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
    
    if (DECL_RESULT (old_decl) != NULL_TREE)
      {
        tree *res_decl = &DECL_RESULT (old_decl);
*************** tree_function_versioning (tree old_decl,
*** 2894,2906 ****
        lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
      }
    
-   current_function_decl = NULL;
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (new_decl);
  
    /* Clean up.  */
    splay_tree_delete (id.decl_map);
    fold_cond_expr_cond ();
    return;
  }
  
--- 3248,3270 ----
        lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
      }
    
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (new_decl);
  
    /* Clean up.  */
    splay_tree_delete (id.decl_map);
    fold_cond_expr_cond ();
+   if (gimple_in_ssa_p (cfun))
+     {
+       update_ssa (TODO_update_ssa);
+ #ifdef ENABLE_CHECKING
+       verify_ssa (true);
+ #endif
+     }
+   free_dominance_info (CDI_DOMINATORS);
+   free_dominance_info (CDI_POST_DOMINATORS);
+   pop_cfun ();
+   current_function_decl = NULL;
    return;
  }
  


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]