This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: FW: [just for fun] patch to enable SSA inlining


Hi,
> On Tuesday 26 December 2006 12:33, Jan Hubicka wrote:
> > ^^^^ bootstraps and mostly regtests (ie my best call so far is patch
> > that bootstraps but fauls in all gomp and many fortran tests).
> 
> Best bet for the problem area: nested functions.

Actually nested functions are not that painful.  The fortran issues was
caused by new type checking firing on return values (the inliner picked
SSA_NAME type from the original type, instead of the type of newly
inserted variable), the gomp problems are caused by dificulties with
updating all the places where we introduce new functions.  We used to
add the new functions at expansion time, while I've moved gomp up in the
queue before inlining causing the existing code to fail.

Instead of merging all the IPA branch ad-hoc updates, I ended up with
plan to rewrite cgraph_add_function to be useable at any time and
updating all places that introduce new functions late to it.  I am half
way through, still need to clean it up and get non-unit-at-a-time
working in all cases.

I am attaching my current version of patch that bootstraps on current
tree and regtests modulo still existent gomp failures with -O0 and the
following:

g++.dg/opt/devirt1.C scan-assembler xyzzy                                       
g++.dg/tree-ssa/pr28238.C (test for excess errors)                              
gcc.dg/funcorder.c scan-assembler-not link_error
gcc.dg/ipa/ipa-1.c (test for excess errors)                                     
gcc.dg/ipa/ipa-2.c (test for excess errors)                                     
gcc.dg/ipa/ipa-3.c (test for excess errors)                                     
gcc.dg/ipa/ipa-4.c (test for excess errors)                                     
gcc.dg/ipa/ipa-5.c (test for excess errors)                                     
gcc.dg/non-local-goto-1.c (test for excess errors)                              
gcc.dg/non-local-goto-2.c (test for excess errors)                              
gcc.dg/pr16194.c (test for excess errors)                                       
gcc.dg/pr23584.c (test for excess errors)  
gcc.dg/pr23584.c (test for excess errors)                                       
gcc.dg/pure-1.c (test for excess errors)                                        
gcc.dg/tree-ssa/20030714-1.c scan-tree-dump-times ->code 2                      
gcc.dg/tree-ssa/20030714-1.c scan-tree-dump-times if  4                         
gcc.dg/tree-ssa/alias-10.c scan-tree-dump return 3;                             
gcc.dg/tree-ssa/alias-2.c scan-tree-dump-times link_error 0                     
gcc.dg/tree-ssa/alias-3.c scan-tree-dump return 1;                              
gcc.dg/tree-ssa/alias-4.c scan-tree-dump return 1;                              
gcc.dg/tree-ssa/alias-5.c scan-tree-dump return 1;                              
gcc.dg/tree-ssa/loadpre5.c scan-tree-dump-times Eliminated: 1 1                 
gcc.dg/tree-ssa/loadpre8.c scan-tree-dump-times Eliminated: 1 1                 
gcc.dg/tree-ssa/ssa-dce-2.c scan-tree-dump-times if  0                          
gcc.dg/tree-ssa/structopt-1.c scan-tree-dump-times Executing store
motion of    
+global.y 1                                                                     
gcc.dg/tree-ssa/structopt-2.c scan-tree-dump-times a.e 0                        
gcc.dg/tree-ssa/structopt-2.c scan-tree-dump-times a.f 0                        
gcc.dg/tree-ssa/structopt-2.c scan-tree-dump-times a.g 0                        
gcc.dg/tree-ssa/structopt-3.c scan-tree-dump-times return 11 1                  

The list might look long, but most of the stuff is obviously caused by
disabling IPA passes, structure aliasing and reordering optimizations so
they no longer happen where the test expect, I will invetigate the rest
four after finishing the cgraph cleanups.  So situation looks actually
slightly better than I expected, I would say ;))

Honza

Index: cgraph.c
===================================================================
*** cgraph.c	(revision 120209)
--- cgraph.c	(working copy)
*************** The callgraph:
*** 83,88 ****
--- 83,89 ----
  #include "intl.h"
  #include "tree-gimple.h"
  #include "tree-dump.h"
+ #include "tree-flow.h"
  
  static void cgraph_node_remove_callers (struct cgraph_node *node);
  static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
*************** int cgraph_max_uid;
*** 111,116 ****
--- 112,120 ----
  /* Set when whole unit has been analyzed so we can access global info.  */
  bool cgraph_global_info_ready = false;
  
+ /* What state callgraph is in right now.  */
+ enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
+ 
  /* Set when the cgraph is fully build and the basic flags are computed.  */
  bool cgraph_function_flags_ready = false;
  
*************** cgraph_function_body_availability (struc
*** 913,933 ****
  
  /* Add the function FNDECL to the call graph.  FNDECL is assumed to be
     in low GIMPLE form and ready to be processed by cgraph_finalize_function.
  
!    When operating in unit-at-a-time, a new callgraph node is added to
!    CGRAPH_EXPAND_QUEUE, which is processed after all the original
!    functions in the call graph .
! 
!    When not in unit-at-a-time, the new callgraph node is added to
!    CGRAPH_NODES_QUEUE for cgraph_assemble_pending_functions to
!    process.  */
  
  void
! cgraph_add_new_function (tree fndecl)
  {
!   struct cgraph_node *n = cgraph_node (fndecl);
!   n->next_needed = cgraph_expand_queue;
!   cgraph_expand_queue = n;
  }
  
  #include "gt-cgraph.h"
--- 917,971 ----
  
  /* Add the function FNDECL to the call graph.  FNDECL is assumed to be
     in low GIMPLE form and ready to be processed by cgraph_finalize_function.
+    Unlike cgraph_finalize_function, this function is intended to be used
+    by middle end and allows insertion of new function at arbitrary point
+    of compilation.
  
!    The function is assumed to be reachable and have address taken (so no
!    API breaking optimizations are performed on it).  */
  
  void
! cgraph_add_new_function (tree fndecl, bool lowered)
  {
!   struct cgraph_node *node;
!   switch (cgraph_state)
!     {
!       case CGRAPH_STATE_CONSTRUCTION:
!       case CGRAPH_STATE_IPA:
!       case CGRAPH_STATE_IPA_SSA:
!       case CGRAPH_STATE_EXPANSION:
! 	/* The functions are now being expanded into assembly.  We can't
! 	   just insert new into the already fixed order, so put it into
! 	   on-side list scanned after every expanded fucntion.
! 	   This make the new function close to the function currently	
! 	   being processed.  */
! 	node = cgraph_node (fndecl);
! 	node->local.finalized = true;
! 	node->local.local = false;
! 	node->reachable = node->needed = true;
! 	node->next_needed = cgraph_expand_queue;
! 	if (lowered)
! 	  node->lowered = true;
! 	cgraph_expand_queue = node;
!         break;
! 
! 	/* At the very end of compilation we have to do all the work up
! 	   to expansion.  */
!       case CGRAPH_STATE_FINISHED:
! 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
! 	current_function_decl = fndecl;
! 	tree_register_cfg_hooks ();
! 	if (!lowered)
!           tree_lowering_passes (fndecl);
! 	bitmap_obstack_initialize (NULL);
! 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)) && optimize)
! 	  execute_pass_list (pass_early_local_passes.sub);
! 	bitmap_obstack_release (NULL);
! 	tree_rest_of_compilation (fndecl);
! 	pop_cfun ();
! 	current_function_decl = NULL;
! 	break;
!     }
  }
  
  #include "gt-cgraph.h"
Index: cgraph.h
===================================================================
*** cgraph.h	(revision 120209)
--- cgraph.h	(working copy)
*************** extern GTY(()) struct cgraph_node *cgrap
*** 254,259 ****
--- 254,273 ----
  extern GTY(()) int cgraph_n_nodes;
  extern GTY(()) int cgraph_max_uid;
  extern bool cgraph_global_info_ready;
+ enum cgraph_state
+ {
+   /* Callgraph is being constructed.  It is safe to add new functions.  */
+   CGRAPH_STATE_CONSTRUCTION,
+   /* Callgraph is built and IPA passes are being run.  */
+   CGRAPH_STATE_IPA,
+   /* Callgraph is built and all functions are transformed to SSA form.  */
+   CGRAPH_STATE_IPA_SSA,
+   /* Functions are now ordered and being passed to RTL expanders.  */
+   CGRAPH_STATE_EXPANSION,
+   /* All cgraph expansion is done.  */
+   CGRAPH_STATE_FINISHED
+ };
+ extern enum cgraph_state cgraph_state;
  extern bool cgraph_function_flags_ready;
  extern GTY(()) struct cgraph_node *cgraph_nodes_queue;
  extern GTY(()) struct cgraph_node *cgraph_expand_queue;
*************** void cgraph_unnest_node (struct cgraph_n
*** 295,301 ****
  enum availability cgraph_function_body_availability (struct cgraph_node *);
  bool cgraph_is_master_clone (struct cgraph_node *);
  struct cgraph_node *cgraph_master_clone (struct cgraph_node *);
! void cgraph_add_new_function (tree);
  
  /* In cgraphunit.c  */
  void cgraph_finalize_function (tree, bool);
--- 309,315 ----
  enum availability cgraph_function_body_availability (struct cgraph_node *);
  bool cgraph_is_master_clone (struct cgraph_node *);
  struct cgraph_node *cgraph_master_clone (struct cgraph_node *);
! void cgraph_add_new_function (tree, bool);
  
  /* In cgraphunit.c  */
  void cgraph_finalize_function (tree, bool);
*************** struct cgraph_node *cgraph_function_vers
*** 316,321 ****
--- 330,336 ----
  void cgraph_analyze_function (struct cgraph_node *);
  struct cgraph_node *save_inline_function_body (struct cgraph_node *);
  void record_references_in_initializer (tree);
+ bool cgraph_expand_expand_queue (void);
  
  /* In ipa.c  */
  bool cgraph_remove_unreachable_nodes (bool, FILE *);
Index: tree.c
===================================================================
*** tree.c	(revision 120209)
--- tree.c	(working copy)
*************** range_in_array_bounds_p (tree ref)
*** 7295,7311 ****
    return true;
  }
  
- /* Return true if T (assumed to be a DECL) is a global variable.  */
- 
- bool
- is_global_var (tree t)
- {
-   if (MTAG_P (t))
-     return (TREE_STATIC (t) || MTAG_GLOBAL (t));
-   else
-     return (TREE_STATIC (t) || DECL_EXTERNAL (t));
- }
- 
  /* Return true if T (assumed to be a DECL) must be assigned a memory
     location.  */
  
--- 7295,7300 ----
Index: tree.h
===================================================================
*** tree.h	(revision 120209)
--- tree.h	(working copy)
*************** struct tree_memory_partition_tag GTY(())
*** 2611,2617 ****
     The result of this flag should always be the same as
     bitmap_bit_p (call_clobbered_vars, DECL_UID (decl)).  */
  #define DECL_CALL_CLOBBERED(DECL) \
!   DECL_COMMON_CHECK (DECL)->decl_common.call_clobbered_flag
  
  struct tree_decl_common GTY(())
  {
--- 2611,2617 ----
     The result of this flag should always be the same as
     bitmap_bit_p (call_clobbered_vars, DECL_UID (decl)).  */
  #define DECL_CALL_CLOBBERED(DECL) \
!   get_var_ann (DECL)->call_clobbered
  
  struct tree_decl_common GTY(())
  {
*************** struct tree_decl_common GTY(())
*** 2653,2659 ****
    /* Logically, these two would go in a theoretical base shared by var and
       parm decl. */
    unsigned gimple_reg_flag : 1;
-   unsigned call_clobbered_flag : 1;
  
    union tree_decl_u1 {
      /* In a FUNCTION_DECL for which DECL_BUILT_IN holds, this is
--- 2653,2658 ----
*************** extern void expand_function_start (tree)
*** 4422,4428 ****
  extern void stack_protect_prologue (void);
  extern void stack_protect_epilogue (void);
  extern void recompute_tree_invariant_for_addr_expr (tree);
- extern bool is_global_var (tree t);
  extern bool needs_to_live_in_memory (tree);
  extern tree reconstruct_complex_type (tree, tree);
  
--- 4421,4426 ----
*************** extern unsigned HOST_WIDE_INT compute_bu
*** 4727,4730 ****
--- 4725,4741 ----
  /* In expr.c.  */
  extern unsigned HOST_WIDE_INT highest_pow2_factor (tree);
  
+ /* Return true if T (assumed to be a DECL) is a global variable.
+    Do this inline, because it is used to decide whether annotation is in
+    hashtable or direct pointer.  */
+ 
+ static inline bool
+ is_global_var (tree t)
+ {
+   if (MTAG_P (t))
+     return (TREE_STATIC (t) || MTAG_GLOBAL (t));
+   else
+     return (TREE_STATIC (t) || DECL_EXTERNAL (t));
+ }
+ 
  #endif  /* GCC_TREE_H  */
Index: tree-pass.h
===================================================================
*** tree-pass.h	(revision 120209)
--- tree-pass.h	(working copy)
*************** extern struct tree_opt_pass pass_ipa_pur
*** 314,319 ****
--- 314,320 ----
  extern struct tree_opt_pass pass_ipa_type_escape;
  extern struct tree_opt_pass pass_ipa_pta;
  extern struct tree_opt_pass pass_early_local_passes;
+ extern struct tree_opt_pass pass_all_early_optimizations;
  
  extern struct tree_opt_pass pass_all_optimizations;
  extern struct tree_opt_pass pass_cleanup_cfg_post_optimizing;
Index: bitmap.c
===================================================================
*** bitmap.c	(revision 120209)
--- bitmap.c	(working copy)
*************** bitmap_obstack_release (bitmap_obstack *
*** 329,334 ****
--- 329,337 ----
    bit_obstack->elements = NULL;
    bit_obstack->heads = NULL;
    obstack_free (&bit_obstack->obstack, NULL);
+ #ifdef ENABLE_CHECKING
+   memset (&bit_obstack->obstack, 0xab, sizeof (*&bit_obstack->obstack));
+ #endif
  }
  
  /* Create a new bitmap on an obstack.  If BIT_OBSTACK is NULL, create
Index: omp-low.c
===================================================================
*** omp-low.c	(revision 120209)
--- omp-low.c	(working copy)
*************** expand_omp_parallel (struct omp_region *
*** 2533,2539 ****
  	single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
        DECL_STRUCT_FUNCTION (child_fn)->curr_properties
  	= cfun->curr_properties;
!       cgraph_add_new_function (child_fn);
  
        /* Convert OMP_RETURN into a RETURN_EXPR.  */
        if (exit_bb)
--- 2533,2539 ----
  	single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
        DECL_STRUCT_FUNCTION (child_fn)->curr_properties
  	= cfun->curr_properties;
!       cgraph_add_new_function (child_fn, true);
  
        /* Convert OMP_RETURN into a RETURN_EXPR.  */
        if (exit_bb)
Index: cgraphunit.c
===================================================================
*** cgraphunit.c	(revision 120209)
--- cgraphunit.c	(working copy)
*************** decide_is_function_needed (struct cgraph
*** 262,267 ****
--- 262,338 ----
    return false;
  }
  
+ /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
+    the expansion process.  Note that this queue may grow as its
+    being processed, as the new functions may generate new ones.  */
+ bool
+ cgraph_expand_expand_queue (void)
+ {
+   bool output = false;
+   tree fndecl;
+   struct cgraph_node *node;
+   switch (cgraph_state)
+     {
+       case CGRAPH_STATE_CONSTRUCTION:
+ 	while (cgraph_expand_queue)
+ 	  {
+ 	    struct cgraph_node *n = cgraph_expand_queue;
+ 	    cgraph_expand_queue = cgraph_expand_queue->next_needed;
+ 	    n->next_needed = NULL;
+ 	    cgraph_finalize_function (n->decl, false);
+ 	    output = true;
+ 	  }
+       case CGRAPH_STATE_IPA:
+       case CGRAPH_STATE_IPA_SSA:
+ 	/* Process CGRAPH_EXPAND_QUEUE, these are functions created during
+ 	   the expansion process.  Note that this queue may grow as its
+ 	   being processed, as the new functions may generate new ones.  */
+ 	while (cgraph_expand_queue)
+ 	  {
+ 	    node = cgraph_expand_queue;
+ 	    fndecl = node->decl;
+ 	    cgraph_expand_queue = cgraph_expand_queue->next_needed;
+ 	      /* When IPA optimization already started, do all essential
+ 		 transformations that has been already performed on the whole
+ 		 cgraph but not on this function.  */
+ 
+ 	      if (!node->analyzed)
+ 		cgraph_analyze_function (node);
+ 	      if ((cgraph_state == CGRAPH_STATE_IPA_SSA
+ 		  && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
+ 		  /* When not optimizing, be sure we run early local passes anyway
+ 		     to expand OMP.  */
+ 		  || !optimize)
+ 		{
+ 		  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
+ 		  current_function_decl = fndecl;
+ 		  execute_pass_list (pass_early_local_passes.sub);
+ 		  free_dominance_info (CDI_POST_DOMINATORS);
+ 		  free_dominance_info (CDI_DOMINATORS);
+ 		  pop_cfun ();
+ 		  current_function_decl = NULL;
+ 		}
+ 	  }
+       case CGRAPH_STATE_EXPANSION:
+ 	/* Process CGRAPH_EXPAND_QUEUE, these are functions created during
+ 	   the expansion process.  Note that this queue may grow as its
+ 	   being processed, as the new functions may generate new ones.  */
+ 	while (cgraph_expand_queue)
+ 	  {
+ 	    node = cgraph_expand_queue;
+ 	    cgraph_expand_queue = cgraph_expand_queue->next_needed;
+ 	    node->next_needed = NULL;
+ 	    node->output = 0;
+ 	    node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
+ 	    cgraph_expand_function (node);
+ 	  }
+       default:
+ 	gcc_assert (!cgraph_expand_queue);
+ 	break;
+     }
+   return output;
+ }
+ 
  /* When not doing unit-at-a-time, output all functions enqueued.
     Return true when such a functions were found.  */
  
*************** cgraph_assemble_pending_functions (void)
*** 288,305 ****
  	  cgraph_expand_function (n);
  	  output = true;
  	}
!     }
! 
!   /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
!      the expansion process.  Note that this queue may grow as its
!      being processed, as the new functions may generate new ones.  */
!   while (cgraph_expand_queue)
!     {
!       struct cgraph_node *n = cgraph_expand_queue;
!       cgraph_expand_queue = cgraph_expand_queue->next_needed;
!       n->next_needed = NULL;
!       cgraph_finalize_function (n->decl, false);
!       output = true;
      }
  
    return output;
--- 359,365 ----
  	  cgraph_expand_function (n);
  	  output = true;
  	}
!       output |= cgraph_expand_expand_queue ();
      }
  
    return output;
*************** cgraph_analyze_function (struct cgraph_n
*** 816,821 ****
--- 876,889 ----
      node->local.inlinable = 0;
    /* Inlining characteristics are maintained by the cgraph_mark_inline.  */
    node->global.insns = node->local.self_insns;
+   if (!flag_unit_at_a_time)
+     {
+       bitmap_obstack_initialize (NULL);
+       execute_pass_list (pass_early_local_passes.sub);
+       free_dominance_info (CDI_POST_DOMINATORS);
+       free_dominance_info (CDI_DOMINATORS);
+       bitmap_obstack_release (NULL);
+     }
  
    node->analyzed = true;
    pop_cfun ();
*************** cgraph_expand_all_functions (void)
*** 1159,1181 ****
  	  gcc_assert (node->reachable);
  	  node->output = 0;
  	  cgraph_expand_function (node);
  	}
      }
  
    free (order);
  
-   /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
-      the expansion process.  Note that this queue may grow as its
-      being processed, as the new functions may generate new ones.  */
-   while (cgraph_expand_queue)
-     {
-       node = cgraph_expand_queue;
-       cgraph_expand_queue = cgraph_expand_queue->next_needed;
-       node->next_needed = NULL;
-       node->output = 0;
-       node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
-       cgraph_expand_function (node);
-     }
  }
  
  /* This is used to sort the node types by the cgraph order number.  */
--- 1227,1239 ----
  	  gcc_assert (node->reachable);
  	  node->output = 0;
  	  cgraph_expand_function (node);
+ 	  cgraph_expand_expand_queue ();
  	}
      }
+   cgraph_expand_expand_queue ();
  
    free (order);
  
  }
  
  /* This is used to sort the node types by the cgraph order number.  */
*************** cgraph_optimize (void)
*** 1383,1388 ****
--- 1441,1449 ----
  #endif
    if (!flag_unit_at_a_time)
      {
+       cgraph_assemble_pending_functions ();
+       cgraph_expand_expand_queue ();
+       cgraph_state = CGRAPH_STATE_FINISHED;
        cgraph_output_pending_asms ();
        varpool_assemble_pending_decls ();
        varpool_output_debug_info ();
*************** cgraph_optimize (void)
*** 1408,1413 ****
--- 1469,1475 ----
        fprintf (cgraph_dump_file, "Marked ");
        dump_cgraph (cgraph_dump_file);
      }
+   cgraph_state = CGRAPH_STATE_IPA;
      
    /* Don't run the IPA passes if there was any error or sorry messages.  */
    if (errorcount == 0 && sorrycount == 0)
*************** cgraph_optimize (void)
*** 1440,1445 ****
--- 1502,1508 ----
  
    cgraph_mark_functions_to_output ();
  
+   cgraph_state = CGRAPH_STATE_EXPANSION;
    if (!flag_toplevel_reorder)
      cgraph_output_in_order ();
    else
*************** cgraph_optimize (void)
*** 1452,1457 ****
--- 1515,1522 ----
        varpool_assemble_pending_decls ();
        varpool_output_debug_info ();
      }
+   cgraph_expand_expand_queue ();
+   cgraph_state = CGRAPH_STATE_FINISHED;
  
    if (cgraph_dump_file)
      {
*************** cgraph_build_static_cdtor (char which, t
*** 1581,1594 ****
--- 1646,1670 ----
  
    gimplify_function_tree (decl);
  
+ #if 0
+   cgraph_add_new_function (decl, false);
+   cgraph_mark_needed_node (cgraph_node (decl));
+ 
+ #else
+ 
    /* ??? We will get called LATE in the compilation process.  */
    if (cgraph_global_info_ready)
      {
        tree_lowering_passes (decl);
+       bitmap_obstack_initialize (NULL);
+       if (optimize)
+         execute_pass_list (pass_early_local_passes.sub);
+       bitmap_obstack_release (NULL);
        tree_rest_of_compilation (decl);
      }
    else
      cgraph_finalize_function (decl, 0);
+ #endif
  
    if (targetm.have_ctors_dtors)
      {
Index: ipa-inline.c
===================================================================
*** ipa-inline.c	(revision 120209)
--- ipa-inline.c	(working copy)
*************** cgraph_decide_inlining_incrementally (st
*** 1099,1104 ****
--- 1099,1113 ----
  
    /* First of all look for always inline functions.  */
    for (e = node->callees; e; e = e->next_callee)
+ #if 0
+ {
+   fprintf("%i %i %i %i\n",
+     e->callee->local.disregard_inline_limits,
+  e->inline_failed,
+  !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed),
+ 	(DECL_SAVED_TREE (e->callee->decl) || e->callee->inline_decl)
+ );
+ #endif
      if (e->callee->local.disregard_inline_limits
  	&& e->inline_failed
          && !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed)
*************** cgraph_decide_inlining_incrementally (st
*** 1115,1120 ****
--- 1124,1132 ----
  	cgraph_mark_inline (e);
  	inlined = true;
        }
+ #if 0
+ }
+ #endif
  
    /* Now do the automatic inlining.  */
    if (!flag_really_no_inline)
Index: opts.c
===================================================================
*** opts.c	(revision 120209)
--- opts.c	(working copy)
*************** decode_options (unsigned int argc, const
*** 454,460 ****
        flag_tree_fre = 1;
        flag_tree_copy_prop = 1;
        flag_tree_sink = 1;
!       flag_tree_salias = 1;
        if (!no_unit_at_a_time_default)
          flag_unit_at_a_time = 1;
  
--- 454,462 ----
        flag_tree_fre = 1;
        flag_tree_copy_prop = 1;
        flag_tree_sink = 1;
!       /* Structure vars code is not happy with new structure references
! 	 appearing in function during inlining, disable it for time being.  */
!       flag_tree_salias = 0;
        if (!no_unit_at_a_time_default)
          flag_unit_at_a_time = 1;
  
Index: tree-mudflap.c
===================================================================
*** tree-mudflap.c	(revision 120209)
--- tree-mudflap.c	(working copy)
*************** mf_decl_cache_locals (void)
*** 460,473 ****
       globals into the cache variables.  */
    t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (mf_cache_shift_decl_l),
                mf_cache_shift_decl_l, mf_cache_shift_decl);
-   add_referenced_var (mf_cache_shift_decl);
    SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
    gimplify_to_stmt_list (&t);
    shift_init_stmts = t;
  
    t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (mf_cache_mask_decl_l),
                mf_cache_mask_decl_l, mf_cache_mask_decl);
-   add_referenced_var (mf_cache_mask_decl);
    SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
    gimplify_to_stmt_list (&t);
    mask_init_stmts = t;
--- 460,471 ----
*************** mf_build_check_statement_for (tree base,
*** 573,589 ****
                                              & __mf_mask].  */
    t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
                (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
-   add_referenced_var (mf_cache_shift_decl);
    t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
                (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
-   add_referenced_var (mf_cache_mask_decl);
    t = build4 (ARRAY_REF,
                TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
                mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
-   add_referenced_var (mf_cache_array_decl);
    t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
    t = build2 (GIMPLE_MODIFY_STMT, void_type_node, mf_elem, t);
-   add_referenced_var (mf_elem);
    SET_EXPR_LOCUS (t, locus);
    gimplify_to_stmt_list (&t);
    tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
--- 571,583 ----
*************** mf_build_check_statement_for (tree base,
*** 607,613 ****
                build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
                TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
    t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
-   add_referenced_var (mf_base);
  
    /* Construct '__mf_elem->high < __mf_limit'.
  
--- 601,606 ----
Index: tree-inline.c
===================================================================
*** tree-inline.c	(revision 120209)
--- tree-inline.c	(working copy)
*************** Boston, MA 02110-1301, USA.  */
*** 50,55 ****
--- 50,56 ----
  #include "pointer-set.h"
  #include "ipa-prop.h"
  #include "value-prof.h"
+ #include "tree-pass.h"
  
  /* I'm not real happy about this, but we need to handle gimple and
     non-gimple trees.  */
*************** insert_decl_map (copy_body_data *id, tre
*** 141,146 ****
--- 142,191 ----
  		       (splay_tree_value) value);
  }
  
+ /* Construct new SSA name for old NAME. ID is the inline context.  */
+ 
+ static tree
+ remap_ssa_name (tree name, copy_body_data *id)
+ {
+   tree new;
+   splay_tree_node n;
+ 
+   gcc_assert (TREE_CODE (name) == SSA_NAME);
+ 
+   n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
+   if (n)
+     return (tree) n->value;
+ 
+   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
+      in copy_bb.  */
+   new = remap_decl (SSA_NAME_VAR (name), id);
+   /* We might've substituted constant or another SSA_NAME for
+      the variable. 
+ 
+      Replace the SSA name representing RESULT_DECL by variable during
+      inlining:  this saves us from need to introduce PHI node in a case
+      return value is just partly initialized.  */
+   if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
+       && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
+ 	  || !id->transform_return_to_modify))
+     {
+       new = make_ssa_name (new, NULL);
+       insert_decl_map (id, name, new);
+       if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
+ 	{
+ 	  SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
+ 	  if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
+ 	    set_default_def (SSA_NAME_VAR (new), new);
+ 	}
+       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
+ 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
+       TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
+     }
+   else
+     insert_decl_map (id, name, new);
+   return new;
+ }
+ 
  /* Remap DECL during the copying of the BLOCK tree for the function.  */
  
  tree
*************** remap_decl (tree decl, copy_body_data *i
*** 188,193 ****
--- 233,254 ----
  	    walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
  	}
  
+       if (cfun && gimple_in_ssa_p (cfun)
+ 	  && (TREE_CODE (t) == VAR_DECL
+ 	      || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
+ 	{
+           tree def = gimple_default_def (id->src_cfun, decl);
+ 	  get_var_ann (t);
+ 	  if (TREE_CODE (decl) != PARM_DECL && def)
+ 	    {
+ 	      tree map = remap_ssa_name (def, id);
+ 	      /* Watch out RESULT_DECLs whose SSA names map directly
+ 		 to them.  */
+ 	      if (TREE_CODE (map) == SSA_NAME)
+ 	        set_default_def (t, map);
+ 	    }
+ 	  add_referenced_var (t);
+ 	}
        return t;
      }
  
*************** copy_body_r (tree *tp, int *walk_subtree
*** 500,505 ****
--- 561,572 ----
  	  return (tree) (void *)1;
  	}
      }
+   else if (TREE_CODE (*tp) == SSA_NAME)
+     {
+       *tp = remap_ssa_name (*tp, id);
+       *walk_subtrees = 0;
+       return NULL;
+     }
  
    /* Local variables and labels need to be replaced by equivalent
       variables.  We don't want to copy static variables; there's only
*************** copy_body_r (tree *tp, int *walk_subtree
*** 621,626 ****
--- 688,698 ----
        /* Here is the "usual case".  Copy this tree node, and then
  	 tweak some special cases.  */
        copy_tree_r (tp, walk_subtrees, NULL);
+ 
+       /* Global variables we didn't seen yet needs to go into referenced
+ 	 vars.  */
+       if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
+ 	add_referenced_var (*tp);
         
        /* If EXPR has block defined, map it to newly constructed block.
           When inlining we want EXPRs without block appear in the block
*************** copy_bb (copy_body_data *id, basic_block
*** 781,791 ****
--- 853,911 ----
  		  && tree_could_throw_p (stmt))
  		add_stmt_to_eh_region (stmt, id->eh_region);
  	    }
+ 	  if (gimple_in_ssa_p (cfun))
+ 	    {
+ 	       ssa_op_iter i;
+ 	       tree def;
+ 
+ 	       FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+ 		if (TREE_CODE (def) == SSA_NAME)
+ 		  SSA_NAME_DEF_STMT (def) = stmt;
+ 	    }
  	}
      }
    return copy_basic_block;
  }
  
+ /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
+    form is quite easy, since dominator relationship for old basic blocks does
+    not change.
+ 
+    There is however exception where inlining might change dominator relation
+    across EH edges from basic block within inlined functions destinating
+    to landging pads in function we inline into.
+ 
+    The function mark PHI_RESULT of such PHI nodes for renaming; it is
+    safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+    must be set.  This means, that there will be no overlapping live ranges
+    for the underlying symbol.
+ 
+    This might change in future if we allow redirecting of EH edges and
+    we might want to change way build CFG pre-inlining to include
+    all the possible edges then.  */
+ static void
+ update_ssa_across_eh_edges (basic_block bb)
+ {
+   edge e;
+   edge_iterator ei;
+ 
+   FOR_EACH_EDGE (e, ei, bb->succs)
+     if (!e->dest->aux
+ 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
+       {
+ 	tree phi;
+ 
+ 	gcc_assert (e->flags & EDGE_EH);
+ 	for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ 	  {
+ 	    gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ 			(PHI_RESULT (phi)));
+ 	    mark_sym_for_renaming
+ 	      (SSA_NAME_VAR (PHI_RESULT (phi)));
+ 	  }
+       }
+ }
+ 
  /* Copy edges from BB into its copy constructed earlier, scale profile
     accordingly.  Edges will be taken care of later.  Assume aux
     pointers to point to the copies of each BB.  */
*************** copy_edges_for_bb (basic_block bb, int c
*** 825,830 ****
--- 945,952 ----
  
        copy_stmt = bsi_stmt (bsi);
        update_stmt (copy_stmt);
+       if (gimple_in_ssa_p (cfun))
+         mark_symbols_for_renaming (copy_stmt);
        /* Do this before the possible split_block.  */
        bsi_next (&bsi);
  
*************** copy_edges_for_bb (basic_block bb, int c
*** 847,857 ****
--- 969,1022 ----
  	       right at this point; split_block doesn't care.  */
  	    {
  	      edge e = split_block (new_bb, copy_stmt);
+ 
  	      new_bb = e->dest;
+ 	      new_bb->aux = e->src->aux;
  	      bsi = bsi_start (new_bb);
  	    }
  
             make_eh_edges (copy_stmt);
+ 
+ 	   if (gimple_in_ssa_p (cfun))
+ 	     update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
+ 	}
+     }
+ }
+ 
+ /* Copy the PHIs.  All blocks and edges are copied, some blocks
+    was possibly split and new outgoing EH edges inserted.
+    BB points to the block of original function and AUX pointers links
+    the original and newly copied blocks.  */
+ 
+ static void
+ copy_phis_for_bb (basic_block bb, copy_body_data *id)
+ {
+   basic_block new_bb = bb->aux;
+   edge_iterator ei;
+   tree phi;
+ 
+   for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+     {
+       tree res = PHI_RESULT (phi);
+       tree new_res = res;
+       tree new_phi;
+       edge new_edge;
+ 
+       if (is_gimple_reg (res))
+ 	{
+ 	  walk_tree (&new_res, copy_body_r, id, NULL);
+ 	  SSA_NAME_DEF_STMT (new_res)
+ 	    = new_phi = create_phi_node (new_res, new_bb);
+ 	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+ 	    {
+ 	      edge old_edge = find_edge (new_edge->src->aux, bb);
+ 	      tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
+ 	      tree new_arg = arg;
+ 
+ 	      walk_tree (&new_arg, copy_body_r, id, NULL);
+ 	      gcc_assert (new_arg);
+ 	      add_phi_arg (new_phi, new_arg, new_edge);
+ 	    }
  	}
      }
  }
*************** remap_decl_1 (tree decl, void *data)
*** 863,868 ****
--- 1028,1094 ----
    return remap_decl (decl, (copy_body_data *) data);
  }
  
+ /* Build struct function and associated datastructures for the new clone
+    NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
+ 
+ static void
+ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
+ 		 int frequency)
+ {
+   struct function *new_cfun
+      = (struct function *) ggc_alloc_cleared (sizeof (struct function));
+   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+   int count_scale, frequency_scale;
+ 
+   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+     count_scale = (REG_BR_PROB_BASE * count
+ 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+   else
+     count_scale = 1;
+ 
+   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+     frequency_scale = (REG_BR_PROB_BASE * frequency
+ 		       /
+ 		       ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+   else
+     frequency_scale = count_scale;
+ 
+   /* Register specific tree functions.  */
+   tree_register_cfg_hooks ();
+   *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
+   VALUE_HISTOGRAMS (new_cfun) = NULL;
+   new_cfun->unexpanded_var_list = NULL;
+   new_cfun->cfg = NULL;
+   new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
+   new_cfun->ib_boundaries_block = NULL;
+   DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
+   push_cfun (new_cfun);
+   init_empty_tree_cfg ();
+ 
+   ENTRY_BLOCK_PTR->count =
+     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+      REG_BR_PROB_BASE);
+   ENTRY_BLOCK_PTR->frequency =
+     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+      frequency_scale / REG_BR_PROB_BASE);
+   EXIT_BLOCK_PTR->count =
+     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+      REG_BR_PROB_BASE);
+   EXIT_BLOCK_PTR->frequency =
+     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+      frequency_scale / REG_BR_PROB_BASE);
+   if (src_cfun->eh)
+     init_eh_for_function ();
+ 
+   if (src_cfun->gimple_df)
+     {
+       init_tree_ssa ();
+       cfun->gimple_df->in_ssa_p = true;
+       init_ssa_operands ();
+     }
+   pop_cfun ();
+ }
+ 
  /* Make a copy of the body of FN so that it can be inserted inline in
     another function.  Walks FN via CFG, returns new fndecl.  */
  
*************** copy_cfg_body (copy_body_data * id, gcov
*** 873,887 ****
    tree callee_fndecl = id->src_fn;
    /* Original cfun for the callee, doesn't change.  */
    struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
!   /* Copy, built by this function.  */
!   struct function *new_cfun;
!   /* Place to copy from; when a copy of the function was saved off earlier,
!      use that instead of the main copy.  */
!   struct function *cfun_to_copy =
!     (struct function *) ggc_alloc_cleared (sizeof (struct function));
    basic_block bb;
    tree new_fndecl = NULL;
    int count_scale, frequency_scale;
  
    if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
      count_scale = (REG_BR_PROB_BASE * count
--- 1099,1109 ----
    tree callee_fndecl = id->src_fn;
    /* Original cfun for the callee, doesn't change.  */
    struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
!   struct function *cfun_to_copy;
    basic_block bb;
    tree new_fndecl = NULL;
    int count_scale, frequency_scale;
+   int last;
  
    if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
      count_scale = (REG_BR_PROB_BASE * count
*************** copy_cfg_body (copy_body_data * id, gcov
*** 903,966 ****
    gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
  	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
  
!   *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
  
-   id->src_cfun = cfun_to_copy;
- 
-   /* If requested, create new basic_block_info and label_to_block_maps.
-      Otherwise, insert our new blocks and labels into the existing cfg.  */
-   if (id->transform_new_cfg)
-     {
-       new_cfun =
- 	(struct function *) ggc_alloc_cleared (sizeof (struct function));
-       *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
-       new_cfun->cfg = NULL;
-       new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
-       new_cfun->ib_boundaries_block = NULL;
-       DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
-       push_cfun (new_cfun);
-       init_empty_tree_cfg ();
- 
-       ENTRY_BLOCK_PTR->count =
- 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- 	 REG_BR_PROB_BASE);
-       ENTRY_BLOCK_PTR->frequency =
- 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- 	 frequency_scale / REG_BR_PROB_BASE);
-       EXIT_BLOCK_PTR->count =
- 	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- 	 REG_BR_PROB_BASE);
-       EXIT_BLOCK_PTR->frequency =
- 	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- 	 frequency_scale / REG_BR_PROB_BASE);
- 
-       entry_block_map = ENTRY_BLOCK_PTR;
-       exit_block_map = EXIT_BLOCK_PTR;
-     }
  
    ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
    EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
  
    /* Duplicate any exception-handling regions.  */
    if (cfun->eh)
      {
-       if (id->transform_new_cfg)
-         init_eh_for_function ();
        id->eh_region_offset
  	= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
  				0, id->eh_region);
      }
    /* Use aux pointers to map the original blocks to copy.  */
    FOR_EACH_BB_FN (bb, cfun_to_copy)
!     bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
    /* Now that we've duplicated the blocks, duplicate their edges.  */
    FOR_ALL_BB_FN (bb, cfun_to_copy)
      copy_edges_for_bb (bb, count_scale);
    FOR_ALL_BB_FN (bb, cfun_to_copy)
!     bb->aux = NULL;
! 
!   if (id->transform_new_cfg)
!     pop_cfun ();
  
    return new_fndecl;
  }
--- 1125,1171 ----
    gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
  	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
  
!   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
  
  
    ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
    EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
+   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
  
    /* Duplicate any exception-handling regions.  */
    if (cfun->eh)
      {
        id->eh_region_offset
  	= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
  				0, id->eh_region);
      }
    /* Use aux pointers to map the original blocks to copy.  */
    FOR_EACH_BB_FN (bb, cfun_to_copy)
!     {
!       basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
!       bb->aux = new;
!       new->aux = bb;
!     }
! 
!   last = n_basic_blocks;
    /* Now that we've duplicated the blocks, duplicate their edges.  */
    FOR_ALL_BB_FN (bb, cfun_to_copy)
      copy_edges_for_bb (bb, count_scale);
+   if (gimple_in_ssa_p (cfun))
+     FOR_ALL_BB_FN (bb, cfun_to_copy)
+       copy_phis_for_bb (bb, id);
    FOR_ALL_BB_FN (bb, cfun_to_copy)
!     {
!       ((basic_block)bb->aux)->aux = NULL;
!       bb->aux = NULL;
!     }
!   /* Zero out AUX fields of newly created block during EH edge
!      insertion. */
!   for (; last < n_basic_blocks; last++)
!     BASIC_BLOCK (last)->aux = NULL;
!   entry_block_map->aux = NULL;
!   exit_block_map->aux = NULL;
  
    return new_fndecl;
  }
*************** setup_one_parameter (copy_body_data *id,
*** 1017,1029 ****
    tree init_stmt;
    tree var;
    tree var_sub;
! 
!   /* If the parameter is never assigned to, we may not need to
!      create a new variable here at all.  Instead, we may be able
!      to just use the argument value.  */
    if (TREE_READONLY (p)
        && !TREE_ADDRESSABLE (p)
!       && value && !TREE_SIDE_EFFECTS (value))
      {
        /* We may produce non-gimple trees by adding NOPs or introduce
  	 invalid sharing when operand is not really constant.
--- 1222,1238 ----
    tree init_stmt;
    tree var;
    tree var_sub;
!   tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
!   tree def = (gimple_in_ssa_p (cfun)
! 	      ? gimple_default_def (id->src_cfun, p) : NULL);
! 
!   /* If the parameter is never assigned to, has no SSA_NAMEs created,
!      we may not need to create a new variable here at all.  Instead, we may
!      be able to just use the argument value.  */
    if (TREE_READONLY (p)
        && !TREE_ADDRESSABLE (p)
!       && value && !TREE_SIDE_EFFECTS (value)
!       && !def)
      {
        /* We may produce non-gimple trees by adding NOPs or introduce
  	 invalid sharing when operand is not really constant.
*************** setup_one_parameter (copy_body_data *id,
*** 1047,1052 ****
--- 1256,1266 ----
       here since the type of this decl must be visible to the calling
       function.  */
    var = copy_decl_to_var (p, id);
+   if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
+     {
+       get_var_ann (var);
+       add_referenced_var (var);
+     }
  
    /* See if the frontend wants to pass this by invisible reference.  If
       so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
*************** setup_one_parameter (copy_body_data *id,
*** 1085,1105 ****
    if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
      TREE_READONLY (var) = 0;
  
    /* Initialize this VAR_DECL from the equivalent argument.  Convert
       the argument to the proper type in case it was promoted.  */
    if (value)
      {
-       tree rhs = fold_convert (TREE_TYPE (var), value);
        block_stmt_iterator bsi = bsi_last (bb);
  
        if (rhs == error_mark_node)
! 	return;
  
        STRIP_USELESS_TYPE_CONVERSION (rhs);
  
        /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
  	 keep our trees in gimple form.  */
!       init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
  
        /* If we did not create a gimple value and we did not create a gimple
  	 cast of a gimple value, then we will need to gimplify INIT_STMTS
--- 1299,1352 ----
    if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
      TREE_READONLY (var) = 0;
  
+   /* If there is no setup required and we are in SSA, take the easy route
+      replacing all SSA names representing the function parameter by the
+      SSA name passed to function.
+ 
+      We need to construct map for the variable anyway as it might be used
+      in different SSA names when parameter is set in function.
+ 
+      FIXME: This usually kills the last connection in between inlined
+      function parameter and the actual value in debug info.  Can we do
+      better here?  If we just inserted the statement, copy propagation
+      would kill it anyway as it always did in older versions of GCC.
+ 
+      We might want to introduce a notion that single SSA_NAME might
+      represent multiple variables for purposes of debugging. */
+   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
+       && (TREE_CODE (rhs) == SSA_NAME
+ 	  || is_gimple_min_invariant (rhs)))
+     {
+       insert_decl_map (id, def, rhs);
+       return;
+     }
+ 
    /* Initialize this VAR_DECL from the equivalent argument.  Convert
       the argument to the proper type in case it was promoted.  */
    if (value)
      {
        block_stmt_iterator bsi = bsi_last (bb);
  
        if (rhs == error_mark_node)
! 	{
!   	  insert_decl_map (id, p, var_sub);
! 	  return;
! 	}
  
        STRIP_USELESS_TYPE_CONVERSION (rhs);
  
        /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
  	 keep our trees in gimple form.  */
!       if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
! 	{
! 	  def = remap_ssa_name (def, id);
!           init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), def, rhs);
! 	  SSA_NAME_DEF_STMT (def) = init_stmt;
! 	  SSA_NAME_IS_DEFAULT_DEF (def) = 0;
! 	  set_default_def (var, NULL);
! 	}
!       else
!         init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
  
        /* If we did not create a gimple value and we did not create a gimple
  	 cast of a gimple value, then we will need to gimplify INIT_STMTS
*************** setup_one_parameter (copy_body_data *id,
*** 1110,1121 ****
  	  && (!is_gimple_cast (rhs)
  	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
  	  || !is_gimple_reg (var))
! 	gimplify_stmt (&init_stmt);
  
        /* If VAR represents a zero-sized variable, it's possible that the
  	 assignment statment may result in no gimple statements.  */
        if (init_stmt)
          bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
      }
  }
  
--- 1357,1385 ----
  	  && (!is_gimple_cast (rhs)
  	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
  	  || !is_gimple_reg (var))
! 	{
!           tree_stmt_iterator i;
! 
! 	  push_gimplify_context ();
! 	  gimplify_stmt (&init_stmt);
! 	  if (gimple_in_ssa_p (cfun)
!               && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
! 	    {
! 	      /* The replacement can expose previously unreferenced
! 		 variables.  */
! 	      for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
! 		find_new_referenced_vars (tsi_stmt_ptr (i));
! 	     }
! 	  pop_gimplify_context (NULL);
! 	}
  
        /* If VAR represents a zero-sized variable, it's possible that the
  	 assignment statment may result in no gimple statements.  */
        if (init_stmt)
          bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+       if (gimple_in_ssa_p (cfun))
+ 	for (;!bsi_end_p (bsi); bsi_next (&bsi))
+ 	  mark_symbols_for_renaming (bsi_stmt (bsi));
      }
  }
  
*************** initialize_inlined_parameters (copy_body
*** 1170,1186 ****
     The USE_STMT is filled to contain a use of the declaration to
     indicate the return value of the function.
  
!    RETURN_SLOT_ADDR, if non-null, was a fake parameter that
!    took the address of the result.  MODIFY_DEST, if non-null, was the LHS of
!    the GIMPLE_MODIFY_STMT to which this call is the RHS.
  
     The return value is a (possibly null) value that is the result of the
     function as seen by the callee.  *USE_P is a (possibly null) value that
     holds the result as seen by the caller.  */
  
  static tree
! declare_return_variable (copy_body_data *id, tree return_slot_addr,
! 			 tree modify_dest, tree *use_p)
  {
    tree callee = id->src_fn;
    tree caller = id->dst_fn;
--- 1434,1450 ----
     The USE_STMT is filled to contain a use of the declaration to
     indicate the return value of the function.
  
!    RETURN_SLOT, if non-null is place where to store the result.  It
!    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
!    was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
  
     The return value is a (possibly null) value that is the result of the
     function as seen by the callee.  *USE_P is a (possibly null) value that
     holds the result as seen by the caller.  */
  
  static tree
! declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
! 			 tree *use_p)
  {
    tree callee = id->src_fn;
    tree caller = id->dst_fn;
*************** declare_return_variable (copy_body_data 
*** 1199,1213 ****
  
    /* If there was a return slot, then the return value is the
       dereferenced address of that object.  */
!   if (return_slot_addr)
      {
!       /* The front end shouldn't have used both return_slot_addr and
  	 a modify expression.  */
        gcc_assert (!modify_dest);
        if (DECL_BY_REFERENCE (result))
! 	var = return_slot_addr;
        else
! 	var = build_fold_indirect_ref (return_slot_addr);
        if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
             || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
  	  && !DECL_GIMPLE_REG_P (result)
--- 1463,1511 ----
  
    /* If there was a return slot, then the return value is the
       dereferenced address of that object.  */
!   if (return_slot)
      {
!       /* The front end shouldn't have used both return_slot and
  	 a modify expression.  */
        gcc_assert (!modify_dest);
        if (DECL_BY_REFERENCE (result))
! 	{
! 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
! 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
! 
! 	  /* We are going to construct *&return_slot and we can't do that
! 	     for variables believed to be not addressable. 
! 
! 	     FIXME: This check possibly can match, because values returned
! 	     via return slot optimization are not believed to have address
! 	     taken by alias analysis.  */
! 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
! 	  if (gimple_in_ssa_p (cfun))
! 	    {
! 	      HOST_WIDE_INT bitsize;
! 	      HOST_WIDE_INT bitpos;
! 	      tree offset;
! 	      enum machine_mode mode;
! 	      int unsignedp;
! 	      int volatilep;
! 	      tree base;
! 	      base = get_inner_reference (return_slot, &bitsize, &bitpos,
! 					  &offset,
! 					  &mode, &unsignedp, &volatilep,
! 					  false);
! 	      if (TREE_CODE (base) == INDIRECT_REF)
! 		base = TREE_OPERAND (base, 0);
! 	      if (TREE_CODE (base) == SSA_NAME)
! 		base = SSA_NAME_VAR (base);
! 	      mark_sym_for_renaming (base);
! 	    }
! 	  var = return_slot_addr;
! 	}
        else
! 	{
! 	  var = return_slot;
! 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
! 	}
        if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
             || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
  	  && !DECL_GIMPLE_REG_P (result)
*************** declare_return_variable (copy_body_data 
*** 1221,1227 ****
    gcc_assert (!TREE_ADDRESSABLE (callee_type));
  
    /* Attempt to avoid creating a new temporary variable.  */
!   if (modify_dest)
      {
        bool use_it = false;
  
--- 1519,1526 ----
    gcc_assert (!TREE_ADDRESSABLE (callee_type));
  
    /* Attempt to avoid creating a new temporary variable.  */
!   if (modify_dest
!       && TREE_CODE (modify_dest) != SSA_NAME)
      {
        bool use_it = false;
  
*************** declare_return_variable (copy_body_data 
*** 1270,1275 ****
--- 1569,1579 ----
    gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
  
    var = copy_result_decl_to_var (result, id);
+   if (gimple_in_ssa_p (cfun))
+     {
+       get_var_ann (var);
+       add_referenced_var (var);
+     }
  
    DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
    DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
*************** expand_call_inline (basic_block bb, tree
*** 1938,1944 ****
    tree fn;
    splay_tree st;
    tree args;
!   tree return_slot_addr;
    tree modify_dest;
    location_t saved_location;
    struct cgraph_edge *cg_edge;
--- 2242,2248 ----
    tree fn;
    splay_tree st;
    tree args;
!   tree return_slot;
    tree modify_dest;
    location_t saved_location;
    struct cgraph_edge *cg_edge;
*************** expand_call_inline (basic_block bb, tree
*** 2095,2100 ****
--- 2399,2405 ----
    /* Record the function we are about to inline.  */
    id->src_fn = fn;
    id->src_node = cg_edge->callee;
+   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
  
    initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
  
*************** expand_call_inline (basic_block bb, tree
*** 2108,2114 ****
    gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
  
    /* Find the lhs to which the result of this call is assigned.  */
!   return_slot_addr = NULL;
    if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
      {
        modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
--- 2413,2419 ----
    gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
  
    /* Find the lhs to which the result of this call is assigned.  */
!   return_slot = NULL;
    if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
      {
        modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
*************** expand_call_inline (basic_block bb, tree
*** 2123,2138 ****
  	TREE_NO_WARNING (modify_dest) = 1;
        if (CALL_EXPR_RETURN_SLOT_OPT (t))
  	{
  	  return_slot_addr = build_fold_addr_expr (modify_dest);
  	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
! 	  modify_dest = NULL;
  	}
      }
    else
      modify_dest = NULL;
  
    /* Declare the return variable for the function.  */
!   declare_return_variable (id, return_slot_addr,
  			   modify_dest, &use_retvar);
  
    /* This is it.  Duplicate the callee body.  Assume callee is
--- 2428,2446 ----
  	TREE_NO_WARNING (modify_dest) = 1;
        if (CALL_EXPR_RETURN_SLOT_OPT (t))
  	{
+ 	  return_slot = modify_dest;
+ 	  modify_dest = NULL;
+ #if 0
  	  return_slot_addr = build_fold_addr_expr (modify_dest);
  	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
! #endif
  	}
      }
    else
      modify_dest = NULL;
  
    /* Declare the return variable for the function.  */
!   declare_return_variable (id, return_slot,
  			   modify_dest, &use_retvar);
  
    /* This is it.  Duplicate the callee body.  Assume callee is
*************** expand_call_inline (basic_block bb, tree
*** 2164,2175 ****
    if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
      {
        *tp = use_retvar;
        maybe_clean_or_replace_eh_stmt (stmt, stmt);
      }
    else
      /* We're modifying a TSI owned by gimple_expand_calls_inline();
         tsi_delink() will leave the iterator in a sane state.  */
!     bsi_remove (&stmt_bsi, true);
  
    if (purge_dead_abnormal_edges)
      tree_purge_dead_abnormal_call_edges (return_block);
--- 2472,2515 ----
    if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
      {
        *tp = use_retvar;
+       if (gimple_in_ssa_p (cfun))
+ 	{
+           update_stmt (stmt);
+           mark_symbols_for_renaming (stmt);
+ 	}
        maybe_clean_or_replace_eh_stmt (stmt, stmt);
      }
    else
      /* We're modifying a TSI owned by gimple_expand_calls_inline();
         tsi_delink() will leave the iterator in a sane state.  */
!     {
!       /* Handle case of inlining function that miss return statement so 
!          return value becomes undefined.  */
!       if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
! 	  && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
! 	{
! 	  tree name = TREE_OPERAND (stmt, 0);
! 	  tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
! 	  tree def = gimple_default_def (cfun, var);
! 
! 	  /* If the variable is used undefined, make this name undefined via
! 	     move.  */
! 	  if (def)
! 	    {
! 	      TREE_OPERAND (stmt, 1) = def;
! 	      update_stmt (stmt);
! 	    }
! 	  /* Otherwise make this variable undefined.  */
! 	  else
! 	    {
! 	      bsi_remove (&stmt_bsi, true);
! 	      set_default_def (var, name);
! 	      SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
! 	    }
! 	}
!       else
!         bsi_remove (&stmt_bsi, true);
!     }
  
    if (purge_dead_abnormal_edges)
      tree_purge_dead_abnormal_call_edges (return_block);
*************** optimize_inline_calls (tree fn)
*** 2290,2296 ****
       as inlining loops might increase the maximum.  */
    if (ENTRY_BLOCK_PTR->count)
      counts_to_freqs ();
!   fold_cond_expr_cond ();
  }
  
  /* FN is a function that has a complete body, and CLONE is a function whose
--- 2630,2652 ----
       as inlining loops might increase the maximum.  */
    if (ENTRY_BLOCK_PTR->count)
      counts_to_freqs ();
!   if (gimple_in_ssa_p (cfun))
!     {
!       /* We make no attempts to keep dominance info up-to-date.  */
!       free_dominance_info (CDI_DOMINATORS);
!       free_dominance_info (CDI_POST_DOMINATORS);
!       delete_unreachable_blocks ();
!       update_ssa (TODO_update_ssa);
!       fold_cond_expr_cond ();
!       if (need_ssa_update_p ())
!         update_ssa (TODO_update_ssa);
!     }
!   else
!     fold_cond_expr_cond ();
!   /* It would be nice to check SSA/CFG/statement consistency here, but it is
!      not possible yet - the IPA passes might make various functions to not
!      throw and they don't care to proactively update local EH info.  This is
!      done later in fixup_cfg pass that also execute the verification.  */
  }
  
  /* FN is a function that has a complete body, and CLONE is a function whose
*************** tree_function_versioning (tree old_decl,
*** 2782,2788 ****
    struct cgraph_node *old_version_node;
    struct cgraph_node *new_version_node;
    copy_body_data id;
!   tree p, new_fndecl;
    unsigned i;
    struct ipa_replace_map *replace_info;
    basic_block old_entry_block;
--- 3138,3144 ----
    struct cgraph_node *old_version_node;
    struct cgraph_node *new_version_node;
    copy_body_data id;
!   tree p;
    unsigned i;
    struct ipa_replace_map *replace_info;
    basic_block old_entry_block;
*************** tree_function_versioning (tree old_decl,
*** 2828,2833 ****
--- 3184,3195 ----
    id.transform_lang_insert_block = false;
  
    current_function_decl = new_decl;
+   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+     (DECL_STRUCT_FUNCTION (old_decl));
+   initialize_cfun (new_decl, old_decl,
+ 		   old_entry_block->count,
+ 		   old_entry_block->frequency);
+   push_cfun (DECL_STRUCT_FUNCTION (new_decl));
    
    /* Copy the function's static chain.  */
    p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
*************** tree_function_versioning (tree old_decl,
*** 2871,2892 ****
        }
    
    /* Copy the Function's body.  */
!   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
!     (DECL_STRUCT_FUNCTION (old_decl));
!   new_fndecl = copy_body (&id,
! 			  old_entry_block->count,
! 			  old_entry_block->frequency, NULL, NULL);
    
-   DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
- 
-   DECL_STRUCT_FUNCTION (new_decl)->cfg =
-     DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
-   DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
-   DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
-     DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
-   DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
-     DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
- 
    if (DECL_RESULT (old_decl) != NULL_TREE)
      {
        tree *res_decl = &DECL_RESULT (old_decl);
--- 3233,3240 ----
        }
    
    /* Copy the Function's body.  */
!   copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
    
    if (DECL_RESULT (old_decl) != NULL_TREE)
      {
        tree *res_decl = &DECL_RESULT (old_decl);
*************** tree_function_versioning (tree old_decl,
*** 2894,2906 ****
        lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
      }
    
-   current_function_decl = NULL;
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (new_decl);
  
    /* Clean up.  */
    splay_tree_delete (id.decl_map);
    fold_cond_expr_cond ();
    return;
  }
  
--- 3242,3261 ----
        lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
      }
    
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (new_decl);
  
    /* Clean up.  */
    splay_tree_delete (id.decl_map);
    fold_cond_expr_cond ();
+   if (gimple_in_ssa_p (cfun))
+     {
+       update_ssa (TODO_update_ssa);
+     }
+   free_dominance_info (CDI_DOMINATORS);
+   free_dominance_info (CDI_POST_DOMINATORS);
+   pop_cfun ();
+   current_function_decl = NULL;
    return;
  }
  
Index: tree-optimize.c
===================================================================
*** tree-optimize.c	(revision 120209)
--- tree-optimize.c	(working copy)
*************** static bool
*** 57,64 ****
  gate_all_optimizations (void)
  {
    return (optimize >= 1
! 	  /* Don't bother doing anything if the program has errors.  */
! 	  && !(errorcount || sorrycount));
  }
  
  struct tree_opt_pass pass_all_optimizations =
--- 57,65 ----
  gate_all_optimizations (void)
  {
    return (optimize >= 1
! 	  /* Don't bother doing anything if the program has errors. 
! 	     We have to pass down the queue if we already went into SSA */
! 	  && (!(errorcount || sorrycount) || gimple_in_ssa_p (cfun)));
  }
  
  struct tree_opt_pass pass_all_optimizations =
*************** struct tree_opt_pass pass_all_optimizati
*** 78,87 ****
    0					/* letter */
  };
  
  struct tree_opt_pass pass_early_local_passes =
  {
!   NULL,					/* name */
!   gate_all_optimizations,		/* gate */
    NULL,					/* execute */
    NULL,					/* sub */
    NULL,					/* next */
--- 79,97 ----
    0					/* letter */
  };
  
+ /* Gate: execute, or not, all of the non-trivial optimizations.  */
+ 
+ static bool
+ gate_all_early_local_passes (void)
+ {
+ 	  /* Don't bother doing anything if the program has errors.  */
+   return (!(errorcount || sorrycount));
+ }
+ 
  struct tree_opt_pass pass_early_local_passes =
  {
!   "early_local_cleanups",		/* name */
!   gate_all_early_local_passes,		/* gate */
    NULL,					/* execute */
    NULL,					/* sub */
    NULL,					/* next */
*************** struct tree_opt_pass pass_early_local_pa
*** 95,100 ****
--- 105,144 ----
    0					/* letter */
  };
  
+ static unsigned int
+ execute_early_local_optimizations (void)
+ {
+   cgraph_state = CGRAPH_STATE_IPA_SSA;
+   return 0;
+ }
+ 
+ /* Gate: execute, or not, all of the non-trivial optimizations.  */
+ 
+ static bool
+ gate_all_early_optimizations (void)
+ {
+   return (optimize >= 1
+ 	  /* Don't bother doing anything if the program has errors.  */
+ 	  && !(errorcount || sorrycount));
+ }
+ 
+ struct tree_opt_pass pass_all_early_optimizations =
+ {
+   "early_optimizations",		/* name */
+   gate_all_early_optimizations,		/* gate */
+   execute_early_local_optimizations,	/* execute */
+   NULL,					/* sub */
+   NULL,					/* next */
+   0,					/* static_pass_number */
+   0,					/* tv_id */
+   0,					/* properties_required */
+   0,					/* properties_provided */
+   0,					/* properties_destroyed */
+   0,					/* todo_flags_start */
+   0,					/* todo_flags_finish */
+   0					/* letter */
+ };
+ 
  /* Pass: cleanup the CFG just before expanding trees to RTL.
     This is just a round of label cleanups and case node grouping
     because after the tree optimizers have run such cleanups may
*************** execute_cleanup_cfg_pre_ipa (void)
*** 107,116 ****
    return 0;
  }
  
  struct tree_opt_pass pass_cleanup_cfg =
  {
    "cleanup_cfg",			/* name */
!   NULL,					/* gate */
    execute_cleanup_cfg_pre_ipa,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
--- 151,168 ----
    return 0;
  }
  
+ /* Gate: execute, or not, the cfg cleanup before IPA.  */
+ 
+ static bool
+ gate_cleanup_cfg_pre_ipa (void)
+ {
+   return (true);
+ }
+ 
  struct tree_opt_pass pass_cleanup_cfg =
  {
    "cleanup_cfg",			/* name */
!   gate_cleanup_cfg_pre_ipa,		/* gate */
    execute_cleanup_cfg_pre_ipa,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
*************** execute_free_datastructures (void)
*** 170,176 ****
  
    /* Remove the ssa structures.  Do it here since this includes statement
       annotations that need to be intact during disband_implicit_edges.  */
!   delete_tree_ssa ();
    return 0;
  }
  
--- 222,229 ----
  
    /* Remove the ssa structures.  Do it here since this includes statement
       annotations that need to be intact during disband_implicit_edges.  */
!   if (cfun->gimple_df)
!     delete_tree_ssa ();
    return 0;
  }
  
*************** execute_fixup_cfg (void)
*** 264,269 ****
--- 317,324 ----
    basic_block bb;
    block_stmt_iterator bsi;
  
+   cfun->after_inlining = true;
+ 
    if (cfun->eh)
      FOR_EACH_BB (bb)
        {
*************** execute_init_datastructures (void)
*** 341,350 ****
    return 0;
  }
  
  struct tree_opt_pass pass_init_datastructures =
  {
    NULL,					/* name */
!   NULL,					/* gate */
    execute_init_datastructures,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
--- 396,413 ----
    return 0;
  }
  
+ /* Gate: initialize or not the SSA datastructures.  */
+ 
+ static bool
+ gate_init_datastructures (void)
+ {
+   return (optimize >= 1);
+ }
+ 
  struct tree_opt_pass pass_init_datastructures =
  {
    NULL,					/* name */
!   gate_init_datastructures,		/* gate */
    execute_init_datastructures,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
*************** tree_lowering_passes (tree fn)
*** 368,374 ****
--- 431,440 ----
    tree_register_cfg_hooks ();
    bitmap_obstack_initialize (NULL);
    execute_pass_list (all_lowering_passes);
+   if (optimize && cgraph_global_info_ready)
+     execute_pass_list (pass_early_local_passes.sub);
    free_dominance_info (CDI_POST_DOMINATORS);
+   free_dominance_info (CDI_DOMINATORS);
    compact_blocks ();
    current_function_decl = saved_current_function_decl;
    bitmap_obstack_release (NULL);
*************** tree_rest_of_compilation (tree fndecl)
*** 408,413 ****
--- 474,482 ----
  
    node = cgraph_node (fndecl);
  
+   /* Initialize the default bitmap obstack.  */
+   bitmap_obstack_initialize (NULL);
+ 
    /* We might need the body of this function so that we can expand
       it inline somewhere else.  */
    if (cgraph_preserve_function_body_p (fndecl))
*************** tree_rest_of_compilation (tree fndecl)
*** 424,430 ****
       We haven't necessarily assigned RTL to all variables yet, so it's
       not safe to try to expand expressions involving them.  */
    cfun->x_dont_save_pending_sizes_p = 1;
!   cfun->after_inlining = true;
  
    if (flag_inline_trees)
      {
--- 493,500 ----
       We haven't necessarily assigned RTL to all variables yet, so it's
       not safe to try to expand expressions involving them.  */
    cfun->x_dont_save_pending_sizes_p = 1;
!   
!   tree_register_cfg_hooks ();
  
    if (flag_inline_trees)
      {
*************** tree_rest_of_compilation (tree fndecl)
*** 453,464 ****
       Kill it so it won't confuse us.  */
    cgraph_node_remove_callees (node);
  
- 
-   /* Initialize the default bitmap obstack.  */
-   bitmap_obstack_initialize (NULL);
    bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
-   
-   tree_register_cfg_hooks ();
    /* Perform all tree transforms and optimizations.  */
    execute_pass_list (all_passes);
    
--- 523,529 ----
Index: tree-profile.c
===================================================================
*** tree-profile.c	(revision 120209)
--- tree-profile.c	(working copy)
*************** do_tree_profiling (void)
*** 237,242 ****
--- 237,246 ----
  static unsigned int
  tree_profiling (void)
  {
+   /* Don't profile functions produced at destruction time, particularly
+      the gcov datastructure initializer.  */
+   if (cgraph_state == CGRAPH_STATE_FINISHED)
+     return 0;
    branch_prob ();
    if (flag_branch_probabilities
        && flag_profile_values
*************** struct tree_opt_pass pass_tree_profile =
*** 267,299 ****
    0					/* letter */
  };
  
- /* Return 1 if tree-based profiling is in effect, else 0.
-    If it is, set up hooks for tree-based profiling.
-    Gate for pass_tree_profile.  */
- 
- static bool
- do_early_tree_profiling (void)
- {
-   return (do_tree_profiling () && (!flag_unit_at_a_time || !optimize));
- }
- 
- struct tree_opt_pass pass_early_tree_profile = 
- {
-   "early_tree_profile",			/* name */
-   do_early_tree_profiling,		/* gate */
-   tree_profiling,			/* execute */
-   NULL,					/* sub */
-   NULL,					/* next */
-   0,					/* static_pass_number */
-   TV_BRANCH_PROB,			/* tv_id */
-   PROP_gimple_leh | PROP_cfg,		/* properties_required */
-   PROP_gimple_leh | PROP_cfg,		/* properties_provided */
-   0,					/* properties_destroyed */
-   0,					/* todo_flags_start */
-   TODO_verify_stmts,			/* todo_flags_finish */
-   0					/* letter */
- };
- 
  struct profile_hooks tree_profile_hooks =
  {
    tree_init_edge_profiler,      /* init_edge_profiler */
--- 271,276 ----
Index: tree-flow.h
===================================================================
*** tree-flow.h	(revision 120209)
--- tree-flow.h	(working copy)
*************** struct var_ann_d GTY(())
*** 251,256 ****
--- 251,259 ----
    /* True for HEAP and PARM_NOALIAS artificial variables.  */
    unsigned is_heapvar : 1;
  
+   /* True for call clobbered variables.  */
+   unsigned call_clobbered : 1;
+ 
    /* Memory partition tag assigned to this symbol.  */
    tree mpt;
  
Index: tree-cfg.c
===================================================================
*** tree-cfg.c	(revision 120209)
--- tree-cfg.c	(working copy)
*************** bsi_for_stmt (tree stmt)
*** 2813,2818 ****
--- 2813,2820 ----
  static inline void
  update_modified_stmts (tree t)
  {
+   if (!ssa_operands_active ())
+     return;
    if (TREE_CODE (t) == STATEMENT_LIST)
      {
        tree_stmt_iterator i;
Index: passes.c
===================================================================
*** passes.c	(revision 120209)
--- passes.c	(working copy)
*************** init_optimization_passes (void)
*** 441,452 ****
--- 441,458 ----
    p = &all_ipa_passes;
    NEXT_PASS (pass_early_ipa_inline);
    NEXT_PASS (pass_early_local_passes);
+   /* The process of merging updates of IPA passes to operate on SSA is not
+      complette yet.  */
+ #if 0
    NEXT_PASS (pass_ipa_cp);
+ #endif
    NEXT_PASS (pass_ipa_inline);
+ #if 0
    NEXT_PASS (pass_ipa_reference);
    NEXT_PASS (pass_ipa_pure_const); 
    NEXT_PASS (pass_ipa_type_escape);
    NEXT_PASS (pass_ipa_pta);
+ #endif
    *p = NULL;
  
    /* All passes needed to lower the function into shape optimizers can
*************** init_optimization_passes (void)
*** 461,483 ****
    NEXT_PASS (pass_lower_complex_O0);
    NEXT_PASS (pass_lower_vector);
    NEXT_PASS (pass_warn_function_return);
-   NEXT_PASS (pass_early_tree_profile);
    *p = NULL;
  
    p = &pass_early_local_passes.sub;
    NEXT_PASS (pass_tree_profile);
    NEXT_PASS (pass_cleanup_cfg);
    NEXT_PASS (pass_rebuild_cgraph_edges);
    *p = NULL;
  
    p = &all_passes;
    NEXT_PASS (pass_fixup_cfg);
-   NEXT_PASS (pass_init_datastructures);
-   NEXT_PASS (pass_expand_omp);
    NEXT_PASS (pass_all_optimizations);
    NEXT_PASS (pass_warn_function_noreturn);
-   NEXT_PASS (pass_mudflap_2);
    NEXT_PASS (pass_free_datastructures);
    NEXT_PASS (pass_free_cfg_annotations);
    NEXT_PASS (pass_expand);
    NEXT_PASS (pass_rest_of_compilation);
--- 467,499 ----
    NEXT_PASS (pass_lower_complex_O0);
    NEXT_PASS (pass_lower_vector);
    NEXT_PASS (pass_warn_function_return);
    *p = NULL;
  
    p = &pass_early_local_passes.sub;
    NEXT_PASS (pass_tree_profile);
    NEXT_PASS (pass_cleanup_cfg);
+   NEXT_PASS (pass_init_datastructures);
+   NEXT_PASS (pass_expand_omp);
+   NEXT_PASS (pass_all_early_optimizations);
    NEXT_PASS (pass_rebuild_cgraph_edges);
    *p = NULL;
  
+   p = &pass_all_early_optimizations.sub;
+   NEXT_PASS (pass_referenced_vars);
+   NEXT_PASS (pass_reset_cc_flags);
+   NEXT_PASS (pass_create_structure_vars);
+   NEXT_PASS (pass_build_ssa);
+   NEXT_PASS (pass_early_warn_uninitialized);
+   NEXT_PASS (pass_cleanup_cfg);
+ 
+   *p = NULL;
+ 
    p = &all_passes;
    NEXT_PASS (pass_fixup_cfg);
    NEXT_PASS (pass_all_optimizations);
    NEXT_PASS (pass_warn_function_noreturn);
    NEXT_PASS (pass_free_datastructures);
+   NEXT_PASS (pass_mudflap_2);
    NEXT_PASS (pass_free_cfg_annotations);
    NEXT_PASS (pass_expand);
    NEXT_PASS (pass_rest_of_compilation);
*************** init_optimization_passes (void)
*** 485,494 ****
    *p = NULL;
  
    p = &pass_all_optimizations.sub;
-   NEXT_PASS (pass_referenced_vars);
-   NEXT_PASS (pass_reset_cc_flags);
-   NEXT_PASS (pass_create_structure_vars);
-   NEXT_PASS (pass_build_ssa);
    NEXT_PASS (pass_may_alias);
    NEXT_PASS (pass_return_slot);
    NEXT_PASS (pass_rename_ssa_copies);
--- 501,506 ----
*************** execute_one_pass (struct tree_opt_pass *
*** 947,952 ****
--- 959,967 ----
    /* Run post-pass cleanup and verification.  */
    execute_todo (todo_after | pass->todo_flags_finish);
  
+   if (!current_function_decl)
+     cgraph_expand_expand_queue ();
+ 
    /* Flush and close dump file.  */
    if (dump_file_name)
      {
*************** execute_ipa_pass_list (struct tree_opt_p
*** 986,991 ****
--- 1001,1008 ----
        gcc_assert (!cfun);
        if (execute_one_pass (pass) && pass->sub)
  	do_per_function ((void (*)(void *))execute_pass_list, pass->sub);
+       if (!current_function_decl)
+ 	cgraph_expand_expand_queue ();
        pass = pass->next;
      }
    while (pass);


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]