This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Clean up early inliner


This cleans up the early inliner, simplifying it and making it the
one that resolves all always-inline inlines.  It also should remove
the need to run the IPA inliner when not optimizing or not inlining.

The single early inlining cleanup that is left is the interaction
with profiling.

Boostrap and regtest running on x86_64-unknown-linux-gnu, ok for
trunk?

Thanks,
Richard.

2010-01-29  Richard Guenther  <rguenther@suse.de>

	* ipa.c (cgraph_postorder): Adjust postorder to guarantee
	single-iteration always-inline inlining.
	* ipa-inline.c (cgraph_mark_inline): Do not return anything.
	(cgraph_decide_inlining): Do not handle always-inline
	specially.
	(try_inline): Remove always-inline cycle detection special case.
	Do not recurse on always-inlines.
	(cgraph_early_inlining): Do not iterate if not optimizing.
	(cgraph_gate_early_inlining): remove.
	(pass_early_inline): Run unconditionally.
	(gate_cgraph_decide_inlining): New function.
	(pass_ipa_inline): Use it.  Do not run the IPA inliner if
	not inlining or optimizing.

	* gcc.dg/torture/inline-2.c: New testcase.

Index: gcc/ipa-inline.c
===================================================================
*** gcc/ipa-inline.c.orig	2010-01-28 17:41:27.000000000 +0100
--- gcc/ipa-inline.c	2010-02-03 13:35:37.000000000 +0100
*************** enum inlining_mode {
*** 159,167 ****
    INLINE_SIZE,
    INLINE_ALL
  };
  static bool
! cgraph_decide_inlining_incrementally (struct cgraph_node *, enum inlining_mode,
! 				      int);
  
  
  /* Statistics we collect about inlining algorithm.  */
--- 159,168 ----
    INLINE_SIZE,
    INLINE_ALL
  };
+ 
  static bool
! cgraph_decide_inlining_incrementally (struct cgraph_node *, enum inlining_mode);
! static void cgraph_flatten (struct cgraph_node *node);
  
  
  /* Statistics we collect about inlining algorithm.  */
*************** cgraph_mark_inline_edge (struct cgraph_e
*** 345,355 ****
      return false;
  }
  
! /* Mark all calls of EDGE->CALLEE inlined into EDGE->CALLER.
!    Return following unredirected edge in the list of callers
!    of EDGE->CALLEE  */
  
! static struct cgraph_edge *
  cgraph_mark_inline (struct cgraph_edge *edge)
  {
    struct cgraph_node *to = edge->caller;
--- 346,354 ----
      return false;
  }
  
! /* Mark all calls of EDGE->CALLEE inlined into EDGE->CALLER.  */
  
! static void
  cgraph_mark_inline (struct cgraph_edge *edge)
  {
    struct cgraph_node *to = edge->caller;
*************** cgraph_mark_inline (struct cgraph_edge *
*** 369,376 ****
  	    edge = next;
  	}
      }
- 
-   return edge;
  }
  
  /* Estimate the growth caused by inlining NODE into all callees.  */
--- 368,373 ----
*************** cgraph_decide_inlining_of_small_function
*** 1127,1132 ****
--- 1124,1211 ----
    BITMAP_FREE (updated_nodes);
  }
  
+ /* Flatten NODE from the IPA inliner.  */
+ 
+ static void
+ cgraph_flatten (struct cgraph_node *node)
+ {
+   struct cgraph_edge *e;
+   void *old_mode;
+ 
+   old_mode = node->aux;
+ 
+   node->aux = (void *)(size_t) INLINE_ALL;
+ 
+   for (e = node->callees; e; e = e->next_callee)
+     {
+       struct cgraph_node *callee = e->callee;
+       void *callee_mode = callee->aux;
+ 
+       if (e->call_stmt_cannot_inline_p)
+ 	continue;
+ 
+       /* We've hit cycle?  It is time to give up.  */
+       if (callee_mode)
+ 	{
+ 	  if (dump_file)
+ 	    fprintf (dump_file,
+ 		     "Not inlining %s into %s to avoid cycle.\n",
+ 		     cgraph_node_name (callee),
+ 		     cgraph_node_name (e->caller));
+ 	  e->inline_failed = (e->callee->local.disregard_inline_limits
+ 			      ? CIF_RECURSIVE_INLINING : CIF_UNSPECIFIED);
+ 	  continue;
+ 	}
+ 
+       /* When the edge is already inlined, we just need to recurse into
+ 	 it in order to fully flatten the leaves.  */
+       if (!e->inline_failed)
+ 	{
+ 	  callee->aux = (void *)(size_t) INLINE_ALL;
+ 	  cgraph_flatten (e->callee);
+ 	  callee->aux = callee_mode;
+ 	  continue;
+ 	}
+ 
+       if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
+ 	{
+ 	  if (dump_file)
+ 	    fprintf (dump_file, "Not inlining: recursive call.\n");
+ 	  continue;
+ 	}
+       if (!tree_can_inline_p (e))
+ 	{
+ 	  if (dump_file)
+ 	    fprintf (dump_file, "Not inlining: %s",
+ 		     cgraph_inline_failed_string (e->inline_failed));
+ 	  continue;
+ 	}
+       if (!e->callee->analyzed)
+ 	{
+ 	  if (dump_file)
+ 	    fprintf (dump_file,
+ 		     "Not inlining: Function body no longer available.\n");
+ 	  continue;
+ 	}
+ 
+       if (dump_file)
+ 	fprintf (dump_file, " Inlining %s into %s.\n",
+ 		 cgraph_node_name (e->callee),
+ 		 cgraph_node_name (e->caller));
+       if (e->inline_failed)
+ 	{
+ 	  cgraph_mark_inline (e);
+ 
+ 	  /* Flattening needs to be done recursively.  */
+ 	  callee->aux = (void *)(size_t) INLINE_ALL;
+ 	  cgraph_flatten (e->callee);
+ 	  callee->aux = callee_mode;
+ 	}
+     }
+ 
+   node->aux = old_mode;
+ }
+ 
  /* Decide on the inlining.  We do so in the topological order to avoid
     expenses on updating data structures.  */
  
*************** cgraph_decide_inlining (void)
*** 1139,1145 ****
      XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
    int old_size = 0;
    int i;
-   bool redo_always_inline = true;
    int initial_size = 0;
  
    cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
--- 1218,1223 ----
*************** cgraph_decide_inlining (void)
*** 1177,1241 ****
      node->aux = 0;
  
    if (dump_file)
!     fprintf (dump_file, "\nInlining always_inline functions:\n");
  
!   /* In the first pass mark all always_inline edges.  Do this with a priority
!      so none of our later choices will make this impossible.  */
!   while (redo_always_inline)
!     {
!       redo_always_inline = false;
!       for (i = nnodes - 1; i >= 0; i--)
  	{
- 	  struct cgraph_edge *e, *next;
- 
- 	  node = order[i];
- 
- 	  /* Handle nodes to be flattened, but don't update overall unit
- 	     size.  */
- 	  if (lookup_attribute ("flatten",
- 				DECL_ATTRIBUTES (node->decl)) != NULL)
- 	    {
- 	      if (dump_file)
- 		fprintf (dump_file,
- 			 "Flattening %s\n", cgraph_node_name (node));
- 	      cgraph_decide_inlining_incrementally (node, INLINE_ALL, 0);
- 	    }
- 
- 	  if (!node->local.disregard_inline_limits)
- 	    continue;
- 	  if (dump_file)
- 	    fprintf (dump_file,
- 		     "\nConsidering %s size:%i (always inline)\n",
- 		     cgraph_node_name (node), node->global.size);
- 	  old_size = overall_size;
- 	  for (e = node->callers; e; e = next)
- 	    {
- 	      next = e->next_caller;
- 	      if (!e->inline_failed || e->call_stmt_cannot_inline_p)
- 		continue;
- 	      if (cgraph_recursive_inlining_p (e->caller, e->callee,
- 					       &e->inline_failed))
- 		continue;
- 	      if (!tree_can_inline_p (e))
-                 continue;
- 	      if (cgraph_mark_inline_edge (e, true, NULL))
- 		redo_always_inline = true;
- 	      if (dump_file)
- 		fprintf (dump_file,
- 			 " Inlined into %s which now has size %i.\n",
- 			 cgraph_node_name (e->caller),
- 			 e->caller->global.size);
- 	    }
- 	  /* Inlining self recursive function might introduce new calls to
- 	     themselves we didn't see in the loop above.  Fill in the proper
- 	     reason why inline failed.  */
- 	  for (e = node->callers; e; e = e->next_caller)
- 	    if (e->inline_failed)
- 	      e->inline_failed = CIF_RECURSIVE_INLINING;
  	  if (dump_file)
  	    fprintf (dump_file,
! 		     " Inlined for a net change of %+i size.\n",
! 		     overall_size - old_size);
  	}
      }
  
--- 1255,1283 ----
      node->aux = 0;
  
    if (dump_file)
!     fprintf (dump_file, "\nFlattening functions:\n");
  
!   /* In the first pass handle functions to be flattened.  Do this with
!      a priority so none of our later choices will make this impossible.  */
!   for (i = nnodes - 1; i >= 0; i--)
!     {
!       node = order[i];
! 
!       /* Handle nodes to be flattened, but don't update overall unit
! 	 size.  Calling the incremental inliner here is lame,
! 	 a simple worklist should be enough.  What should be left
! 	 here from the early inliner (if it runs) is cyclic cases.
! 	 Ideally when processing callees we stop inlining at the
! 	 entry of cycles, possibly cloning that entry point and
! 	 try to flatten itself turning it into a self-recursive
! 	 function.  */
!       if (lookup_attribute ("flatten",
! 			    DECL_ATTRIBUTES (node->decl)) != NULL)
  	{
  	  if (dump_file)
  	    fprintf (dump_file,
! 		     "Flattening %s\n", cgraph_node_name (node));
! 	  cgraph_flatten (node);
  	}
      }
  
*************** cgraph_decide_inlining (void)
*** 1312,1397 ****
    return 0;
  }
  
- /* Try to inline edge E from incremental inliner.  MODE specifies mode
-    of inliner.
- 
-    We are detecting cycles by storing mode of inliner into cgraph_node last
-    time we visited it in the recursion.  In general when mode is set, we have
-    recursive inlining, but as an special case, we want to try harder inline
-    ALWAYS_INLINE functions: consider callgraph a->b->c->b, with a being
-    flatten, b being always inline.  Flattening 'a' will collapse
-    a->b->c before hitting cycle.  To accommodate always inline, we however
-    need to inline a->b->c->b.
- 
-    So after hitting cycle first time, we switch into ALWAYS_INLINE mode and
-    stop inlining only after hitting ALWAYS_INLINE in ALWAY_INLINE mode.  */
- static bool
- try_inline (struct cgraph_edge *e, enum inlining_mode mode, int depth)
- {
-   struct cgraph_node *callee = e->callee;
-   enum inlining_mode callee_mode = (enum inlining_mode) (size_t) callee->aux;
-   bool always_inline = e->callee->local.disregard_inline_limits;
-   bool inlined = false;
- 
-   /* We've hit cycle?  */
-   if (callee_mode)
-     {
-       /* It is first time we see it and we are not in ALWAY_INLINE only
- 	 mode yet.  and the function in question is always_inline.  */
-       if (always_inline && mode != INLINE_ALWAYS_INLINE)
- 	{
- 	  if (dump_file)
- 	    {
- 	      indent_to (dump_file, depth);
- 	      fprintf (dump_file,
- 		       "Hit cycle in %s, switching to always inline only.\n",
- 		       cgraph_node_name (callee));
- 	    }
- 	  mode = INLINE_ALWAYS_INLINE;
- 	}
-       /* Otherwise it is time to give up.  */
-       else
- 	{
- 	  if (dump_file)
- 	    {
- 	      indent_to (dump_file, depth);
- 	      fprintf (dump_file,
- 		       "Not inlining %s into %s to avoid cycle.\n",
- 		       cgraph_node_name (callee),
- 		       cgraph_node_name (e->caller));
- 	    }
- 	  e->inline_failed = (e->callee->local.disregard_inline_limits
- 		              ? CIF_RECURSIVE_INLINING : CIF_UNSPECIFIED);
-           return false;
- 	}
-     }
- 
-   callee->aux = (void *)(size_t) mode;
-   if (dump_file)
-     {
-       indent_to (dump_file, depth);
-       fprintf (dump_file, " Inlining %s into %s.\n",
- 	       cgraph_node_name (e->callee),
- 	       cgraph_node_name (e->caller));
-     }
-   if (e->inline_failed)
-     {
-       cgraph_mark_inline (e);
- 
-       /* In order to fully inline always_inline functions, we need to
- 	 recurse here, since the inlined functions might not be processed by
- 	 incremental inlining at all yet.
- 
- 	 Also flattening needs to be done recursively.  */
- 
-       if (mode == INLINE_ALL || always_inline)
- 	cgraph_decide_inlining_incrementally (e->callee, mode, depth + 1);
-       inlined = true;
-     }
-   callee->aux = (void *)(size_t) callee_mode;
-   return inlined;
- }
- 
  /* Return true when N is leaf function.  Accept cheap (pure&const) builtins
     in leaf functions.  */
  static bool
--- 1354,1359 ----
*************** leaf_node_p (struct cgraph_node *n)
*** 1407,1444 ****
  }
  
  /* Decide on the inlining.  We do so in the topological order to avoid
!    expenses on updating data structures.
!    DEPTH is depth of recursion, used only for debug output.  */
  
  static bool
  cgraph_decide_inlining_incrementally (struct cgraph_node *node,
! 				      enum inlining_mode mode,
! 				      int depth)
  {
    struct cgraph_edge *e;
    bool inlined = false;
    cgraph_inline_failed_t failed_reason;
-   enum inlining_mode old_mode;
  
  #ifdef ENABLE_CHECKING
    verify_cgraph_node (node);
  #endif
  
-   old_mode = (enum inlining_mode) (size_t)node->aux;
- 
    if (mode != INLINE_ALWAYS_INLINE && mode != INLINE_SIZE_NORECURSIVE
        && lookup_attribute ("flatten", DECL_ATTRIBUTES (node->decl)) != NULL)
      {
        if (dump_file)
! 	{
! 	  indent_to (dump_file, depth);
! 	  fprintf (dump_file, "Flattening %s\n", cgraph_node_name (node));
! 	}
        mode = INLINE_ALL;
      }
  
-   node->aux = (void *)(size_t) mode;
- 
    /* First of all look for always inline functions.  */
    if (mode != INLINE_SIZE_NORECURSIVE)
      for (e = node->callees; e; e = e->next_callee)
--- 1369,1397 ----
  }
  
  /* Decide on the inlining.  We do so in the topological order to avoid
!    expenses on updating data structures.  */
  
  static bool
  cgraph_decide_inlining_incrementally (struct cgraph_node *node,
! 				      enum inlining_mode mode)
  {
    struct cgraph_edge *e;
    bool inlined = false;
    cgraph_inline_failed_t failed_reason;
  
  #ifdef ENABLE_CHECKING
    verify_cgraph_node (node);
  #endif
  
    if (mode != INLINE_ALWAYS_INLINE && mode != INLINE_SIZE_NORECURSIVE
        && lookup_attribute ("flatten", DECL_ATTRIBUTES (node->decl)) != NULL)
      {
        if (dump_file)
! 	fprintf (dump_file, "Incrementally flattening %s\n",
! 		 cgraph_node_name (node));
        mode = INLINE_ALL;
      }
  
    /* First of all look for always inline functions.  */
    if (mode != INLINE_SIZE_NORECURSIVE)
      for (e = node->callees; e; e = e->next_callee)
*************** cgraph_decide_inlining_incrementally (st
*** 1448,1508 ****
  	  continue;
  	if (e->call_stmt_cannot_inline_p)
  	  continue;
- 	/* When the edge is already inlined, we just need to recurse into
- 	   it in order to fully flatten the leaves.  */
- 	if (!e->inline_failed && mode == INLINE_ALL)
- 	  {
- 	    inlined |= try_inline (e, mode, depth);
- 	    continue;
- 	  }
  	if (dump_file)
! 	  {
! 	    indent_to (dump_file, depth);
! 	    fprintf (dump_file,
! 		     "Considering to always inline inline candidate %s.\n",
! 		     cgraph_node_name (e->callee));
! 	  }
  	if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	  {
  	    if (dump_file)
! 	      {
! 		indent_to (dump_file, depth);
! 		fprintf (dump_file, "Not inlining: recursive call.\n");
! 	      }
  	    continue;
  	  }
  	if (!tree_can_inline_p (e))
  	  {
  	    if (dump_file)
! 	      {
! 		indent_to (dump_file, depth);
! 		fprintf (dump_file,
! 			 "Not inlining: %s",
!                          cgraph_inline_failed_string (e->inline_failed));
! 	      }
  	    continue;
  	  }
  	if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
  	    != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
  	  {
  	    if (dump_file)
! 	      {
! 		indent_to (dump_file, depth);
! 		fprintf (dump_file, "Not inlining: SSA form does not match.\n");
! 	      }
  	    continue;
  	  }
  	if (!e->callee->analyzed)
  	  {
  	    if (dump_file)
! 	      {
! 		indent_to (dump_file, depth);
! 		fprintf (dump_file,
! 			 "Not inlining: Function body no longer available.\n");
! 	      }
  	    continue;
  	  }
! 	inlined |= try_inline (e, mode, depth);
        }
  
    /* Now do the automatic inlining.  */
--- 1401,1445 ----
  	  continue;
  	if (e->call_stmt_cannot_inline_p)
  	  continue;
  	if (dump_file)
! 	  fprintf (dump_file,
! 		   "Considering to always inline inline candidate %s.\n",
! 		   cgraph_node_name (e->callee));
  	if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	  {
  	    if (dump_file)
! 	      fprintf (dump_file, "Not inlining: recursive call.\n");
  	    continue;
  	  }
  	if (!tree_can_inline_p (e))
  	  {
  	    if (dump_file)
! 	      fprintf (dump_file,
! 		       "Not inlining: %s",
! 		       cgraph_inline_failed_string (e->inline_failed));
  	    continue;
  	  }
  	if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
  	    != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
  	  {
  	    if (dump_file)
! 	      fprintf (dump_file, "Not inlining: SSA form does not match.\n");
  	    continue;
  	  }
  	if (!e->callee->analyzed)
  	  {
  	    if (dump_file)
! 	      fprintf (dump_file,
! 		       "Not inlining: Function body no longer available.\n");
  	    continue;
  	  }
! 
! 	if (dump_file)
! 	  fprintf (dump_file, " Inlining %s into %s.\n",
! 		   cgraph_node_name (e->callee),
! 		   cgraph_node_name (e->caller));
! 	cgraph_mark_inline (e);
! 	inlined = true;
        }
  
    /* Now do the automatic inlining.  */
*************** cgraph_decide_inlining_incrementally (st
*** 1529,1549 ****
  	  if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file, "Not inlining: recursive call.\n");
! 		}
  	      continue;
  	    }
  	  if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
  	      != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file,
! 			   "Not inlining: SSA form does not match.\n");
! 		}
  	      continue;
  	    }
  
--- 1466,1480 ----
  	  if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	    {
  	      if (dump_file)
! 		fprintf (dump_file, "Not inlining: recursive call.\n");
  	      continue;
  	    }
  	  if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
  	      != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
  	    {
  	      if (dump_file)
! 		fprintf (dump_file,
! 			 "Not inlining: SSA form does not match.\n");
  	      continue;
  	    }
  
*************** cgraph_decide_inlining_incrementally (st
*** 1562,1575 ****
  	      && cgraph_estimate_growth (e->callee) > allowed_growth)
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file,
! 			   "Not inlining: code size would grow by %i.\n",
! 			   cgraph_estimate_size_after_inlining (1, e->caller,
! 								e->callee)
! 			   - e->caller->global.size);
! 		}
  	      continue;
  	    }
  	  if (!cgraph_check_inline_limits (node, e->callee, &e->inline_failed,
--- 1493,1503 ----
  	      && cgraph_estimate_growth (e->callee) > allowed_growth)
  	    {
  	      if (dump_file)
! 		fprintf (dump_file,
! 			 "Not inlining: code size would grow by %i.\n",
! 			 cgraph_estimate_size_after_inlining (1, e->caller,
! 							      e->callee)
! 			 - e->caller->global.size);
  	      continue;
  	    }
  	  if (!cgraph_check_inline_limits (node, e->callee, &e->inline_failed,
*************** cgraph_decide_inlining_incrementally (st
*** 1577,1616 ****
  	      || e->call_stmt_cannot_inline_p)
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file, "Not inlining: %s.\n",
! 			   cgraph_inline_failed_string (e->inline_failed));
! 		}
  	      continue;
  	    }
  	  if (!e->callee->analyzed)
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file,
! 			   "Not inlining: Function body no longer available.\n");
! 		}
  	      continue;
  	    }
  	  if (!tree_can_inline_p (e))
  	    {
  	      if (dump_file)
! 		{
! 		  indent_to (dump_file, depth);
! 		  fprintf (dump_file,
! 			   "Not inlining: %s.",
! 			   cgraph_inline_failed_string (e->inline_failed));
! 		}
  	      continue;
  	    }
  	  if (cgraph_default_inline_p (e->callee, &failed_reason))
! 	    inlined |= try_inline (e, mode, depth);
  	}
        BITMAP_FREE (visited);
      }
-   node->aux = (void *)(size_t) old_mode;
    return inlined;
  }
  
--- 1505,1541 ----
  	      || e->call_stmt_cannot_inline_p)
  	    {
  	      if (dump_file)
! 		fprintf (dump_file, "Not inlining: %s.\n",
! 			 cgraph_inline_failed_string (e->inline_failed));
  	      continue;
  	    }
  	  if (!e->callee->analyzed)
  	    {
  	      if (dump_file)
! 		fprintf (dump_file,
! 			 "Not inlining: Function body no longer available.\n");
  	      continue;
  	    }
  	  if (!tree_can_inline_p (e))
  	    {
  	      if (dump_file)
! 		fprintf (dump_file,
! 			 "Not inlining: %s.",
! 			 cgraph_inline_failed_string (e->inline_failed));
  	      continue;
  	    }
  	  if (cgraph_default_inline_p (e->callee, &failed_reason))
! 	    {
! 	      if (dump_file)
! 		fprintf (dump_file, " Inlining %s into %s.\n",
! 			 cgraph_node_name (e->callee),
! 			 cgraph_node_name (e->caller));
! 	      cgraph_mark_inline (e);
! 	      inlined = true;
! 	    }
  	}
        BITMAP_FREE (visited);
      }
    return inlined;
  }
  
*************** cgraph_early_inlining (void)
*** 1632,1658 ****
  
    if (sorrycount || errorcount)
      return 0;
!   while (iterations < PARAM_VALUE (PARAM_EARLY_INLINER_MAX_ITERATIONS)
!          && cgraph_decide_inlining_incrementally (node,
!   					          iterations
! 					          ? INLINE_SIZE_NORECURSIVE : INLINE_SIZE, 0))
!     {
        timevar_push (TV_INTEGRATION);
        todo |= optimize_inline_calls (current_function_decl);
-       iterations++;
        timevar_pop (TV_INTEGRATION);
      }
!   if (dump_file)
!     fprintf (dump_file, "Iterations: %i\n", iterations);
    cfun->always_inline_functions_inlined = true;
-   return todo;
- }
  
! /* When inlining shall be performed.  */
! static bool
! cgraph_gate_early_inlining (void)
! {
!   return flag_early_inlining;
  }
  
  struct gimple_opt_pass pass_early_inline =
--- 1557,1596 ----
  
    if (sorrycount || errorcount)
      return 0;
! 
!   if (!optimize
!       || flag_no_inline
!       || !flag_early_inlining)
!     {
!       /* When not optimizing or not inlining inline only always-inline
! 	 functions.  */
!       cgraph_decide_inlining_incrementally (node, INLINE_ALWAYS_INLINE);
        timevar_push (TV_INTEGRATION);
        todo |= optimize_inline_calls (current_function_decl);
        timevar_pop (TV_INTEGRATION);
      }
!   else
!     {
!       /* We iterate incremental inlining to get trivial cases of indirect
! 	 inlining.  */
!       while (iterations < PARAM_VALUE (PARAM_EARLY_INLINER_MAX_ITERATIONS)
! 	     && cgraph_decide_inlining_incrementally (node,
! 						      iterations
! 						      ? INLINE_SIZE_NORECURSIVE
! 						      : INLINE_SIZE))
! 	{
! 	  timevar_push (TV_INTEGRATION);
! 	  todo |= optimize_inline_calls (current_function_decl);
! 	  iterations++;
! 	  timevar_pop (TV_INTEGRATION);
! 	}
!       if (dump_file)
! 	fprintf (dump_file, "Iterations: %i\n", iterations);
!     }
! 
    cfun->always_inline_functions_inlined = true;
  
!   return todo;
  }
  
  struct gimple_opt_pass pass_early_inline =
*************** struct gimple_opt_pass pass_early_inline
*** 1660,1666 ****
   {
    GIMPLE_PASS,
    "einline",	 			/* name */
!   cgraph_gate_early_inlining,		/* gate */
    cgraph_early_inlining,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
--- 1598,1604 ----
   {
    GIMPLE_PASS,
    "einline",	 			/* name */
!   NULL,					/* gate */
    cgraph_early_inlining,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
*************** inline_write_summary (cgraph_node_set se
*** 2044,2055 ****
      ipa_prop_write_jump_functions (set);
  }
  
  struct ipa_opt_pass_d pass_ipa_inline =
  {
   {
    IPA_PASS,
    "inline",				/* name */
!   NULL,					/* gate */
    cgraph_decide_inlining,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
--- 1982,2011 ----
      ipa_prop_write_jump_functions (set);
  }
  
+ /* When to run IPA inlining.  Inlining of always-inline functions
+    happens during early inlining.  */
+ 
+ static bool
+ gate_cgraph_decide_inlining (void)
+ {
+   /* We'd like to skip this if not optimizing or not inlining as
+      all always-inline functions have been processed by early
+      inlining already.  But this breaks EH with C++ somehow with
+ 
+      g++.dg/torture/pr31863.C: In destructor 'Serializer<unsigned int, Loki::Typelist<ClassSpec<unsigned int, A040, 40u>, Loki::NullType> >::~Serializer()':
+      g++.dg/torture/pr31863.C:231:7: error: statement marked for throw, but doesn't
+      Serializer<unsigned int, ClassSpec<unsigned int, A040, 40u> >::~Serializer (this.18352_8, D.118411_7);
+ 
+      so leave it on unconditionally for now.  */
+   return 1;
+ }
+ 
  struct ipa_opt_pass_d pass_ipa_inline =
  {
   {
    IPA_PASS,
    "inline",				/* name */
!   gate_cgraph_decide_inlining,		/* gate */
    cgraph_decide_inlining,		/* execute */
    NULL,					/* sub */
    NULL,					/* next */
Index: gcc/ipa.c
===================================================================
*** gcc/ipa.c.orig	2009-12-18 15:17:09.000000000 +0100
--- gcc/ipa.c	2010-02-03 12:49:01.000000000 +0100
*************** cgraph_postorder (struct cgraph_node **o
*** 70,75 ****
--- 70,81 ----
  		    node2->aux = edge->next_caller;
  		  else
  		    node2->aux = &last;
+ 		  /* Break possible cycles involving always-inline
+ 		     functions by ignoring edges from always-inline
+ 		     functions to non-always-inline functions.  */
+ 		  if (edge->caller->local.disregard_inline_limits
+ 		      && !edge->callee->local.disregard_inline_limits)
+ 		    continue;
  		  if (!edge->caller->aux)
  		    {
  		      if (!edge->caller->callers)
Index: gcc/testsuite/gcc.dg/torture/inline-2.c
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- gcc/testsuite/gcc.dg/torture/inline-2.c	2010-02-03 12:49:01.000000000 +0100
***************
*** 0 ****
--- 1,35 ----
+ /* { dg-do link } */
+ 
+ extern inline void foo2 (void) __attribute__((always_inline,gnu_inline));
+ extern inline void foo1 (void) __attribute__((always_inline,gnu_inline));
+ void bar1 (void);
+ void bar2 (void);
+ 
+ extern inline void __attribute__((always_inline,gnu_inline))
+ foo2 (void)
+ {
+   bar2 ();
+ }
+ 
+ void
+ bar1 (void)
+ {
+   foo2 ();
+ }
+ 
+ void
+ bar2 (void)
+ {
+   foo1 ();
+ }
+ 
+ extern inline void __attribute__((always_inline,gnu_inline))
+ foo1 (void)
+ {
+   bar1 ();
+ }
+ 
+ int main()
+ {
+   return 0;
+ }


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]