[PATCH][1/2] Merge call stmt cost handling from pretty-IPA

Richard Guenther rguenther@suse.de
Wed Apr 6 08:48:00 GMT 2011


This merges the call stmt cost handling from pretty IPA together
with dependent cleanups.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.

Richard.

2011-04-05  Richard Guenther  <rguenther@suse.de>
 
	PR tree-optimization/47663
	* cgraph.h (struct cgraph_edge): Add call_stmt_size and
	call_stmt_time fields.
	(cgraph_edge_inlinable_p): Declare.
	(cgraph_edge_recursive_p): New inline function.
	* cgraph.c (cgraph_create_edge_1): Initialize call_stmt_size.
	(cgraph_clone_edge): Copy it.
	* ipa-inline.c (cgraph_estimate_edge_time): New function.
	Account for call stmt time.
	(cgraph_estimate_time_after_inlining): Take edge argument.
	(cgraph_estimate_edge_growth): Account call stmt size.
	(cgraph_estimate_size_after_inlining): Take edge argument.
	(cgraph_mark_inline_edge): Adjust.
	(cgraph_check_inline_limits): Likewise.
	(cgraph_recursive_inlining_p): Remove.
	(cgraph_edge_badness): Use cgraph_edge_recursive_p.
	(cgraph_decide_recursive_inlining): Take edge argument and
	adjust.
	(cgraph_decide_inlining_of_small_functions): Do not avoid
	diags for recursive inlining here.
	(cgraph_flatten): Adjust.
	(cgraph_decide_inlining_incrementally): Likewise.
	(estimate_function_body_sizes): Remove call cost handling.
	(compute_inline_parameters): Initialize caller edge call costs.
	(cgraph_estimate_edge_growth): New function.
	(cgraph_estimate_growth): Use it.
	(cgraph_edge_badness): Likewise.
	(cgraph_check_inline_limits): Take an edge argument.
	(cgraph_decide_inlining_of_small_functions): Adjust.
	(cgraph_decide_inlining): Likewise.
	* tree-inline.c (estimate_num_insns): Only account for call
	return value if it is used.
	(expand_call_inline): Avoid diagnostics on recursive inline
	functions here.
	* lto-cgraph.c (lto_output_edge): Output edge call costs.
	(input_edge): Input edge call costs.

	* gcc.dg/tree-ssa/inline-8.c: New testcase.

Index: gcc/cgraph.c
===================================================================
*** gcc/cgraph.c.orig	2011-03-28 16:41:11.000000000 +0200
--- gcc/cgraph.c	2011-04-05 15:57:38.000000000 +0200
*************** cgraph_create_edge_1 (struct cgraph_node
*** 1032,1037 ****
--- 1032,1039 ----
    edge->loop_nest = nest;
  
    edge->call_stmt = call_stmt;
+   edge->call_stmt_size = 0;
+   edge->call_stmt_time = 0;
    push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
    edge->can_throw_external
      = call_stmt ? stmt_can_throw_external (call_stmt) : false;
*************** cgraph_clone_edge (struct cgraph_edge *e
*** 2141,2146 ****
--- 2143,2150 ----
  	}
      }
  
+   new_edge->call_stmt_size = e->call_stmt_size;
+   new_edge->call_stmt_time = e->call_stmt_time;
    new_edge->inline_failed = e->inline_failed;
    new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
    new_edge->lto_stmt_uid = stmt_uid;
Index: gcc/cgraph.h
===================================================================
*** gcc/cgraph.h.orig	2011-03-28 16:41:11.000000000 +0200
--- gcc/cgraph.h	2011-04-05 15:57:38.000000000 +0200
*************** struct GTY((chain_next ("%h.next_caller"
*** 431,436 ****
--- 431,439 ----
    int frequency;
    /* Unique id of the edge.  */
    int uid;
+   /* Estimated size and time of the call statement.  */
+   int call_stmt_size;
+   int call_stmt_time;
    /* Depth of loop nest, 1 means no loop nest.  */
    unsigned short int loop_nest;
    /* Whether this edge was made direct by indirect inlining.  */
*************** varpool_next_static_initializer (struct
*** 771,776 ****
--- 774,780 ----
  /* In ipa-inline.c  */
  void cgraph_clone_inlined_nodes (struct cgraph_edge *, bool, bool);
  void compute_inline_parameters (struct cgraph_node *);
+ cgraph_inline_failed_t cgraph_edge_inlinable_p (struct cgraph_edge *);
  
  
  /* Create a new static variable of type TYPE.  */
*************** varpool_all_refs_explicit_p (struct varp
*** 958,963 ****
--- 962,978 ----
  /* Constant pool accessor function.  */
  htab_t constant_pool_htab (void);
  
+ /* Return true when the edge E represents a direct recursion.  */
+ static inline bool
+ cgraph_edge_recursive_p (struct cgraph_edge *e)
+ {
+   if (e->caller->global.inlined_to)
+     return e->caller->global.inlined_to->decl == e->callee->decl;
+   else
+     return e->caller->decl == e->callee->decl;
+ }
+ 
+ 
  /* FIXME: inappropriate dependency of cgraph on IPA.  */
  #include "ipa-ref-inline.h"
  
Index: gcc/testsuite/gcc.dg/tree-ssa/inline-8.c
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- gcc/testsuite/gcc.dg/tree-ssa/inline-8.c	2011-04-05 15:57:38.000000000 +0200
***************
*** 0 ****
--- 1,31 ----
+ /* { dg-do compile } */
+ /* { dg-options "-O -finline-small-functions --param early-inlining-insns=0 -fdump-tree-einline" } */
+ 
+ int foo0();
+ void bar0() { foo0(); }
+ void foobar0() { bar0(); }
+ 
+ void foo1();
+ void bar1() { foo1(); }
+ void foobar1() { bar1(); }
+ 
+ #if 0
+ int foo2();
+ int bar2() { return foo2(); }
+ /* The size estimate fails to see that inlining the call statement in bar2
+    will make its lhs dead.  */
+ void foobar2() { bar2(); }
+ #endif
+ 
+ int foo3();
+ int bar3() { return foo3(); }
+ int foobar3() { return bar3(); }
+ 
+ int bar4() { return 0; }
+ void foobar4() { bar4(); }
+ 
+ int bar5() { return 0; }
+ int foobar5() { return bar5(); }
+ 
+ /* { dg-final { scan-tree-dump-times "Inlining" 5 "einline" } } */
+ /* { dg-final { cleanup-tree-dump "einline" } } */
Index: gcc/lto-cgraph.c
===================================================================
*** gcc/lto-cgraph.c.orig	2011-04-05 12:52:46.000000000 +0200
--- gcc/lto-cgraph.c	2011-04-05 15:57:38.000000000 +0200
*************** lto_output_edge (struct lto_simple_outpu
*** 285,290 ****
--- 285,292 ----
    bp_pack_value (&bp, uid, HOST_BITS_PER_INT);
    bp_pack_value (&bp, edge->inline_failed, HOST_BITS_PER_INT);
    bp_pack_value (&bp, edge->frequency, HOST_BITS_PER_INT);
+   bp_pack_value (&bp, edge->call_stmt_size, HOST_BITS_PER_INT);
+   bp_pack_value (&bp, edge->call_stmt_time, HOST_BITS_PER_INT);
    bp_pack_value (&bp, edge->loop_nest, 30);
    bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
    bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
*************** input_edge (struct lto_input_block *ib,
*** 1215,1220 ****
--- 1217,1223 ----
    cgraph_inline_failed_t inline_failed;
    struct bitpack_d bp;
    int ecf_flags = 0;
+   int call_stmt_time, call_stmt_size;
  
    caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
    if (caller == NULL || caller->decl == NULL_TREE)
*************** input_edge (struct lto_input_block *ib,
*** 1236,1241 ****
--- 1239,1246 ----
    inline_failed = (cgraph_inline_failed_t) bp_unpack_value (&bp,
  							    HOST_BITS_PER_INT);
    freq = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
+   call_stmt_size = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
+   call_stmt_time = (int) bp_unpack_value (&bp, HOST_BITS_PER_INT);
    nest = (unsigned) bp_unpack_value (&bp, 30);
  
    if (indirect)
*************** input_edge (struct lto_input_block *ib,
*** 1248,1253 ****
--- 1253,1260 ----
    edge->inline_failed = inline_failed;
    edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
    edge->can_throw_external = bp_unpack_value (&bp, 1);
+   edge->call_stmt_size = call_stmt_size;
+   edge->call_stmt_time = call_stmt_time;
    if (indirect)
      {
        if (bp_unpack_value (&bp, 1))
Index: gcc/ipa-inline.c
===================================================================
*** gcc/ipa-inline.c.orig	2011-04-05 12:52:46.000000000 +0200
--- gcc/ipa-inline.c	2011-04-06 10:42:33.000000000 +0200
*************** inline_summary (struct cgraph_node *node
*** 164,179 ****
    return &node->local.inline_summary;
  }
  
! /* Estimate self time of the function after inlining WHAT into TO.  */
  
  static int
! cgraph_estimate_time_after_inlining (int frequency, struct cgraph_node *to,
! 				     struct cgraph_node *what)
  {
!   gcov_type time = (((gcov_type)what->global.time
! 		     - inline_summary (what)->time_inlining_benefit)
!   		    * frequency + CGRAPH_FREQ_BASE / 2) / CGRAPH_FREQ_BASE
! 		    + to->global.time;
    if (time < 0)
      time = 0;
    if (time > MAX_TIME)
--- 164,194 ----
    return &node->local.inline_summary;
  }
  
! /* Estimate the time cost for the caller when inlining EDGE.  */
! 
! static inline int
! cgraph_estimate_edge_time (struct cgraph_edge *edge)
! {
!   int call_stmt_time;
!   /* ???  We throw away cgraph edges all the time so the information
!      we store in edges doesn't persist for early inlining.  Ugh.  */
!   if (!edge->call_stmt)
!     call_stmt_time = edge->call_stmt_time;
!   else
!     call_stmt_time = estimate_num_insns (edge->call_stmt, &eni_time_weights);
!   return (((gcov_type)edge->callee->global.time
! 	   - inline_summary (edge->callee)->time_inlining_benefit
! 	   - call_stmt_time) * edge->frequency
! 	  + CGRAPH_FREQ_BASE / 2) / CGRAPH_FREQ_BASE;
! }
! 
! /* Estimate self time of the function NODE after inlining EDGE.  */
  
  static int
! cgraph_estimate_time_after_inlining (struct cgraph_node *node,
! 				     struct cgraph_edge *edge)
  {
!   gcov_type time = node->global.time + cgraph_estimate_edge_time (edge);
    if (time < 0)
      time = 0;
    if (time > MAX_TIME)
*************** cgraph_estimate_time_after_inlining (int
*** 181,194 ****
    return time;
  }
  
! /* Estimate self size of the function after inlining WHAT into TO.  */
  
  static inline int
! cgraph_estimate_size_after_inlining (struct cgraph_node *to,
! 				     struct cgraph_node *what)
  {
!   int size = ((what->global.size - inline_summary (what)->size_inlining_benefit)
! 	      + to->global.size);
    gcc_assert (size >= 0);
    return size;
  }
--- 196,226 ----
    return time;
  }
  
! /* Estimate the growth of the caller when inlining EDGE.  */
! 
! static inline int
! cgraph_estimate_edge_growth (struct cgraph_edge *edge)
! {
!   int call_stmt_size;
!   /* ???  We throw away cgraph edges all the time so the information
!      we store in edges doesn't persist for early inlining.  Ugh.  */
!   if (!edge->call_stmt)
!     call_stmt_size = edge->call_stmt_size;
!   else
!     call_stmt_size = estimate_num_insns (edge->call_stmt, &eni_size_weights);
!   return (edge->callee->global.size
! 	  - inline_summary (edge->callee)->size_inlining_benefit
! 	  - call_stmt_size);
! }
! 
! /* Estimate the size of NODE after inlining EDGE which should be an
!    edge to either NODE or a call inlined into NODE.  */
  
  static inline int
! cgraph_estimate_size_after_inlining (struct cgraph_node *node,
! 				     struct cgraph_edge *edge)
  {
!   int size = node->global.size + cgraph_estimate_edge_growth (edge);
    gcc_assert (size >= 0);
    return size;
  }
*************** cgraph_mark_inline_edge (struct cgraph_e
*** 301,309 ****
  			 VEC (cgraph_edge_p, heap) **new_edges)
  {
    int old_size = 0, new_size = 0;
!   struct cgraph_node *to = NULL, *what;
    struct cgraph_edge *curr = e;
-   int freq;
  
    /* Don't inline inlined edges.  */
    gcc_assert (e->inline_failed);
--- 333,340 ----
  			 VEC (cgraph_edge_p, heap) **new_edges)
  {
    int old_size = 0, new_size = 0;
!   struct cgraph_node *to = NULL;
    struct cgraph_edge *curr = e;
  
    /* Don't inline inlined edges.  */
    gcc_assert (e->inline_failed);
*************** cgraph_mark_inline_edge (struct cgraph_e
*** 315,333 ****
  
    cgraph_clone_inlined_nodes (e, true, update_original);
  
-   what = e->callee;
- 
-   freq = e->frequency;
    /* Now update size of caller and all functions caller is inlined into.  */
    for (;e && !e->inline_failed; e = e->caller->callers)
      {
        to = e->caller;
        old_size = e->caller->global.size;
!       new_size = cgraph_estimate_size_after_inlining (to, what);
        to->global.size = new_size;
!       to->global.time = cgraph_estimate_time_after_inlining (freq, to, what);
      }
!   gcc_assert (what->global.inlined_to == to);
    if (new_size > old_size)
      overall_size += new_size - old_size;
    ncalls_inlined++;
--- 346,361 ----
  
    cgraph_clone_inlined_nodes (e, true, update_original);
  
    /* Now update size of caller and all functions caller is inlined into.  */
    for (;e && !e->inline_failed; e = e->caller->callers)
      {
        to = e->caller;
        old_size = e->caller->global.size;
!       new_size = cgraph_estimate_size_after_inlining (to, e);
        to->global.size = new_size;
!       to->global.time = cgraph_estimate_time_after_inlining (to, e);
      }
!   gcc_assert (curr->callee->global.inlined_to == to);
    if (new_size > old_size)
      overall_size += new_size - old_size;
    ncalls_inlined++;
*************** cgraph_estimate_growth (struct cgraph_no
*** 357,364 ****
        if (e->caller == node)
          self_recursive = true;
        if (e->inline_failed)
! 	growth += (cgraph_estimate_size_after_inlining (e->caller, node)
! 		   - e->caller->global.size);
      }
  
    /* ??? Wrong for non-trivially self recursive functions or cases where
--- 385,391 ----
        if (e->caller == node)
          self_recursive = true;
        if (e->inline_failed)
! 	growth += cgraph_estimate_edge_growth (e);
      }
  
    /* ??? Wrong for non-trivially self recursive functions or cases where
*************** cgraph_estimate_growth (struct cgraph_no
*** 379,395 ****
    return growth;
  }
  
! /* Return false when inlining WHAT into TO is not good idea
!    as it would cause too large growth of function bodies.
!    When ONE_ONLY is true, assume that only one call site is going
!    to be inlined, otherwise figure out how many call sites in
!    TO calls WHAT and verify that all can be inlined.
!    */
  
  static bool
! cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
  			    cgraph_inline_failed_t *reason)
  {
    int newsize;
    int limit;
    HOST_WIDE_INT stack_size_limit, inlined_stack;
--- 406,422 ----
    return growth;
  }
  
! /* Return false when inlining edge E is not good idea
!    as it would cause too large growth of the callers function body
!    or stack frame size.  *REASON if non-NULL is updated if the
!    inlining is not a good idea.  */
  
  static bool
! cgraph_check_inline_limits (struct cgraph_edge *e,
  			    cgraph_inline_failed_t *reason)
  {
+   struct cgraph_node *to = e->caller;
+   struct cgraph_node *what = e->callee;
    int newsize;
    int limit;
    HOST_WIDE_INT stack_size_limit, inlined_stack;
*************** cgraph_check_inline_limits (struct cgrap
*** 408,414 ****
  
    /* Check the size after inlining against the function limits.  But allow
       the function to shrink if it went over the limits by forced inlining.  */
!   newsize = cgraph_estimate_size_after_inlining (to, what);
    if (newsize >= to->global.size
        && newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
        && newsize > limit)
--- 435,441 ----
  
    /* Check the size after inlining against the function limits.  But allow
       the function to shrink if it went over the limits by forced inlining.  */
!   newsize = cgraph_estimate_size_after_inlining (to, e);
    if (newsize >= to->global.size
        && newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
        && newsize > limit)
*************** cgraph_default_inline_p (struct cgraph_n
*** 487,514 ****
    return true;
  }
  
- /* Return true when inlining WHAT would create recursive inlining.
-    We call recursive inlining all cases where same function appears more than
-    once in the single recursion nest path in the inline graph.  */
- 
- static inline bool
- cgraph_recursive_inlining_p (struct cgraph_node *to,
- 			     struct cgraph_node *what,
- 			     cgraph_inline_failed_t *reason)
- {
-   bool recursive;
-   if (to->global.inlined_to)
-     recursive = what->decl == to->global.inlined_to->decl;
-   else
-     recursive = what->decl == to->decl;
-   /* Marking recursive function inline has sane semantic and thus we should
-      not warn on it.  */
-   if (recursive && reason)
-     *reason = (what->local.disregard_inline_limits
- 	       ? CIF_RECURSIVE_INLINING : CIF_UNSPECIFIED);
-   return recursive;
- }
- 
  /* A cost model driving the inlining heuristics in a way so the edges with
     smallest badness are inlined first.  After each inlining is performed
     the costs of all caller edges of nodes affected are recomputed so the
--- 514,519 ----
*************** cgraph_edge_badness (struct cgraph_edge
*** 524,532 ****
    if (edge->callee->local.disregard_inline_limits)
      return INT_MIN;
  
!   growth =
!     (cgraph_estimate_size_after_inlining (edge->caller, edge->callee)
!      - edge->caller->global.size);
  
    if (dump)
      {
--- 529,535 ----
    if (edge->callee->local.disregard_inline_limits)
      return INT_MIN;
  
!   growth = cgraph_estimate_edge_growth (edge);
  
    if (dump)
      {
*************** cgraph_edge_badness (struct cgraph_edge
*** 536,544 ****
        fprintf (dump_file, "      growth %i, time %i-%i, size %i-%i\n",
  	       growth,
  	       edge->callee->global.time,
! 	       inline_summary (edge->callee)->time_inlining_benefit,
  	       edge->callee->global.size,
! 	       inline_summary (edge->callee)->size_inlining_benefit);
      }
  
    /* Always prefer inlining saving code size.  */
--- 539,549 ----
        fprintf (dump_file, "      growth %i, time %i-%i, size %i-%i\n",
  	       growth,
  	       edge->callee->global.time,
! 	       inline_summary (edge->callee)->time_inlining_benefit
! 	       + edge->call_stmt_time,
  	       edge->callee->global.size,
! 	       inline_summary (edge->callee)->size_inlining_benefit
! 	       + edge->call_stmt_size);
      }
  
    /* Always prefer inlining saving code size.  */
*************** cgraph_edge_badness (struct cgraph_edge
*** 557,563 ****
        badness =
  	((int)
  	 ((double) edge->count * INT_MIN / max_count / (max_benefit + 1)) *
! 	 (inline_summary (edge->callee)->time_inlining_benefit + 1)) / growth;
        if (dump)
  	{
  	  fprintf (dump_file,
--- 562,569 ----
        badness =
  	((int)
  	 ((double) edge->count * INT_MIN / max_count / (max_benefit + 1)) *
! 	 (inline_summary (edge->callee)->time_inlining_benefit
! 	  + edge->call_stmt_time + 1)) / growth;
        if (dump)
  	{
  	  fprintf (dump_file,
*************** cgraph_edge_badness (struct cgraph_edge
*** 566,572 ****
  		   (int) badness, (double) badness / INT_MIN,
  		   (double) edge->count / max_count,
  		   (double) (inline_summary (edge->callee)->
! 			     time_inlining_benefit + 1) / (max_benefit + 1));
  	}
      }
  
--- 572,579 ----
  		   (int) badness, (double) badness / INT_MIN,
  		   (double) edge->count / max_count,
  		   (double) (inline_summary (edge->callee)->
! 			     time_inlining_benefit
! 			     + edge->call_stmt_time + 1) / (max_benefit + 1));
  	}
      }
  
*************** cgraph_edge_badness (struct cgraph_edge
*** 586,593 ****
        int growth_for_all;
        badness = growth * 10000;
        benefitperc =
! 	100 * inline_summary (edge->callee)->time_inlining_benefit
! 	    / (edge->callee->global.time + 1) +1;
        benefitperc = MIN (benefitperc, 100);
        div *= benefitperc;
  
--- 593,601 ----
        int growth_for_all;
        badness = growth * 10000;
        benefitperc =
! 	100 * (inline_summary (edge->callee)->time_inlining_benefit
! 	       + edge->call_stmt_time)
! 	    / (edge->callee->global.time + 1) + 1;
        benefitperc = MIN (benefitperc, 100);
        div *= benefitperc;
  
*************** cgraph_edge_badness (struct cgraph_edge
*** 636,642 ****
    gcc_assert (badness >= INT_MIN);
    gcc_assert (badness <= INT_MAX - 1);
    /* Make recursive inlining happen always after other inlining is done.  */
!   if (cgraph_recursive_inlining_p (edge->caller, edge->callee, NULL))
      return badness + 1;
    else
      return badness;
--- 644,650 ----
    gcc_assert (badness >= INT_MIN);
    gcc_assert (badness <= INT_MAX - 1);
    /* Make recursive inlining happen always after other inlining is done.  */
!   if (cgraph_edge_recursive_p (edge))
      return badness + 1;
    else
      return badness;
*************** lookup_recursive_calls (struct cgraph_no
*** 822,839 ****
     is NULL.  */
  
  static bool
! cgraph_decide_recursive_inlining (struct cgraph_node *node,
  				  VEC (cgraph_edge_p, heap) **new_edges)
  {
    int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
    int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
    int probability = PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY);
    fibheap_t heap;
    struct cgraph_edge *e;
    struct cgraph_node *master_clone, *next;
    int depth = 0;
    int n = 0;
  
    /* It does not make sense to recursively inline always-inline functions
       as we are going to sorry() on the remaining calls anyway.  */
    if (node->local.disregard_inline_limits
--- 830,852 ----
     is NULL.  */
  
  static bool
! cgraph_decide_recursive_inlining (struct cgraph_edge *edge,
  				  VEC (cgraph_edge_p, heap) **new_edges)
  {
    int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
    int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
    int probability = PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY);
    fibheap_t heap;
+   struct cgraph_node *node;
    struct cgraph_edge *e;
    struct cgraph_node *master_clone, *next;
    int depth = 0;
    int n = 0;
  
+   node = edge->caller;
+   if (node->global.inlined_to)
+     node = node->global.inlined_to;
+ 
    /* It does not make sense to recursively inline always-inline functions
       as we are going to sorry() on the remaining calls anyway.  */
    if (node->local.disregard_inline_limits
*************** cgraph_decide_recursive_inlining (struct
*** 852,858 ****
  
    /* Make sure that function is small enough to be considered for inlining.  */
    if (!max_depth
!       || cgraph_estimate_size_after_inlining (node, node)  >= limit)
      return false;
    heap = fibheap_new ();
    lookup_recursive_calls (node, node, heap);
--- 865,871 ----
  
    /* Make sure that function is small enough to be considered for inlining.  */
    if (!max_depth
!       || cgraph_estimate_size_after_inlining (node, edge)  >= limit)
      return false;
    heap = fibheap_new ();
    lookup_recursive_calls (node, node, heap);
*************** cgraph_decide_recursive_inlining (struct
*** 876,889 ****
        cgraph_clone_inlined_nodes (e, true, false);
  
    /* Do the inlining and update list of recursive call during process.  */
!   while (!fibheap_empty (heap)
! 	 && (cgraph_estimate_size_after_inlining (node, master_clone)
! 	     <= limit))
      {
        struct cgraph_edge *curr
  	= (struct cgraph_edge *) fibheap_extract_min (heap);
        struct cgraph_node *cnode;
  
        depth = 1;
        for (cnode = curr->caller;
  	   cnode->global.inlined_to; cnode = cnode->callers->caller)
--- 889,903 ----
        cgraph_clone_inlined_nodes (e, true, false);
  
    /* Do the inlining and update list of recursive call during process.  */
!   while (!fibheap_empty (heap))
      {
        struct cgraph_edge *curr
  	= (struct cgraph_edge *) fibheap_extract_min (heap);
        struct cgraph_node *cnode;
  
+       if (cgraph_estimate_size_after_inlining (node, curr) > limit)
+ 	break;
+ 
        depth = 1;
        for (cnode = curr->caller;
  	   cnode->global.inlined_to; cnode = cnode->callers->caller)
*************** cgraph_decide_inlining_of_small_function
*** 1083,1092 ****
  	}
        
        callee = edge->callee;
! 
!       growth = (cgraph_estimate_size_after_inlining (edge->caller, edge->callee)
! 		- edge->caller->global.size);
! 
        if (dump_file)
  	{
  	  fprintf (dump_file,
--- 1097,1103 ----
  	}
        
        callee = edge->callee;
!       growth = cgraph_estimate_edge_growth (edge);
        if (dump_file)
  	{
  	  fprintf (dump_file,
*************** cgraph_decide_inlining_of_small_function
*** 1155,1179 ****
   	not_good = CIF_OPTIMIZING_FOR_SIZE;
        if (not_good && growth > 0 && cgraph_estimate_growth (edge->callee) > 0)
  	{
!           if (!cgraph_recursive_inlining_p (edge->caller, edge->callee,
! 				            &edge->inline_failed))
! 	    {
! 	      edge->inline_failed = not_good;
! 	      if (dump_file)
! 		fprintf (dump_file, " inline_failed:%s.\n",
! 			 cgraph_inline_failed_string (edge->inline_failed));
! 	    }
  	  continue;
  	}
        if (!cgraph_default_inline_p (edge->callee, &edge->inline_failed))
  	{
!           if (!cgraph_recursive_inlining_p (edge->caller, edge->callee,
! 				            &edge->inline_failed))
! 	    {
! 	      if (dump_file)
! 		fprintf (dump_file, " inline_failed:%s.\n",
! 			 cgraph_inline_failed_string (edge->inline_failed));
! 	    }
  	  continue;
  	}
        if (!tree_can_inline_p (edge)
--- 1166,1182 ----
   	not_good = CIF_OPTIMIZING_FOR_SIZE;
        if (not_good && growth > 0 && cgraph_estimate_growth (edge->callee) > 0)
  	{
! 	  edge->inline_failed = not_good;
! 	  if (dump_file)
! 	    fprintf (dump_file, " inline_failed:%s.\n",
! 		     cgraph_inline_failed_string (edge->inline_failed));
  	  continue;
  	}
        if (!cgraph_default_inline_p (edge->callee, &edge->inline_failed))
  	{
! 	  if (dump_file)
! 	    fprintf (dump_file, " inline_failed:%s.\n",
! 		     cgraph_inline_failed_string (edge->inline_failed));
  	  continue;
  	}
        if (!tree_can_inline_p (edge)
*************** cgraph_decide_inlining_of_small_function
*** 1184,1199 ****
  		     cgraph_inline_failed_string (edge->inline_failed));
  	  continue;
  	}
!       if (cgraph_recursive_inlining_p (edge->caller, edge->callee,
! 				       &edge->inline_failed))
  	{
  	  where = edge->caller;
  	  if (where->global.inlined_to)
  	    where = where->global.inlined_to;
! 	  if (!cgraph_decide_recursive_inlining (where,
  						 flag_indirect_inlining
  						 ? &new_indirect_edges : NULL))
! 	    continue;
  	  if (flag_indirect_inlining)
  	    add_new_edges_to_heap (heap, new_indirect_edges);
            update_all_callee_keys (heap, where, updated_nodes);
--- 1187,1204 ----
  		     cgraph_inline_failed_string (edge->inline_failed));
  	  continue;
  	}
!       if (cgraph_edge_recursive_p (edge))
  	{
  	  where = edge->caller;
  	  if (where->global.inlined_to)
  	    where = where->global.inlined_to;
! 	  if (!cgraph_decide_recursive_inlining (edge,
  						 flag_indirect_inlining
  						 ? &new_indirect_edges : NULL))
! 	    {
! 	      edge->inline_failed = CIF_RECURSIVE_INLINING;
! 	      continue;
! 	    }
  	  if (flag_indirect_inlining)
  	    add_new_edges_to_heap (heap, new_indirect_edges);
            update_all_callee_keys (heap, where, updated_nodes);
*************** cgraph_decide_inlining_of_small_function
*** 1201,1208 ****
        else
  	{
  	  struct cgraph_node *callee;
! 	  if (!cgraph_check_inline_limits (edge->caller, edge->callee,
! 				           &edge->inline_failed))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, " Not inlining into %s:%s.\n",
--- 1206,1212 ----
        else
  	{
  	  struct cgraph_node *callee;
! 	  if (!cgraph_check_inline_limits (edge, &edge->inline_failed))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, " Not inlining into %s:%s.\n",
*************** cgraph_decide_inlining_of_small_function
*** 1295,1303 ****
  	  if (dump_flags & TDF_DETAILS)
  	    cgraph_edge_badness (edge, true);
  	}
!       if (!edge->callee->local.disregard_inline_limits && edge->inline_failed
!           && !cgraph_recursive_inlining_p (edge->caller, edge->callee,
! 				           &edge->inline_failed))
  	edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
      }
  
--- 1299,1305 ----
  	  if (dump_flags & TDF_DETAILS)
  	    cgraph_edge_badness (edge, true);
  	}
!       if (!edge->callee->local.disregard_inline_limits && edge->inline_failed)
  	edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
      }
  
*************** cgraph_flatten (struct cgraph_node *node
*** 1359,1365 ****
  	  continue;
  	}
  
!       if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "Not inlining: recursive call.\n");
--- 1361,1367 ----
  	  continue;
  	}
  
!       if (cgraph_edge_recursive_p (e))
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "Not inlining: recursive call.\n");
*************** cgraph_decide_inlining (void)
*** 1431,1440 ****
  	if (!DECL_EXTERNAL (node->decl))
  	  initial_size += node->global.size;
  	for (e = node->callees; e; e = e->next_callee)
! 	  if (max_count < e->count)
! 	    max_count = e->count;
! 	if (max_benefit < inline_summary (node)->time_inlining_benefit)
! 	  max_benefit = inline_summary (node)->time_inlining_benefit;
        }
    gcc_assert (in_lto_p
  	      || !max_count
--- 1433,1446 ----
  	if (!DECL_EXTERNAL (node->decl))
  	  initial_size += node->global.size;
  	for (e = node->callees; e; e = e->next_callee)
! 	  {
! 	    int benefit = (inline_summary (node)->time_inlining_benefit
! 			   + e->call_stmt_time);
! 	    if (max_count < e->count)
! 	      max_count = e->count;
! 	    if (max_benefit < benefit)
! 	      max_benefit = benefit;
! 	  }
        }
    gcc_assert (in_lto_p
  	      || !max_count
*************** cgraph_decide_inlining (void)
*** 1516,1523 ****
  			   node->callers->caller->global.size);
  		}
  
! 	      if (cgraph_check_inline_limits (node->callers->caller, node,
! 					      &reason))
  		{
  		  struct cgraph_node *caller = node->callers->caller;
  		  cgraph_mark_inline_edge (node->callers, true, NULL);
--- 1522,1528 ----
  			   node->callers->caller->global.size);
  		}
  
! 	      if (cgraph_check_inline_limits (node->callers, &reason))
  		{
  		  struct cgraph_node *caller = node->callers->caller;
  		  cgraph_mark_inline_edge (node->callers, true, NULL);
*************** cgraph_decide_inlining_incrementally (st
*** 1602,1608 ****
  	  fprintf (dump_file,
  		   "Considering to always inline inline candidate %s.\n",
  		   cgraph_node_name (e->callee));
! 	if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	  {
  	    if (dump_file)
  	      fprintf (dump_file, "Not inlining: recursive call.\n");
--- 1607,1613 ----
  	  fprintf (dump_file,
  		   "Considering to always inline inline candidate %s.\n",
  		   cgraph_node_name (e->callee));
! 	if (cgraph_edge_recursive_p (e))
  	  {
  	    if (dump_file)
  	      fprintf (dump_file, "Not inlining: recursive call.\n");
*************** cgraph_decide_inlining_incrementally (st
*** 1656,1662 ****
  	  if (dump_file)
  	    fprintf (dump_file, "Considering inline candidate %s.\n",
  		     cgraph_node_name (e->callee));
! 	  if (cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, "Not inlining: recursive call.\n");
--- 1661,1667 ----
  	  if (dump_file)
  	    fprintf (dump_file, "Considering inline candidate %s.\n",
  		     cgraph_node_name (e->callee));
! 	  if (cgraph_edge_recursive_p (e))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, "Not inlining: recursive call.\n");
*************** cgraph_decide_inlining_incrementally (st
*** 1681,1696 ****
  	  if (((mode == INLINE_SIZE || mode == INLINE_SIZE_NORECURSIVE)
  	       || (!flag_inline_functions
  		   && !DECL_DECLARED_INLINE_P (e->callee->decl)))
! 	      && (cgraph_estimate_size_after_inlining (e->caller, e->callee)
! 		  > e->caller->global.size + allowed_growth)
  	      && cgraph_estimate_growth (e->callee) > allowed_growth)
  	    {
  	      if (dump_file)
  		fprintf (dump_file,
  			 "Not inlining: code size would grow by %i.\n",
! 			 cgraph_estimate_size_after_inlining (e->caller,
! 							      e->callee)
! 			 - e->caller->global.size);
  	      continue;
  	    }
  	  if (e->call_stmt_cannot_inline_p
--- 1686,1698 ----
  	  if (((mode == INLINE_SIZE || mode == INLINE_SIZE_NORECURSIVE)
  	       || (!flag_inline_functions
  		   && !DECL_DECLARED_INLINE_P (e->callee->decl)))
! 	      && cgraph_estimate_edge_growth (e) > allowed_growth
  	      && cgraph_estimate_growth (e->callee) > allowed_growth)
  	    {
  	      if (dump_file)
  		fprintf (dump_file,
  			 "Not inlining: code size would grow by %i.\n",
! 			 cgraph_estimate_edge_growth (e));
  	      continue;
  	    }
  	  if (e->call_stmt_cannot_inline_p
*************** cgraph_decide_inlining_incrementally (st
*** 1708,1714 ****
  			 "Not inlining: Function body no longer available.\n");
  	      continue;
  	    }
! 	  if (!cgraph_check_inline_limits (node, e->callee, &e->inline_failed))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, "Not inlining: %s.\n",
--- 1710,1716 ----
  			 "Not inlining: Function body no longer available.\n");
  	      continue;
  	    }
! 	  if (!cgraph_check_inline_limits (e, &e->inline_failed))
  	    {
  	      if (dump_file)
  		fprintf (dump_file, "Not inlining: %s.\n",
*************** estimate_function_body_sizes (struct cgr
*** 1901,1909 ****
    basic_block bb;
    gimple_stmt_iterator bsi;
    struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
-   tree arg;
    int freq;
-   tree funtype = TREE_TYPE (node->decl);
  
    if (dump_file)
      fprintf (dump_file, "Analyzing function body size: %s\n",
--- 1903,1909 ----
*************** estimate_function_body_sizes (struct cgr
*** 1944,1978 ****
    time_inlining_benefit = ((time_inlining_benefit + CGRAPH_FREQ_BASE)
    			   / (CGRAPH_FREQ_BASE * 2));
    size_inlining_benefit = (size_inlining_benefit + 1) / 2;
-   if (dump_file)
-     fprintf (dump_file, "Overall function body time: %i-%i size: %i-%i\n",
- 	     (int)time, (int)time_inlining_benefit,
- 	     size, size_inlining_benefit);
-   time_inlining_benefit += eni_time_weights.call_cost;
-   size_inlining_benefit += eni_size_weights.call_cost;
-   if (!VOID_TYPE_P (TREE_TYPE (funtype)))
-     {
-       int cost = estimate_move_cost (TREE_TYPE (funtype));
-       time_inlining_benefit += cost;
-       size_inlining_benefit += cost;
-     }
-   for (arg = DECL_ARGUMENTS (node->decl); arg; arg = DECL_CHAIN (arg))
-     if (!VOID_TYPE_P (TREE_TYPE (arg)))
-       {
-         int cost = estimate_move_cost (TREE_TYPE (arg));
-         time_inlining_benefit += cost;
-         size_inlining_benefit += cost;
-       }
    if (time_inlining_benefit > MAX_TIME)
      time_inlining_benefit = MAX_TIME;
    if (time > MAX_TIME)
      time = MAX_TIME;
-   inline_summary (node)->self_time = time;
-   inline_summary (node)->self_size = size;
    if (dump_file)
!     fprintf (dump_file, "With function call overhead time: %i-%i size: %i-%i\n",
  	     (int)time, (int)time_inlining_benefit,
  	     size, size_inlining_benefit);
    inline_summary (node)->time_inlining_benefit = time_inlining_benefit;
    inline_summary (node)->size_inlining_benefit = size_inlining_benefit;
  }
--- 1944,1959 ----
    time_inlining_benefit = ((time_inlining_benefit + CGRAPH_FREQ_BASE)
    			   / (CGRAPH_FREQ_BASE * 2));
    size_inlining_benefit = (size_inlining_benefit + 1) / 2;
    if (time_inlining_benefit > MAX_TIME)
      time_inlining_benefit = MAX_TIME;
    if (time > MAX_TIME)
      time = MAX_TIME;
    if (dump_file)
!     fprintf (dump_file, "Overall function body time: %i-%i size: %i-%i\n",
  	     (int)time, (int)time_inlining_benefit,
  	     size, size_inlining_benefit);
+   inline_summary (node)->self_time = time;
+   inline_summary (node)->self_size = size;
    inline_summary (node)->time_inlining_benefit = time_inlining_benefit;
    inline_summary (node)->size_inlining_benefit = size_inlining_benefit;
  }
*************** void
*** 1982,1987 ****
--- 1963,1969 ----
  compute_inline_parameters (struct cgraph_node *node)
  {
    HOST_WIDE_INT self_stack_size;
+   struct cgraph_edge *e;
  
    gcc_assert (!node->global.inlined_to);
  
*************** compute_inline_parameters (struct cgraph
*** 2001,2008 ****
      node->local.can_change_signature = true;
    else
      {
-       struct cgraph_edge *e;
- 
        /* Functions calling builtin_apply can not change signature.  */
        for (e = node->callees; e; e = e->next_callee)
  	if (DECL_BUILT_IN (e->callee->decl)
--- 1983,1988 ----
*************** compute_inline_parameters (struct cgraph
*** 2012,2017 ****
--- 1992,2008 ----
        node->local.can_change_signature = !e;
      }
    estimate_function_body_sizes (node);
+   /* Compute size of call statements.  We have to do this for callers here,
+      those sizes need to be present for edges _to_ us as early as
+      we are finished with early opts.  */
+   for (e = node->callers; e; e = e->next_caller)
+     if (e->call_stmt)
+       {
+ 	e->call_stmt_size
+ 	  = estimate_num_insns (e->call_stmt, &eni_size_weights);
+ 	e->call_stmt_time
+ 	  = estimate_num_insns (e->call_stmt, &eni_time_weights);
+       }
    /* Inlining characteristics are maintained by the cgraph_mark_inline.  */
    node->global.time = inline_summary (node)->self_time;
    node->global.size = inline_summary (node)->self_size;
Index: gcc/tree-inline.c
===================================================================
*** gcc/tree-inline.c.orig	2011-04-05 12:52:47.000000000 +0200
--- gcc/tree-inline.c	2011-04-06 10:43:00.000000000 +0200
*************** estimate_num_insns (gimple stmt, eni_wei
*** 3514,3520 ****
  	if (decl)
  	  funtype = TREE_TYPE (decl);
  
! 	if (!VOID_TYPE_P (TREE_TYPE (funtype)))
  	  cost += estimate_move_cost (TREE_TYPE (funtype));
  
  	if (funtype)
--- 3514,3521 ----
  	if (decl)
  	  funtype = TREE_TYPE (decl);
  
! 	if (!VOID_TYPE_P (TREE_TYPE (funtype))
! 	    && gimple_call_lhs (stmt))
  	  cost += estimate_move_cost (TREE_TYPE (funtype));
  
  	if (funtype)
*************** expand_call_inline (basic_block bb, gimp
*** 3812,3817 ****
--- 3813,3820 ----
  	       && !DECL_IN_SYSTEM_HEADER (fn)
  	       && reason != CIF_UNSPECIFIED
  	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
+ 	       /* Do not warn about not inlined recursive calls.  */
+ 	       && !cgraph_edge_recursive_p (cg_edge)
  	       /* Avoid warnings during early inline pass. */
  	       && cgraph_global_info_ready)
  	{



More information about the Gcc-patches mailing list