This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH][3/n] Minor pass-reorg


This removes redundant work done by the inliner (and where I spotted
similar errors).  A small step to making the post-IPA scalar cleanup
pipeline the same as the early optimization pipeline.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied.

Richard.

2010-09-22  Richard Guenther  <rguenther@suse.de>

	* tree-inline.c (optimize_inline_calls): Schedule cleanups
	only if we inlined something.  Block compaction and conditional
	folding are done by cfg cleanup.  Schedule update-address-taken.
	(tree_function_versioning): Remove redundant call to number_blocks.
	* tree-optimize.c (execute_cleanup_cfg_post_optimizing): Conditional
	folding is done by cfg cleanup.
	* passes.c (init_optimization_passes): Remove update-address-taken
	pass after IPA inlining.

Index: gcc/tree-inline.c
===================================================================
*** gcc/tree-inline.c	(revision 164521)
--- gcc/tree-inline.c	(working copy)
*************** optimize_inline_calls (tree fn)
*** 4162,4167 ****
--- 4162,4168 ----
    basic_block bb;
    int last = n_basic_blocks;
    struct gimplify_ctx gctx;
+   bool inlined_p = false;
  
    /* There is no point in performing inlining if errors have already
       occurred -- and we might crash if we try to inline invalid
*************** optimize_inline_calls (tree fn)
*** 4201,4207 ****
       follow it; we'll trudge through them, processing their CALL_EXPRs
       along the way.  */
    FOR_EACH_BB (bb)
!     gimple_expand_calls_inline (bb, &id);
  
    pop_gimplify_context (NULL);
  
--- 4202,4208 ----
       follow it; we'll trudge through them, processing their CALL_EXPRs
       along the way.  */
    FOR_EACH_BB (bb)
!     inlined_p |= gimple_expand_calls_inline (bb, &id);
  
    pop_gimplify_context (NULL);
  
*************** optimize_inline_calls (tree fn)
*** 4217,4234 ****
      }
  #endif
  
!   /* Fold the statements before compacting/renumbering the basic blocks.  */
    fold_marked_statements (last, id.statements_to_fold);
    pointer_set_destroy (id.statements_to_fold);
  
    gcc_assert (!id.debug_stmts);
  
!   /* Renumber the (code) basic_blocks consecutively.  */
!   compact_blocks ();
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (fn);
  
-   fold_cond_expr_cond ();
    delete_unreachable_blocks_update_callgraph (&id);
  #ifdef ENABLE_CHECKING
    verify_cgraph_node (id.dst_node);
--- 4218,4236 ----
      }
  #endif
  
!   /* Fold queued statements.  */
    fold_marked_statements (last, id.statements_to_fold);
    pointer_set_destroy (id.statements_to_fold);
  
    gcc_assert (!id.debug_stmts);
  
!   /* If we didn't inline into the function there is nothing to do.  */
!   if (!inlined_p)
!     return 0;
! 
    /* Renumber the lexical scoping (non-code) blocks consecutively.  */
    number_blocks (fn);
  
    delete_unreachable_blocks_update_callgraph (&id);
  #ifdef ENABLE_CHECKING
    verify_cgraph_node (id.dst_node);
*************** optimize_inline_calls (tree fn)
*** 4241,4246 ****
--- 4243,4249 ----
    return (TODO_update_ssa
  	  | TODO_cleanup_cfg
  	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
+ 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
  	  | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
  }
  
*************** tree_function_versioning (tree old_decl,
*** 5118,5126 ****
  
    DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
  
-   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
-   number_blocks (id.dst_fn);
- 
    declare_inline_vars (DECL_INITIAL (new_decl), vars);
  
    if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
--- 5121,5126 ----
Index: gcc/tree-optimize.c
===================================================================
*** gcc/tree-optimize.c	(revision 164522)
--- gcc/tree-optimize.c	(working copy)
*************** struct gimple_opt_pass pass_all_early_op
*** 158,164 ****
  static unsigned int
  execute_cleanup_cfg_post_optimizing (void)
  {
-   fold_cond_expr_cond ();
    cleanup_tree_cfg ();
    cleanup_dead_labels ();
    group_case_labels ();
--- 158,163 ----
Index: gcc/passes.c
===================================================================
*** gcc/passes.c	(revision 164522)
--- gcc/passes.c	(working copy)
*************** init_optimization_passes (void)
*** 836,842 ****
        /* Initial scalar cleanups before alias computation.
  	 They ensure memory accesses are not indirect wherever possible.  */
        NEXT_PASS (pass_strip_predict_hints);
-       NEXT_PASS (pass_update_address_taken);
        NEXT_PASS (pass_rename_ssa_copies);
        NEXT_PASS (pass_complete_unrolli);
        NEXT_PASS (pass_ccp);
--- 836,841 ----


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]