This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
Re: FW: [just for fun] patch to enable SSA inlining
- From: Jan Hubicka <jh at suse dot cz>
- To: Razya Ladelsky <RAZYA at il dot ibm dot com>
- Cc: jh at suse dot cz, gcc-patches at gcc dot gnu dot org
- Date: Sat, 30 Dec 2006 13:33:02 +0100
- Subject: Re: FW: [just for fun] patch to enable SSA inlining
- References: <BAY18-F92CFE75B8977CFBC58D4EB9C10@phx.gbl> <OFA1961C0A.D8A58CAF-ONC2257250.0033AAB5-C2257250.0035271F@il.ibm.com>
Hi,
here is current incarnation of patch after merges of aliasing patches
approved by Danny. Expected set of failures is:
g++.dg/opt/devirt1.C scan-assembler xyzzy
gcc.dg/ipa/ipa-1.c (test for excess errors)
gcc.dg/ipa/ipa-1.c scan-ipa-dump-times propagating const 2
gcc.dg/ipa/ipa-1.c scan-ipa-dump-times versioned function 2
gcc.dg/ipa/ipa-2.c (test for excess errors)
gcc.dg/ipa/ipa-3.c (test for excess errors)
gcc.dg/ipa/ipa-3.c scan-ipa-dump-times propagating const 3
gcc.dg/ipa/ipa-4.c (test for excess errors)
gcc.dg/ipa/ipa-5.c (test for excess errors)
gcc.dg/ipa/ipa-6.c (test for excess errors)
gcc.dg/pr16194.c (test for excess errors)
I will be looking into the devirtualization now, the ipa-* are all ipcp
being not updated to SSA yet. The patch is now basically about pass
queue reordering and one not yet approved change to fixup_cfg.
Honza
Index: cgraph.c
===================================================================
*** cgraph.c (revision 120286)
--- cgraph.c (working copy)
*************** The callgraph:
*** 83,88 ****
--- 83,89 ----
#include "intl.h"
#include "tree-gimple.h"
#include "tree-dump.h"
+ #include "tree-flow.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
*************** cgraph_add_new_function (tree fndecl, bo
*** 942,947 ****
--- 943,949 ----
break;
case CGRAPH_STATE_IPA:
+ case CGRAPH_STATE_IPA_SSA:
case CGRAPH_STATE_EXPANSION:
/* Bring the function into finalized state and enqueue for later
analyzing and compilation. */
*************** cgraph_add_new_function (tree fndecl, bo
*** 963,968 ****
--- 965,974 ----
tree_register_cfg_hooks ();
if (!lowered)
tree_lowering_passes (fndecl);
+ bitmap_obstack_initialize (NULL);
+ if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)) && optimize)
+ execute_pass_list (pass_early_local_passes.sub);
+ bitmap_obstack_release (NULL);
tree_rest_of_compilation (fndecl);
pop_cfun ();
current_function_decl = NULL;
Index: cgraph.h
===================================================================
*** cgraph.h (revision 120286)
--- cgraph.h (working copy)
*************** enum cgraph_state
*** 260,265 ****
--- 260,267 ----
CGRAPH_STATE_CONSTRUCTION,
/* Callgraph is built and IPA passes are being run. */
CGRAPH_STATE_IPA,
+ /* Callgraph is built and all functions are transformed to SSA form. */
+ CGRAPH_STATE_IPA_SSA,
/* Functions are now ordered and being passed to RTL expanders. */
CGRAPH_STATE_EXPANSION,
/* All cgraph expansion is done. */
Index: tree-pass.h
===================================================================
*** tree-pass.h (revision 120286)
--- tree-pass.h (working copy)
*************** extern struct tree_opt_pass pass_ipa_pur
*** 314,319 ****
--- 314,320 ----
extern struct tree_opt_pass pass_ipa_type_escape;
extern struct tree_opt_pass pass_ipa_pta;
extern struct tree_opt_pass pass_early_local_passes;
+ extern struct tree_opt_pass pass_all_early_optimizations;
extern struct tree_opt_pass pass_all_optimizations;
extern struct tree_opt_pass pass_cleanup_cfg_post_optimizing;
Index: bitmap.c
===================================================================
*** bitmap.c (revision 120286)
--- bitmap.c (working copy)
*************** bitmap_obstack_release (bitmap_obstack *
*** 329,334 ****
--- 329,337 ----
bit_obstack->elements = NULL;
bit_obstack->heads = NULL;
obstack_free (&bit_obstack->obstack, NULL);
+ #ifdef ENABLE_CHECKING
+ memset (&bit_obstack->obstack, 0xab, sizeof (*&bit_obstack->obstack));
+ #endif
}
/* Create a new bitmap on an obstack. If BIT_OBSTACK is NULL, create
Index: cgraphunit.c
===================================================================
*** cgraphunit.c (revision 120286)
--- cgraphunit.c (working copy)
*************** cgraph_process_new_functions (void)
*** 294,299 ****
--- 294,300 ----
break;
case CGRAPH_STATE_IPA:
+ case CGRAPH_STATE_IPA_SSA:
/* When IPA optimization already started, do all essential
transformations that has been already performed on the whole
cgraph but not on this function. */
*************** cgraph_process_new_functions (void)
*** 313,318 ****
--- 314,325 ----
initialize_inline_failed (node);
if (flag_really_no_inline && !node->local.disregard_inline_limits)
node->local.inlinable = 0;
+ if ((cgraph_state == CGRAPH_STATE_IPA_SSA
+ && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
+ /* When not optimizing, be sure we run early local passes anyway
+ to expand OMP. */
+ || !optimize)
+ execute_pass_list (pass_early_local_passes.sub);
free_dominance_info (CDI_POST_DOMINATORS);
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
*************** cgraph_analyze_function (struct cgraph_n
*** 877,882 ****
--- 884,898 ----
node->local.inlinable = 0;
/* Inlining characteristics are maintained by the cgraph_mark_inline. */
node->global.insns = node->local.self_insns;
+ if (!flag_unit_at_a_time)
+ {
+ bitmap_obstack_initialize (NULL);
+ tree_register_cfg_hooks ();
+ execute_pass_list (pass_early_local_passes.sub);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ free_dominance_info (CDI_DOMINATORS);
+ bitmap_obstack_release (NULL);
+ }
node->analyzed = true;
pop_cfun ();
*************** cgraph_expand_all_functions (void)
*** 1220,1225 ****
--- 1236,1242 ----
gcc_assert (node->reachable);
node->output = 0;
cgraph_expand_function (node);
+ cgraph_process_new_functions ();
}
}
cgraph_process_new_functions ();
Index: tree-mudflap.c
===================================================================
*** tree-mudflap.c (revision 120286)
--- tree-mudflap.c (working copy)
*************** mf_decl_cache_locals (void)
*** 460,473 ****
globals into the cache variables. */
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (mf_cache_shift_decl_l),
mf_cache_shift_decl_l, mf_cache_shift_decl);
- add_referenced_var (mf_cache_shift_decl);
SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
gimplify_to_stmt_list (&t);
shift_init_stmts = t;
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (mf_cache_mask_decl_l),
mf_cache_mask_decl_l, mf_cache_mask_decl);
- add_referenced_var (mf_cache_mask_decl);
SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
gimplify_to_stmt_list (&t);
mask_init_stmts = t;
--- 460,471 ----
*************** mf_build_check_statement_for (tree base,
*** 573,589 ****
& __mf_mask]. */
t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
(flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
- add_referenced_var (mf_cache_shift_decl);
t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
(flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
- add_referenced_var (mf_cache_mask_decl);
t = build4 (ARRAY_REF,
TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
- add_referenced_var (mf_cache_array_decl);
t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, mf_elem, t);
- add_referenced_var (mf_elem);
SET_EXPR_LOCUS (t, locus);
gimplify_to_stmt_list (&t);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
--- 571,583 ----
*************** mf_build_check_statement_for (tree base,
*** 607,613 ****
build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
- add_referenced_var (mf_base);
/* Construct '__mf_elem->high < __mf_limit'.
--- 601,606 ----
Index: tree-optimize.c
===================================================================
*** tree-optimize.c (revision 120286)
--- tree-optimize.c (working copy)
*************** static bool
*** 57,64 ****
gate_all_optimizations (void)
{
return (optimize >= 1
! /* Don't bother doing anything if the program has errors. */
! && !(errorcount || sorrycount));
}
struct tree_opt_pass pass_all_optimizations =
--- 57,65 ----
gate_all_optimizations (void)
{
return (optimize >= 1
! /* Don't bother doing anything if the program has errors.
! We have to pass down the queue if we already went into SSA */
! && (!(errorcount || sorrycount) || gimple_in_ssa_p (cfun)));
}
struct tree_opt_pass pass_all_optimizations =
*************** struct tree_opt_pass pass_all_optimizati
*** 78,87 ****
0 /* letter */
};
struct tree_opt_pass pass_early_local_passes =
{
! NULL, /* name */
! gate_all_optimizations, /* gate */
NULL, /* execute */
NULL, /* sub */
NULL, /* next */
--- 79,97 ----
0 /* letter */
};
+ /* Gate: execute, or not, all of the non-trivial optimizations. */
+
+ static bool
+ gate_all_early_local_passes (void)
+ {
+ /* Don't bother doing anything if the program has errors. */
+ return (!(errorcount || sorrycount));
+ }
+
struct tree_opt_pass pass_early_local_passes =
{
! "early_local_cleanups", /* name */
! gate_all_early_local_passes, /* gate */
NULL, /* execute */
NULL, /* sub */
NULL, /* next */
*************** struct tree_opt_pass pass_early_local_pa
*** 95,100 ****
--- 105,145 ----
0 /* letter */
};
+ static unsigned int
+ execute_early_local_optimizations (void)
+ {
+ if (flag_unit_at_a_time)
+ cgraph_state = CGRAPH_STATE_IPA_SSA;
+ return 0;
+ }
+
+ /* Gate: execute, or not, all of the non-trivial optimizations. */
+
+ static bool
+ gate_all_early_optimizations (void)
+ {
+ return (optimize >= 1
+ /* Don't bother doing anything if the program has errors. */
+ && !(errorcount || sorrycount));
+ }
+
+ struct tree_opt_pass pass_all_early_optimizations =
+ {
+ "early_optimizations", /* name */
+ gate_all_early_optimizations, /* gate */
+ execute_early_local_optimizations, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+ };
+
/* Pass: cleanup the CFG just before expanding trees to RTL.
This is just a round of label cleanups and case node grouping
because after the tree optimizers have run such cleanups may
*************** execute_cleanup_cfg_pre_ipa (void)
*** 107,116 ****
return 0;
}
struct tree_opt_pass pass_cleanup_cfg =
{
"cleanup_cfg", /* name */
! NULL, /* gate */
execute_cleanup_cfg_pre_ipa, /* execute */
NULL, /* sub */
NULL, /* next */
--- 152,169 ----
return 0;
}
+ /* Gate: execute, or not, the cfg cleanup before IPA. */
+
+ static bool
+ gate_cleanup_cfg_pre_ipa (void)
+ {
+ return true;
+ }
+
struct tree_opt_pass pass_cleanup_cfg =
{
"cleanup_cfg", /* name */
! gate_cleanup_cfg_pre_ipa, /* gate */
execute_cleanup_cfg_pre_ipa, /* execute */
NULL, /* sub */
NULL, /* next */
*************** execute_free_datastructures (void)
*** 170,176 ****
/* Remove the ssa structures. Do it here since this includes statement
annotations that need to be intact during disband_implicit_edges. */
! delete_tree_ssa ();
return 0;
}
--- 223,230 ----
/* Remove the ssa structures. Do it here since this includes statement
annotations that need to be intact during disband_implicit_edges. */
! if (cfun->gimple_df)
! delete_tree_ssa ();
return 0;
}
*************** execute_fixup_cfg (void)
*** 263,268 ****
--- 317,325 ----
{
basic_block bb;
block_stmt_iterator bsi;
+ int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
+
+ cfun->after_inlining = true;
if (cfun->eh)
FOR_EACH_BB (bb)
*************** execute_fixup_cfg (void)
*** 271,279 ****
{
tree stmt = bsi_stmt (bsi);
tree call = get_call_expr_in (stmt);
! if (call && call_expr_flags (call) & (ECF_CONST | ECF_PURE))
! TREE_SIDE_EFFECTS (call) = 0;
if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
remove_stmt_from_eh_region (stmt);
}
--- 328,347 ----
{
tree stmt = bsi_stmt (bsi);
tree call = get_call_expr_in (stmt);
+ tree decl = call ? get_callee_fndecl (call) : NULL;
! if (decl && call_expr_flags (call) & (ECF_CONST | ECF_PURE)
! && TREE_SIDE_EFFECTS (call))
! {
! if (gimple_in_ssa_p (cfun))
! {
! todo |= TODO_update_ssa;
! update_stmt (stmt);
! }
! TREE_SIDE_EFFECTS (call) = 0;
! }
! if (decl && TREE_NOTHROW (decl))
! TREE_NOTHROW (call) = 1;
if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
remove_stmt_from_eh_region (stmt);
}
*************** execute_fixup_cfg (void)
*** 281,316 ****
}
if (current_function_has_nonlocal_label)
! FOR_EACH_BB (bb)
! {
! for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
! {
! tree stmt = bsi_stmt (bsi);
! if (tree_can_make_abnormal_goto (stmt))
! {
! if (stmt == bsi_stmt (bsi_last (bb)))
! {
! if (!has_abnormal_outgoing_edge_p (bb))
make_abnormal_goto_edges (bb, true);
! }
! else
! {
! edge e = split_block (bb, stmt);
! bb = e->src;
! make_abnormal_goto_edges (bb, true);
! }
! break;
! }
! }
! }
!
! cleanup_tree_cfg ();
/* Dump a textual representation of the flowgraph. */
if (dump_file)
dump_tree_cfg (dump_file, dump_flags);
! return 0;
}
struct tree_opt_pass pass_fixup_cfg =
--- 349,405 ----
}
if (current_function_has_nonlocal_label)
! {
! FOR_EACH_BB (bb)
! {
! for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
! {
! tree stmt = bsi_stmt (bsi);
! if (tree_can_make_abnormal_goto (stmt))
! {
! if (stmt == bsi_stmt (bsi_last (bb)))
! {
! if (!has_abnormal_outgoing_edge_p (bb))
! make_abnormal_goto_edges (bb, true);
! }
! else
! {
! edge e = split_block (bb, stmt);
! bb = e->src;
make_abnormal_goto_edges (bb, true);
! }
! break;
! }
!
! /* Update PHIs on nonlocal goto receivers we (possibly)
! just created new edges into. */
! if (TREE_CODE (stmt) == LABEL_EXPR
! && gimple_in_ssa_p (cfun))
! {
! tree target = LABEL_EXPR_LABEL (stmt);
! if (DECL_NONLOCAL (target))
! {
! tree phi;
!
! for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
! {
! todo |= TODO_update_ssa;
! gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
! (PHI_RESULT (phi)));
! mark_sym_for_renaming
! (SSA_NAME_VAR (PHI_RESULT (phi)));
! }
! }
! }
! }
! }
! }
/* Dump a textual representation of the flowgraph. */
if (dump_file)
dump_tree_cfg (dump_file, dump_flags);
! return todo;
}
struct tree_opt_pass pass_fixup_cfg =
*************** struct tree_opt_pass pass_fixup_cfg =
*** 326,334 ****
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
! 0, /* todo_flags_finish */
! 0 /* letter */
! };
/* Do the actions required to initialize internal data structures used
in tree-ssa optimization passes. */
--- 415,424 ----
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
! TODO_cleanup_cfg | TODO_ggc_collect
! | TODO_dump_func | TODO_verify_flow
! | TODO_verify_stmts,/* todo_flags_finish */
! 0 /* letter */ };
/* Do the actions required to initialize internal data structures used
in tree-ssa optimization passes. */
*************** execute_init_datastructures (void)
*** 341,350 ****
return 0;
}
struct tree_opt_pass pass_init_datastructures =
{
NULL, /* name */
! NULL, /* gate */
execute_init_datastructures, /* execute */
NULL, /* sub */
NULL, /* next */
--- 431,448 ----
return 0;
}
+ /* Gate: initialize or not the SSA datastructures. */
+
+ static bool
+ gate_init_datastructures (void)
+ {
+ return (optimize >= 1);
+ }
+
struct tree_opt_pass pass_init_datastructures =
{
NULL, /* name */
! gate_init_datastructures, /* gate */
execute_init_datastructures, /* execute */
NULL, /* sub */
NULL, /* next */
*************** tree_lowering_passes (tree fn)
*** 368,374 ****
--- 466,475 ----
tree_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
execute_pass_list (all_lowering_passes);
+ if (optimize && cgraph_global_info_ready)
+ execute_pass_list (pass_early_local_passes.sub);
free_dominance_info (CDI_POST_DOMINATORS);
+ free_dominance_info (CDI_DOMINATORS);
compact_blocks ();
current_function_decl = saved_current_function_decl;
bitmap_obstack_release (NULL);
*************** tree_rest_of_compilation (tree fndecl)
*** 408,413 ****
--- 509,517 ----
node = cgraph_node (fndecl);
+ /* Initialize the default bitmap obstack. */
+ bitmap_obstack_initialize (NULL);
+
/* We might need the body of this function so that we can expand
it inline somewhere else. */
if (cgraph_preserve_function_body_p (fndecl))
*************** tree_rest_of_compilation (tree fndecl)
*** 424,430 ****
We haven't necessarily assigned RTL to all variables yet, so it's
not safe to try to expand expressions involving them. */
cfun->x_dont_save_pending_sizes_p = 1;
! cfun->after_inlining = true;
if (flag_inline_trees)
{
--- 528,535 ----
We haven't necessarily assigned RTL to all variables yet, so it's
not safe to try to expand expressions involving them. */
cfun->x_dont_save_pending_sizes_p = 1;
!
! tree_register_cfg_hooks ();
if (flag_inline_trees)
{
*************** tree_rest_of_compilation (tree fndecl)
*** 453,464 ****
Kill it so it won't confuse us. */
cgraph_node_remove_callees (node);
-
- /* Initialize the default bitmap obstack. */
- bitmap_obstack_initialize (NULL);
bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/
-
- tree_register_cfg_hooks ();
/* Perform all tree transforms and optimizations. */
execute_pass_list (all_passes);
--- 558,564 ----
Index: tree-profile.c
===================================================================
*** tree-profile.c (revision 120286)
--- tree-profile.c (working copy)
*************** do_tree_profiling (void)
*** 237,242 ****
--- 237,246 ----
static unsigned int
tree_profiling (void)
{
+ /* Don't profile functions produced at destruction time, particularly
+ the gcov datastructure initializer. */
+ if (cgraph_state == CGRAPH_STATE_FINISHED)
+ return 0;
branch_prob ();
if (flag_branch_probabilities
&& flag_profile_values
*************** struct tree_opt_pass pass_tree_profile =
*** 267,299 ****
0 /* letter */
};
- /* Return 1 if tree-based profiling is in effect, else 0.
- If it is, set up hooks for tree-based profiling.
- Gate for pass_tree_profile. */
-
- static bool
- do_early_tree_profiling (void)
- {
- return (do_tree_profiling () && (!flag_unit_at_a_time || !optimize));
- }
-
- struct tree_opt_pass pass_early_tree_profile =
- {
- "early_tree_profile", /* name */
- do_early_tree_profiling, /* gate */
- tree_profiling, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_BRANCH_PROB, /* tv_id */
- PROP_gimple_leh | PROP_cfg, /* properties_required */
- PROP_gimple_leh | PROP_cfg, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_verify_stmts, /* todo_flags_finish */
- 0 /* letter */
- };
-
struct profile_hooks tree_profile_hooks =
{
tree_init_edge_profiler, /* init_edge_profiler */
--- 271,276 ----
Index: tree-cfg.c
===================================================================
*** tree-cfg.c (revision 120286)
--- tree-cfg.c (working copy)
*************** bsi_for_stmt (tree stmt)
*** 2803,2808 ****
--- 2803,2810 ----
static inline void
update_modified_stmts (tree t)
{
+ if (!ssa_operands_active ())
+ return;
if (TREE_CODE (t) == STATEMENT_LIST)
{
tree_stmt_iterator i;
Index: passes.c
===================================================================
*** passes.c (revision 120286)
--- passes.c (working copy)
*************** init_optimization_passes (void)
*** 441,446 ****
--- 441,448 ----
p = &all_ipa_passes;
NEXT_PASS (pass_early_ipa_inline);
NEXT_PASS (pass_early_local_passes);
+ /* The process of merging updates of IPA passes to operate on SSA is not
+ complette yet. */
NEXT_PASS (pass_ipa_cp);
NEXT_PASS (pass_ipa_inline);
NEXT_PASS (pass_ipa_reference);
*************** init_optimization_passes (void)
*** 461,483 ****
NEXT_PASS (pass_lower_complex_O0);
NEXT_PASS (pass_lower_vector);
NEXT_PASS (pass_warn_function_return);
- NEXT_PASS (pass_early_tree_profile);
*p = NULL;
p = &pass_early_local_passes.sub;
NEXT_PASS (pass_tree_profile);
NEXT_PASS (pass_cleanup_cfg);
NEXT_PASS (pass_rebuild_cgraph_edges);
*p = NULL;
p = &all_passes;
NEXT_PASS (pass_fixup_cfg);
- NEXT_PASS (pass_init_datastructures);
- NEXT_PASS (pass_expand_omp);
NEXT_PASS (pass_all_optimizations);
NEXT_PASS (pass_warn_function_noreturn);
- NEXT_PASS (pass_mudflap_2);
NEXT_PASS (pass_free_datastructures);
NEXT_PASS (pass_free_cfg_annotations);
NEXT_PASS (pass_expand);
NEXT_PASS (pass_rest_of_compilation);
--- 463,494 ----
NEXT_PASS (pass_lower_complex_O0);
NEXT_PASS (pass_lower_vector);
NEXT_PASS (pass_warn_function_return);
*p = NULL;
p = &pass_early_local_passes.sub;
NEXT_PASS (pass_tree_profile);
NEXT_PASS (pass_cleanup_cfg);
+ NEXT_PASS (pass_init_datastructures);
+ NEXT_PASS (pass_expand_omp);
+ NEXT_PASS (pass_all_early_optimizations);
NEXT_PASS (pass_rebuild_cgraph_edges);
*p = NULL;
+ p = &pass_all_early_optimizations.sub;
+ NEXT_PASS (pass_referenced_vars);
+ NEXT_PASS (pass_reset_cc_flags);
+ NEXT_PASS (pass_build_ssa);
+ NEXT_PASS (pass_early_warn_uninitialized);
+ NEXT_PASS (pass_cleanup_cfg);
+
+ *p = NULL;
+
p = &all_passes;
NEXT_PASS (pass_fixup_cfg);
NEXT_PASS (pass_all_optimizations);
NEXT_PASS (pass_warn_function_noreturn);
NEXT_PASS (pass_free_datastructures);
+ NEXT_PASS (pass_mudflap_2);
NEXT_PASS (pass_free_cfg_annotations);
NEXT_PASS (pass_expand);
NEXT_PASS (pass_rest_of_compilation);
*************** init_optimization_passes (void)
*** 485,494 ****
*p = NULL;
p = &pass_all_optimizations.sub;
- NEXT_PASS (pass_referenced_vars);
- NEXT_PASS (pass_reset_cc_flags);
NEXT_PASS (pass_create_structure_vars);
- NEXT_PASS (pass_build_ssa);
NEXT_PASS (pass_may_alias);
NEXT_PASS (pass_return_slot);
NEXT_PASS (pass_rename_ssa_copies);
--- 496,502 ----