This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
new code committed to dataflow branch
- From: Kenneth Zadeck <zadeck at naturalbridge dot com>
- To: GCC Patches <gcc-patches at gcc dot gnu dot org>, "Berlin, Daniel" <dberlin at dberlin dot org>, "Zadeck, Kenneth" <zadeck at naturalbridge dot com>
- Date: Sat, 08 Apr 2006 21:23:41 -0400
- Subject: new code committed to dataflow branch
Made bt-load.c use its own instance of df and removed all uses of flow.
Striped out the call to the split_all_insns and
branch_target_load_optimize from flow2
and into their own passes.
There is now no use of flow after branch_target_load_optimize2.
This patch was bootstrapped and regression tested on three platforms.
x86_64-unknown-linux-gnu
powerpc64-unknown-linux-gnu
i686-pc-linux-gnu
Kenny
2006-04-08 Kenneth Zadeck <zadeck@naturalbridge.com>
* tree-pass.h (pass_split_after_reload,
pass_branch_target_load_optimize1
pass_branch_target_load_optimize2): Added.
(pass_branch_target_load_optimize): Deleted.
* flow.c (rest_of_handle_flow2): Split the calls to split_all_insns and
branch_target_load_optimize into their own passes.
* passes.c (init_optimization_passes): Ditto.
(init_optimization_passes): Moved clear_df pass to before
second branch_target_load_optimize pass.
* bt-load (compute_defs_uses_and_gen, build_btr_def_use_webs,
migrate_btr_defs): Threaded private copy of df into these functions.
(branch_target_load_optimize): Made private and add local
instance of df. Removed all references to flow.
(rest_of_handle_branch_target_load_optimize1): New function.
(rest_of_handle_branch_target_load_optimize): Renamed to
rest_of_handle_branch_target_load_optimize2.
rtl.h (branch_target_load_optimize): Removed.
Index: tree-pass.h
===================================================================
--- tree-pass.h (revision 112761)
+++ tree-pass.h (working copy)
@@ -375,13 +375,15 @@ extern struct tree_opt_pass pass_value_p
extern struct tree_opt_pass pass_remove_death_notes;
extern struct tree_opt_pass pass_postreload_cse;
extern struct tree_opt_pass pass_gcse2;
+extern struct tree_opt_pass pass_split_after_reload;
+extern struct tree_opt_pass pass_branch_target_load_optimize1;
extern struct tree_opt_pass pass_flow2;
extern struct tree_opt_pass pass_stack_adjustments;
extern struct tree_opt_pass pass_peephole2;
extern struct tree_opt_pass pass_if_after_reload;
extern struct tree_opt_pass pass_regrename;
extern struct tree_opt_pass pass_reorder_blocks;
-extern struct tree_opt_pass pass_branch_target_load_optimize;
+extern struct tree_opt_pass pass_branch_target_load_optimize2;
extern struct tree_opt_pass pass_leaf_regs;
extern struct tree_opt_pass pass_sched2;
extern struct tree_opt_pass pass_stack_regs;
Index: flow.c
===================================================================
--- flow.c (revision 112775)
+++ flow.c (working copy)
@@ -4168,15 +4168,6 @@ rest_of_handle_flow2 (void)
#if 0
int i;
#endif
- /* If optimizing, then go ahead and split insns now. */
-#ifndef STACK_REGS
- if (optimize > 0)
-#endif
- split_all_insns ();
-
- if (flag_branch_target_load_optimize)
- branch_target_load_optimize (epilogue_completed);
-
if (optimize)
cleanup_cfg (CLEANUP_EXPENSIVE);
#if 0
Index: passes.c
===================================================================
--- passes.c (revision 112785)
+++ passes.c (working copy)
@@ -673,6 +673,8 @@ init_optimization_passes (void)
NEXT_PASS (pass_reset_df_after_reload);
NEXT_PASS (pass_gcse2);
NEXT_PASS (pass_rtl_dse);
+ NEXT_PASS (pass_split_after_reload);
+ NEXT_PASS (pass_branch_target_load_optimize1);
NEXT_PASS (pass_flow2);
NEXT_PASS (pass_rtl_seqabstr);
NEXT_PASS (pass_stack_adjustments);
@@ -683,9 +685,9 @@ init_optimization_passes (void)
NEXT_PASS (pass_regrename);
NEXT_PASS (pass_rtl_dce);
NEXT_PASS (pass_reorder_blocks);
- NEXT_PASS (pass_branch_target_load_optimize);
- NEXT_PASS (pass_leaf_regs);
NEXT_PASS (pass_clear_df);
+ NEXT_PASS (pass_branch_target_load_optimize2);
+ NEXT_PASS (pass_leaf_regs);
NEXT_PASS (pass_sched2);
NEXT_PASS (pass_split_before_regstack);
NEXT_PASS (pass_stack_regs);
Index: bt-load.c
===================================================================
--- bt-load.c (revision 112761)
+++ bt-load.c (working copy)
@@ -122,12 +122,12 @@ static btr_user new_btr_user (basic_bloc
static void dump_hard_reg_set (HARD_REG_SET);
static void dump_btrs_live (int);
static void note_other_use_this_block (unsigned int, btr_user);
-static void compute_defs_uses_and_gen (fibheap_t, btr_def *,btr_user *,
+static void compute_defs_uses_and_gen (struct df *, fibheap_t, btr_def *,btr_user *,
sbitmap *, sbitmap *, HARD_REG_SET *);
static void compute_kill (sbitmap *, sbitmap *, HARD_REG_SET *);
static void compute_out (sbitmap *bb_out, sbitmap *, sbitmap *, int);
static void link_btr_uses (btr_def *, btr_user *, sbitmap *, sbitmap *, int);
-static void build_btr_def_use_webs (fibheap_t);
+static void build_btr_def_use_webs (struct df *, fibheap_t);
static int block_at_edge_of_live_range_p (int, btr_def);
static void clear_btr_from_live_range (btr_def def);
static void add_btr_to_live_range (btr_def, int);
@@ -138,7 +138,7 @@ static void combine_btr_defs (btr_def, H
static void btr_def_live_range (btr_def, HARD_REG_SET *);
static void move_btr_def (basic_block, int, btr_def, bitmap, HARD_REG_SET *);
static int migrate_btr_def (btr_def, int);
-static void migrate_btr_defs (enum reg_class, int);
+static void migrate_btr_defs (struct df *df, enum reg_class, int);
static int can_move_up (basic_block, rtx, int);
static void note_btr_set (rtx, rtx, void *);
@@ -446,7 +446,8 @@ note_btr_set (rtx dest, rtx set ATTRIBUT
}
static void
-compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
+compute_defs_uses_and_gen (struct df *df,
+ fibheap_t all_btr_defs, btr_def *def_array,
btr_user *use_array, sbitmap *btr_defset,
sbitmap *bb_gen, HARD_REG_SET *btrs_written)
{
@@ -479,7 +480,7 @@ compute_defs_uses_and_gen (fibheap_t all
CLEAR_HARD_REG_SET (info.btrs_written_in_block);
for (reg = first_btr; reg <= last_btr; reg++)
if (TEST_HARD_REG_BIT (all_btrs, reg)
- && REGNO_REG_SET_P (DF_LIVE_IN (rtl_df, bb), reg))
+ && REGNO_REG_SET_P (DF_LIVE_IN (df, bb), reg))
SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
@@ -580,7 +581,7 @@ compute_defs_uses_and_gen (fibheap_t all
COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
- REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], DF_LIVE_OUT (rtl_df, bb));
+ REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], DF_LIVE_OUT (df, bb));
/* If this block ends in a jump insn, add any uses or even clobbers
of branch target registers that it might have. */
for (insn = BB_END (bb); insn != BB_HEAD (bb) && ! INSN_P (insn); )
@@ -777,7 +778,7 @@ link_btr_uses (btr_def *def_array, btr_u
}
static void
-build_btr_def_use_webs (fibheap_t all_btr_defs)
+build_btr_def_use_webs (struct df *df, fibheap_t all_btr_defs)
{
const int max_uid = get_max_uid ();
btr_def *def_array = XCNEWVEC (btr_def, max_uid);
@@ -791,7 +792,7 @@ build_btr_def_use_webs (fibheap_t all_bt
sbitmap_vector_zero (btr_defset, (last_btr - first_btr) + 1);
- compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
+ compute_defs_uses_and_gen (df, all_btr_defs, def_array, use_array, btr_defset,
bb_gen, btrs_written);
bb_kill = sbitmap_vector_alloc (n_basic_blocks, max_uid);
@@ -1388,7 +1389,7 @@ migrate_btr_def (btr_def def, int min_co
/* Attempt to move instructions that set target registers earlier
in the flowgraph, away from their corresponding uses. */
static void
-migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
+migrate_btr_defs (struct df *df, enum reg_class btr_class, int allow_callee_save)
{
fibheap_t all_btr_defs = fibheap_new ();
int reg;
@@ -1423,7 +1424,7 @@ migrate_btr_defs (enum reg_class btr_cla
btrs_live = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
btrs_live_at_end = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
- build_btr_def_use_webs (all_btr_defs);
+ build_btr_def_use_webs (df, all_btr_defs);
while (!fibheap_empty (all_btr_defs))
{
@@ -1449,49 +1450,91 @@ migrate_btr_defs (enum reg_class btr_cla
fibheap_delete (all_btr_defs);
}
-void
+static void
branch_target_load_optimize (bool after_prologue_epilogue_gen)
{
enum reg_class class = targetm.branch_target_register_class ();
if (class != NO_REGS)
{
+ struct df * df = df_init (DF_HARD_REGS);
+ df_ur_add_problem (df, 0);
+
/* Initialize issue_rate. */
if (targetm.sched.issue_rate)
issue_rate = targetm.sched.issue_rate ();
else
issue_rate = 1;
- /* Build the CFG for migrate_btr_defs. */
+ if (!after_prologue_epilogue_gen)
+ {
+ /* Build the CFG for migrate_btr_defs. */
#if 1
- /* This may or may not be needed, depending on where we
- run this phase. */
- cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
+ /* This may or may not be needed, depending on where we
+ run this phase. */
+ cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
#endif
+ }
+ df_analyze (df);
- life_analysis (0);
/* Dominator info is also needed for migrate_btr_def. */
calculate_dominance_info (CDI_DOMINATORS);
- migrate_btr_defs (class,
+ migrate_btr_defs (df, class,
(targetm.branch_target_register_callee_saved
(after_prologue_epilogue_gen)));
free_dominance_info (CDI_DOMINATORS);
+ df_finish (df);
+
+ if (!after_prologue_epilogue_gen)
+ {
+ update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
+ PROP_DEATH_NOTES | PROP_REG_INFO);
+ }
- update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
- PROP_DEATH_NOTES | PROP_REG_INFO);
}
}
static bool
-gate_handle_branch_target_load_optimize (void)
+gate_handle_branch_target_load_optimize1 (void)
+{
+ return flag_branch_target_load_optimize;
+}
+
+
+static void
+rest_of_handle_branch_target_load_optimize1 (void)
+{
+ branch_target_load_optimize (epilogue_completed);
+}
+
+struct tree_opt_pass pass_branch_target_load_optimize1 =
+{
+ "btl1", /* name */
+ gate_handle_branch_target_load_optimize1, /* gate */
+ rest_of_handle_branch_target_load_optimize1, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'd' /* letter */
+};
+
+static bool
+gate_handle_branch_target_load_optimize2 (void)
{
return (optimize > 0 && flag_branch_target_load_optimize2);
}
static void
-rest_of_handle_branch_target_load_optimize (void)
+rest_of_handle_branch_target_load_optimize2 (void)
{
static int warned = 0;
@@ -1511,11 +1554,11 @@ rest_of_handle_branch_target_load_optimi
branch_target_load_optimize (epilogue_completed);
}
-struct tree_opt_pass pass_branch_target_load_optimize =
+struct tree_opt_pass pass_branch_target_load_optimize2 =
{
- "btl", /* name */
- gate_handle_branch_target_load_optimize, /* gate */
- rest_of_handle_branch_target_load_optimize, /* execute */
+ "btl2", /* name */
+ gate_handle_branch_target_load_optimize2, /* gate */
+ rest_of_handle_branch_target_load_optimize2, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
Index: rtl.h
===================================================================
--- rtl.h (revision 112785)
+++ rtl.h (working copy)
@@ -2119,9 +2119,6 @@ extern void print_inline_rtx (FILE *, rt
/* In loop.c */
extern void init_loop (void);
-/* In bt-load.c */
-extern void branch_target_load_optimize (bool);
-
/* In function.c */
extern void reposition_prologue_and_epilogue_notes (rtx);
extern void thread_prologue_and_epilogue_insns (rtx);