Index: recog.c =================================================================== --- recog.c (revision 112761) +++ recog.c (working copy) @@ -2719,7 +2719,7 @@ split_insn (rtx insn) /* Split all insns in the function. If UPD_LIFE, update life info after. */ void -split_all_insns (int upd_life) +split_all_insns (void) { sbitmap blocks; bool changed; @@ -2787,18 +2787,7 @@ split_all_insns (int upd_life) } if (changed) - { - int old_last_basic_block = last_basic_block; - - find_many_sub_basic_blocks (blocks); - - if (old_last_basic_block != last_basic_block && upd_life) - blocks = sbitmap_resize (blocks, last_basic_block, 1); - } - - if (changed && upd_life) - update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES, - PROP_DEATH_NOTES); + find_many_sub_basic_blocks (blocks); #ifdef ENABLE_CHECKING verify_flow_info (); @@ -3470,7 +3459,8 @@ struct tree_opt_pass pass_peephole2 = static void rest_of_handle_split_all_insns (void) { - split_all_insns (1); + split_all_insns (); + update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES); } struct tree_opt_pass pass_split_all_insns = @@ -3490,27 +3480,25 @@ struct tree_opt_pass pass_split_all_insn 0 /* letter */ }; -/* The placement of the splitting that we do for shorten_branches - depends on whether regstack is used by the target or not. */ -static bool -gate_do_final_split (void) +static void +rest_of_handle_split_after_reload (void) { -#if defined (HAVE_ATTR_length) && !defined (STACK_REGS) - return 1; -#else - return 0; -#endif + /* If optimizing, then go ahead and split insns now. */ +#ifndef STACK_REGS + if (optimize > 0) +#endif + split_all_insns (); } -struct tree_opt_pass pass_split_for_shorten_branches = +struct tree_opt_pass pass_split_after_reload = { - "split3", /* name */ - gate_do_final_split, /* gate */ - split_all_insns_noflow, /* execute */ + "split2", /* name */ + NULL, /* gate */ + rest_of_handle_split_after_reload, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - TV_SHORTEN_BRANCH, /* tv_id */ + 0, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ @@ -3519,7 +3507,6 @@ struct tree_opt_pass pass_split_for_shor 0 /* letter */ }; - static bool gate_handle_split_before_regstack (void) { @@ -3538,11 +3525,17 @@ gate_handle_split_before_regstack (void) #endif } +static void +rest_of_handle_split_before_regstack (void) +{ + split_all_insns (); +} + struct tree_opt_pass pass_split_before_regstack = { - "split2", /* name */ + "split3", /* name */ gate_handle_split_before_regstack, /* gate */ - rest_of_handle_split_all_insns, /* execute */ + rest_of_handle_split_before_regstack, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ @@ -3554,3 +3547,34 @@ struct tree_opt_pass pass_split_before_r TODO_dump_func, /* todo_flags_finish */ 0 /* letter */ }; + +/* The placement of the splitting that we do for shorten_branches + depends on whether regstack is used by the target or not. */ +static bool +gate_do_final_split (void) +{ +#if defined (HAVE_ATTR_length) && !defined (STACK_REGS) + return 1; +#else + return 0; +#endif +} + +struct tree_opt_pass pass_split_for_shorten_branches = +{ + "split4", /* name */ + gate_do_final_split, /* gate */ + split_all_insns_noflow, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_SHORTEN_BRANCH, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func, /* todo_flags_finish */ + 0 /* letter */ +}; + + Index: rtl.h =================================================================== --- rtl.h (revision 112761) +++ rtl.h (working copy) @@ -1779,7 +1779,7 @@ extern const char *decode_asm_operands ( extern enum reg_class reg_preferred_class (int); extern enum reg_class reg_alternate_class (int); -extern void split_all_insns (int); +extern void split_all_insns (void); extern void split_all_insns_noflow (void); #define MAX_SAVED_CONST_INT 64 Index: bb-reorder.c =================================================================== --- bb-reorder.c (revision 112761) +++ bb-reorder.c (working copy) @@ -2200,13 +2200,9 @@ gate_handle_reorder_blocks (void) static void rest_of_handle_reorder_blocks (void) { - bool changed; - struct df * saved_df = rtl_df; - rtl_df = NULL; - /* Last attempt to optimize CFG, as scheduling, peepholing and insn splitting possibly introduced more crossjumping opportunities. */ - changed = cleanup_cfg (CLEANUP_EXPENSIVE); + cleanup_cfg (CLEANUP_EXPENSIVE); if (flag_sched2_use_traces && flag_schedule_insns_after_reload) { @@ -2219,11 +2215,7 @@ rest_of_handle_reorder_blocks (void) reorder_basic_blocks (); if (flag_reorder_blocks || flag_reorder_blocks_and_partition || (flag_sched2_use_traces && flag_schedule_insns_after_reload)) - changed |= cleanup_cfg (CLEANUP_EXPENSIVE); - - rtl_df = saved_df; - update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, - PROP_DEATH_NOTES); + cleanup_cfg (CLEANUP_EXPENSIVE); /* Add NOTE_INSN_SWITCH_TEXT_SECTIONS notes. */ insert_section_boundary_note (); Index: reg-stack.c =================================================================== --- reg-stack.c (revision 112761) +++ reg-stack.c (working copy) @@ -1327,9 +1327,12 @@ subst_stack_regs_pat (rtx insn, stack re emit_pop_insn (insn, regstack, *src, EMIT_AFTER); return control_flow_insn_deleted; } - /* ??? Uninitialized USE should not happen. */ - else - gcc_assert (get_hard_regnum (regstack, *src) != -1); + /* Uninitialized USE might happen for functions returning uninitialized + value. We will properly initialize the USE on the edge to EXIT_BLOCK, + so it is safe to ignore the use here. This is consistent with behaviour + of dataflow analyzer that ignores USE too. (This also imply that + forcingly initializing the register to NaN here would lead to ICE later, + since the REG_DEAD notes are not issued.) */ break; case CLOBBER: Index: sched-rgn.c =================================================================== --- sched-rgn.c (revision 112761) +++ sched-rgn.c (working copy) @@ -2571,7 +2571,7 @@ rest_of_handle_sched2 (void) /* Do control and data sched analysis again, and write some more of the results to dump file. */ - split_all_insns (1); + split_all_insns (); if (flag_sched2_use_superblocks || flag_sched2_use_traces) { Index: flow.c =================================================================== --- flow.c (revision 112761) +++ flow.c (working copy) @@ -4172,7 +4172,7 @@ rest_of_handle_flow2 (void) #ifndef STACK_REGS if (optimize > 0) #endif - split_all_insns (0); + split_all_insns (); if (flag_branch_target_load_optimize) branch_target_load_optimize (epilogue_completed);