* emit-rtl.c (active_insn_p): Make JUMP_TABLE_DATA a non-active insn. (emit_pattern_after_setloc): Set location on any nondebug insn. (emit_pattern_before_setloc): Likewise. * final.c (reemit_insn_block_notes): Remove now-redundant test. * function.c (active_insns_between): Rename to ... (active_insns_in_bb_p): ... this. Use FOR_BB_INSNS_REVERSE. Walk insns from last to first in the basic block, speculating active insns are likely found quicker in this order. (thread_prologue_and_epilogue_insns): Fix active_insns_between users. * reorg.c (dbr_next_active_insn, dbr_prev_active_insn): New functions. (first_active_target_insn, optimize_skip, try_merge_delay_insns, own_thread_p, fill_simple_delay_slots, follow_jumps, fill_slots_from_thread, fill_eager_delay_slots, label_before_next_insn, relax_delay_slots, dbr_schedule): Use dbr_next_active_insn instead of next_active_insn, or dbr_prev_active_insn instead of prev_active_insn. * config/ia64/ia64.c (emit_all_insn_group_barriers): Remove now-redundant test. (final_emit_insn_group_barriers): Likewise. Index: emit-rtl.c =================================================================== --- emit-rtl.c (revision 205336) +++ emit-rtl.c (working copy) @@ -3298,7 +3298,6 @@ int active_insn_p (const_rtx insn) { return (CALL_P (insn) || JUMP_P (insn) - || JUMP_TABLE_DATA_P (insn) /* FIXME */ || (NONJUMP_INSN_P (insn) && (! reload_completed || (GET_CODE (PATTERN (insn)) != USE @@ -4487,7 +4486,7 @@ emit_pattern_after_setloc (rtx pattern, rtx after, after = NEXT_INSN (after); while (1) { - if (active_insn_p (after) && !INSN_LOCATION (after)) + if (NONDEBUG_INSN_P (after) && !INSN_LOCATION (after)) INSN_LOCATION (after) = loc; if (after == last) break; @@ -4596,7 +4595,7 @@ emit_pattern_before_setloc (rtx pattern, rtx befor first = NEXT_INSN (first); while (1) { - if (active_insn_p (first) && !INSN_LOCATION (first)) + if (NONDEBUG_INSN_P (first) && !INSN_LOCATION (first)) INSN_LOCATION (first) = loc; if (first == last) break; Index: final.c =================================================================== --- final.c (revision 205336) +++ final.c (working copy) @@ -1672,10 +1672,6 @@ reemit_insn_block_notes (void) } if (!active_insn_p (insn)) - continue; - - /* Avoid putting scope notes between jump table and its label. */ - if (JUMP_TABLE_DATA_P (insn)) continue; this_block = insn_scope (insn); Index: function.c =================================================================== --- function.c (revision 205336) +++ function.c (working copy) @@ -5693,18 +5693,14 @@ dup_block_and_redirect (basic_block bb, basic_bloc #endif #if defined (HAVE_return) || defined (HAVE_simple_return) -/* Return true if there are any active insns between HEAD and TAIL. */ +/* Return true if there are any active insns in BB. */ static bool -active_insn_between (rtx head, rtx tail) +active_insns_in_bb_p (basic_block bb) { - while (tail) - { - if (active_insn_p (tail)) - return true; - if (tail == head) - return false; - tail = PREV_INSN (tail); - } + rtx insn; + FOR_BB_INSNS_REVERSE (bb, insn) + if (active_insn_p (insn)) + return true; return false; } @@ -6354,7 +6350,7 @@ thread_prologue_and_epilogue_insns (void) e = EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds, i); if (LABEL_P (BB_HEAD (e->src)) && !bitmap_bit_p (&bb_flags, e->src->index) - && !active_insn_between (BB_HEAD (e->src), BB_END (e->src))) + && !active_insns_in_bb_p (e->src)) unconverted_simple_returns = convert_jumps_to_returns (e->src, true, unconverted_simple_returns); @@ -6384,7 +6380,7 @@ thread_prologue_and_epilogue_insns (void) basic_block last_bb = exit_fallthru_edge->src; if (LABEL_P (BB_HEAD (last_bb)) - && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb))) + && !active_insns_in_bb_p (last_bb)) convert_jumps_to_returns (last_bb, false, vNULL); if (EDGE_COUNT (last_bb->preds) != 0 Index: reorg.c =================================================================== --- reorg.c (revision 205336) +++ reorg.c (working copy) @@ -248,7 +248,41 @@ static void fill_eager_delay_slots (void); static void relax_delay_slots (rtx); static void make_return_insns (rtx); -/* A wrapper around next_active_insn which takes care to return ret_rtx +/* A wrapper around next_active_insn which takes care to stop at jump + table data, which used to be a JUMP_INSN and used to be treated as an + active insn. A lot of code in reorg.c still expects next_active_insn + to return jump table data. */ + +static rtx +dbr_next_active_insn (rtx insn) +{ + while (insn) + { + insn = NEXT_INSN (insn); + if (insn == 0 || active_insn_p (insn) + || JUMP_TABLE_DATA_P (insn)) + break; + } + + return insn; +} + +/* Likewise for dbr_prev_active_insn. */ + +static rtx +dbr_prev_active_insn (rtx insn) +{ + while (insn) + { + insn = PREV_INSN (insn); + if (insn == 0 || active_insn_p (insn) + || JUMP_TABLE_DATA_P (insn)) + break; + } + return insn; +} + +/* A wrapper around dbr_next_active_insn which takes care to return ret_rtx unchanged. */ static rtx @@ -256,8 +290,9 @@ first_active_target_insn (rtx insn) { if (ANY_RETURN_P (insn)) return insn; - return next_active_insn (insn); + return dbr_next_active_insn (insn); } + /* Return true iff INSN is a simplejump, or any kind of return insn. */ @@ -762,7 +797,7 @@ static rtx optimize_skip (rtx insn) { rtx trial = next_nonnote_insn (insn); - rtx next_trial = next_active_insn (trial); + rtx next_trial = dbr_next_active_insn (trial); rtx delay_list = 0; int flags; @@ -783,7 +818,7 @@ optimize_skip (rtx insn) we have one insn followed by a branch to the same label we branch to. In both of these cases, inverting the jump and annulling the delay slot give the same effect in fewer insns. */ - if (next_trial == next_active_insn (JUMP_LABEL (insn)) + if (next_trial == dbr_next_active_insn (JUMP_LABEL (insn)) || (next_trial != 0 && simplejump_or_return_p (next_trial) && JUMP_LABEL (insn) == JUMP_LABEL (next_trial))) @@ -797,7 +832,7 @@ optimize_skip (rtx insn) } delay_list = add_to_delay_list (trial, NULL_RTX); - next_trial = next_active_insn (trial); + next_trial = dbr_next_active_insn (trial); update_block (trial, trial); delete_related_insns (trial); @@ -1355,7 +1390,7 @@ try_merge_delay_insns (rtx insn, rtx thread) { update_block (trial, thread); if (trial == thread) - thread = next_active_insn (thread); + thread = dbr_next_active_insn (thread); delete_related_insns (trial); INSN_FROM_TARGET_P (next_to_match) = 0; @@ -1704,7 +1739,7 @@ own_thread_p (rtx thread, rtx label, int allow_fal return 0; /* Get the first active insn, or THREAD, if it is an active insn. */ - active_insn = next_active_insn (PREV_INSN (thread)); + active_insn = dbr_next_active_insn (PREV_INSN (thread)); for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn)) if (LABEL_P (insn) @@ -1973,7 +2008,7 @@ fill_simple_delay_slots (int non_jumps_p) else flags = get_jump_flags (insn, NULL_RTX); - if ((trial = next_active_insn (insn)) + if ((trial = dbr_next_active_insn (insn)) && JUMP_P (trial) && simplejump_p (trial) && eligible_for_delay (insn, slots_filled, trial, flags) @@ -2209,7 +2244,7 @@ fill_simple_delay_slots (int non_jumps_p) && trial && jump_to_label_p (trial) && simplejump_p (trial) - && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0 + && (next_trial = dbr_next_active_insn (JUMP_LABEL (trial))) != 0 && ! (NONJUMP_INSN_P (next_trial) && GET_CODE (PATTERN (next_trial)) == SEQUENCE) && !JUMP_P (next_trial) @@ -2251,7 +2286,7 @@ fill_simple_delay_slots (int non_jumps_p) && slots_filled != slots_to_fill) delay_list = fill_slots_from_thread (insn, const_true_rtx, - next_active_insn (JUMP_LABEL (insn)), + dbr_next_active_insn (JUMP_LABEL (insn)), NULL, 1, 1, own_thread_p (JUMP_LABEL (insn), JUMP_LABEL (insn), 0), @@ -2292,7 +2327,7 @@ follow_jumps (rtx label, rtx jump, bool *crossing) return label; for (depth = 0; (depth < 10 - && (insn = next_active_insn (value)) != 0 + && (insn = dbr_next_active_insn (value)) != 0 && JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX && ((any_uncondjump_p (insn) && onlyjump_p (insn)) @@ -2442,7 +2477,7 @@ fill_slots_from_thread (rtx insn, rtx condition, r update_block (trial, thread); if (trial == thread) { - thread = next_active_insn (thread); + thread = dbr_next_active_insn (thread); if (new_thread == trial) new_thread = thread; } @@ -2452,7 +2487,7 @@ fill_slots_from_thread (rtx insn, rtx condition, r else { update_reg_unused_notes (prior_insn, trial); - new_thread = next_active_insn (trial); + new_thread = dbr_next_active_insn (trial); } continue; @@ -2520,7 +2555,7 @@ fill_slots_from_thread (rtx insn, rtx condition, r update_block (trial, thread); if (trial == thread) { - thread = next_active_insn (thread); + thread = dbr_next_active_insn (thread); if (new_thread == trial) new_thread = thread; } @@ -2565,7 +2600,7 @@ fill_slots_from_thread (rtx insn, rtx condition, r LABEL_NUSES (JUMP_LABEL (trial))--; } else - new_thread = next_active_insn (trial); + new_thread = dbr_next_active_insn (trial); temp = own_thread ? trial : copy_delay_slot_insn (trial); if (thread_if_true) @@ -2592,7 +2627,7 @@ fill_slots_from_thread (rtx insn, rtx condition, r to call update_block and delete_insn. */ fix_reg_dead_note (prior_insn, insn); update_reg_unused_notes (prior_insn, new_thread); - new_thread = next_active_insn (new_thread); + new_thread = dbr_next_active_insn (new_thread); } break; } @@ -2739,14 +2774,14 @@ fill_slots_from_thread (rtx insn, rtx condition, r update_block (trial, thread); if (trial == thread) { - thread = next_active_insn (thread); + thread = dbr_next_active_insn (thread); if (new_thread == trial) new_thread = thread; } delete_related_insns (trial); } else - new_thread = next_active_insn (trial); + new_thread = dbr_next_active_insn (trial); ninsn = own_thread ? trial : copy_delay_slot_insn (trial); if (thread_if_true) @@ -2864,7 +2899,7 @@ fill_eager_delay_slots (void) } else { - fallthrough_insn = next_active_insn (insn); + fallthrough_insn = dbr_next_active_insn (insn); own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1); prediction = mostly_true_jump (insn); } @@ -2911,7 +2946,7 @@ fill_eager_delay_slots (void) if (delay_list == 0) delay_list = fill_slots_from_thread (insn, condition, insn_at_target, - next_active_insn (insn), 0, 1, + dbr_next_active_insn (insn), 0, 1, own_target, slots_to_fill, &slots_filled, delay_list); @@ -3107,7 +3142,7 @@ delete_jump (rtx insn) static rtx label_before_next_insn (rtx x, rtx scan_limit) { - rtx insn = next_active_insn (x); + rtx insn = dbr_next_active_insn (x); while (insn) { insn = PREV_INSN (insn); @@ -3136,7 +3171,7 @@ relax_delay_slots (rtx first) rtx other; bool crossing; - next = next_active_insn (insn); + next = dbr_next_active_insn (insn); /* If this is a jump insn, see if it now jumps to a jump, jumps to the next insn, or jumps to a label that is not the last of a @@ -3151,7 +3186,7 @@ relax_delay_slots (rtx first) if (ANY_RETURN_P (target_label)) target_label = find_end_label (target_label); - if (target_label && next_active_insn (target_label) == next + if (target_label && dbr_next_active_insn (target_label) == next && ! condjump_in_parallel_p (insn)) { delete_jump (insn); @@ -3171,7 +3206,8 @@ relax_delay_slots (rtx first) if (next && simplejump_or_return_p (next) && any_condjump_p (insn) && target_label - && next_active_insn (target_label) == next_active_insn (next) + && dbr_next_active_insn (target_label) + == dbr_next_active_insn (next) && no_labels_between_p (insn, next)) { rtx label = JUMP_LABEL (next); @@ -3212,7 +3248,7 @@ relax_delay_slots (rtx first) we would then be making the more common case longer. */ if (simplejump_or_return_p (insn) - && (other = prev_active_insn (insn)) != 0 + && (other = dbr_prev_active_insn (insn)) != 0 && any_condjump_p (other) && no_labels_between_p (other, insn) && 0 > mostly_true_jump (other)) @@ -3237,7 +3273,7 @@ relax_delay_slots (rtx first) if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0)) { delete_from_delay_slot (XVECEXP (pat, 0, 1)); - next = prev_active_insn (next); + next = dbr_prev_active_insn (next); continue; } @@ -3311,7 +3347,7 @@ relax_delay_slots (rtx first) /* If the first insn at TARGET_LABEL is redundant with a previous insn, redirect the jump to the following insn and process again. - We use next_real_insn instead of next_active_insn so we + We use next_real_insn instead of dbr_next_active_insn so we don't skip USE-markers, or we'll end up with incorrect liveness info. */ trial = next_real_insn (target_label); @@ -3321,7 +3357,7 @@ relax_delay_slots (rtx first) { /* Figure out where to emit the special USE insn so we don't later incorrectly compute register live/death info. */ - rtx tmp = next_active_insn (trial); + rtx tmp = dbr_next_active_insn (trial); if (tmp == 0) tmp = find_end_label (simple_return_rtx); @@ -3364,7 +3400,7 @@ relax_delay_slots (rtx first) /* See if we have a simple (conditional) jump that is useless. */ if (! INSN_ANNULLED_BRANCH_P (delay_insn) && ! condjump_in_parallel_p (delay_insn) - && prev_active_insn (target_label) == insn + && dbr_prev_active_insn (target_label) == insn && ! BARRIER_P (prev_nonnote_insn (target_label)) #ifdef HAVE_cc0 /* If the last insn in the delay slot sets CC0 for some insn, @@ -3429,7 +3465,7 @@ relax_delay_slots (rtx first) if (! INSN_ANNULLED_BRANCH_P (delay_insn) && any_condjump_p (delay_insn) && next && simplejump_or_return_p (next) - && next_active_insn (target_label) == next_active_insn (next) + && dbr_next_active_insn (target_label) == dbr_next_active_insn (next) && no_labels_between_p (insn, next)) { rtx label = JUMP_LABEL (next); @@ -3479,11 +3515,11 @@ relax_delay_slots (rtx first) try_merge_delay_insns (insn, next); else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1)) && own_thread_p (target_label, target_label, 0)) - try_merge_delay_insns (insn, next_active_insn (target_label)); + try_merge_delay_insns (insn, dbr_next_active_insn (target_label)); /* If we get here, we haven't deleted INSN. But we may have deleted NEXT, so recompute it. */ - next = next_active_insn (insn); + next = dbr_next_active_insn (insn); } } @@ -3668,7 +3704,8 @@ dbr_schedule (rtx first) unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0); } - for (insn = next_active_insn (first); insn; insn = next_active_insn (insn)) + for (insn = dbr_next_active_insn (first); + insn; insn = dbr_next_active_insn (insn)) { rtx target; Index: config/ia64/ia64.c =================================================================== --- config/ia64/ia64.c (revision 205336) +++ config/ia64/ia64.c (working copy) @@ -7022,8 +7022,6 @@ emit_all_insn_group_barriers (FILE *dump ATTRIBUTE if (! last) continue; - if (JUMP_TABLE_DATA_P (last)) - last = prev_active_insn (last); if (recog_memoized (last) != CODE_FOR_insn_group_barrier) emit_insn_after (gen_insn_group_barrier (GEN_INT (3)), last); @@ -9407,8 +9405,6 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBU if (! last) continue; - if (JUMP_TABLE_DATA_P (last)) - last = prev_active_insn (last); if (recog_memoized (last) != CODE_FOR_insn_group_barrier) emit_insn_after (gen_insn_group_barrier (GEN_INT (3)), last);