static void fill_eager_delay_slots PROTO((rtx));
static void relax_delay_slots PROTO((rtx));
static void make_return_insns PROTO((rtx));
+static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
+static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
\f
/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
if (global_regs[i])
SET_HARD_REG_BIT (res->regs, i);
- /* Skip any labels between the CALL_INSN and possible USE insns. */
- while (GET_CODE (insn) == CODE_LABEL)
- insn = PREV_INSN (insn);
+ {
+ rtx link;
- for ( ; (insn && GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == USE);
- insn = PREV_INSN (insn))
- {
- for (i = 1; i < seq_size; i++)
+ for (link = CALL_INSN_FUNCTION_USAGE (x);
+ link;
+ link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == USE)
{
- rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
- if (GET_CODE (slot_pat) == SET
- && rtx_equal_p (SET_DEST (slot_pat),
- XEXP (PATTERN (insn), 0)))
- break;
+ for (i = 1; i < seq_size; i++)
+ {
+ rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
+ if (GET_CODE (slot_pat) == SET
+ && rtx_equal_p (SET_DEST (slot_pat),
+ SET_DEST (XEXP (link, 0))))
+ break;
+ }
+ if (i >= seq_size)
+ mark_referenced_resources (SET_DEST (XEXP (link, 0)),
+ res, 0);
}
- if (i >= seq_size)
- mark_referenced_resources (XEXP (PATTERN (insn), 0), res, 0);
- }
+ }
}
/* ... fall through to other INSN processing ... */
{
rtx next = NEXT_INSN (x);
rtx prev = PREV_INSN (x);
+ rtx link;
res->cc = res->memory = 1;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (NEXT_INSN (prev) != x)
next = NEXT_INSN (NEXT_INSN (prev));
- /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
- while (GET_CODE (next) == CODE_LABEL)
- next = NEXT_INSN (next);
-
- for (; (next && GET_CODE (next) == INSN
- && GET_CODE (PATTERN (next)) == CLOBBER);
- next = NEXT_INSN (next))
- mark_set_resources (XEXP (PATTERN (next), 0), res, 1, 0);
+ for (link = CALL_INSN_FUNCTION_USAGE (x);
+ link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
/* Check for a NOTE_INSN_SETJMP. If it exists, then we must
assume that this call can clobber any register. */
If LABEL is zero, then there is no way to determine the branch
direction. */
if (GET_CODE (insn) == JUMP_INSN
- && condjump_p (insn)
+ && (condjump_p (insn) || condjump_in_parallel_p (insn))
&& INSN_UID (insn) <= max_uid
&& label != 0
&& INSN_UID (label) <= max_uid)
Non conditional branches are predicted as very likely taken. */
if (GET_CODE (insn) == JUMP_INSN
- && condjump_p (insn))
+ && (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
int prediction;
rtx pat = PATTERN (insn);
rtx src;
+ if (condjump_in_parallel_p (insn))
+ pat = XVECEXP (pat, 0, 0);
+
if (GET_CODE (pat) == RETURN)
return target == 0 ? const_true_rtx : 0;
return (i == XVECLEN (pat, 0));
}
+/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
+ any insns we wish to place in the delay slot of JUMP. */
+
+static int
+redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
+ rtx jump, newlabel, delay_list;
+{
+ int flags, i;
+ rtx li;
+
+ /* Make sure all the insns in DELAY_LIST would still be
+ valid after threading the jump. If they are still
+ valid, then return non-zero. */
+
+ flags = get_jump_flags (jump, newlabel);
+ for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
+ if (! (
+#ifdef ANNUL_IFFALSE_SLOTS
+ (INSN_ANNULLED_BRANCH_P (jump)
+ && INSN_FROM_TARGET_P (XEXP (li, 0)))
+ ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
+#endif
+#ifdef ANNUL_IFTRUE_SLOTS
+ (INSN_ANNULLED_BRANCH_P (jump)
+ && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
+ ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
+#endif
+ eligible_for_delay (jump, i, XEXP (li, 0), flags)))
+ break;
+
+ return (li == NULL);
+}
+
\f
/* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
the condition tested by INSN is CONDITION and the resources shown in
&& ! insn_sets_resource_p (trial, &set, 1)
&& ! insn_sets_resource_p (trial, &needed, 1)
&& (trial = try_split (pat, trial, 0)) != 0
+ /* Update next_trial, in case try_split succeeded. */
+ && (next_trial = next_nonnote_insn (trial))
&& rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
/* Have to test this condition if annul condition is different
from (and less restrictive than) non-annulling one. */
&& eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
{
- next_trial = next_nonnote_insn (trial);
if (! annul_p)
{
&& ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
{
rtx pat = PATTERN (trial);
+ rtx filled_insn = XVECEXP (pat, 0, 0);
+
+ /* Account for resources set/needed by the filled insn. */
+ mark_set_resources (filled_insn, &set, 0, 1);
+ mark_referenced_resources (filled_insn, &needed, 1);
for (i = 1; i < XVECLEN (pat, 0); i++)
{
marked live, plus live pseudo regs that have been renumbered to
hard regs. */
- COPY_HARD_REG_SET (current_live_regs, *regs_live);
+#ifdef HARD_REG_SET
+ current_live_regs = *regs_live;
+#else
+ COPY_HARD_REG_SET (current_live_regs, regs_live);
+#endif
for (offset = 0, i = 0; offset < regset_size; offset++)
{
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
if (slots_filled != slots_to_fill
&& delay_list == 0
- && GET_CODE (insn) == JUMP_INSN && condjump_p (insn))
+ && GET_CODE (insn) == JUMP_INSN
+ && (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
delay_list = optimize_skip (insn);
if (delay_list)
if (slots_filled != slots_to_fill
&& (GET_CODE (insn) != JUMP_INSN
- || (condjump_p (insn) && ! simplejump_p (insn)
+ || ((condjump_p (insn) || condjump_in_parallel_p (insn))
+ && ! simplejump_p (insn)
&& JUMP_LABEL (insn) != 0)))
{
rtx target = 0;
if (new_thread && GET_CODE (new_thread) == JUMP_INSN
&& (simplejump_p (new_thread)
- || GET_CODE (PATTERN (new_thread)) == RETURN))
+ || GET_CODE (PATTERN (new_thread)) == RETURN)
+ && redirect_with_delay_list_safe_p (insn,
+ JUMP_LABEL (new_thread),
+ delay_list))
new_thread = follow_jumps (JUMP_LABEL (new_thread));
if (new_thread == 0)
if (insn == 0
|| INSN_DELETED_P (insn)
|| GET_CODE (insn) != JUMP_INSN
- || ! condjump_p (insn))
+ || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
continue;
slots_to_fill = num_delay_slots (insn);
the next insn, or jumps to a label that is not the last of a
group of consecutive labels. */
if (GET_CODE (insn) == JUMP_INSN
- && condjump_p (insn)
+ && (condjump_p (insn) || condjump_in_parallel_p (insn))
&& (target_label = JUMP_LABEL (insn)) != 0)
{
target_label = follow_jumps (target_label);
if (target_label == 0)
target_label = find_end_label ();
- if (next_active_insn (target_label) == next)
+ if (next_active_insn (target_label) == next
+ && ! condjump_in_parallel_p (insn))
{
delete_jump (insn);
continue;
if (GET_CODE (insn) == JUMP_INSN
&& (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
&& (other = prev_active_insn (insn)) != 0
- && condjump_p (other)
+ && (condjump_p (other) || condjump_in_parallel_p (other))
&& no_labels_between_p (other, insn)
&& 0 < mostly_true_jump (other,
get_branch_condition (other,
/* Now look only at the cases where we have a filled JUMP_INSN. */
if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
- || ! condjump_p (XVECEXP (PATTERN (insn), 0, 0)))
+ || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
+ || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
continue;
target_label = JUMP_LABEL (delay_insn);
if (! INSN_ANNULLED_BRANCH_P (delay_insn)
&& prev_active_insn (target_label) == insn
+ && ! condjump_in_parallel_p (delay_insn)
#ifdef HAVE_cc0
/* If the last insn in the delay slot sets CC0 for some insn,
various code assumes that it is in a delay slot. We could
pat = PATTERN (insn);
jump_insn = XVECEXP (pat, 0, 0);
- /* If we can't make the jump into a RETURN, redirect it to the best
+ /* If we can't make the jump into a RETURN, try to redirect it to the best
RETURN and go on to the next insn. */
if (! reorg_redirect_jump (jump_insn, NULL_RTX))
{
- reorg_redirect_jump (jump_insn, real_return_label);
+ /* Make sure redirecting the jump will not invalidate the delay
+ slot insns. */
+ if (redirect_with_delay_slots_safe_p (jump_insn,
+ real_return_label,
+ insn))
+ reorg_redirect_jump (jump_insn, real_return_label);
continue;
}
obstack_ptr_grow (&unfilled_slots_obstack, insn);
/* Ensure all jumps go to the last of a set of consecutive labels. */
- if (GET_CODE (insn) == JUMP_INSN && condjump_p (insn)
+ if (GET_CODE (insn) == JUMP_INSN
+ && (condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn) != 0
&& ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
!= JUMP_LABEL (insn)))