1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
23 #include "insn-attr.h"
27 /* Instruction reorganization pass.
29 This pass runs after register allocation and final jump
30 optimization. It should be the last pass to run before peephole.
31 It serves primarily to fill delay slots of insns, typically branch
32 and call insns. Other insns typically involve more complicated
33 interactions of data dependencies and resource constraints, and
34 are better handled by scheduling before register allocation (by the
35 function `schedule_insns').
37 The Branch Penalty is the number of extra cycles that are needed to
38 execute a branch insn. On an ideal machine, branches take a single
39 cycle, and the Branch Penalty is 0. Several RISC machines approach
40 branch delays differently:
42 The MIPS and AMD 29000 have a single branch delay slot. Most insns
43 (except other branches) can be used to fill this slot. When the
44 slot is filled, two insns execute in two cycles, reducing the
45 branch penalty to zero.
47 The Motorola 88000 conditionally exposes its branch delay slot,
48 so code is shorter when it is turned off, but will run faster
49 when useful insns are scheduled there.
51 The IBM ROMP has two forms of branch and call insns, both with and
52 without a delay slot. Much like the 88k, insns not using the delay
53 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
55 The SPARC always has a branch delay slot, but its effects can be
56 annulled when the branch is not taken. This means that failing to
57 find other sources of insns, we can hoist an insn from the branch
58 target that would only be safe to execute knowing that the branch
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_eager_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops (or
80 on the m88k, unexpose the branch slot). When
81 `fill_eager_delay_slots' takes insns from the fall-through path of
82 the jump, usually there is no code expansion; when it takes insns
83 from the branch target, there is code expansion if it is not the
84 only way to reach that target.
86 (3) `relax_delay_slots' uses a set of rules to simplify code that
87 has been reorganized by (1) and (2). It finds cases where
88 conditional test can be eliminated, jumps can be threaded, extra
89 insns can be eliminated, etc. It is the job of (1) and (2) to do a
90 good job of scheduling locally; `relax_delay_slots' takes care of
91 making the various individual schedules work well together. It is
92 especially tuned to handle the control flow interactions of branch
93 insns. It does nothing for insns with delay slots that do not
96 On machines that use CC0, we are very conservative. We will not make
97 a copy of an insn involving CC0 since we want to maintain a 1-1
98 correspondence between the insn that sets and uses CC0. The insns are
99 allowed to be separated by placing an insn that sets CC0 (but not an insn
100 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
101 delay slot. In that case, we point each insn at the other with REG_CC_USER
102 and REG_CC_SETTER notes. Note that these restrictions affect very few
103 machines because most RISC machines with delay slots will not use CC0
104 (the RT is the only known exception at this point).
108 The Acorn Risc Machine can conditionally execute most insns, so
109 it is profitable to move single insns into a position to execute
110 based on the condition code of the previous insn.
112 The HP-PA can conditionally nullify insns, providing a similar
113 effect to the ARM, differing mostly in which insn is "in charge". */
118 #include "insn-config.h"
119 #include "conditions.h"
120 #include "hard-reg-set.h"
121 #include "basic-block.h"
123 #include "insn-flags.h"
129 #define obstack_chunk_alloc xmalloc
130 #define obstack_chunk_free free
132 extern int xmalloc ();
135 #ifndef ANNUL_IFTRUE_SLOTS
136 #define eligible_for_annul_true(INSN, SLOTS, TRIAL) 0
138 #ifndef ANNUL_IFFALSE_SLOTS
139 #define eligible_for_annul_false(INSN, SLOTS, TRIAL) 0
142 /* Insns which have delay slots that have not yet been filled. */
144 static struct obstack unfilled_slots_obstack
;
145 static rtx
*unfilled_firstobj
;
147 /* Define macros to refer to the first and last slot containing unfilled
148 insns. These are used because the list may move and its address
149 should be recomputed at each use. */
151 #define unfilled_slots_base \
152 ((rtx *) obstack_base (&unfilled_slots_obstack))
154 #define unfilled_slots_next \
155 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
157 /* This structure is used to indicate which hardware resources are set or
158 needed by insns so far. */
162 char memory
; /* Insn sets or needs a memory location. */
163 char volatil
; /* Insn sets or needs a volatile memory loc. */
164 char cc
; /* Insn sets or needs the condition codes. */
165 HARD_REG_SET regs
; /* Which registers are set or needed. */
168 /* Macro to clear all resources. */
169 #define CLEAR_RESOURCE(RES) \
170 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
171 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
173 /* Indicates what resources are required at function end. */
174 static struct resources end_of_function_needs
;
176 /* Points to the label before the end of the function. */
177 static rtx end_of_function_label
;
179 /* This structure is used to record liveness information at the targets or
180 fallthrough insns of branches. We will most likely need the information
181 at targets again, so save them in a hash table rather than recomputing them
186 int uid
; /* INSN_UID of target. */
187 struct target_info
*next
; /* Next info for same hash bucket. */
188 HARD_REG_SET live_regs
; /* Registers live at target. */
189 int block
; /* Basic block number containing target. */
190 int bb_tick
; /* Generation count of basic block info. */
193 #define TARGET_HASH_PRIME 257
195 /* Define the hash table itself. */
196 static struct target_info
**target_hash_table
;
198 /* For each basic block, we maintain a generation number of its basic
199 block info, which is updated each time we move an insn from the
200 target of a jump. This is the generation number indexed by block
203 static int *bb_ticks
;
205 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
206 not always monotonically increase. */
207 static int *uid_to_ruid
;
209 /* Highest valid index in `uid_to_ruid'. */
212 /* Forward references: */
214 static int redundant_insn_p ();
215 static void update_block ();
217 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
218 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
219 is TRUE, resources used by the called routine will be included for
223 mark_referenced_resources (x
, res
, include_called_routine
)
225 register struct resources
*res
;
226 register int include_called_routine
;
228 register enum rtx_code code
= GET_CODE (x
);
230 register char *format_ptr
;
232 /* Handle leaf items for which we set resource flags. Also, special-case
233 CALL, SET and CLOBBER operators. */
245 if (GET_CODE (SUBREG_REG (x
)) != REG
)
246 mark_referenced_resources (SUBREG_REG (x
), res
, 0);
249 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
250 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
251 for (i
= regno
; i
< last_regno
; i
++)
252 SET_HARD_REG_BIT (res
->regs
, i
);
257 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
258 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
262 /* If this memory shouldn't change, it really isn't referencing
264 if (! RTX_UNCHANGING_P (x
))
266 res
->volatil
= MEM_VOLATILE_P (x
);
268 /* Mark registers used to access memory. */
269 mark_referenced_resources (XEXP (x
, 0), res
, 0);
276 case UNSPEC_VOLATILE
:
278 /* Traditional asm's are always volatile. */
283 res
->volatil
= MEM_VOLATILE_P (x
);
285 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
286 We can not just fall through here since then we would be confused
287 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
288 traditional asms unlike their normal usage. */
290 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
291 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, 0);
295 /* The first operand will be a (MEM (xxx)) but doesn't really reference
296 memory. The second operand may be referenced, though. */
297 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, 0);
298 mark_referenced_resources (XEXP (x
, 1), res
, 0);
302 /* Usually, the first operand of SET is set, not referenced. But
303 registers used to access memory are referenced. SET_DEST is
304 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
306 mark_referenced_resources (SET_SRC (x
), res
, 0);
309 if (GET_CODE (x
) == SIGN_EXTRACT
|| GET_CODE (x
) == ZERO_EXTRACT
)
310 mark_referenced_resources (x
, res
, 0);
311 else if (GET_CODE (x
) == SUBREG
)
313 if (GET_CODE (x
) == MEM
)
314 mark_referenced_resources (XEXP (x
, 0), res
, 0);
321 if (include_called_routine
)
323 /* A CALL references memory, the frame pointer if it exists, the
324 stack pointer, any global registers and any registers given in
325 USE insns immediately in front of the CALL.
327 However, we may have moved some of the parameter loading insns
328 into the delay slot of this CALL. If so, the USE's for them
329 don't count and should be skipped. */
330 rtx insn
= PREV_INSN (x
);
335 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
336 if (NEXT_INSN (insn
) != x
)
338 sequence
= PATTERN (NEXT_INSN (insn
));
339 seq_size
= XVECLEN (sequence
, 0);
340 if (GET_CODE (sequence
) != SEQUENCE
)
345 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
346 if (frame_pointer_needed
)
347 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
349 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
351 SET_HARD_REG_BIT (res
->regs
, i
);
353 /* Skip any labels between the CALL_INSN and possible USE insns. */
354 while (GET_CODE (insn
) == CODE_LABEL
)
355 insn
= PREV_INSN (insn
);
357 for ( ; (insn
&& GET_CODE (insn
) == INSN
358 && GET_CODE (PATTERN (insn
)) == USE
);
359 insn
= PREV_INSN (insn
))
361 for (i
= 1; i
< seq_size
; i
++)
363 rtx slot_pat
= PATTERN (XVECEXP (sequence
, 0, i
));
364 if (GET_CODE (slot_pat
) == SET
365 && rtx_equal_p (SET_DEST (slot_pat
),
366 XEXP (PATTERN (insn
), 0)))
370 mark_referenced_resources (XEXP (PATTERN (insn
), 0), res
, 0);
374 /* ... fall through to other INSN processing ... */
378 /* No special processing, just speed up. */
379 mark_referenced_resources (PATTERN (x
), res
, include_called_routine
);
383 /* Process each sub-expression and flag what it needs. */
384 format_ptr
= GET_RTX_FORMAT (code
);
385 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
386 switch (*format_ptr
++)
389 mark_referenced_resources (XEXP (x
, i
), res
, include_called_routine
);
393 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
394 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
395 include_called_routine
);
400 /* Given an insn, INSN, and a pointer to a `struct resource', RES, indicate
401 which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
402 is TRUE, also mark resources potentially set by the called routine.
404 We never mark the insn as modifying the condition code unless it explicitly
405 SETs CC0 even though this is not totally correct. The reason for this is
406 that we require a SET of CC0 to immediately precede the reference to CC0.
407 So if some other insn sets CC0 as a side-effect, we know it cannot affect
408 our computation and thus may be placed in a delay slot. */
411 mark_set_resources (insn
, res
, include_called_routine
)
413 register struct resources
*res
;
414 int include_called_routine
;
418 switch (GET_CODE (insn
))
423 /* These don't set any resources. */
427 /* Called routine modifies the condition code, memory, any registers
428 that aren't saved across calls, global registers and anything
429 explicitly CLOBBERed immediately after the CALL_INSN. */
431 if (include_called_routine
)
433 rtx next
= NEXT_INSN (insn
);
435 res
->cc
= res
->memory
= 1;
436 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
437 if (call_used_regs
[i
] || global_regs
[i
])
438 SET_HARD_REG_BIT (res
->regs
, i
);
440 /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
441 while (GET_CODE (next
) == CODE_LABEL
)
442 next
= NEXT_INSN (next
);
444 for (; (next
&& GET_CODE (next
) == INSN
445 && GET_CODE (PATTERN (next
)) == CLOBBER
);
446 next
= NEXT_INSN (next
))
447 mark_referenced_resources (XEXP (PATTERN (next
), 0), res
, 0);
450 /* ... and also what it's RTL says it modifies, if anything. */
455 register rtx body
= PATTERN (insn
);
458 /* An insn consisting of just a CLOBBER (or USE) is
459 just for flow and doesn't actually do anything, so we don't check
462 If the source of a SET is a CALL, this is actually done by
463 the called routine. So only include it if we are to include the
464 effects of the calling routine. */
466 if (GET_CODE (body
) == SET
467 && (include_called_routine
|| GET_CODE (SET_SRC (body
)) != CALL
))
468 mark_referenced_resources (SET_DEST (body
), res
, 0);
469 else if (GET_CODE (body
) == PARALLEL
)
471 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
472 if ((GET_CODE (XVECEXP (body
, 0, i
)) == SET
473 && (include_called_routine
474 || GET_CODE (SET_SRC (XVECEXP (body
, 0, i
))) != CALL
))
475 || GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
476 mark_referenced_resources (SET_DEST (XVECEXP (body
, 0, i
)),
479 else if (GET_CODE (body
) == SEQUENCE
)
480 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
481 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (body
, 0, 0))
482 && INSN_FROM_TARGET_P (XVECEXP (body
, 0, i
))))
483 mark_set_resources (XVECEXP (body
, 0, i
), res
,
484 include_called_routine
);
487 /* If any register are incremented or decremented in an address,
488 they are set here. */
489 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
490 if (REG_NOTE_KIND (note
) == REG_INC
)
491 mark_referenced_resources (XEXP (note
, 0), res
, 0);
495 /* An insn that has a PRE_DEC on SP will not have a REG_INC note.
496 Until we fix this correctly, consider all insns as modifying
497 SP on such machines. So far, we don't have delay slot scheduling
498 on any machines with PUSH_ROUNDING. */
499 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
509 /* Return TRUE if this insn should stop the search for insn to fill delay
510 slots. LABELS_P indicates that labels should terminate the search.
511 In all cases, jumps terminate the search. */
514 stop_search_p (insn
, labels_p
)
521 switch (GET_CODE (insn
))
535 /* OK unless it contains a delay slot or is an `asm' insn of some type.
536 We don't know anything about these. */
537 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
538 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
539 || asm_noperands (PATTERN (insn
)) >= 0);
546 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
547 resource set contains a volatile memory reference. Otherwise, return FALSE. */
550 resource_conflicts_p (res1
, res2
)
551 struct resources
*res1
, *res2
;
553 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
554 || res1
->volatil
|| res2
->volatil
)
558 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
563 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
564 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
571 /* Return TRUE if any resource marked in RES, a `struct resources', is
572 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
573 routine is using those resources.
575 We compute this by computing all the resources referenced by INSN and
576 seeing if this conflicts with RES. It might be faster to directly check
577 ourselves, and this is the way it used to work, but it means duplicating
578 a large block of complex code. */
581 insn_references_resource_p (insn
, res
, include_called_routine
)
583 register struct resources
*res
;
584 int include_called_routine
;
586 struct resources insn_res
;
588 CLEAR_RESOURCE (&insn_res
);
589 mark_referenced_resources (insn
, &insn_res
, include_called_routine
);
590 return resource_conflicts_p (&insn_res
, res
);
593 /* Return TRUE if INSN modifies resources that are marked in RES.
594 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
595 included. CC0 is only modified if it is explicitly set; see comments
596 in front of mark_set_resources for details. */
599 insn_sets_resource_p (insn
, res
, include_called_routine
)
601 register struct resources
*res
;
602 int include_called_routine
;
604 struct resources insn_sets
;
606 CLEAR_RESOURCE (&insn_sets
);
607 mark_set_resources (insn
, &insn_sets
, include_called_routine
);
608 return resource_conflicts_p (&insn_sets
, res
);
611 /* Find a label at the end of the function or before a RETURN. If there is
619 /* If we found one previously, return it. */
620 if (end_of_function_label
)
621 return end_of_function_label
;
623 /* Otherwise, see if there is a label at the end of the function. If there
624 is, it must be that RETURN insns aren't needed, so that is our return
625 label and we don't have to do anything else. */
627 insn
= get_last_insn ();
628 while (GET_CODE (insn
) == NOTE
629 || (GET_CODE (insn
) == INSN
630 && (GET_CODE (PATTERN (insn
)) == USE
631 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
632 insn
= PREV_INSN (insn
);
634 if (GET_CODE (insn
) == CODE_LABEL
)
635 end_of_function_label
= insn
;
638 /* Otherwise, make a new label and emit a RETURN and BARRIER,
640 end_of_function_label
= gen_label_rtx ();
641 LABEL_NUSES (end_of_function_label
) = 0;
642 emit_label (end_of_function_label
);
646 emit_jump_insn (gen_return ());
652 /* Show one additional use for this label so it won't go away until
654 ++LABEL_NUSES (end_of_function_label
);
656 return end_of_function_label
;
659 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
660 the pattern of INSN with the SEQUENCE.
662 Chain the insns so that NEXT_INSN of each insn in the sequence points to
663 the next and NEXT_INSN of the last insn in the sequence points to
664 the first insn after the sequence. Similarly for PREV_INSN. This makes
665 it easier to scan all insns.
667 Returns the SEQUENCE that replaces INSN. */
670 emit_delay_sequence (insn
, list
, length
, avail
)
680 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
681 rtvec seqv
= rtvec_alloc (length
+ 1);
682 rtx seq
= gen_rtx (SEQUENCE
, VOIDmode
, seqv
);
683 rtx seq_insn
= make_insn_raw (seq
);
684 rtx first
= get_insns ();
685 rtx last
= get_last_insn ();
687 /* Make a copy of the insn having delay slots. */
688 rtx delay_insn
= copy_rtx (insn
);
690 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
691 confuse further processing. Update LAST in case it was the last insn.
692 We will put the BARRIER back in later. */
693 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
695 delete_insn (NEXT_INSN (insn
));
696 last
= get_last_insn ();
700 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
701 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
702 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
705 set_new_first_and_last_insn (first
, seq_insn
);
707 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
710 set_new_first_and_last_insn (seq_insn
, last
);
712 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
714 /* Build our SEQUENCE and rebuild the insn chain. */
715 XVECEXP (seq
, 0, 0) = delay_insn
;
716 INSN_DELETED_P (delay_insn
) = 0;
717 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
719 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
721 rtx tem
= XEXP (li
, 0);
724 /* Show that this copy of the insn isn't deleted. */
725 INSN_DELETED_P (tem
) = 0;
727 XVECEXP (seq
, 0, i
) = tem
;
728 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
729 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
731 /* Remove any REG_DEAD notes because we can't rely on them now
732 that the insn has been moved. */
733 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
734 if (REG_NOTE_KIND (note
) == REG_DEAD
)
735 XEXP (note
, 0) = const0_rtx
;
738 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
740 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
741 last insn in that SEQUENCE to point to us. Similarly for the first
742 insn in the following insn if it is a SEQUENCE. */
744 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
745 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
746 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
747 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
750 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
751 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
752 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
754 /* If there used to be a BARRIER, put it back. */
756 emit_barrier_after (seq_insn
);
764 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
765 be in the order in which the insns are to be executed. */
768 add_to_delay_list (insn
, delay_list
)
772 /* If we have an empty list, just make a new list element. */
774 return gen_rtx (INSN_LIST
, VOIDmode
, insn
, 0);
776 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
778 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
783 /* Delete INSN from the the delay slot of the insn that it is in. This may
784 produce an insn without anything in its delay slots. */
787 delete_from_delay_slot (insn
)
790 rtx trial
, seq_insn
, seq
, prev
;
794 /* We first must find the insn containing the SEQUENCE with INSN in its
795 delay slot. Do this by finding an insn, TRIAL, where
796 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
799 PREV_INSN (NEXT_INSN (trial
)) == trial
;
800 trial
= NEXT_INSN (trial
))
803 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
804 seq
= PATTERN (seq_insn
);
806 /* Create a delay list consisting of all the insns other than the one
807 we are deleting (unless we were the only one). */
808 if (XVECLEN (seq
, 0) > 2)
809 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
810 if (XVECEXP (seq
, 0, i
) != insn
)
811 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
813 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
814 list, and rebuild the delay list if non-empty. */
815 prev
= PREV_INSN (seq_insn
);
816 trial
= XVECEXP (seq
, 0, 0);
817 delete_insn (seq_insn
);
818 add_insn_after (trial
, prev
);
820 if (GET_CODE (trial
) == JUMP_INSN
821 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
822 emit_barrier_after (trial
);
824 /* If there are any delay insns, remit them. Otherwise clear the
827 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2, 0);
829 INSN_ANNULLED_BRANCH_P (trial
) = 0;
831 INSN_FROM_TARGET_P (insn
) = 0;
833 /* Show we need to fill this insn again. */
834 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
837 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
838 the insn that sets CC0 for it and delete it too. */
841 delete_scheduled_jump (insn
)
844 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
845 delete the insn that sets the condition code, but it is hard to find it.
846 Since this case is rare anyway, don't bother trying; there would likely
847 be other insns that became dead anyway, which we wouldn't know to
851 if (reg_mentioned_p (cc0_rtx
, insn
))
853 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, 0);
855 /* If a reg-note was found, it points to an insn to set CC0. This
856 insn is in the delay list of some other insn. So delete it from
857 the delay list it was in. */
860 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), 0)
861 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
862 delete_from_delay_slot (XEXP (note
, 0));
866 /* The insn setting CC0 is our previous insn, but it may be in
867 a delay slot. It will be the last insn in the delay slot, if
869 rtx trial
= previous_insn (insn
);
870 if (GET_CODE (trial
) == NOTE
)
871 trial
= prev_nonnote_insn (trial
);
872 if (sets_cc0_p (PATTERN (trial
)) != 1
873 || FIND_REG_INC_NOTE (trial
, 0))
875 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
878 delete_from_delay_slot (trial
);
886 /* Counters for delay-slot filling. */
888 #define NUM_REORG_FUNCTIONS 2
889 #define MAX_DELAY_HISTOGRAM 3
890 #define MAX_REORG_PASSES 2
892 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
894 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
896 static int reorg_pass_number
;
899 note_delay_statistics (slots_filled
, index
)
900 int slots_filled
, index
;
902 num_insns_needing_delays
[index
][reorg_pass_number
]++;
903 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
904 slots_filled
= MAX_DELAY_HISTOGRAM
;
905 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
908 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
910 /* Optimize the following cases:
912 1. When a conditional branch skips over only one instruction,
913 use an annulling branch and put that insn in the delay slot.
914 Use either a branch that annuls when the condition if true or
915 invert the test with a branch that annuls when the condition is
916 false. This saves insns, since otherwise we must copy an insn
919 (orig) (skip) (otherwise)
920 Bcc.n L1 Bcc',a L1 Bcc,a L1'
927 2. When a conditional branch skips over only one instruction,
928 and after that, it unconditionally branches somewhere else,
929 perform the similar optimization. This saves executing the
930 second branch in the case where the inverted condition is true.
939 This should be expanded to skip over N insns, where N is the number
940 of delay slots required. */
946 register rtx trial
= next_nonnote_insn (insn
);
947 rtx next_trial
= next_active_insn (trial
);
952 || GET_CODE (trial
) != INSN
953 || GET_CODE (PATTERN (trial
)) == SEQUENCE
954 || recog_memoized (trial
) < 0
955 || (! eligible_for_annul_false (insn
, 0, trial
)
956 && ! eligible_for_annul_true (insn
, 0, trial
)))
959 /* There are two cases where we are just executing one insn (we assume
960 here that a branch requires only one insn; this should be generalized
961 at some point): Where the branch goes around a single insn or where
962 we have one insn followed by a branch to the same label we branch to.
963 In both of these cases, inverting the jump and annulling the delay
964 slot give the same effect in fewer insns. */
965 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
)))
967 && GET_CODE (next_trial
) == JUMP_INSN
968 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
969 && (simplejump_p (next_trial
)
970 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
972 if (eligible_for_annul_false (insn
, 0, trial
))
974 if (invert_jump (insn
, JUMP_LABEL (insn
)))
975 INSN_FROM_TARGET_P (trial
) = 1;
976 else if (! eligible_for_annul_true (insn
, 0, trial
))
980 delay_list
= add_to_delay_list (trial
, 0);
981 next_trial
= next_active_insn (trial
);
982 update_block (trial
, trial
);
985 /* Also, if we are targeting an unconditional
986 branch, thread our jump to the target of that branch. Don't
987 change this into a RETURN here, because it may not accept what
988 we have in the delay slot. We'll fix this up later. */
989 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
990 && (simplejump_p (next_trial
)
991 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
993 target_label
= JUMP_LABEL (next_trial
);
994 if (target_label
== 0)
995 target_label
= find_end_label ();
996 redirect_jump (insn
, target_label
);
999 INSN_ANNULLED_BRANCH_P (insn
) = 1;
1006 /* Return truth value of the statement that this branch
1007 is mostly taken. If we think that the branch is extremely likely
1008 to be taken, we return 2. If the branch is slightly more likely to be
1009 taken, return 1. Otherwise, return 0.
1011 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1014 mostly_true_jump (jump_insn
, condition
)
1015 rtx jump_insn
, condition
;
1017 rtx target_label
= JUMP_LABEL (jump_insn
);
1020 /* If this is a conditional return insn, assume it won't return. */
1021 if (target_label
== 0)
1024 /* If TARGET_LABEL has no jumps between it and the end of the function,
1025 this is essentially a conditional return, so predict it as false. */
1026 for (insn
= NEXT_INSN (target_label
);
1027 insn
&& GET_CODE (insn
) != JUMP_INSN
;
1028 insn
= NEXT_INSN (insn
))
1034 /* If this is the test of a loop, it is very likely true. We scan backwards
1035 from the target label. If we find a NOTE_INSN_LOOP_BEG before the next
1036 real insn, we assume the branch is to the top of the loop. */
1037 for (insn
= PREV_INSN (target_label
);
1038 insn
&& GET_CODE (insn
) == NOTE
;
1039 insn
= PREV_INSN (insn
))
1040 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1043 /* If we couldn't figure out what this jump was, assume it won't be
1044 taken. This should be rare. */
1048 /* EQ tests are usually false and NE tests are usually true. Also,
1049 most quantities are positive, so we can make the appropriate guesses
1050 about signed comparisons against zero. */
1051 switch (GET_CODE (condition
))
1054 /* Unconditional branch. */
1062 if (XEXP (condition
, 1) == const0_rtx
)
1067 if (XEXP (condition
, 1) == const0_rtx
)
1072 /* Predict backward branches usually take, forward branches usually not. If
1073 we don't know whether this is forward or backward, assume the branch
1074 will be taken, since most are. */
1075 return (INSN_UID (jump_insn
) > max_uid
|| INSN_UID (target_label
) > max_uid
1076 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1077 > uid_to_ruid
[INSN_UID (target_label
)]));;
1080 /* Return the condition under which INSN will branch to TARGET. If TARGET
1081 is zero, return the condition under which INSN will return. If INSN is
1082 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1083 type of jump, or it doesn't go to TARGET, return 0. */
1086 get_branch_condition (insn
, target
)
1090 rtx pat
= PATTERN (insn
);
1093 if (GET_CODE (pat
) == RETURN
)
1094 return target
== 0 ? const_true_rtx
: 0;
1096 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1099 src
= SET_SRC (pat
);
1100 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1101 return const_true_rtx
;
1103 else if (GET_CODE (src
) == IF_THEN_ELSE
1104 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1105 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1106 && XEXP (XEXP (src
, 1), 0) == target
))
1107 && XEXP (src
, 2) == pc_rtx
)
1108 return XEXP (src
, 0);
1110 else if (GET_CODE (src
) == IF_THEN_ELSE
1111 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1112 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1113 && XEXP (XEXP (src
, 2), 0) == target
))
1114 && XEXP (src
, 1) == pc_rtx
)
1115 return gen_rtx (reverse_condition (GET_CODE (XEXP (src
, 0))),
1116 GET_MODE (XEXP (src
, 0)),
1117 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1120 /* Return non-zero if CONDITION is more strict than the condition of
1121 INSN, i.e., if INSN will always branch if CONDITION is true. */
1124 condition_dominates_p (condition
, insn
)
1128 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1129 enum rtx_code code
= GET_CODE (condition
);
1130 enum rtx_code other_code
;
1132 if (rtx_equal_p (condition
, other_condition
)
1133 || other_condition
== const_true_rtx
)
1136 else if (condition
== const_true_rtx
|| other_condition
== 0)
1139 other_code
= GET_CODE (other_condition
);
1140 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1141 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1142 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1145 return comparison_dominates_p (code
, other_code
);
1148 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1149 the condition tested by INSN is CONDITION and the resources shown in
1150 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1151 from SEQ's delay list, in addition to whatever insns it may execute
1152 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1153 needed while searching for delay slot insns. Return the concatenated
1154 delay list if possible, otherwise, return 0.
1156 SLOTS_TO_FILL is the total number of slots required by INSN, and
1157 PSLOTS_FILLED points to the number filled so far (also the number of
1158 insns in DELAY_LIST). It is updated with the number that have been
1159 filled from the SEQUENCE, if any.
1161 PANNUL_P points to a non-zero value if we already know that we need
1162 to annul INSN. If this routine determines that annulling is needed,
1163 it may set that value non-zero.
1165 PNEW_THREAD points to a location that is to receive the place at which
1166 execution should continue. */
1169 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1170 sets
, needed
, other_needed
,
1171 slots_to_fill
, pslots_filled
, pannul_p
,
1173 rtx insn
, condition
;
1176 struct resources
*sets
, *needed
, *other_needed
;
1183 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1184 int total_slots_filled
= *pslots_filled
;
1185 rtx new_delay_list
= 0;
1186 int must_annul
= *pannul_p
;
1189 /* We can't do anything if there are more delay slots in SEQ than we
1190 can handle, or if we don't know that it will be a taken branch.
1192 We know that it will be a taken branch if it is either an unconditional
1193 branch or a conditional branch with a stricter branch condition. */
1195 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1196 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0)))
1199 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1201 rtx trial
= XVECEXP (seq
, 0, i
);
1203 if (insn_references_resource_p (trial
, sets
, 0)
1204 || insn_sets_resource_p (trial
, needed
, 0)
1205 || insn_sets_resource_p (trial
, sets
, 0)
1207 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1209 || find_reg_note (trial
, REG_CC_USER
, 0)
1211 /* If TRIAL is from the fallthrough code of an annulled branch insn
1212 in SEQ, we cannot use it. */
1213 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1214 && ! INSN_FROM_TARGET_P (trial
)))
1217 /* If this insn was already done (usually in a previous delay slot),
1218 pretend we put it in our delay slot. */
1219 if (redundant_insn_p (trial
, insn
, new_delay_list
))
1223 && ((condition
== const_true_rtx
1224 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1225 && ! may_trap_p (PATTERN (trial
)))))
1226 ? eligible_for_delay (insn
, total_slots_filled
, trial
)
1228 eligible_for_annul_false (insn
, total_slots_filled
, trial
)))
1230 temp
= copy_rtx (trial
);
1231 INSN_FROM_TARGET_P (temp
) = 1;
1232 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1233 total_slots_filled
++;
1235 if (--slots_remaining
== 0)
1242 /* Show the place to which we will be branching. */
1243 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1245 /* Add any new insns to the delay list and update the count of the
1246 number of slots filled. */
1247 *pslots_filled
= total_slots_filled
;
1248 *pannul_p
= must_annul
;
1250 if (delay_list
== 0)
1251 return new_delay_list
;
1253 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1254 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1259 /* Similar to steal_delay_list_from_target except that SEQ is on the
1260 fallthrough path of INSN. Here we only do something if the delay insn
1261 of SEQ is an unconditional branch. In that case we steal its delay slot
1262 for INSN since unconditional branches are much easier to fill. */
1265 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1266 delay_list
, sets
, needed
, other_needed
,
1267 slots_to_fill
, pslots_filled
, pannul_p
)
1268 rtx insn
, condition
;
1271 struct resources
*sets
, *needed
, *other_needed
;
1278 /* We can't do anything if SEQ's delay insn isn't an
1279 unconditional branch. */
1281 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1282 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1285 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1287 rtx trial
= XVECEXP (seq
, 0, i
);
1289 /* If TRIAL sets CC0, stealing it will move it too far from the use
1291 if (insn_references_resource_p (trial
, sets
, 0)
1292 || insn_sets_resource_p (trial
, needed
, 0)
1293 || insn_sets_resource_p (trial
, sets
, 0)
1295 || sets_cc0_p (PATTERN (trial
))
1301 /* If this insn was already done, we don't need it. */
1302 if (redundant_insn_p (trial
, insn
, delay_list
))
1304 delete_from_delay_slot (trial
);
1309 && ((condition
== const_true_rtx
1310 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1311 && ! may_trap_p (PATTERN (trial
)))))
1312 ? eligible_for_delay (insn
, *pslots_filled
, trial
)
1314 eligible_for_annul_true (insn
, *pslots_filled
, trial
)))
1316 delete_from_delay_slot (trial
);
1317 delay_list
= add_to_delay_list (trial
, delay_list
);
1319 if (++(*pslots_filled
) == slots_to_fill
)
1329 /* Try merging insns starting at THREAD which match exactly the insns in
1332 If all insns were matched and the insn was previously annulling, the
1333 annul bit will be cleared.
1335 For each insn that is merged, if the branch is or will be non-annulling,
1336 we delete the merged insn. */
1339 try_merge_delay_insns (insn
, thread
)
1342 rtx trial
, next_trial
;
1343 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1344 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1345 int slot_number
= 1;
1346 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1347 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1348 struct resources set
, needed
;
1349 rtx merged_insns
= 0;
1352 CLEAR_RESOURCE (&needed
);
1353 CLEAR_RESOURCE (&set
);
1355 /* If this is not an annulling branch, take into account anything needed in
1356 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1357 folded into one. If we are annulling, this would be the correct
1358 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1359 will essentially disable this optimization. This method is somewhat of
1360 a kludge, but I don't see a better way.) */
1362 mark_referenced_resources (next_to_match
, &needed
, 1);
1364 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1366 rtx pat
= PATTERN (trial
);
1368 next_trial
= next_nonnote_insn (trial
);
1370 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1371 if (GET_CODE (trial
) == INSN
1372 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1375 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1377 /* We can't share an insn that sets cc0. */
1378 && ! sets_cc0_p (pat
)
1380 && ! insn_references_resource_p (trial
, &set
, 1)
1381 && ! insn_sets_resource_p (trial
, &set
, 1)
1382 && ! insn_sets_resource_p (trial
, &needed
, 1)
1383 && (trial
= try_split (pat
, trial
, 0)) != 0
1384 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1385 /* Have to test this condition if annul condition is different
1386 from (and less restrictive than) non-annulling one. */
1387 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
))
1389 next_trial
= next_nonnote_insn (trial
);
1393 update_block (trial
, thread
);
1394 delete_insn (trial
);
1395 INSN_FROM_TARGET_P (next_to_match
) = 0;
1398 merged_insns
= gen_rtx (INSN_LIST
, VOIDmode
, trial
, merged_insns
);
1400 if (++slot_number
== num_slots
)
1403 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1405 mark_referenced_resources (next_to_match
, &needed
, 1);
1408 mark_set_resources (trial
, &set
, 1);
1409 mark_referenced_resources (trial
, &needed
, 1);
1412 /* See if we stopped on a filled insn. If we did, try to see if its
1413 delay slots match. */
1414 if (slot_number
!= num_slots
1415 && trial
&& GET_CODE (trial
) == INSN
1416 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1417 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1419 rtx pat
= PATTERN (trial
);
1421 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1423 rtx dtrial
= XVECEXP (pat
, 0, i
);
1425 if (! insn_references_resource_p (dtrial
, &set
, 1)
1426 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1427 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1429 && ! sets_cc0_p (PATTERN (dtrial
))
1431 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1432 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
))
1436 update_block (dtrial
, thread
);
1437 delete_from_delay_slot (dtrial
);
1438 INSN_FROM_TARGET_P (next_to_match
) = 0;
1441 merged_insns
= gen_rtx (INSN_LIST
, SImode
, dtrial
,
1444 if (++slot_number
== num_slots
)
1447 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1452 /* If all insns in the delay slot have been matched and we were previously
1453 annulling the branch, we need not any more. In that case delete all the
1454 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1455 the delay list so that we know that it isn't only being used at the
1457 if (next_to_match
== 0 && annul_p
)
1459 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1461 if (GET_MODE (merged_insns
) == SImode
)
1463 update_block (XEXP (merged_insns
, 0), thread
);
1464 delete_from_delay_slot (XEXP (merged_insns
, 0));
1468 update_block (XEXP (merged_insns
, 0), thread
);
1469 delete_insn (XEXP (merged_insns
, 0));
1473 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1475 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1476 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1480 /* See if INSN is redundant with an insn in front of TARGET. Often this
1481 is called when INSN is a candidate for a delay slot of TARGET.
1482 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1483 of INSN. Often INSN will be redundant with an insn in a delay slot of
1484 some previous insn. This happens when we have a series of branches to the
1485 same label; in that case the first insn at the target might want to go
1486 into each of the delay slots.
1488 If we are not careful, this routine can take up a significant fraction
1489 of the total compilation time (4%), but only wins rarely. Hence we
1490 speed this routine up by making two passes. The first pass goes back
1491 until it hits a label and sees if it find an insn with an identical
1492 pattern. Only in this (relatively rare) event does it check for
1495 We do not split insns we encounter. This could cause us not to find a
1496 redundant insn, but the cost of splitting seems greater than the possible
1497 gain in rare cases. */
1500 redundant_insn_p (insn
, target
, delay_list
)
1505 rtx target_main
= target
;
1506 rtx ipat
= PATTERN (insn
);
1508 struct resources needed
, set
;
1511 /* Scan backwards looking for a match. */
1512 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1514 if (GET_CODE (trial
) == CODE_LABEL
)
1517 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != JUMP_INSN
1518 && GET_CODE (trial
) != JUMP_INSN
)
1521 pat
= PATTERN (trial
);
1522 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1525 if (GET_CODE (pat
) == SEQUENCE
)
1527 /* Stop for a CALL and its delay slots because it difficult to track
1528 its resource needs correctly. */
1529 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1532 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1533 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
1534 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
))
1537 /* If found a match, exit this loop early. */
1542 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
))
1546 /* If we didn't find an insn that matches, return 0. */
1550 /* See what resources this insn sets and needs. If they overlap, or
1551 if this insn references CC0, it can't be redundant. */
1553 CLEAR_RESOURCE (&needed
);
1554 CLEAR_RESOURCE (&set
);
1555 mark_set_resources (insn
, &set
, 1);
1556 mark_referenced_resources (insn
, &needed
, 1);
1558 /* If TARGET is a SEQUENCE, get the main insn. */
1559 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1560 target_main
= XVECEXP (PATTERN (target
), 0, 0);
1562 if (resource_conflicts_p (&needed
, &set
)
1564 || reg_mentioned_p (cc0_rtx
, ipat
)
1566 /* The insn requiring the delay may not set anything needed or set by
1568 || insn_sets_resource_p (target_main
, &needed
, 1)
1569 || insn_sets_resource_p (target_main
, &set
, 1))
1572 /* Insns we pass may not set either NEEDED or SET, so merge them for
1574 needed
.memory
|= set
.memory
;
1575 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
1577 /* This insn isn't redundant if it conflicts with an insn that either is
1578 or will be in a delay slot of TARGET. */
1582 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
1584 delay_list
= XEXP (delay_list
, 1);
1587 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1588 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
1589 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
1592 /* Scan backwards until we reach a label or an insn that uses something
1593 INSN sets or sets something insn uses or sets. */
1595 for (trial
= PREV_INSN (target
);
1596 trial
&& GET_CODE (trial
) != CODE_LABEL
;
1597 trial
= PREV_INSN (trial
))
1599 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
1600 && GET_CODE (trial
) != JUMP_INSN
)
1603 pat
= PATTERN (trial
);
1604 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1607 if (GET_CODE (pat
) == SEQUENCE
)
1609 /* If this is a CALL_INSN and its delay slots, it is hard to track
1610 the resource needs properly, so give up. */
1611 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1614 /* See if any of the insns in the delay slot match, updating
1615 resource requirements as we go. */
1616 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1618 rtx candidate
= XVECEXP (pat
, 0, i
);
1620 /* If an insn will be annulled if the branch is false, it isn't
1621 considered as a possible duplicate insn. */
1622 if (rtx_equal_p (PATTERN (candidate
), ipat
)
1623 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1624 && INSN_FROM_TARGET_P (candidate
)))
1626 /* Show that this insn will be used in the sequel. */
1627 INSN_FROM_TARGET_P (candidate
) = 0;
1631 /* Unless this is an annulled insn from the target of a branch,
1632 we must stop if it sets anything needed or set by INSN. */
1633 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1634 || ! INSN_FROM_TARGET_P (candidate
))
1635 && insn_sets_resource_p (candidate
, &needed
, 1))
1640 /* If the insn requiring the delay slot conflicts with INSN, we
1642 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
1647 /* See if TRIAL is the same as INSN. */
1648 pat
= PATTERN (trial
);
1649 if (rtx_equal_p (pat
, ipat
))
1652 /* Can't go any further if TRIAL conflicts with INSN. */
1653 if (insn_sets_resource_p (trial
, &needed
, 1))
1661 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1662 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1663 is non-zero, we are allowed to fall into this thread; otherwise, we are
1666 If LABEL is used more than one or we pass a label other than LABEL before
1667 finding an active insn, we do not own this thread. */
1670 own_thread_p (thread
, label
, allow_fallthrough
)
1673 int allow_fallthrough
;
1678 /* We don't own the function end. */
1682 /* Get the first active insn, or THREAD, if it is an active insn. */
1683 active_insn
= next_active_insn (PREV_INSN (thread
));
1685 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
1686 if (GET_CODE (insn
) == CODE_LABEL
1687 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
1690 if (allow_fallthrough
)
1693 /* Ensure that we reach a BARRIER before any insn or label. */
1694 for (insn
= prev_nonnote_insn (thread
);
1695 insn
== 0 || GET_CODE (insn
) != BARRIER
;
1696 insn
= prev_nonnote_insn (insn
))
1698 || GET_CODE (insn
) == CODE_LABEL
1699 || (GET_CODE (insn
) == INSN
1700 && GET_CODE (PATTERN (insn
)) != USE
1701 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
1707 /* Find the number of the basic block that starts closest to INSN. Return -1
1708 if we couldn't find such a basic block. */
1711 find_basic_block (insn
)
1716 /* Scan backwards to the previous BARRIER. Then see if we can find a
1717 label that starts a basic block. Return the basic block number. */
1719 for (insn
= prev_nonnote_insn (insn
);
1720 insn
&& GET_CODE (insn
) != BARRIER
;
1721 insn
= prev_nonnote_insn (insn
))
1724 /* The start of the function is basic block zero. */
1728 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
1729 anything other than a CODE_LABEL or note, we can't find this code. */
1730 for (insn
= next_nonnote_insn (insn
);
1731 insn
&& GET_CODE (insn
) == CODE_LABEL
;
1732 insn
= next_nonnote_insn (insn
))
1734 for (i
= 0; i
< n_basic_blocks
; i
++)
1735 if (insn
== basic_block_head
[i
])
1742 /* Called when INSN is being moved from a location near the target of a jump.
1743 We leave a marker of the form (use (INSN)) immediately in front
1744 of WHERE for mark_target_live_regs. These markers will be deleted when
1747 We used to try to update the live status of registers if WHERE is at
1748 the start of a basic block, but that can't work since we may remove a
1749 BARRIER in relax_delay_slots. */
1752 update_block (insn
, where
)
1758 /* Ignore if this was in a delay slot and it came from the target of
1760 if (INSN_FROM_TARGET_P (insn
))
1763 emit_insn_before (gen_rtx (USE
, VOIDmode
, insn
), where
);
1765 /* INSN might be making a value live in a block where it didn't use to
1766 be. So recompute liveness information for this block. */
1768 b
= find_basic_block (insn
);
1773 /* Marks registers possibly live at the current place being scanned by
1774 mark_target_live_regs. Used only by next two function. */
1776 static HARD_REG_SET current_live_regs
;
1778 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
1779 Also only used by the next two functions. */
1781 static HARD_REG_SET pending_dead_regs
;
1783 /* Utility function called from mark_target_live_regs via note_stores.
1784 It deadens any CLOBBERed registers and livens any SET registers. */
1787 update_live_status (dest
, x
)
1791 int first_regno
, last_regno
;
1794 if (GET_CODE (dest
) != REG
1795 && (GET_CODE (dest
) != SUBREG
|| GET_CODE (SUBREG_REG (dest
)) != REG
))
1798 if (GET_CODE (dest
) == SUBREG
)
1799 first_regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
1801 first_regno
= REGNO (dest
);
1803 last_regno
= first_regno
+ HARD_REGNO_NREGS (first_regno
, GET_MODE (dest
));
1805 if (GET_CODE (x
) == CLOBBER
)
1806 for (i
= first_regno
; i
< last_regno
; i
++)
1807 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
1809 for (i
= first_regno
; i
< last_regno
; i
++)
1811 SET_HARD_REG_BIT (current_live_regs
, i
);
1812 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
1816 /* Similar to next_insn, but ignores insns in the delay slots of
1817 an annulled branch. */
1820 next_insn_no_annul (insn
)
1825 /* If INSN is an annulled branch, skip any insns from the target
1827 if (INSN_ANNULLED_BRANCH_P (insn
)
1828 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
1829 while (INSN_FROM_TARGET_P (NEXT_INSN (insn
)))
1830 insn
= NEXT_INSN (insn
);
1832 insn
= NEXT_INSN (insn
);
1833 if (insn
&& GET_CODE (insn
) == INSN
1834 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1835 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1841 /* Set the resources that are live at TARGET.
1843 If TARGET is zero, we refer to the end of the current function and can
1844 return our precomputed value.
1846 Otherwise, we try to find out what is live by consulting the basic block
1847 information. This is tricky, because we must consider the actions of
1848 reload and jump optimization, which occur after the basic block information
1851 Accordingly, we proceed as follows::
1853 We find the previous BARRIER and look at all immediately following labels
1854 (with no intervening active insns) to see if any of them start a basic
1855 block. If we hit the start of the function first, we use block 0.
1857 Once we have found a basic block and a corresponding first insns, we can
1858 accurately compute the live status from basic_block_live_regs and
1859 reg_renumber. (By starting at a label following a BARRIER, we are immune
1860 to actions taken by reload and jump.) Then we scan all insns between
1861 that point and our target. For each CLOBBER (or for call-clobbered regs
1862 when we pass a CALL_INSN), mark the appropriate registers are dead. For
1863 a SET, mark them as live.
1865 We have to be careful when using REG_DEAD notes because they are not
1866 updated by such things as find_equiv_reg. So keep track of registers
1867 marked as dead that haven't been assigned to, and mark them dead at the
1868 next CODE_LABEL since reload and jump won't propagate values across labels.
1870 If we cannot find the start of a basic block (should be a very rare
1871 case, if it can happen at all), mark everything as potentially live.
1873 Next, scan forward from TARGET looking for things set or clobbered
1874 before they are used. These are not live.
1876 Because we can be called many times on the same target, save our results
1877 in a hash table indexed by INSN_UID. */
1880 mark_target_live_regs (target
, res
)
1882 struct resources
*res
;
1886 struct target_info
*tinfo
;
1889 HARD_REG_SET scratch
;
1890 struct resources set
, needed
;
1893 /* Handle end of function. */
1896 *res
= end_of_function_needs
;
1900 /* We have to assume memory is needed, but the CC isn't. */
1905 /* See if we have computed this value already. */
1906 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
1907 tinfo
; tinfo
= tinfo
->next
)
1908 if (tinfo
->uid
== INSN_UID (target
))
1911 /* Start by getting the basic block number. If we have saved information,
1912 we can get it from there unless the insn at the start of the basic block
1913 has been deleted. */
1914 if (tinfo
&& tinfo
->block
!= -1
1915 && ! INSN_DELETED_P (basic_block_head
[tinfo
->block
]))
1919 b
= find_basic_block (target
);
1923 /* If the information is up-to-date, use it. Otherwise, we will
1925 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
1927 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
1933 /* Allocate a place to put our results and chain it into the
1935 tinfo
= (struct target_info
*) oballoc (sizeof (struct target_info
));
1936 tinfo
->uid
= INSN_UID (target
);
1938 tinfo
->next
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
1939 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
1942 CLEAR_HARD_REG_SET (pending_dead_regs
);
1944 /* If we found a basic block, get the live registers from it and update
1945 them with anything set or killed between its start and the insn before
1946 TARGET. Otherwise, we must assume everything is live. */
1949 regset regs_live
= basic_block_live_at_start
[b
];
1952 rtx start_insn
, stop_insn
;
1954 /* Compute hard regs live at start of block -- this is the real hard regs
1955 marked live, plus live pseudo regs that have been renumbered to
1959 current_live_regs
= *regs_live
;
1961 COPY_HARD_REG_SET (current_live_regs
, regs_live
);
1964 for (offset
= 0, i
= 0; offset
< regset_size
; offset
++)
1966 if (regs_live
[offset
] == 0)
1967 i
+= HOST_BITS_PER_INT
;
1969 for (bit
= 1; bit
&& i
< max_regno
; bit
<<= 1, i
++)
1970 if ((regs_live
[offset
] & bit
)
1971 && (regno
= reg_renumber
[i
]) >= 0)
1973 j
< regno
+ HARD_REGNO_NREGS (regno
,
1974 PSEUDO_REGNO_MODE (i
));
1976 SET_HARD_REG_BIT (current_live_regs
, j
);
1979 /* Get starting and ending insn, handling the case where each might
1981 start_insn
= (b
== 0 ? get_insns () : basic_block_head
[b
]);
1984 if (GET_CODE (start_insn
) == INSN
1985 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
1986 start_insn
= XVECEXP (PATTERN (start_insn
), 0, 0);
1988 if (GET_CODE (stop_insn
) == INSN
1989 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
1990 stop_insn
= next_insn (PREV_INSN (stop_insn
));
1992 for (insn
= start_insn
; insn
!= stop_insn
;
1993 insn
= next_insn_no_annul (insn
))
1996 rtx real_insn
= insn
;
1998 /* If this insn is from the target of a branch, it isn't going to
1999 be used in the sequel. If it is used in both cases, this
2000 test will not be true. */
2001 if (INSN_FROM_TARGET_P (insn
))
2004 /* If this insn is a USE made by update_block, we care about the
2006 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
2007 && (GET_CODE (XEXP (PATTERN (insn
), 0)) == INSN
2008 || GET_CODE (XEXP (PATTERN (insn
), 0)) == CALL_INSN
2009 || GET_CODE (XEXP (PATTERN (insn
), 0)) == JUMP_INSN
))
2010 real_insn
= XEXP (PATTERN (insn
), 0);
2012 if (GET_CODE (real_insn
) == CALL_INSN
)
2014 /* CALL clobbers all call-used regs that aren't fixed except
2015 sp, ap, and fp. Do this before setting the result of the
2017 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2018 if (call_used_regs
[i
]
2019 && i
!= STACK_POINTER_REGNUM
&& i
!= FRAME_POINTER_REGNUM
2020 && i
!= ARG_POINTER_REGNUM
2021 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2022 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
2024 #ifdef PIC_OFFSET_TABLE_REGNUM
2025 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2028 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2030 /* A CALL_INSN sets any global register live, since it may
2031 have been modified by the call. */
2032 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2034 SET_HARD_REG_BIT (current_live_regs
, i
);
2037 /* Mark anything killed in an insn to be deadened at the next
2038 label. Ignore USE insns; the only REG_DEAD notes will be for
2039 parameters. But they might be early. A CALL_INSN will usually
2040 clobber registers used for parameters. It isn't worth bothering
2041 with the unlikely case when it won't. */
2042 if ((GET_CODE (real_insn
) == INSN
2043 && GET_CODE (PATTERN (real_insn
)) != USE
)
2044 || GET_CODE (real_insn
) == JUMP_INSN
2045 || GET_CODE (real_insn
) == CALL_INSN
)
2047 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2048 if (REG_NOTE_KIND (link
) == REG_DEAD
2049 && GET_CODE (XEXP (link
, 0)) == REG
2050 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2052 int first_regno
= REGNO (XEXP (link
, 0));
2055 + HARD_REGNO_NREGS (first_regno
,
2056 GET_MODE (XEXP (link
, 0))));
2058 for (i
= first_regno
; i
< last_regno
; i
++)
2059 SET_HARD_REG_BIT (pending_dead_regs
, i
);
2062 note_stores (PATTERN (real_insn
), update_live_status
);
2064 /* If any registers were unused after this insn, kill them.
2065 These notes will always be accurate. */
2066 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2067 if (REG_NOTE_KIND (link
) == REG_UNUSED
2068 && GET_CODE (XEXP (link
, 0)) == REG
2069 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2071 int first_regno
= REGNO (XEXP (link
, 0));
2074 + HARD_REGNO_NREGS (first_regno
,
2075 GET_MODE (XEXP (link
, 0))));
2077 for (i
= first_regno
; i
< last_regno
; i
++)
2078 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2082 if (GET_CODE (real_insn
) == CODE_LABEL
)
2084 /* A label clobbers the pending dead registers since neither
2085 reload nor jump will propagate a value across a label. */
2086 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
2087 CLEAR_HARD_REG_SET (pending_dead_regs
);
2091 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
2093 tinfo
->bb_tick
= bb_ticks
[b
];
2096 /* We didn't find the start of a basic block. Assume everything
2097 in use. This should happen only extremely rarely. */
2098 SET_HARD_REG_SET (res
->regs
);
2100 /* Now step forward from TARGET looking for registers that are set before
2101 they are used. These are dead. If we pass a label, any pending dead
2102 registers that weren't yet used can be made dead. Stop when we pass a
2103 conditional JUMP_INSN; follow the first few unconditional branches. */
2105 CLEAR_RESOURCE (&set
);
2106 CLEAR_RESOURCE (&needed
);
2108 for (insn
= target
; insn
; insn
= next
)
2110 rtx main_insn
= insn
;
2112 next
= NEXT_INSN (insn
);
2113 switch (GET_CODE (insn
))
2116 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
2117 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
2118 CLEAR_HARD_REG_SET (pending_dead_regs
);
2126 if (GET_CODE (PATTERN (insn
)) == USE
2127 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
2129 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2130 main_insn
= XVECEXP (PATTERN (insn
), 0, 0);
2133 if (GET_CODE (main_insn
) == JUMP_INSN
)
2135 if (jump_count
++ < 10
2136 && (simplejump_p (main_insn
)
2137 || GET_CODE (PATTERN (main_insn
)) == RETURN
))
2139 next
= next_active_insn (JUMP_LABEL (main_insn
));
2147 mark_referenced_resources (insn
, &needed
, 1);
2148 mark_set_resources (insn
, &set
, 1);
2150 COPY_HARD_REG_SET (scratch
, set
.regs
);
2151 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2152 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2155 /* If we hit an unconditional branch, we have another way of finding out
2156 what is live: we can see what is live at the branch target and include
2157 anything used but not set before the branch. The only things that are
2158 live are those that are live using the above test and the test below.
2160 Don't try this if we expired our jump count above, since that would
2161 mean there may be an infinite loop in the function being compiled. */
2163 if (jump_insn
&& jump_count
< 10)
2165 rtx jump_target
= (GET_CODE (jump_insn
) == INSN
2166 ? JUMP_LABEL (XVECEXP (PATTERN (jump_insn
), 0, 0))
2167 : JUMP_LABEL (jump_insn
));
2168 struct resources new_resources
;
2169 rtx stop_insn
= next_active_insn (jump_insn
);
2171 mark_target_live_regs (next_active_insn (jump_target
), &new_resources
);
2172 CLEAR_RESOURCE (&set
);
2173 CLEAR_RESOURCE (&needed
);
2175 /* Include JUMP_INSN in the needed registers. */
2176 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
2178 mark_referenced_resources (insn
, &needed
, 1);
2180 COPY_HARD_REG_SET (scratch
, needed
.regs
);
2181 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
2182 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
2184 mark_set_resources (insn
, &set
, 1);
2187 AND_HARD_REG_SET (res
->regs
, new_resources
.regs
);
2190 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
2193 /* Scan a function looking for insns that need a delay slot and find insns to
2194 put into the delay slot.
2196 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2197 as calls). We do these first since we don't want jump insns (that are
2198 easier to fill) to get the only insns that could be used for non-jump insns.
2199 When it is zero, only try to fill JUMP_INSNs.
2201 When slots are filled in this manner, the insns (including the
2202 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2203 it is possible to tell whether a delay slot has really been filled
2204 or not. `final' knows how to deal with this, by communicating
2205 through FINAL_SEQUENCE. */
2208 fill_simple_delay_slots (first
, non_jumps_p
)
2211 register rtx insn
, pat
, trial
, next_trial
;
2213 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2214 struct resources needed
, set
;
2215 register int slots_to_fill
, slots_filled
;
2218 for (i
= 0; i
< num_unfilled_slots
; i
++)
2220 /* Get the next insn to fill. If it has already had any slots assigned,
2221 we can't do anything with it. Maybe we'll improve this later. */
2223 insn
= unfilled_slots_base
[i
];
2225 || INSN_DELETED_P (insn
)
2226 || (GET_CODE (insn
) == INSN
2227 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2228 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2229 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2232 slots_to_fill
= num_delay_slots (insn
);
2233 if (slots_to_fill
== 0)
2236 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2237 says how many. After initialization, scan backwards from the
2238 insn to search for a potential delay-slot candidate. Stop
2239 searching when a label or jump is hit.
2241 For each candidate, if it is to go into the delay slot (moved
2242 forward in execution sequence), it must not need or set any resources
2243 that were set by later insns and must not set any resources that
2244 are needed for those insns.
2246 The delay slot insn itself sets resources unless it is a call
2247 (in which case the called routine, not the insn itself, is doing
2252 CLEAR_RESOURCE (&needed
);
2253 CLEAR_RESOURCE (&set
);
2254 mark_set_resources (insn
, &set
, 0);
2255 mark_referenced_resources (insn
, &needed
, 0);
2257 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2260 next_trial
= prev_nonnote_insn (trial
);
2262 /* This must be an INSN or CALL_INSN. */
2263 pat
= PATTERN (trial
);
2265 /* USE and CLOBBER at this level was just for flow; ignore it. */
2266 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2269 /* Check for resource conflict first, to avoid unnecessary
2271 if (! insn_references_resource_p (trial
, &set
, 1)
2272 && ! insn_sets_resource_p (trial
, &set
, 1)
2273 && ! insn_sets_resource_p (trial
, &needed
, 1)
2275 /* Can't separate set of cc0 from its use. */
2276 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2277 && ! sets_cc0_p (cc0_rtx
, pat
))
2281 trial
= try_split (pat
, trial
, 1);
2282 next_trial
= prev_nonnote_insn (trial
);
2283 if (eligible_for_delay (insn
, slots_filled
, trial
))
2285 /* In this case, we are searching backward, so if we
2286 find insns to put on the delay list, we want
2287 to put them at the head, rather than the
2288 tail, of the list. */
2290 delay_list
= gen_rtx (INSN_LIST
, VOIDmode
,
2292 update_block (trial
, trial
);
2293 delete_insn (trial
);
2294 if (slots_to_fill
== ++slots_filled
)
2300 mark_set_resources (trial
, &set
, 1);
2301 mark_referenced_resources (trial
, &needed
, 1);
2304 if (slots_filled
== slots_to_fill
)
2307 /* If all needed slots haven't been filled, we come here. */
2309 /* Try to optimize case of jumping around a single insn. */
2310 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2311 else if (delay_list
== 0
2312 && GET_CODE (insn
) == JUMP_INSN
&& condjump_p (insn
))
2314 delay_list
= optimize_skip (insn
);
2320 /* @@ This would be a good place to optimize:
2323 nop add %o7,.-L1,%o7
2329 /* Try to get insns from beyond the insn needing the delay slot.
2330 These insns can neither set or reference resources set in insns being
2331 skipped, cannot set resources in the insn being skipped, and, if this
2332 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2333 call might not return).
2335 If this is a conditional jump, see if it merges back to us early
2336 enough for us to pick up insns from the merge point. Don't do
2337 this if there is another branch to our label unless we pass all of
2340 Another similar merge is if we jump to the same place that a
2341 later unconditional jump branches to. In that case, we don't
2342 care about the number of uses of our label. */
2344 else if (GET_CODE (insn
) != JUMP_INSN
2345 || (condjump_p (insn
) && ! simplejump_p (insn
)
2346 && JUMP_LABEL (insn
) != 0))
2349 int maybe_never
= 0;
2350 int passed_label
= 0;
2352 struct resources needed_at_jump
;
2354 CLEAR_RESOURCE (&needed
);
2355 CLEAR_RESOURCE (&set
);
2357 if (GET_CODE (insn
) == CALL_INSN
)
2359 mark_set_resources (insn
, &set
, 1);
2360 mark_referenced_resources (insn
, &needed
, 1);
2363 else if (GET_CODE (insn
) == JUMP_INSN
)
2365 /* Get our target and show how many more uses we want to
2366 see before we hit the label. */
2367 target
= JUMP_LABEL (insn
);
2368 target_uses
= LABEL_NUSES (target
) - 1;
2371 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
2373 rtx pat
, trial_delay
;
2375 next_trial
= next_nonnote_insn (trial
);
2377 if (GET_CODE (trial
) == CODE_LABEL
)
2381 /* If this is our target, see if we have seen all its uses.
2382 If so, indicate we have passed our target and ignore it.
2383 All other labels cause us to stop our search. */
2384 if (trial
== target
&& target_uses
== 0)
2392 else if (GET_CODE (trial
) == BARRIER
)
2395 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2396 pat
= PATTERN (trial
);
2398 /* Stand-alone USE and CLOBBER are just for flow. */
2399 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2402 /* If this already has filled delay slots, get the insn needing
2404 if (GET_CODE (pat
) == SEQUENCE
)
2405 trial_delay
= XVECEXP (pat
, 0, 0);
2407 trial_delay
= trial
;
2409 /* If this is a jump insn to our target, indicate that we have
2410 seen another jump to it. If we aren't handling a conditional
2411 jump, stop our search. Otherwise, compute the needs at its
2412 target and add them to NEEDED. */
2413 if (GET_CODE (trial_delay
) == JUMP_INSN
)
2417 else if (JUMP_LABEL (trial_delay
) == target
)
2421 mark_target_live_regs
2422 (next_active_insn (JUMP_LABEL (trial_delay
)),
2424 needed
.memory
|= needed_at_jump
.memory
;
2425 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
2429 /* See if we have a resource problem before we try to
2432 && GET_CODE (pat
) != SEQUENCE
2433 && ! insn_references_resource_p (trial
, &set
, 1)
2434 && ! insn_sets_resource_p (trial
, &set
, 1)
2435 && ! insn_sets_resource_p (trial
, &needed
, 1)
2437 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2439 && ! (maybe_never
&& may_trap_p (pat
))
2440 && (trial
= try_split (pat
, trial
, 0))
2441 && eligible_for_delay (insn
, slots_filled
, trial
))
2443 next_trial
= next_nonnote_insn (trial
);
2444 delay_list
= add_to_delay_list (trial
, delay_list
);
2447 if (reg_mentioned_p (cc0_rtx
, pat
))
2448 link_cc0_insns (trial
);
2452 update_block (trial
, trial
);
2453 delete_insn (trial
);
2454 if (slots_to_fill
== ++slots_filled
)
2459 mark_set_resources (trial
, &set
, 1);
2460 mark_referenced_resources (trial
, &needed
, 1);
2462 /* Ensure we don't put insns between the setting of cc and the
2463 comparison by moving a setting of cc into an earlier delay
2464 slot since these insns could clobber the condition code. */
2467 /* If this is a call or jump, we might not get here. */
2468 if (GET_CODE (trial
) == CALL_INSN
2469 || GET_CODE (trial
) == JUMP_INSN
)
2473 /* If there are slots left to fill and our search was stopped by an
2474 unconditional branch, try the insn at the branch target. We can
2475 redirect the branch if it works. */
2476 if (slots_to_fill
!= slots_filled
2478 && GET_CODE (trial
) == JUMP_INSN
2479 && simplejump_p (trial
)
2480 && (target
== 0 || JUMP_LABEL (trial
) == target
)
2481 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
2482 && ! (GET_CODE (next_trial
) == INSN
2483 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
2484 && ! insn_references_resource_p (next_trial
, &set
, 1)
2485 && ! insn_sets_resource_p (next_trial
, &set
, 1)
2486 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
2488 && ! (reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
2489 && ! sets_cc0_p (PATTERN (next_trial
)))
2491 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
2492 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
2493 && eligible_for_delay (insn
, slots_filled
, next_trial
))
2495 rtx new_label
= next_active_insn (next_trial
);
2498 new_label
= get_label_before (new_label
);
2501 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
2503 redirect_jump (trial
, new_label
);
2505 /* If we merged because we both jumped to the same place,
2506 redirect the original insn also. */
2508 redirect_jump (insn
, new_label
);
2513 unfilled_slots_base
[i
]
2514 = emit_delay_sequence (insn
, delay_list
,
2515 slots_filled
, slots_to_fill
);
2517 if (slots_to_fill
== slots_filled
)
2518 unfilled_slots_base
[i
] = 0;
2520 note_delay_statistics (slots_filled
, 0);
2523 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2524 /* See if the epilogue needs any delay slots. Try to fill them if so.
2525 The only thing we can do is scan backwards from the end of the
2526 function. If we did this in a previous pass, it is incorrect to do it
2528 if (current_function_epilogue_delay_list
)
2531 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
2532 if (slots_to_fill
== 0)
2536 CLEAR_RESOURCE (&needed
);
2537 CLEAR_RESOURCE (&set
);
2539 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
2540 trial
= PREV_INSN (trial
))
2542 if (GET_CODE (trial
) == NOTE
)
2544 pat
= PATTERN (trial
);
2545 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2548 if (! insn_references_resource_p (trial
, &set
, 1)
2549 && ! insn_sets_resource_p (trial
, &needed
, 1)
2551 /* Don't want to mess with cc0 here. */
2552 && ! reg_mentioned_p (cc0_rtx
, pat
)
2556 trial
= try_split (pat
, trial
, 1);
2557 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
2559 /* Here as well we are searching backward, so put the
2560 insns we find on the head of the list. */
2562 current_function_epilogue_delay_list
2563 = gen_rtx (INSN_LIST
, VOIDmode
, trial
,
2564 current_function_epilogue_delay_list
);
2565 mark_referenced_resources (trial
, &end_of_function_needs
, 1);
2566 update_block (trial
, trial
);
2567 delete_insn (trial
);
2569 /* Clear deleted bit so final.c will output the insn. */
2570 INSN_DELETED_P (trial
) = 0;
2572 if (slots_to_fill
== ++slots_filled
)
2578 mark_set_resources (trial
, &set
, 1);
2579 mark_referenced_resources (trial
, &needed
, 1);
2582 note_delay_statistics (slots_filled
, 0);
2586 /* Try to find insns to place in delay slots.
2588 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2589 or is an unconditional branch if CONDITION is const_true_rtx.
2590 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2592 THREAD is a flow-of-control, either the insns to be executed if the
2593 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2595 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2596 to see if any potential delay slot insns set things needed there.
2598 LIKELY is non-zero if it is extremely likely that the branch will be
2599 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2600 end of a loop back up to the top.
2602 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2603 thread. I.e., it is the fallthrough code of our jump or the target of the
2604 jump when we are the only jump going there.
2606 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2607 case, we can only take insns from the head of the thread for our delay
2608 slot. We then adjust the jump to point after the insns we have taken. */
2611 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
2612 thread_if_true
, own_thread
, own_opposite_thread
,
2613 slots_to_fill
, pslots_filled
)
2616 rtx thread
, opposite_thread
;
2619 int own_thread
, own_opposite_thread
;
2620 int slots_to_fill
, *pslots_filled
;
2624 struct resources opposite_needed
, set
, needed
;
2629 /* Validate our arguments. */
2630 if ((condition
== const_true_rtx
&& ! thread_if_true
)
2631 || (! own_thread
&& ! thread_if_true
))
2634 /* If our thread is the end of subroutine, we can't get any delay
2639 /* If this is an unconditional branch, nothing is needed at the
2640 opposite thread. Otherwise, compute what is needed there. */
2641 if (condition
== const_true_rtx
)
2642 CLEAR_RESOURCE (&opposite_needed
);
2644 mark_target_live_regs (opposite_thread
, &opposite_needed
);
2646 /* If the insn at THREAD can be split, do it here to avoid having to
2647 update THREAD and NEW_THREAD if it is done in the loop below. Also
2648 initialize NEW_THREAD. */
2650 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
2652 /* Scan insns at THREAD. We are looking for an insn that can be removed
2653 from THREAD (it neither sets nor references resources that were set
2654 ahead of it and it doesn't set anything needs by the insns ahead of
2655 it) and that either can be placed in an annulling insn or aren't
2656 needed at OPPOSITE_THREAD. */
2658 CLEAR_RESOURCE (&needed
);
2659 CLEAR_RESOURCE (&set
);
2661 /* If we do not own this thread, we must stop as soon as we find
2662 something that we can't put in a delay slot, since all we can do
2663 is branch into THREAD at a later point. Therefore, labels stop
2664 the search if this is not the `true' thread. */
2666 for (trial
= thread
;
2667 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
2668 trial
= next_nonnote_insn (trial
))
2672 /* If we have passed a label, we no longer own this thread. */
2673 if (GET_CODE (trial
) == CODE_LABEL
)
2679 pat
= PATTERN (trial
);
2680 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2683 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2684 don't separate or copy insns that set and use CC0. */
2685 if (! insn_references_resource_p (trial
, &set
, 1)
2686 && ! insn_sets_resource_p (trial
, &set
, 1)
2687 && ! insn_sets_resource_p (trial
, &needed
, 1)
2689 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2690 && (! own_thread
|| ! sets_cc0_p (pat
)))
2694 /* If TRIAL is redundant with some insn before INSN, we don't
2695 actually need to add it to the delay list; we can merely pretend
2697 if (redundant_insn_p (trial
, insn
, delay_list
))
2701 update_block (trial
, thread
);
2702 delete_insn (trial
);
2705 new_thread
= next_active_insn (trial
);
2710 /* There are two ways we can win: If TRIAL doesn't set anything
2711 needed at the opposite thread and can't trap, or if it can
2712 go into an annulled delay slot. */
2713 if (condition
== const_true_rtx
2714 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
2715 && ! may_trap_p (pat
)))
2717 trial
= try_split (pat
, trial
, 0);
2718 pat
= PATTERN (trial
);
2719 if (eligible_for_delay (insn
, *pslots_filled
, trial
))
2723 #ifdef ANNUL_IFTRUE_SLOTS
2726 #ifdef ANNUL_IFFALSE_SLOTS
2731 trial
= try_split (pat
, trial
, 0);
2732 pat
= PATTERN (trial
);
2734 ? eligible_for_annul_false (insn
, *pslots_filled
, trial
)
2735 : eligible_for_annul_true (insn
, *pslots_filled
, trial
)))
2743 if (reg_mentioned_p (cc0_rtx
, pat
))
2744 link_cc0_insns (trial
);
2747 /* If we own this thread, delete the insn. If this is the
2748 destination of a branch, show that a basic block status
2749 may have been updated. In any case, mark the new
2750 starting point of this thread. */
2753 update_block (trial
, thread
);
2754 delete_insn (trial
);
2757 new_thread
= next_active_insn (trial
);
2759 temp
= own_thread
? trial
: copy_rtx (trial
);
2761 INSN_FROM_TARGET_P (temp
) = 1;
2763 delay_list
= add_to_delay_list (temp
, delay_list
);
2765 if (slots_to_fill
== ++(*pslots_filled
))
2767 /* Even though we have filled all the slots, we
2768 may be branching to a location that has a
2769 redundant insn. Skip any if so. */
2770 while (new_thread
&& ! own_thread
2771 && ! insn_sets_resource_p (new_thread
, &set
, 1)
2772 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
2773 && ! insn_references_resource_p (new_thread
,
2775 && redundant_insn_p (new_thread
, insn
,
2777 new_thread
= next_active_insn (new_thread
);
2786 /* This insn can't go into a delay slot. */
2788 mark_set_resources (trial
, &set
, 1);
2789 mark_referenced_resources (trial
, &needed
, 1);
2791 /* Ensure we don't put insns between the setting of cc and the comparison
2792 by moving a setting of cc into an earlier delay slot since these insns
2793 could clobber the condition code. */
2796 /* If this insn is a register-register copy and the next insn has
2797 a use of our destination, change it to use our source. That way,
2798 it will become a candidate for our delay slot the next time
2799 through this loop. This case occurs commonly in loops that
2802 We could check for more complex cases than those tested below,
2803 but it doesn't seem worth it. It might also be a good idea to try
2804 to swap the two insns. That might do better.
2806 We can't do this if the next insn modifies our source, because that
2807 would make the replacement into the insn invalid. This also
2808 prevents updating the contents of a PRE_INC. */
2810 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
2811 && GET_CODE (SET_SRC (pat
)) == REG
2812 && GET_CODE (SET_DEST (pat
)) == REG
)
2814 rtx next
= next_nonnote_insn (trial
);
2816 if (next
&& GET_CODE (next
) == INSN
2817 && GET_CODE (PATTERN (next
)) != USE
2818 && ! reg_set_p (SET_DEST (pat
), next
)
2819 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
2820 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
2824 /* If we stopped on a branch insn that has delay slots, see if we can
2825 steal some of the insns in those slots. */
2826 if (trial
&& GET_CODE (trial
) == INSN
2827 && GET_CODE (PATTERN (trial
)) == SEQUENCE
2828 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
2830 /* If this is the `true' thread, we will want to follow the jump,
2831 so we can only do this if we have taken everything up to here. */
2832 if (thread_if_true
&& trial
== new_thread
)
2834 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
2835 delay_list
, &set
, &needed
,
2836 &opposite_needed
, slots_to_fill
,
2837 pslots_filled
, &must_annul
,
2839 else if (! thread_if_true
)
2841 = steal_delay_list_from_fallthrough (insn
, condition
,
2843 delay_list
, &set
, &needed
,
2844 &opposite_needed
, slots_to_fill
,
2845 pslots_filled
, &must_annul
);
2848 /* If we haven't found anything for this delay slot and it is very
2849 likely that the branch will be taken, see if the insn at our target
2850 increments or decrements a register with an increment that does not
2851 depend on the destination register. If so, try to place the opposite
2852 arithmetic insn after the jump insn and put the arithmetic insn in the
2853 delay slot. If we can't do this, return. */
2854 if (delay_list
== 0 && likely
&& new_thread
&& GET_CODE (new_thread
) == INSN
)
2856 rtx pat
= PATTERN (new_thread
);
2861 pat
= PATTERN (trial
);
2863 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
2864 || ! eligible_for_delay (insn
, 0, trial
))
2867 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
2868 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
2869 && rtx_equal_p (XEXP (src
, 0), dest
)
2870 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
2872 rtx other
= XEXP (src
, 1);
2876 /* If this is a constant adjustment, use the same code with
2877 the negated constant. Otherwise, reverse the sense of the
2879 if (GET_CODE (other
) == CONST_INT
)
2880 new_arith
= gen_rtx (GET_CODE (src
), GET_MODE (src
), dest
,
2881 negate_rtx (GET_MODE (src
), other
));
2883 new_arith
= gen_rtx (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
2884 GET_MODE (src
), dest
, other
);
2886 ninsn
= emit_insn_after (gen_rtx (SET
, VOIDmode
, dest
, new_arith
),
2889 if (recog_memoized (ninsn
) < 0
2890 || (insn_extract (ninsn
),
2891 ! constrain_operands (INSN_CODE (ninsn
), 1)))
2893 delete_insn (ninsn
);
2899 update_block (trial
, thread
);
2900 delete_insn (trial
);
2903 new_thread
= next_active_insn (trial
);
2905 ninsn
= own_thread
? trial
: copy_rtx (trial
);
2907 INSN_FROM_TARGET_P (ninsn
) = 1;
2909 delay_list
= add_to_delay_list (ninsn
, 0);
2914 if (delay_list
&& must_annul
)
2915 INSN_ANNULLED_BRANCH_P (insn
) = 1;
2917 /* If we are to branch into the middle of this thread, find an appropriate
2918 label or make a new one if none, and redirect INSN to it. If we hit the
2919 end of the function, use the end-of-function label. */
2920 if (new_thread
!= thread
)
2924 if (! thread_if_true
)
2927 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
2928 && (simplejump_p (new_thread
)
2929 || GET_CODE (PATTERN (new_thread
)) == RETURN
))
2930 new_thread
= follow_jumps (JUMP_LABEL (new_thread
), 1);
2932 if (new_thread
== 0)
2933 label
= find_end_label ();
2934 else if (GET_CODE (new_thread
) == CODE_LABEL
)
2937 label
= get_label_before (new_thread
);
2939 redirect_jump (insn
, label
);
2945 /* Make another attempt to find insns to place in delay slots.
2947 We previously looked for insns located in front of the delay insn
2948 and, for non-jump delay insns, located behind the delay insn.
2950 Here only try to schedule jump insns and try to move insns from either
2951 the target or the following insns into the delay slot. If annulling is
2952 supported, we will be likely to do this. Otherwise, we can do this only
2956 fill_eager_delay_slots (first
)
2961 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2963 for (i
= 0; i
< num_unfilled_slots
; i
++)
2966 rtx target_label
, insn_at_target
, fallthrough_insn
;
2969 int own_fallthrough
;
2970 int prediction
, slots_to_fill
, slots_filled
;
2972 insn
= unfilled_slots_base
[i
];
2974 || INSN_DELETED_P (insn
)
2975 || GET_CODE (insn
) != JUMP_INSN
2976 || ! condjump_p (insn
))
2979 slots_to_fill
= num_delay_slots (insn
);
2980 if (slots_to_fill
== 0)
2984 target_label
= JUMP_LABEL (insn
);
2985 condition
= get_branch_condition (insn
, target_label
);
2990 /* Get the next active fallthough and target insns and see if we own
2991 them. Then see whether the branch is likely true. We don't need
2992 to do a lot of this for unconditional branches. */
2994 insn_at_target
= next_active_insn (target_label
);
2995 own_target
= own_thread_p (target_label
, target_label
, 0);
2997 if (condition
== const_true_rtx
)
2999 own_fallthrough
= 0;
3000 fallthrough_insn
= 0;
3005 fallthrough_insn
= next_active_insn (insn
);
3006 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), 0, 1);
3007 prediction
= mostly_true_jump (insn
, condition
);
3010 /* If this insn is expected to branch, first try to get insns from our
3011 target, then our fallthrough insns. If it is not, expected to branch,
3012 try the other order. */
3017 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3018 fallthrough_insn
, prediction
== 2, 1,
3019 own_target
, own_fallthrough
,
3020 slots_to_fill
, &slots_filled
);
3022 if (delay_list
== 0 && own_fallthrough
)
3024 /* Even though we didn't find anything for delay slots,
3025 we might have found a redundant insn which we deleted
3026 from the thread that was filled. So we have to recompute
3027 the next insn at the target. */
3028 target_label
= JUMP_LABEL (insn
);
3029 insn_at_target
= next_active_insn (target_label
);
3032 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3033 insn_at_target
, 0, 0,
3034 own_fallthrough
, own_target
,
3035 slots_to_fill
, &slots_filled
);
3040 if (own_fallthrough
)
3042 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3043 insn_at_target
, 0, 0,
3044 own_fallthrough
, own_target
,
3045 slots_to_fill
, &slots_filled
);
3047 if (delay_list
== 0)
3049 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3050 next_active_insn (insn
), 0, 1,
3051 own_target
, own_fallthrough
,
3052 slots_to_fill
, &slots_filled
);
3056 unfilled_slots_base
[i
]
3057 = emit_delay_sequence (insn
, delay_list
,
3058 slots_filled
, slots_to_fill
);
3060 if (slots_to_fill
== slots_filled
)
3061 unfilled_slots_base
[i
] = 0;
3063 note_delay_statistics (slots_filled
, 1);
3067 /* Once we have tried two ways to fill a delay slot, make a pass over the
3068 code to try to improve the results and to do such things as more jump
3072 relax_delay_slots (first
)
3075 register rtx insn
, next
, pat
;
3076 register rtx trial
, delay_insn
, target_label
;
3078 /* Look at every JUMP_INSN and see if we can improve it. */
3079 for (insn
= first
; insn
; insn
= next
)
3083 next
= next_active_insn (insn
);
3085 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3086 the next insn, or jumps to a label that is not the last of a
3087 group of consecutive labels. */
3088 if (GET_CODE (insn
) == JUMP_INSN
3089 && (target_label
= JUMP_LABEL (insn
)) != 0)
3091 target_label
= follow_jumps (target_label
, 1);
3092 target_label
= prev_label (next_active_insn (target_label
));
3094 if (target_label
== 0)
3095 target_label
= find_end_label ();
3097 if (next_active_insn (target_label
) == next
)
3103 if (target_label
!= JUMP_LABEL (insn
))
3104 redirect_jump (insn
, target_label
);
3106 /* See if this jump branches around a unconditional jump.
3107 If so, invert this jump and point it to the target of the
3109 if (next
&& GET_CODE (next
) == JUMP_INSN
3110 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3111 && next_active_insn (target_label
) == next_active_insn (next
)
3112 && no_labels_between_p (insn
, next
))
3114 rtx label
= JUMP_LABEL (next
);
3116 /* Be careful how we do this to avoid deleting code or
3117 labels that are momentarily dead. See similar optimization
3120 We also need to ensure we properly handle the case when
3121 invert_jump fails. */
3123 ++LABEL_NUSES (target_label
);
3125 ++LABEL_NUSES (label
);
3127 if (invert_jump (insn
, label
))
3134 --LABEL_NUSES (label
);
3136 if (--LABEL_NUSES (target_label
) == 0)
3137 delete_insn (target_label
);
3143 /* If this is an unconditional jump and the previous insn is a
3144 conditional jump, try reversing the condition of the previous
3145 insn and swapping our targets. The next pass might be able to
3148 Don't do this if we expect the conditional branch to be true, because
3149 we would then be making the more common case longer. */
3151 if (GET_CODE (insn
) == JUMP_INSN
3152 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
3153 && (other
= prev_active_insn (insn
)) != 0
3154 && condjump_p (other
)
3155 && no_labels_between_p (other
, insn
)
3156 && ! mostly_true_jump (other
,
3157 get_branch_condition (other
,
3158 JUMP_LABEL (other
))))
3160 rtx other_target
= JUMP_LABEL (other
);
3162 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3163 as we move the label. */
3165 ++LABEL_NUSES (other_target
);
3167 if (invert_jump (other
, target_label
))
3168 redirect_jump (insn
, other_target
);
3171 --LABEL_NUSES (other_target
);
3174 /* Now look only at cases where we have filled a delay slot. */
3175 if (GET_CODE (insn
) != INSN
3176 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3179 pat
= PATTERN (insn
);
3180 delay_insn
= XVECEXP (pat
, 0, 0);
3182 /* See if the first insn in the delay slot is redundant with some
3183 previous insn. Remove it from the delay slot if so; then set up
3184 to reprocess this insn. */
3185 if (redundant_insn_p (XVECEXP (pat
, 0, 1), delay_insn
, 0))
3187 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
3188 next
= prev_active_insn (next
);
3192 /* Now look only at the cases where we have a filled JUMP_INSN. */
3193 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3194 || ! condjump_p (XVECEXP (PATTERN (insn
), 0, 0)))
3197 target_label
= JUMP_LABEL (delay_insn
);
3201 /* If this jump goes to another unconditional jump, thread it, but
3202 don't convert a jump into a RETURN here. */
3203 trial
= follow_jumps (target_label
, 1);
3204 trial
= prev_label (next_active_insn (trial
));
3205 if (trial
== 0 && target_label
!= 0)
3206 trial
= find_end_label ();
3208 if (trial
!= target_label
)
3210 redirect_jump (delay_insn
, trial
);
3211 target_label
= trial
;
3214 /* If the first insn at TARGET_LABEL is redundant with a previous
3215 insn, redirect the jump to the following insn process again. */
3216 trial
= next_active_insn (target_label
);
3217 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3218 && redundant_insn_p (trial
, insn
, 0))
3220 trial
= next_active_insn (trial
);
3222 target_label
= find_end_label ();
3224 target_label
= get_label_before (trial
);
3225 redirect_jump (delay_insn
, target_label
);
3230 /* Similarly, if it is an unconditional jump with one insn in its
3231 delay list and that insn is redundant, thread the jump. */
3232 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
3233 && XVECLEN (PATTERN (trial
), 0) == 2
3234 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
3235 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
3236 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
3237 && redundant_insn_p (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
3239 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
3240 if (target_label
== 0)
3241 target_label
= find_end_label ();
3242 redirect_jump (delay_insn
, target_label
);
3248 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3249 && prev_active_insn (target_label
) == insn
3251 /* If the last insn in the delay slot sets CC0 for some insn,
3252 various code assumes that it is in a delay slot. We could
3253 put it back where it belonged and delete the register notes,
3254 but it doesn't seem worthwhile in this uncommon case. */
3255 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3262 /* All this insn does is execute its delay list and jump to the
3263 following insn. So delete the jump and just execute the delay
3266 We do this by deleting the INSN containing the SEQUENCE, then
3267 re-emitting the insns separately, and then deleting the jump.
3268 This allows the count of the jump target to be properly
3271 /* Clear the from target bit, since these insns are no longer
3273 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3274 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3276 trial
= PREV_INSN (insn
);
3278 emit_insn_after (pat
, trial
);
3279 delete_scheduled_jump (delay_insn
);
3283 /* See if this jump (with its delay slots) branches around another
3284 jump (without delay slots). If so, invert this jump and point
3285 it to the target of the second jump. We cannot do this for
3286 annulled jumps, though. Again, don't convert a jump to a RETURN
3288 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3289 && next
&& GET_CODE (next
) == JUMP_INSN
3290 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3291 && next_active_insn (target_label
) == next_active_insn (next
)
3292 && no_labels_between_p (insn
, next
))
3294 rtx label
= JUMP_LABEL (next
);
3295 rtx old_label
= JUMP_LABEL (delay_insn
);
3298 label
= find_end_label ();
3300 /* Be careful how we do this to avoid deleting code or labels
3301 that are momentarily dead. See similar optimization in jump.c */
3303 ++LABEL_NUSES (old_label
);
3305 if (invert_jump (delay_insn
, label
))
3311 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
3312 delete_insn (old_label
);
3316 /* If we own the thread opposite the way this insn branches, see if we
3317 can merge its delay slots with following insns. */
3318 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3319 && own_thread_p (NEXT_INSN (insn
), 0, 1))
3320 try_merge_delay_insns (insn
, next
);
3321 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3322 && own_thread_p (target_label
, target_label
, 0))
3323 try_merge_delay_insns (insn
, next_active_insn (target_label
));
3325 /* If we get here, we haven't deleted INSN. But we may have deleted
3326 NEXT, so recompute it. */
3327 next
= next_active_insn (insn
);
3333 /* Look for filled jumps to the end of function label. We can try to convert
3334 them into RETURN insns if the insns in the delay slot are valid for the
3338 make_return_insns (first
)
3341 rtx insn
, jump_insn
, pat
;
3342 rtx real_return_label
= end_of_function_label
;
3345 /* See if there is a RETURN insn in the function other than the one we
3346 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3347 into a RETURN to jump to it. */
3348 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3349 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
3351 real_return_label
= get_label_before (insn
);
3355 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3356 was equal to END_OF_FUNCTION_LABEL. */
3357 LABEL_NUSES (real_return_label
)++;
3359 /* Clear the list of insns to fill so we can use it. */
3360 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3362 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3364 /* Only look at filled JUMP_INSNs that go to the end of function
3366 if (GET_CODE (insn
) != INSN
3367 || GET_CODE (PATTERN (insn
)) != SEQUENCE
3368 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3369 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
3372 pat
= PATTERN (insn
);
3373 jump_insn
= XVECEXP (pat
, 0, 0);
3375 /* If we can't make the jump into a RETURN, redirect it to the best
3376 RETURN and go on to the next insn. */
3377 if (! redirect_jump (jump_insn
, 0))
3379 redirect_jump (jump_insn
, real_return_label
);
3383 /* See if this RETURN can accept the insns current in its delay slot.
3384 It can if it has more or an equal number of slots and the contents
3385 of each is valid. */
3387 slots
= num_delay_slots (jump_insn
);
3388 if (slots
>= XVECLEN (pat
, 0) - 1)
3390 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3392 #ifdef ANNUL_IFFALSE_SLOTS
3393 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3394 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3395 ? eligible_for_annul_false (jump_insn
, i
- 1,
3396 XVECEXP (pat
, 0, i
)) :
3398 #ifdef ANNUL_IFTRUE_SLOTS
3399 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3400 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3401 ? eligible_for_annul_true (jump_insn
, i
- 1,
3402 XVECEXP (pat
, 0, i
)) :
3404 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
))))
3410 if (i
== XVECLEN (pat
, 0))
3413 /* We have to do something with this insn. If it is an unconditional
3414 RETURN, delete the SEQUENCE and output the individual insns,
3415 followed by the RETURN. Then set things up so we try to find
3416 insns for its delay slots, if it needs some. */
3417 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
3419 rtx prev
= PREV_INSN (insn
);
3422 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3423 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
3425 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
3426 emit_barrier_after (insn
);
3429 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3432 /* It is probably more efficient to keep this with its current
3433 delay slot as a branch to a RETURN. */
3434 redirect_jump (jump_insn
, real_return_label
);
3437 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3438 new delay slots we have created. */
3439 if (--LABEL_NUSES (real_return_label
) == 0)
3440 delete_insn (real_return_label
);
3442 fill_simple_delay_slots (first
, 1);
3443 fill_simple_delay_slots (first
, 0);
3447 /* Try to find insns to place in delay slots. */
3450 dbr_schedule (first
, file
)
3457 int old_flag_no_peephole
= flag_no_peephole
;
3459 /* Execute `final' once in prescan mode to delete any insns that won't be
3460 used. Don't let final try to do any peephole optimization--it will
3461 ruin dataflow information for this pass. */
3463 flag_no_peephole
= 1;
3464 final (first
, 0, NO_DEBUG
, 1, 1);
3465 flag_no_peephole
= old_flag_no_peephole
;
3468 /* Find the highest INSN_UID and allocate and initialize our map from
3469 INSN_UID's to position in code. */
3470 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3471 if (INSN_UID (insn
) > max_uid
)
3472 max_uid
= INSN_UID (insn
);
3474 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int *));
3475 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
3476 uid_to_ruid
[INSN_UID (insn
)] = i
;
3478 /* Initialize the list of insns that need filling. */
3479 if (unfilled_firstobj
== 0)
3481 gcc_obstack_init (&unfilled_slots_obstack
);
3482 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3485 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
3489 INSN_ANNULLED_BRANCH_P (insn
) = 0;
3490 INSN_FROM_TARGET_P (insn
) = 0;
3492 /* Skip vector tables. We can't get attributes for them. */
3493 if (GET_CODE (insn
) == JUMP_INSN
3494 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
3495 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
3498 if (num_delay_slots (insn
) > 0)
3499 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3501 /* Ensure all jumps go to the last of a set of consecutive labels. */
3502 if (GET_CODE (insn
) == JUMP_INSN
&& condjump_p (insn
)
3503 && JUMP_LABEL (insn
) != 0
3504 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
3505 != JUMP_LABEL (insn
)))
3506 redirect_jump (insn
, target
);
3509 /* Indicate what resources are required to be valid at the end of the current
3510 function. The condition code never is and memory always is. If the
3511 frame pointer is needed, it is and so is the stack pointer unless
3512 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
3513 stack pointer is. Registers used to return the function value are
3514 needed. Registers holding global variables are needed. */
3516 end_of_function_needs
.cc
= 0;
3517 end_of_function_needs
.memory
= 1;
3518 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
3520 if (frame_pointer_needed
)
3522 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
3523 #ifdef EXIT_IGNORE_STACK
3524 if (! EXIT_IGNORE_STACK
)
3526 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
3529 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
3531 if (current_function_return_rtx
!= 0
3532 && GET_CODE (current_function_return_rtx
) == REG
)
3533 mark_referenced_resources (current_function_return_rtx
,
3534 &end_of_function_needs
, 0);
3536 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3538 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
3540 /* Show we haven't computed an end-of-function label yet. */
3541 end_of_function_label
= 0;
3543 /* Allocate and initialize the tables used by mark_target_live_regs. */
3545 = (struct target_info
**) alloca ((TARGET_HASH_PRIME
3546 * sizeof (struct target_info
*)));
3547 bzero (target_hash_table
, TARGET_HASH_PRIME
* sizeof (struct target_info
*));
3549 bb_ticks
= (int *) alloca (n_basic_blocks
* sizeof (int));
3550 bzero (bb_ticks
, n_basic_blocks
* sizeof (int));
3552 /* Initialize the statistics for this function. */
3553 bzero (num_insns_needing_delays
, sizeof num_insns_needing_delays
);
3554 bzero (num_filled_delays
, sizeof num_filled_delays
);
3556 /* Now do the delay slot filling. Try everything twice in case earlier
3557 changes make more slots fillable. */
3559 for (reorg_pass_number
= 0;
3560 reorg_pass_number
< MAX_REORG_PASSES
;
3561 reorg_pass_number
++)
3563 fill_simple_delay_slots (first
, 1);
3564 fill_simple_delay_slots (first
, 0);
3565 fill_eager_delay_slots (first
);
3566 relax_delay_slots (first
);
3569 /* Delete any USE insns made by update_block; subsequent passes don't need
3570 them or know how to deal with them. */
3571 for (insn
= first
; insn
; insn
= next
)
3573 next
= NEXT_INSN (insn
);
3575 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
3576 && (GET_CODE (XEXP (PATTERN (insn
), 0)) == INSN
3577 || GET_CODE (XEXP (PATTERN (insn
), 0)) == JUMP_INSN
3578 || GET_CODE (XEXP (PATTERN (insn
), 0)) == CALL_INSN
))
3579 next
= delete_insn (insn
);
3582 /* If we made an end of function label, indicate that it is now
3583 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3584 If it is now unused, delete it. */
3585 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
3586 delete_insn (end_of_function_label
);
3589 if (HAVE_return
&& end_of_function_label
!= 0)
3590 make_return_insns (first
);
3593 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3595 /* It is not clear why the line below is needed, but it does seem to be. */
3596 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3600 register int i
, j
, need_comma
;
3602 for (reorg_pass_number
= 0;
3603 reorg_pass_number
< MAX_REORG_PASSES
;
3604 reorg_pass_number
++)
3606 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
3607 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
3610 fprintf (file
, ";; Reorg function #%d\n", i
);
3612 fprintf (file
, ";; %d insns needing delay slots\n;; ",
3613 num_insns_needing_delays
[i
][reorg_pass_number
]);
3615 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
3616 if (num_filled_delays
[i
][j
][reorg_pass_number
])
3619 fprintf (file
, ", ");
3621 fprintf (file
, "%d got %d delays",
3622 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
3624 fprintf (file
, "\n");
3629 #endif /* DELAY_SLOTS */