1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
56 Three techniques for filling delay slots have been implemented so far:
58 (1) `fill_simple_delay_slots' is the simplest, most efficient way
59 to fill delay slots. This pass first looks for insns which come
60 from before the branch and which are safe to execute after the
61 branch. Then it searches after the insn requiring delay slots or,
62 in the case of a branch, for insns that are after the point at
63 which the branch merges into the fallthrough code, if such a point
64 exists. When such insns are found, the branch penalty decreases
65 and no code expansion takes place.
67 (2) `fill_eager_delay_slots' is more complicated: it is used for
68 scheduling conditional jumps, or for scheduling jumps which cannot
69 be filled using (1). A machine need not have annulled jumps to use
70 this strategy, but it helps (by keeping more options open).
71 `fill_eager_delay_slots' tries to guess the direction the branch
72 will go; if it guesses right 100% of the time, it can reduce the
73 branch penalty as much as `fill_simple_delay_slots' does. If it
74 guesses wrong 100% of the time, it might as well schedule nops (or
75 on the m88k, unexpose the branch slot). When
76 `fill_eager_delay_slots' takes insns from the fall-through path of
77 the jump, usually there is no code expansion; when it takes insns
78 from the branch target, there is code expansion if it is not the
79 only way to reach that target.
81 (3) `relax_delay_slots' uses a set of rules to simplify code that
82 has been reorganized by (1) and (2). It finds cases where
83 conditional test can be eliminated, jumps can be threaded, extra
84 insns can be eliminated, etc. It is the job of (1) and (2) to do a
85 good job of scheduling locally; `relax_delay_slots' takes care of
86 making the various individual schedules work well together. It is
87 especially tuned to handle the control flow interactions of branch
88 insns. It does nothing for insns with delay slots that do not
91 On machines that use CC0, we are very conservative. We will not make
92 a copy of an insn involving CC0 since we want to maintain a 1-1
93 correspondence between the insn that sets and uses CC0. The insns are
94 allowed to be separated by placing an insn that sets CC0 (but not an insn
95 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
96 delay slot. In that case, we point each insn at the other with REG_CC_USER
97 and REG_CC_SETTER notes. Note that these restrictions affect very few
98 machines because most RISC machines with delay slots will not use CC0
99 (the RT is the only known exception at this point).
103 The Acorn Risc Machine can conditionally execute most insns, so
104 it is profitable to move single insns into a position to execute
105 based on the condition code of the previous insn.
107 The HP-PA can conditionally nullify insns, providing a similar
108 effect to the ARM, differing mostly in which insn is "in charge". */
113 #include "insn-config.h"
114 #include "conditions.h"
115 #include "hard-reg-set.h"
116 #include "basic-block.h"
118 #include "insn-flags.h"
123 #include "insn-attr.h"
127 #define obstack_chunk_alloc xmalloc
128 #define obstack_chunk_free free
130 #ifndef ANNUL_IFTRUE_SLOTS
131 #define eligible_for_annul_true(INSN, SLOTS, TRIAL) 0
133 #ifndef ANNUL_IFFALSE_SLOTS
134 #define eligible_for_annul_false(INSN, SLOTS, TRIAL) 0
137 /* Insns which have delay slots that have not yet been filled. */
139 static struct obstack unfilled_slots_obstack
;
140 static rtx
*unfilled_firstobj
;
142 /* Define macros to refer to the first and last slot containing unfilled
143 insns. These are used because the list may move and its address
144 should be recomputed at each use. */
146 #define unfilled_slots_base \
147 ((rtx *) obstack_base (&unfilled_slots_obstack))
149 #define unfilled_slots_next \
150 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
152 /* This structure is used to indicate which hardware resources are set or
153 needed by insns so far. */
157 char memory
; /* Insn sets or needs a memory location. */
158 char volatil
; /* Insn sets or needs a volatile memory loc. */
159 char cc
; /* Insn sets or needs the condition codes. */
160 HARD_REG_SET regs
; /* Which registers are set or needed. */
163 /* Macro to clear all resources. */
164 #define CLEAR_RESOURCE(RES) \
165 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
166 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
168 /* Indicates what resources are required at the beginning of the epilogue. */
169 static struct resources start_of_epilogue_needs
;
171 /* Indicates what resources are required at function end. */
172 static struct resources end_of_function_needs
;
174 /* Points to the label before the end of the function. */
175 static rtx end_of_function_label
;
177 /* This structure is used to record liveness information at the targets or
178 fallthrough insns of branches. We will most likely need the information
179 at targets again, so save them in a hash table rather than recomputing them
184 int uid
; /* INSN_UID of target. */
185 struct target_info
*next
; /* Next info for same hash bucket. */
186 HARD_REG_SET live_regs
; /* Registers live at target. */
187 int block
; /* Basic block number containing target. */
188 int bb_tick
; /* Generation count of basic block info. */
191 #define TARGET_HASH_PRIME 257
193 /* Define the hash table itself. */
194 static struct target_info
**target_hash_table
;
196 /* For each basic block, we maintain a generation number of its basic
197 block info, which is updated each time we move an insn from the
198 target of a jump. This is the generation number indexed by block
201 static int *bb_ticks
;
203 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
204 not always monotonically increase. */
205 static int *uid_to_ruid
;
207 /* Highest valid index in `uid_to_ruid'. */
210 /* Forward references: */
212 static int redundant_insn_p ();
213 static void update_block ();
215 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
216 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
217 is TRUE, resources used by the called routine will be included for
221 mark_referenced_resources (x
, res
, include_called_routine
)
223 register struct resources
*res
;
224 register int include_called_routine
;
226 register enum rtx_code code
= GET_CODE (x
);
228 register char *format_ptr
;
230 /* Handle leaf items for which we set resource flags. Also, special-case
231 CALL, SET and CLOBBER operators. */
243 if (GET_CODE (SUBREG_REG (x
)) != REG
)
244 mark_referenced_resources (SUBREG_REG (x
), res
, 0);
247 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
248 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
249 for (i
= regno
; i
< last_regno
; i
++)
250 SET_HARD_REG_BIT (res
->regs
, i
);
255 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
256 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
260 /* If this memory shouldn't change, it really isn't referencing
262 if (! RTX_UNCHANGING_P (x
))
264 res
->volatil
= MEM_VOLATILE_P (x
);
266 /* Mark registers used to access memory. */
267 mark_referenced_resources (XEXP (x
, 0), res
, 0);
274 case UNSPEC_VOLATILE
:
276 /* Traditional asm's are always volatile. */
281 res
->volatil
= MEM_VOLATILE_P (x
);
283 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
284 We can not just fall through here since then we would be confused
285 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
286 traditional asms unlike their normal usage. */
288 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
289 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, 0);
293 /* The first operand will be a (MEM (xxx)) but doesn't really reference
294 memory. The second operand may be referenced, though. */
295 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, 0);
296 mark_referenced_resources (XEXP (x
, 1), res
, 0);
300 /* Usually, the first operand of SET is set, not referenced. But
301 registers used to access memory are referenced. SET_DEST is
302 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
304 mark_referenced_resources (SET_SRC (x
), res
, 0);
307 if (GET_CODE (x
) == SIGN_EXTRACT
|| GET_CODE (x
) == ZERO_EXTRACT
)
308 mark_referenced_resources (x
, res
, 0);
309 else if (GET_CODE (x
) == SUBREG
)
311 if (GET_CODE (x
) == MEM
)
312 mark_referenced_resources (XEXP (x
, 0), res
, 0);
319 if (include_called_routine
)
321 /* A CALL references memory, the frame pointer if it exists, the
322 stack pointer, any global registers and any registers given in
323 USE insns immediately in front of the CALL.
325 However, we may have moved some of the parameter loading insns
326 into the delay slot of this CALL. If so, the USE's for them
327 don't count and should be skipped. */
328 rtx insn
= PREV_INSN (x
);
333 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
334 if (NEXT_INSN (insn
) != x
)
336 sequence
= PATTERN (NEXT_INSN (insn
));
337 seq_size
= XVECLEN (sequence
, 0);
338 if (GET_CODE (sequence
) != SEQUENCE
)
343 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
344 if (frame_pointer_needed
)
345 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
347 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
349 SET_HARD_REG_BIT (res
->regs
, i
);
351 /* Skip any labels between the CALL_INSN and possible USE insns. */
352 while (GET_CODE (insn
) == CODE_LABEL
)
353 insn
= PREV_INSN (insn
);
355 for ( ; (insn
&& GET_CODE (insn
) == INSN
356 && GET_CODE (PATTERN (insn
)) == USE
);
357 insn
= PREV_INSN (insn
))
359 for (i
= 1; i
< seq_size
; i
++)
361 rtx slot_pat
= PATTERN (XVECEXP (sequence
, 0, i
));
362 if (GET_CODE (slot_pat
) == SET
363 && rtx_equal_p (SET_DEST (slot_pat
),
364 XEXP (PATTERN (insn
), 0)))
368 mark_referenced_resources (XEXP (PATTERN (insn
), 0), res
, 0);
372 /* ... fall through to other INSN processing ... */
376 /* No special processing, just speed up. */
377 mark_referenced_resources (PATTERN (x
), res
, include_called_routine
);
381 /* Process each sub-expression and flag what it needs. */
382 format_ptr
= GET_RTX_FORMAT (code
);
383 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
384 switch (*format_ptr
++)
387 mark_referenced_resources (XEXP (x
, i
), res
, include_called_routine
);
391 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
392 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
393 include_called_routine
);
398 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
399 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
400 is nonzero, also mark resources potentially set by the called routine.
402 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
403 objects are being referenced instead of set.
405 We never mark the insn as modifying the condition code unless it explicitly
406 SETs CC0 even though this is not totally correct. The reason for this is
407 that we require a SET of CC0 to immediately precede the reference to CC0.
408 So if some other insn sets CC0 as a side-effect, we know it cannot affect
409 our computation and thus may be placed in a delay slot. */
412 mark_set_resources (x
, res
, in_dest
, include_called_routine
)
414 register struct resources
*res
;
416 int include_called_routine
;
418 register enum rtx_code code
;
420 register char *format_ptr
;
438 /* These don't set any resources. */
447 /* Called routine modifies the condition code, memory, any registers
448 that aren't saved across calls, global registers and anything
449 explicitly CLOBBERed immediately after the CALL_INSN. */
451 if (include_called_routine
)
453 rtx next
= NEXT_INSN (x
);
455 res
->cc
= res
->memory
= 1;
456 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
457 if (call_used_regs
[i
] || global_regs
[i
])
458 SET_HARD_REG_BIT (res
->regs
, i
);
460 /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
461 while (GET_CODE (next
) == CODE_LABEL
)
462 next
= NEXT_INSN (next
);
464 for (; (next
&& GET_CODE (next
) == INSN
465 && GET_CODE (PATTERN (next
)) == CLOBBER
);
466 next
= NEXT_INSN (next
))
467 mark_set_resources (XEXP (PATTERN (next
), 0), res
, 1, 0);
470 /* ... and also what it's RTL says it modifies, if anything. */
475 /* An insn consisting of just a CLOBBER (or USE) is just for flow
476 and doesn't actually do anything, so we ignore it. */
479 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
484 /* If the source of a SET is a CALL, this is actually done by
485 the called routine. So only include it if we are to include the
486 effects of the calling routine. */
488 mark_set_resources (SET_DEST (x
), res
,
489 (include_called_routine
490 || GET_CODE (SET_SRC (x
)) != CALL
),
493 mark_set_resources (SET_SRC (x
), res
, 0, 0);
497 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
501 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
502 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x
, 0, 0))
503 && INSN_FROM_TARGET_P (XVECEXP (x
, 0, i
))))
504 mark_set_resources (XVECEXP (x
, 0, i
), res
, 0,
505 include_called_routine
);
512 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
516 mark_set_resources (XEXP (x
, 0), res
, in_dest
, 0);
517 mark_set_resources (XEXP (x
, 1), res
, 0, 0);
518 mark_set_resources (XEXP (x
, 2), res
, 0, 0);
525 res
->volatil
= MEM_VOLATILE_P (x
);
528 mark_set_resources (XEXP (x
, 0), res
, 0, 0);
533 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
534 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
538 /* Process each sub-expression and flag what it needs. */
539 format_ptr
= GET_RTX_FORMAT (code
);
540 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
541 switch (*format_ptr
++)
544 mark_set_resources (XEXP (x
, i
), res
, in_dest
, include_called_routine
);
548 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
549 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
,
550 include_called_routine
);
555 /* Return TRUE if this insn should stop the search for insn to fill delay
556 slots. LABELS_P indicates that labels should terminate the search.
557 In all cases, jumps terminate the search. */
560 stop_search_p (insn
, labels_p
)
567 switch (GET_CODE (insn
))
581 /* OK unless it contains a delay slot or is an `asm' insn of some type.
582 We don't know anything about these. */
583 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
584 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
585 || asm_noperands (PATTERN (insn
)) >= 0);
592 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
593 resource set contains a volatile memory reference. Otherwise, return FALSE. */
596 resource_conflicts_p (res1
, res2
)
597 struct resources
*res1
, *res2
;
599 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
600 || res1
->volatil
|| res2
->volatil
)
604 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
609 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
610 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
617 /* Return TRUE if any resource marked in RES, a `struct resources', is
618 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
619 routine is using those resources.
621 We compute this by computing all the resources referenced by INSN and
622 seeing if this conflicts with RES. It might be faster to directly check
623 ourselves, and this is the way it used to work, but it means duplicating
624 a large block of complex code. */
627 insn_references_resource_p (insn
, res
, include_called_routine
)
629 register struct resources
*res
;
630 int include_called_routine
;
632 struct resources insn_res
;
634 CLEAR_RESOURCE (&insn_res
);
635 mark_referenced_resources (insn
, &insn_res
, include_called_routine
);
636 return resource_conflicts_p (&insn_res
, res
);
639 /* Return TRUE if INSN modifies resources that are marked in RES.
640 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
641 included. CC0 is only modified if it is explicitly set; see comments
642 in front of mark_set_resources for details. */
645 insn_sets_resource_p (insn
, res
, include_called_routine
)
647 register struct resources
*res
;
648 int include_called_routine
;
650 struct resources insn_sets
;
652 CLEAR_RESOURCE (&insn_sets
);
653 mark_set_resources (insn
, &insn_sets
, 0, include_called_routine
);
654 return resource_conflicts_p (&insn_sets
, res
);
657 /* Find a label at the end of the function or before a RETURN. If there is
665 /* If we found one previously, return it. */
666 if (end_of_function_label
)
667 return end_of_function_label
;
669 /* Otherwise, see if there is a label at the end of the function. If there
670 is, it must be that RETURN insns aren't needed, so that is our return
671 label and we don't have to do anything else. */
673 insn
= get_last_insn ();
674 while (GET_CODE (insn
) == NOTE
675 || (GET_CODE (insn
) == INSN
676 && (GET_CODE (PATTERN (insn
)) == USE
677 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
678 insn
= PREV_INSN (insn
);
680 if (GET_CODE (insn
) == CODE_LABEL
)
681 end_of_function_label
= insn
;
684 /* Otherwise, make a new label and emit a RETURN and BARRIER,
686 end_of_function_label
= gen_label_rtx ();
687 LABEL_NUSES (end_of_function_label
) = 0;
688 emit_label (end_of_function_label
);
692 emit_jump_insn (gen_return ());
698 /* Show one additional use for this label so it won't go away until
700 ++LABEL_NUSES (end_of_function_label
);
702 return end_of_function_label
;
705 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
706 the pattern of INSN with the SEQUENCE.
708 Chain the insns so that NEXT_INSN of each insn in the sequence points to
709 the next and NEXT_INSN of the last insn in the sequence points to
710 the first insn after the sequence. Similarly for PREV_INSN. This makes
711 it easier to scan all insns.
713 Returns the SEQUENCE that replaces INSN. */
716 emit_delay_sequence (insn
, list
, length
, avail
)
726 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
727 rtvec seqv
= rtvec_alloc (length
+ 1);
728 rtx seq
= gen_rtx (SEQUENCE
, VOIDmode
, seqv
);
729 rtx seq_insn
= make_insn_raw (seq
);
730 rtx first
= get_insns ();
731 rtx last
= get_last_insn ();
733 /* Make a copy of the insn having delay slots. */
734 rtx delay_insn
= copy_rtx (insn
);
736 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
737 confuse further processing. Update LAST in case it was the last insn.
738 We will put the BARRIER back in later. */
739 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
741 delete_insn (NEXT_INSN (insn
));
742 last
= get_last_insn ();
746 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
747 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
748 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
751 set_new_first_and_last_insn (first
, seq_insn
);
753 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
756 set_new_first_and_last_insn (seq_insn
, last
);
758 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
760 /* Build our SEQUENCE and rebuild the insn chain. */
761 XVECEXP (seq
, 0, 0) = delay_insn
;
762 INSN_DELETED_P (delay_insn
) = 0;
763 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
765 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
767 rtx tem
= XEXP (li
, 0);
770 /* Show that this copy of the insn isn't deleted. */
771 INSN_DELETED_P (tem
) = 0;
773 XVECEXP (seq
, 0, i
) = tem
;
774 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
775 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
777 /* Remove any REG_DEAD notes because we can't rely on them now
778 that the insn has been moved. */
779 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
780 if (REG_NOTE_KIND (note
) == REG_DEAD
)
781 XEXP (note
, 0) = const0_rtx
;
784 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
786 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
787 last insn in that SEQUENCE to point to us. Similarly for the first
788 insn in the following insn if it is a SEQUENCE. */
790 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
791 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
792 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
793 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
796 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
797 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
798 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
800 /* If there used to be a BARRIER, put it back. */
802 emit_barrier_after (seq_insn
);
810 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
811 be in the order in which the insns are to be executed. */
814 add_to_delay_list (insn
, delay_list
)
818 /* If we have an empty list, just make a new list element. */
820 return gen_rtx (INSN_LIST
, VOIDmode
, insn
, NULL_RTX
);
822 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
824 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
829 /* Delete INSN from the the delay slot of the insn that it is in. This may
830 produce an insn without anything in its delay slots. */
833 delete_from_delay_slot (insn
)
836 rtx trial
, seq_insn
, seq
, prev
;
840 /* We first must find the insn containing the SEQUENCE with INSN in its
841 delay slot. Do this by finding an insn, TRIAL, where
842 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
845 PREV_INSN (NEXT_INSN (trial
)) == trial
;
846 trial
= NEXT_INSN (trial
))
849 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
850 seq
= PATTERN (seq_insn
);
852 /* Create a delay list consisting of all the insns other than the one
853 we are deleting (unless we were the only one). */
854 if (XVECLEN (seq
, 0) > 2)
855 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
856 if (XVECEXP (seq
, 0, i
) != insn
)
857 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
859 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
860 list, and rebuild the delay list if non-empty. */
861 prev
= PREV_INSN (seq_insn
);
862 trial
= XVECEXP (seq
, 0, 0);
863 delete_insn (seq_insn
);
864 add_insn_after (trial
, prev
);
866 if (GET_CODE (trial
) == JUMP_INSN
867 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
868 emit_barrier_after (trial
);
870 /* If there are any delay insns, remit them. Otherwise clear the
873 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2, 0);
875 INSN_ANNULLED_BRANCH_P (trial
) = 0;
877 INSN_FROM_TARGET_P (insn
) = 0;
879 /* Show we need to fill this insn again. */
880 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
883 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
884 the insn that sets CC0 for it and delete it too. */
887 delete_scheduled_jump (insn
)
890 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
891 delete the insn that sets the condition code, but it is hard to find it.
892 Since this case is rare anyway, don't bother trying; there would likely
893 be other insns that became dead anyway, which we wouldn't know to
897 if (reg_mentioned_p (cc0_rtx
, insn
))
899 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
901 /* If a reg-note was found, it points to an insn to set CC0. This
902 insn is in the delay list of some other insn. So delete it from
903 the delay list it was in. */
906 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
907 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
908 delete_from_delay_slot (XEXP (note
, 0));
912 /* The insn setting CC0 is our previous insn, but it may be in
913 a delay slot. It will be the last insn in the delay slot, if
915 rtx trial
= previous_insn (insn
);
916 if (GET_CODE (trial
) == NOTE
)
917 trial
= prev_nonnote_insn (trial
);
918 if (sets_cc0_p (PATTERN (trial
)) != 1
919 || FIND_REG_INC_NOTE (trial
, 0))
921 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
924 delete_from_delay_slot (trial
);
932 /* Counters for delay-slot filling. */
934 #define NUM_REORG_FUNCTIONS 2
935 #define MAX_DELAY_HISTOGRAM 3
936 #define MAX_REORG_PASSES 2
938 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
940 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
942 static int reorg_pass_number
;
945 note_delay_statistics (slots_filled
, index
)
946 int slots_filled
, index
;
948 num_insns_needing_delays
[index
][reorg_pass_number
]++;
949 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
950 slots_filled
= MAX_DELAY_HISTOGRAM
;
951 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
954 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
956 /* Optimize the following cases:
958 1. When a conditional branch skips over only one instruction,
959 use an annulling branch and put that insn in the delay slot.
960 Use either a branch that annuls when the condition if true or
961 invert the test with a branch that annuls when the condition is
962 false. This saves insns, since otherwise we must copy an insn
965 (orig) (skip) (otherwise)
966 Bcc.n L1 Bcc',a L1 Bcc,a L1'
973 2. When a conditional branch skips over only one instruction,
974 and after that, it unconditionally branches somewhere else,
975 perform the similar optimization. This saves executing the
976 second branch in the case where the inverted condition is true.
985 This should be expanded to skip over N insns, where N is the number
986 of delay slots required. */
992 register rtx trial
= next_nonnote_insn (insn
);
993 rtx next_trial
= next_active_insn (trial
);
998 || GET_CODE (trial
) != INSN
999 || GET_CODE (PATTERN (trial
)) == SEQUENCE
1000 || recog_memoized (trial
) < 0
1001 || (! eligible_for_annul_false (insn
, 0, trial
)
1002 && ! eligible_for_annul_true (insn
, 0, trial
)))
1005 /* There are two cases where we are just executing one insn (we assume
1006 here that a branch requires only one insn; this should be generalized
1007 at some point): Where the branch goes around a single insn or where
1008 we have one insn followed by a branch to the same label we branch to.
1009 In both of these cases, inverting the jump and annulling the delay
1010 slot give the same effect in fewer insns. */
1011 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
)))
1013 && GET_CODE (next_trial
) == JUMP_INSN
1014 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
1015 && (simplejump_p (next_trial
)
1016 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
1018 if (eligible_for_annul_false (insn
, 0, trial
))
1020 if (invert_jump (insn
, JUMP_LABEL (insn
)))
1021 INSN_FROM_TARGET_P (trial
) = 1;
1022 else if (! eligible_for_annul_true (insn
, 0, trial
))
1026 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
1027 next_trial
= next_active_insn (trial
);
1028 update_block (trial
, trial
);
1029 delete_insn (trial
);
1031 /* Also, if we are targeting an unconditional
1032 branch, thread our jump to the target of that branch. Don't
1033 change this into a RETURN here, because it may not accept what
1034 we have in the delay slot. We'll fix this up later. */
1035 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
1036 && (simplejump_p (next_trial
)
1037 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
1039 target_label
= JUMP_LABEL (next_trial
);
1040 if (target_label
== 0)
1041 target_label
= find_end_label ();
1042 redirect_jump (insn
, target_label
);
1045 INSN_ANNULLED_BRANCH_P (insn
) = 1;
1052 /* Return truth value of the statement that this branch
1053 is mostly taken. If we think that the branch is extremely likely
1054 to be taken, we return 2. If the branch is slightly more likely to be
1055 taken, return 1. Otherwise, return 0.
1057 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1060 mostly_true_jump (jump_insn
, condition
)
1061 rtx jump_insn
, condition
;
1063 rtx target_label
= JUMP_LABEL (jump_insn
);
1066 /* If this is a conditional return insn, assume it won't return. */
1067 if (target_label
== 0)
1070 /* If TARGET_LABEL has no jumps between it and the end of the function,
1071 this is essentially a conditional return, so predict it as false. */
1072 for (insn
= NEXT_INSN (target_label
); insn
; insn
= NEXT_INSN (insn
))
1074 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1075 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1076 if (GET_CODE (insn
) == JUMP_INSN
)
1083 /* If this is the test of a loop, it is very likely true. We scan backwards
1084 from the target label. If we find a NOTE_INSN_LOOP_BEG before the next
1085 real insn, we assume the branch is to the top of the loop. */
1086 for (insn
= PREV_INSN (target_label
);
1087 insn
&& GET_CODE (insn
) == NOTE
;
1088 insn
= PREV_INSN (insn
))
1089 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1092 /* If this is a jump to the test of a loop, it is likely true. We scan
1093 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1094 before the next real insn, we assume the branch is to the loop branch
1096 for (insn
= NEXT_INSN (target_label
);
1097 insn
&& GET_CODE (insn
) == NOTE
;
1098 insn
= PREV_INSN (insn
))
1099 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
)
1102 /* If we couldn't figure out what this jump was, assume it won't be
1103 taken. This should be rare. */
1107 /* EQ tests are usually false and NE tests are usually true. Also,
1108 most quantities are positive, so we can make the appropriate guesses
1109 about signed comparisons against zero. */
1110 switch (GET_CODE (condition
))
1113 /* Unconditional branch. */
1121 if (XEXP (condition
, 1) == const0_rtx
)
1126 if (XEXP (condition
, 1) == const0_rtx
)
1131 /* Predict backward branches usually take, forward branches usually not. If
1132 we don't know whether this is forward or backward, assume the branch
1133 will be taken, since most are. */
1134 return (INSN_UID (jump_insn
) > max_uid
|| INSN_UID (target_label
) > max_uid
1135 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1136 > uid_to_ruid
[INSN_UID (target_label
)]));;
1139 /* Return the condition under which INSN will branch to TARGET. If TARGET
1140 is zero, return the condition under which INSN will return. If INSN is
1141 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1142 type of jump, or it doesn't go to TARGET, return 0. */
1145 get_branch_condition (insn
, target
)
1149 rtx pat
= PATTERN (insn
);
1152 if (GET_CODE (pat
) == RETURN
)
1153 return target
== 0 ? const_true_rtx
: 0;
1155 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1158 src
= SET_SRC (pat
);
1159 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1160 return const_true_rtx
;
1162 else if (GET_CODE (src
) == IF_THEN_ELSE
1163 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1164 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1165 && XEXP (XEXP (src
, 1), 0) == target
))
1166 && XEXP (src
, 2) == pc_rtx
)
1167 return XEXP (src
, 0);
1169 else if (GET_CODE (src
) == IF_THEN_ELSE
1170 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1171 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1172 && XEXP (XEXP (src
, 2), 0) == target
))
1173 && XEXP (src
, 1) == pc_rtx
)
1174 return gen_rtx (reverse_condition (GET_CODE (XEXP (src
, 0))),
1175 GET_MODE (XEXP (src
, 0)),
1176 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1181 /* Return non-zero if CONDITION is more strict than the condition of
1182 INSN, i.e., if INSN will always branch if CONDITION is true. */
1185 condition_dominates_p (condition
, insn
)
1189 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1190 enum rtx_code code
= GET_CODE (condition
);
1191 enum rtx_code other_code
;
1193 if (rtx_equal_p (condition
, other_condition
)
1194 || other_condition
== const_true_rtx
)
1197 else if (condition
== const_true_rtx
|| other_condition
== 0)
1200 other_code
= GET_CODE (other_condition
);
1201 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1202 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1203 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1206 return comparison_dominates_p (code
, other_code
);
1209 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1210 the condition tested by INSN is CONDITION and the resources shown in
1211 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1212 from SEQ's delay list, in addition to whatever insns it may execute
1213 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1214 needed while searching for delay slot insns. Return the concatenated
1215 delay list if possible, otherwise, return 0.
1217 SLOTS_TO_FILL is the total number of slots required by INSN, and
1218 PSLOTS_FILLED points to the number filled so far (also the number of
1219 insns in DELAY_LIST). It is updated with the number that have been
1220 filled from the SEQUENCE, if any.
1222 PANNUL_P points to a non-zero value if we already know that we need
1223 to annul INSN. If this routine determines that annulling is needed,
1224 it may set that value non-zero.
1226 PNEW_THREAD points to a location that is to receive the place at which
1227 execution should continue. */
1230 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1231 sets
, needed
, other_needed
,
1232 slots_to_fill
, pslots_filled
, pannul_p
,
1234 rtx insn
, condition
;
1237 struct resources
*sets
, *needed
, *other_needed
;
1244 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1245 int total_slots_filled
= *pslots_filled
;
1246 rtx new_delay_list
= 0;
1247 int must_annul
= *pannul_p
;
1250 /* We can't do anything if there are more delay slots in SEQ than we
1251 can handle, or if we don't know that it will be a taken branch.
1253 We know that it will be a taken branch if it is either an unconditional
1254 branch or a conditional branch with a stricter branch condition. */
1256 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1257 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0)))
1260 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1262 rtx trial
= XVECEXP (seq
, 0, i
);
1264 if (insn_references_resource_p (trial
, sets
, 0)
1265 || insn_sets_resource_p (trial
, needed
, 0)
1266 || insn_sets_resource_p (trial
, sets
, 0)
1268 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1270 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1272 /* If TRIAL is from the fallthrough code of an annulled branch insn
1273 in SEQ, we cannot use it. */
1274 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1275 && ! INSN_FROM_TARGET_P (trial
)))
1278 /* If this insn was already done (usually in a previous delay slot),
1279 pretend we put it in our delay slot. */
1280 if (redundant_insn_p (trial
, insn
, new_delay_list
))
1284 && ((condition
== const_true_rtx
1285 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1286 && ! may_trap_p (PATTERN (trial
)))))
1287 ? eligible_for_delay (insn
, total_slots_filled
, trial
)
1289 eligible_for_annul_false (insn
, total_slots_filled
, trial
)))
1291 temp
= copy_rtx (trial
);
1292 INSN_FROM_TARGET_P (temp
) = 1;
1293 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1294 total_slots_filled
++;
1296 if (--slots_remaining
== 0)
1303 /* Show the place to which we will be branching. */
1304 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1306 /* Add any new insns to the delay list and update the count of the
1307 number of slots filled. */
1308 *pslots_filled
= total_slots_filled
;
1309 *pannul_p
= must_annul
;
1311 if (delay_list
== 0)
1312 return new_delay_list
;
1314 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1315 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1320 /* Similar to steal_delay_list_from_target except that SEQ is on the
1321 fallthrough path of INSN. Here we only do something if the delay insn
1322 of SEQ is an unconditional branch. In that case we steal its delay slot
1323 for INSN since unconditional branches are much easier to fill. */
1326 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1327 delay_list
, sets
, needed
, other_needed
,
1328 slots_to_fill
, pslots_filled
, pannul_p
)
1329 rtx insn
, condition
;
1332 struct resources
*sets
, *needed
, *other_needed
;
1339 /* We can't do anything if SEQ's delay insn isn't an
1340 unconditional branch. */
1342 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1343 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1346 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1348 rtx trial
= XVECEXP (seq
, 0, i
);
1350 /* If TRIAL sets CC0, stealing it will move it too far from the use
1352 if (insn_references_resource_p (trial
, sets
, 0)
1353 || insn_sets_resource_p (trial
, needed
, 0)
1354 || insn_sets_resource_p (trial
, sets
, 0)
1356 || sets_cc0_p (PATTERN (trial
))
1362 /* If this insn was already done, we don't need it. */
1363 if (redundant_insn_p (trial
, insn
, delay_list
))
1365 delete_from_delay_slot (trial
);
1370 && ((condition
== const_true_rtx
1371 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1372 && ! may_trap_p (PATTERN (trial
)))))
1373 ? eligible_for_delay (insn
, *pslots_filled
, trial
)
1375 eligible_for_annul_true (insn
, *pslots_filled
, trial
)))
1377 delete_from_delay_slot (trial
);
1378 delay_list
= add_to_delay_list (trial
, delay_list
);
1380 if (++(*pslots_filled
) == slots_to_fill
)
1390 /* Try merging insns starting at THREAD which match exactly the insns in
1393 If all insns were matched and the insn was previously annulling, the
1394 annul bit will be cleared.
1396 For each insn that is merged, if the branch is or will be non-annulling,
1397 we delete the merged insn. */
1400 try_merge_delay_insns (insn
, thread
)
1403 rtx trial
, next_trial
;
1404 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1405 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1406 int slot_number
= 1;
1407 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1408 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1409 struct resources set
, needed
;
1410 rtx merged_insns
= 0;
1413 CLEAR_RESOURCE (&needed
);
1414 CLEAR_RESOURCE (&set
);
1416 /* If this is not an annulling branch, take into account anything needed in
1417 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1418 folded into one. If we are annulling, this would be the correct
1419 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1420 will essentially disable this optimization. This method is somewhat of
1421 a kludge, but I don't see a better way.) */
1423 mark_referenced_resources (next_to_match
, &needed
, 1);
1425 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1427 rtx pat
= PATTERN (trial
);
1429 next_trial
= next_nonnote_insn (trial
);
1431 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1432 if (GET_CODE (trial
) == INSN
1433 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1436 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1438 /* We can't share an insn that sets cc0. */
1439 && ! sets_cc0_p (pat
)
1441 && ! insn_references_resource_p (trial
, &set
, 1)
1442 && ! insn_sets_resource_p (trial
, &set
, 1)
1443 && ! insn_sets_resource_p (trial
, &needed
, 1)
1444 && (trial
= try_split (pat
, trial
, 0)) != 0
1445 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1446 /* Have to test this condition if annul condition is different
1447 from (and less restrictive than) non-annulling one. */
1448 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
))
1450 next_trial
= next_nonnote_insn (trial
);
1454 update_block (trial
, thread
);
1455 delete_insn (trial
);
1456 INSN_FROM_TARGET_P (next_to_match
) = 0;
1459 merged_insns
= gen_rtx (INSN_LIST
, VOIDmode
, trial
, merged_insns
);
1461 if (++slot_number
== num_slots
)
1464 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1466 mark_referenced_resources (next_to_match
, &needed
, 1);
1469 mark_set_resources (trial
, &set
, 0, 1);
1470 mark_referenced_resources (trial
, &needed
, 1);
1473 /* See if we stopped on a filled insn. If we did, try to see if its
1474 delay slots match. */
1475 if (slot_number
!= num_slots
1476 && trial
&& GET_CODE (trial
) == INSN
1477 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1478 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1480 rtx pat
= PATTERN (trial
);
1482 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1484 rtx dtrial
= XVECEXP (pat
, 0, i
);
1486 if (! insn_references_resource_p (dtrial
, &set
, 1)
1487 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1488 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1490 && ! sets_cc0_p (PATTERN (dtrial
))
1492 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1493 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
))
1497 update_block (dtrial
, thread
);
1498 delete_from_delay_slot (dtrial
);
1499 INSN_FROM_TARGET_P (next_to_match
) = 0;
1502 merged_insns
= gen_rtx (INSN_LIST
, SImode
, dtrial
,
1505 if (++slot_number
== num_slots
)
1508 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1513 /* If all insns in the delay slot have been matched and we were previously
1514 annulling the branch, we need not any more. In that case delete all the
1515 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1516 the delay list so that we know that it isn't only being used at the
1518 if (next_to_match
== 0 && annul_p
)
1520 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1522 if (GET_MODE (merged_insns
) == SImode
)
1524 update_block (XEXP (merged_insns
, 0), thread
);
1525 delete_from_delay_slot (XEXP (merged_insns
, 0));
1529 update_block (XEXP (merged_insns
, 0), thread
);
1530 delete_insn (XEXP (merged_insns
, 0));
1534 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1536 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1537 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1541 /* See if INSN is redundant with an insn in front of TARGET. Often this
1542 is called when INSN is a candidate for a delay slot of TARGET.
1543 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1544 of INSN. Often INSN will be redundant with an insn in a delay slot of
1545 some previous insn. This happens when we have a series of branches to the
1546 same label; in that case the first insn at the target might want to go
1547 into each of the delay slots.
1549 If we are not careful, this routine can take up a significant fraction
1550 of the total compilation time (4%), but only wins rarely. Hence we
1551 speed this routine up by making two passes. The first pass goes back
1552 until it hits a label and sees if it find an insn with an identical
1553 pattern. Only in this (relatively rare) event does it check for
1556 We do not split insns we encounter. This could cause us not to find a
1557 redundant insn, but the cost of splitting seems greater than the possible
1558 gain in rare cases. */
1561 redundant_insn_p (insn
, target
, delay_list
)
1566 rtx target_main
= target
;
1567 rtx ipat
= PATTERN (insn
);
1569 struct resources needed
, set
;
1572 /* Scan backwards looking for a match. */
1573 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1575 if (GET_CODE (trial
) == CODE_LABEL
)
1578 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != JUMP_INSN
1579 && GET_CODE (trial
) != JUMP_INSN
)
1582 pat
= PATTERN (trial
);
1583 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1586 if (GET_CODE (pat
) == SEQUENCE
)
1588 /* Stop for a CALL and its delay slots because it difficult to track
1589 its resource needs correctly. */
1590 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1593 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1594 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
1595 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
))
1598 /* If found a match, exit this loop early. */
1603 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
))
1607 /* If we didn't find an insn that matches, return 0. */
1611 /* See what resources this insn sets and needs. If they overlap, or
1612 if this insn references CC0, it can't be redundant. */
1614 CLEAR_RESOURCE (&needed
);
1615 CLEAR_RESOURCE (&set
);
1616 mark_set_resources (insn
, &set
, 0, 1);
1617 mark_referenced_resources (insn
, &needed
, 1);
1619 /* If TARGET is a SEQUENCE, get the main insn. */
1620 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1621 target_main
= XVECEXP (PATTERN (target
), 0, 0);
1623 if (resource_conflicts_p (&needed
, &set
)
1625 || reg_mentioned_p (cc0_rtx
, ipat
)
1627 /* The insn requiring the delay may not set anything needed or set by
1629 || insn_sets_resource_p (target_main
, &needed
, 1)
1630 || insn_sets_resource_p (target_main
, &set
, 1))
1633 /* Insns we pass may not set either NEEDED or SET, so merge them for
1635 needed
.memory
|= set
.memory
;
1636 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
1638 /* This insn isn't redundant if it conflicts with an insn that either is
1639 or will be in a delay slot of TARGET. */
1643 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
1645 delay_list
= XEXP (delay_list
, 1);
1648 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1649 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
1650 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
1653 /* Scan backwards until we reach a label or an insn that uses something
1654 INSN sets or sets something insn uses or sets. */
1656 for (trial
= PREV_INSN (target
);
1657 trial
&& GET_CODE (trial
) != CODE_LABEL
;
1658 trial
= PREV_INSN (trial
))
1660 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
1661 && GET_CODE (trial
) != JUMP_INSN
)
1664 pat
= PATTERN (trial
);
1665 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1668 if (GET_CODE (pat
) == SEQUENCE
)
1670 /* If this is a CALL_INSN and its delay slots, it is hard to track
1671 the resource needs properly, so give up. */
1672 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1675 /* See if any of the insns in the delay slot match, updating
1676 resource requirements as we go. */
1677 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1679 rtx candidate
= XVECEXP (pat
, 0, i
);
1681 /* If an insn will be annulled if the branch is false, it isn't
1682 considered as a possible duplicate insn. */
1683 if (rtx_equal_p (PATTERN (candidate
), ipat
)
1684 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1685 && INSN_FROM_TARGET_P (candidate
)))
1687 /* Show that this insn will be used in the sequel. */
1688 INSN_FROM_TARGET_P (candidate
) = 0;
1692 /* Unless this is an annulled insn from the target of a branch,
1693 we must stop if it sets anything needed or set by INSN. */
1694 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1695 || ! INSN_FROM_TARGET_P (candidate
))
1696 && insn_sets_resource_p (candidate
, &needed
, 1))
1701 /* If the insn requiring the delay slot conflicts with INSN, we
1703 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
1708 /* See if TRIAL is the same as INSN. */
1709 pat
= PATTERN (trial
);
1710 if (rtx_equal_p (pat
, ipat
))
1713 /* Can't go any further if TRIAL conflicts with INSN. */
1714 if (insn_sets_resource_p (trial
, &needed
, 1))
1722 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1723 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1724 is non-zero, we are allowed to fall into this thread; otherwise, we are
1727 If LABEL is used more than one or we pass a label other than LABEL before
1728 finding an active insn, we do not own this thread. */
1731 own_thread_p (thread
, label
, allow_fallthrough
)
1734 int allow_fallthrough
;
1739 /* We don't own the function end. */
1743 /* Get the first active insn, or THREAD, if it is an active insn. */
1744 active_insn
= next_active_insn (PREV_INSN (thread
));
1746 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
1747 if (GET_CODE (insn
) == CODE_LABEL
1748 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
1751 if (allow_fallthrough
)
1754 /* Ensure that we reach a BARRIER before any insn or label. */
1755 for (insn
= prev_nonnote_insn (thread
);
1756 insn
== 0 || GET_CODE (insn
) != BARRIER
;
1757 insn
= prev_nonnote_insn (insn
))
1759 || GET_CODE (insn
) == CODE_LABEL
1760 || (GET_CODE (insn
) == INSN
1761 && GET_CODE (PATTERN (insn
)) != USE
1762 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
1768 /* Find the number of the basic block that starts closest to INSN. Return -1
1769 if we couldn't find such a basic block. */
1772 find_basic_block (insn
)
1777 /* Scan backwards to the previous BARRIER. Then see if we can find a
1778 label that starts a basic block. Return the basic block number. */
1780 for (insn
= prev_nonnote_insn (insn
);
1781 insn
&& GET_CODE (insn
) != BARRIER
;
1782 insn
= prev_nonnote_insn (insn
))
1785 /* The start of the function is basic block zero. */
1789 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
1790 anything other than a CODE_LABEL or note, we can't find this code. */
1791 for (insn
= next_nonnote_insn (insn
);
1792 insn
&& GET_CODE (insn
) == CODE_LABEL
;
1793 insn
= next_nonnote_insn (insn
))
1795 for (i
= 0; i
< n_basic_blocks
; i
++)
1796 if (insn
== basic_block_head
[i
])
1803 /* Called when INSN is being moved from a location near the target of a jump.
1804 We leave a marker of the form (use (INSN)) immediately in front
1805 of WHERE for mark_target_live_regs. These markers will be deleted when
1808 We used to try to update the live status of registers if WHERE is at
1809 the start of a basic block, but that can't work since we may remove a
1810 BARRIER in relax_delay_slots. */
1813 update_block (insn
, where
)
1819 /* Ignore if this was in a delay slot and it came from the target of
1821 if (INSN_FROM_TARGET_P (insn
))
1824 emit_insn_before (gen_rtx (USE
, VOIDmode
, insn
), where
);
1826 /* INSN might be making a value live in a block where it didn't use to
1827 be. So recompute liveness information for this block. */
1829 b
= find_basic_block (insn
);
1834 /* Marks registers possibly live at the current place being scanned by
1835 mark_target_live_regs. Used only by next two function. */
1837 static HARD_REG_SET current_live_regs
;
1839 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
1840 Also only used by the next two functions. */
1842 static HARD_REG_SET pending_dead_regs
;
1844 /* Utility function called from mark_target_live_regs via note_stores.
1845 It deadens any CLOBBERed registers and livens any SET registers. */
1848 update_live_status (dest
, x
)
1852 int first_regno
, last_regno
;
1855 if (GET_CODE (dest
) != REG
1856 && (GET_CODE (dest
) != SUBREG
|| GET_CODE (SUBREG_REG (dest
)) != REG
))
1859 if (GET_CODE (dest
) == SUBREG
)
1860 first_regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
1862 first_regno
= REGNO (dest
);
1864 last_regno
= first_regno
+ HARD_REGNO_NREGS (first_regno
, GET_MODE (dest
));
1866 if (GET_CODE (x
) == CLOBBER
)
1867 for (i
= first_regno
; i
< last_regno
; i
++)
1868 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
1870 for (i
= first_regno
; i
< last_regno
; i
++)
1872 SET_HARD_REG_BIT (current_live_regs
, i
);
1873 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
1877 /* Similar to next_insn, but ignores insns in the delay slots of
1878 an annulled branch. */
1881 next_insn_no_annul (insn
)
1886 /* If INSN is an annulled branch, skip any insns from the target
1888 if (INSN_ANNULLED_BRANCH_P (insn
)
1889 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
1890 while (INSN_FROM_TARGET_P (NEXT_INSN (insn
)))
1891 insn
= NEXT_INSN (insn
);
1893 insn
= NEXT_INSN (insn
);
1894 if (insn
&& GET_CODE (insn
) == INSN
1895 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1896 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1902 /* Set the resources that are live at TARGET.
1904 If TARGET is zero, we refer to the end of the current function and can
1905 return our precomputed value.
1907 Otherwise, we try to find out what is live by consulting the basic block
1908 information. This is tricky, because we must consider the actions of
1909 reload and jump optimization, which occur after the basic block information
1912 Accordingly, we proceed as follows::
1914 We find the previous BARRIER and look at all immediately following labels
1915 (with no intervening active insns) to see if any of them start a basic
1916 block. If we hit the start of the function first, we use block 0.
1918 Once we have found a basic block and a corresponding first insns, we can
1919 accurately compute the live status from basic_block_live_regs and
1920 reg_renumber. (By starting at a label following a BARRIER, we are immune
1921 to actions taken by reload and jump.) Then we scan all insns between
1922 that point and our target. For each CLOBBER (or for call-clobbered regs
1923 when we pass a CALL_INSN), mark the appropriate registers are dead. For
1924 a SET, mark them as live.
1926 We have to be careful when using REG_DEAD notes because they are not
1927 updated by such things as find_equiv_reg. So keep track of registers
1928 marked as dead that haven't been assigned to, and mark them dead at the
1929 next CODE_LABEL since reload and jump won't propagate values across labels.
1931 If we cannot find the start of a basic block (should be a very rare
1932 case, if it can happen at all), mark everything as potentially live.
1934 Next, scan forward from TARGET looking for things set or clobbered
1935 before they are used. These are not live.
1937 Because we can be called many times on the same target, save our results
1938 in a hash table indexed by INSN_UID. */
1941 mark_target_live_regs (target
, res
)
1943 struct resources
*res
;
1947 struct target_info
*tinfo
;
1951 HARD_REG_SET scratch
;
1952 struct resources set
, needed
;
1955 /* Handle end of function. */
1958 *res
= end_of_function_needs
;
1962 /* We have to assume memory is needed, but the CC isn't. */
1967 /* See if we have computed this value already. */
1968 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
1969 tinfo
; tinfo
= tinfo
->next
)
1970 if (tinfo
->uid
== INSN_UID (target
))
1973 /* Start by getting the basic block number. If we have saved information,
1974 we can get it from there unless the insn at the start of the basic block
1975 has been deleted. */
1976 if (tinfo
&& tinfo
->block
!= -1
1977 && ! INSN_DELETED_P (basic_block_head
[tinfo
->block
]))
1981 b
= find_basic_block (target
);
1985 /* If the information is up-to-date, use it. Otherwise, we will
1987 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
1989 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
1995 /* Allocate a place to put our results and chain it into the
1997 tinfo
= (struct target_info
*) oballoc (sizeof (struct target_info
));
1998 tinfo
->uid
= INSN_UID (target
);
2000 tinfo
->next
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2001 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
2004 CLEAR_HARD_REG_SET (pending_dead_regs
);
2006 /* If we found a basic block, get the live registers from it and update
2007 them with anything set or killed between its start and the insn before
2008 TARGET. Otherwise, we must assume everything is live. */
2011 regset regs_live
= basic_block_live_at_start
[b
];
2013 REGSET_ELT_TYPE bit
;
2015 rtx start_insn
, stop_insn
;
2017 /* Compute hard regs live at start of block -- this is the real hard regs
2018 marked live, plus live pseudo regs that have been renumbered to
2022 current_live_regs
= *regs_live
;
2024 COPY_HARD_REG_SET (current_live_regs
, regs_live
);
2027 for (offset
= 0, i
= 0; offset
< regset_size
; offset
++)
2029 if (regs_live
[offset
] == 0)
2030 i
+= REGSET_ELT_BITS
;
2032 for (bit
= 1; bit
&& i
< max_regno
; bit
<<= 1, i
++)
2033 if ((regs_live
[offset
] & bit
)
2034 && (regno
= reg_renumber
[i
]) >= 0)
2036 j
< regno
+ HARD_REGNO_NREGS (regno
,
2037 PSEUDO_REGNO_MODE (i
));
2039 SET_HARD_REG_BIT (current_live_regs
, j
);
2042 /* Get starting and ending insn, handling the case where each might
2044 start_insn
= (b
== 0 ? get_insns () : basic_block_head
[b
]);
2047 if (GET_CODE (start_insn
) == INSN
2048 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
2049 start_insn
= XVECEXP (PATTERN (start_insn
), 0, 0);
2051 if (GET_CODE (stop_insn
) == INSN
2052 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
2053 stop_insn
= next_insn (PREV_INSN (stop_insn
));
2055 for (insn
= start_insn
; insn
!= stop_insn
;
2056 insn
= next_insn_no_annul (insn
))
2059 rtx real_insn
= insn
;
2061 /* If this insn is from the target of a branch, it isn't going to
2062 be used in the sequel. If it is used in both cases, this
2063 test will not be true. */
2064 if (INSN_FROM_TARGET_P (insn
))
2067 /* If this insn is a USE made by update_block, we care about the
2069 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
2070 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2071 real_insn
= XEXP (PATTERN (insn
), 0);
2073 if (GET_CODE (real_insn
) == CALL_INSN
)
2075 /* CALL clobbers all call-used regs that aren't fixed except
2076 sp, ap, and fp. Do this before setting the result of the
2078 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2079 if (call_used_regs
[i
]
2080 && i
!= STACK_POINTER_REGNUM
&& i
!= FRAME_POINTER_REGNUM
2081 && i
!= ARG_POINTER_REGNUM
2082 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2083 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
2085 #ifdef PIC_OFFSET_TABLE_REGNUM
2086 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2089 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2091 /* A CALL_INSN sets any global register live, since it may
2092 have been modified by the call. */
2093 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2095 SET_HARD_REG_BIT (current_live_regs
, i
);
2098 /* Mark anything killed in an insn to be deadened at the next
2099 label. Ignore USE insns; the only REG_DEAD notes will be for
2100 parameters. But they might be early. A CALL_INSN will usually
2101 clobber registers used for parameters. It isn't worth bothering
2102 with the unlikely case when it won't. */
2103 if ((GET_CODE (real_insn
) == INSN
2104 && GET_CODE (PATTERN (real_insn
)) != USE
)
2105 || GET_CODE (real_insn
) == JUMP_INSN
2106 || GET_CODE (real_insn
) == CALL_INSN
)
2108 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2109 if (REG_NOTE_KIND (link
) == REG_DEAD
2110 && GET_CODE (XEXP (link
, 0)) == REG
2111 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2113 int first_regno
= REGNO (XEXP (link
, 0));
2116 + HARD_REGNO_NREGS (first_regno
,
2117 GET_MODE (XEXP (link
, 0))));
2119 for (i
= first_regno
; i
< last_regno
; i
++)
2120 SET_HARD_REG_BIT (pending_dead_regs
, i
);
2123 note_stores (PATTERN (real_insn
), update_live_status
);
2125 /* If any registers were unused after this insn, kill them.
2126 These notes will always be accurate. */
2127 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2128 if (REG_NOTE_KIND (link
) == REG_UNUSED
2129 && GET_CODE (XEXP (link
, 0)) == REG
2130 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2132 int first_regno
= REGNO (XEXP (link
, 0));
2135 + HARD_REGNO_NREGS (first_regno
,
2136 GET_MODE (XEXP (link
, 0))));
2138 for (i
= first_regno
; i
< last_regno
; i
++)
2139 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2143 else if (GET_CODE (real_insn
) == CODE_LABEL
)
2145 /* A label clobbers the pending dead registers since neither
2146 reload nor jump will propagate a value across a label. */
2147 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
2148 CLEAR_HARD_REG_SET (pending_dead_regs
);
2151 /* The beginning of the epilogue corresponds to the end of the
2152 RTL chain when there are no epilogue insns. Certain resources
2153 are implicitly required at that point. */
2154 else if (GET_CODE (real_insn
) == NOTE
2155 && NOTE_LINE_NUMBER (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
2156 IOR_HARD_REG_SET (current_live_regs
, start_of_epilogue_needs
.regs
);
2159 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
2161 tinfo
->bb_tick
= bb_ticks
[b
];
2164 /* We didn't find the start of a basic block. Assume everything
2165 in use. This should happen only extremely rarely. */
2166 SET_HARD_REG_SET (res
->regs
);
2168 /* Now step forward from TARGET looking for registers that are set before
2169 they are used. These are dead. If we pass a label, any pending dead
2170 registers that weren't yet used can be made dead. Stop when we pass a
2171 conditional JUMP_INSN; follow the first few unconditional branches. */
2173 CLEAR_RESOURCE (&set
);
2174 CLEAR_RESOURCE (&needed
);
2176 for (insn
= target
; insn
; insn
= next
)
2178 rtx this_jump_insn
= insn
;
2180 next
= NEXT_INSN (insn
);
2181 switch (GET_CODE (insn
))
2184 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
2185 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
2186 CLEAR_HARD_REG_SET (pending_dead_regs
);
2194 if (GET_CODE (PATTERN (insn
)) == USE
)
2196 /* If INSN is a USE made by update_block, we care about the
2197 underlying insn. Any registers set by the underlying insn
2198 are live since the insn is being done somewhere else. */
2199 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2200 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0, 1);
2202 /* All other USE insns are to be ignored. */
2205 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
2207 else if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2209 /* An unconditional jump can be used to fill the delay slot
2210 of a call, so search for a JUMP_INSN in any position. */
2211 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2213 this_jump_insn
= XVECEXP (PATTERN (insn
), 0, i
);
2214 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2220 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2222 if (jump_count
++ < 10
2223 && (simplejump_p (this_jump_insn
)
2224 || GET_CODE (PATTERN (this_jump_insn
)) == RETURN
))
2226 next
= next_active_insn (JUMP_LABEL (this_jump_insn
));
2230 jump_target
= JUMP_LABEL (this_jump_insn
);
2237 mark_referenced_resources (insn
, &needed
, 1);
2238 mark_set_resources (insn
, &set
, 0, 1);
2240 COPY_HARD_REG_SET (scratch
, set
.regs
);
2241 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2242 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2245 /* If we hit an unconditional branch, we have another way of finding out
2246 what is live: we can see what is live at the branch target and include
2247 anything used but not set before the branch. The only things that are
2248 live are those that are live using the above test and the test below.
2250 Don't try this if we expired our jump count above, since that would
2251 mean there may be an infinite loop in the function being compiled. */
2253 if (jump_insn
&& jump_count
< 10)
2255 struct resources new_resources
;
2256 rtx stop_insn
= next_active_insn (jump_insn
);
2258 mark_target_live_regs (next_active_insn (jump_target
), &new_resources
);
2259 CLEAR_RESOURCE (&set
);
2260 CLEAR_RESOURCE (&needed
);
2262 /* Include JUMP_INSN in the needed registers. */
2263 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
2265 mark_referenced_resources (insn
, &needed
, 1);
2267 COPY_HARD_REG_SET (scratch
, needed
.regs
);
2268 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
2269 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
2271 mark_set_resources (insn
, &set
, 0, 1);
2274 AND_HARD_REG_SET (res
->regs
, new_resources
.regs
);
2277 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
2280 /* Scan a function looking for insns that need a delay slot and find insns to
2281 put into the delay slot.
2283 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2284 as calls). We do these first since we don't want jump insns (that are
2285 easier to fill) to get the only insns that could be used for non-jump insns.
2286 When it is zero, only try to fill JUMP_INSNs.
2288 When slots are filled in this manner, the insns (including the
2289 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2290 it is possible to tell whether a delay slot has really been filled
2291 or not. `final' knows how to deal with this, by communicating
2292 through FINAL_SEQUENCE. */
2295 fill_simple_delay_slots (first
, non_jumps_p
)
2298 register rtx insn
, pat
, trial
, next_trial
;
2300 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2301 struct resources needed
, set
;
2302 register int slots_to_fill
, slots_filled
;
2305 for (i
= 0; i
< num_unfilled_slots
; i
++)
2307 /* Get the next insn to fill. If it has already had any slots assigned,
2308 we can't do anything with it. Maybe we'll improve this later. */
2310 insn
= unfilled_slots_base
[i
];
2312 || INSN_DELETED_P (insn
)
2313 || (GET_CODE (insn
) == INSN
2314 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2315 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2316 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2319 slots_to_fill
= num_delay_slots (insn
);
2320 if (slots_to_fill
== 0)
2323 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2324 says how many. After initialization, first try optimizing
2327 nop add %o7,.-L1,%o7
2331 If this case applies, the delay slot of the call is filled with
2332 the unconditional jump. This is done first to avoid having the
2333 delay slot of the call filled in the backward scan. Also, since
2334 the unconditional jump is likely to also have a delay slot, that
2335 insn must exist when it is subsequently scanned. */
2340 if (GET_CODE (insn
) == CALL_INSN
2341 && (trial
= next_active_insn (insn
))
2342 && GET_CODE (trial
) == JUMP_INSN
2343 && simplejump_p (trial
)
2344 && eligible_for_delay (insn
, slots_filled
, trial
)
2345 && no_labels_between_p (insn
, trial
))
2348 delay_list
= add_to_delay_list (trial
, delay_list
);
2349 /* Remove the unconditional jump from consideration for delay slot
2350 filling and unthread it. */
2351 if (unfilled_slots_base
[i
+ 1] == trial
)
2352 unfilled_slots_base
[i
+ 1] = 0;
2354 rtx next
= NEXT_INSN (trial
);
2355 rtx prev
= PREV_INSN (trial
);
2357 NEXT_INSN (prev
) = next
;
2359 PREV_INSN (next
) = prev
;
2363 /* Now, scan backwards from the insn to search for a potential
2364 delay-slot candidate. Stop searching when a label or jump is hit.
2366 For each candidate, if it is to go into the delay slot (moved
2367 forward in execution sequence), it must not need or set any resources
2368 that were set by later insns and must not set any resources that
2369 are needed for those insns.
2371 The delay slot insn itself sets resources unless it is a call
2372 (in which case the called routine, not the insn itself, is doing
2375 if (slots_filled
< slots_to_fill
)
2377 CLEAR_RESOURCE (&needed
);
2378 CLEAR_RESOURCE (&set
);
2379 mark_set_resources (insn
, &set
, 0, 0);
2380 mark_referenced_resources (insn
, &needed
, 0);
2382 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2385 next_trial
= prev_nonnote_insn (trial
);
2387 /* This must be an INSN or CALL_INSN. */
2388 pat
= PATTERN (trial
);
2390 /* USE and CLOBBER at this level was just for flow; ignore it. */
2391 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2394 /* Check for resource conflict first, to avoid unnecessary
2396 if (! insn_references_resource_p (trial
, &set
, 1)
2397 && ! insn_sets_resource_p (trial
, &set
, 1)
2398 && ! insn_sets_resource_p (trial
, &needed
, 1)
2400 /* Can't separate set of cc0 from its use. */
2401 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2402 && ! sets_cc0_p (cc0_rtx
, pat
))
2406 trial
= try_split (pat
, trial
, 1);
2407 next_trial
= prev_nonnote_insn (trial
);
2408 if (eligible_for_delay (insn
, slots_filled
, trial
))
2410 /* In this case, we are searching backward, so if we
2411 find insns to put on the delay list, we want
2412 to put them at the head, rather than the
2413 tail, of the list. */
2415 delay_list
= gen_rtx (INSN_LIST
, VOIDmode
,
2417 update_block (trial
, trial
);
2418 delete_insn (trial
);
2419 if (slots_to_fill
== ++slots_filled
)
2425 mark_set_resources (trial
, &set
, 0, 1);
2426 mark_referenced_resources (trial
, &needed
, 1);
2430 /* If all needed slots haven't been filled, we come here. */
2432 /* Try to optimize case of jumping around a single insn. */
2433 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2434 if (slots_filled
!= slots_to_fill
2436 && GET_CODE (insn
) == JUMP_INSN
&& condjump_p (insn
))
2438 delay_list
= optimize_skip (insn
);
2444 /* Try to get insns from beyond the insn needing the delay slot.
2445 These insns can neither set or reference resources set in insns being
2446 skipped, cannot set resources in the insn being skipped, and, if this
2447 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2448 call might not return).
2450 If this is a conditional jump, see if it merges back to us early
2451 enough for us to pick up insns from the merge point. Don't do
2452 this if there is another branch to our label unless we pass all of
2455 Another similar merge is if we jump to the same place that a
2456 later unconditional jump branches to. In that case, we don't
2457 care about the number of uses of our label. */
2459 if (slots_filled
!= slots_to_fill
2460 && (GET_CODE (insn
) != JUMP_INSN
2461 || (condjump_p (insn
) && ! simplejump_p (insn
)
2462 && JUMP_LABEL (insn
) != 0)))
2465 int maybe_never
= 0;
2466 int passed_label
= 0;
2468 struct resources needed_at_jump
;
2470 CLEAR_RESOURCE (&needed
);
2471 CLEAR_RESOURCE (&set
);
2473 if (GET_CODE (insn
) == CALL_INSN
)
2475 mark_set_resources (insn
, &set
, 0, 1);
2476 mark_referenced_resources (insn
, &needed
, 1);
2481 mark_set_resources (insn
, &set
, 0, 0);
2482 mark_referenced_resources (insn
, &needed
, 0);
2483 if (GET_CODE (insn
) == JUMP_INSN
)
2485 /* Get our target and show how many more uses we want to
2486 see before we hit the label. */
2487 target
= JUMP_LABEL (insn
);
2488 target_uses
= LABEL_NUSES (target
) - 1;
2493 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
2495 rtx pat
, trial_delay
;
2497 next_trial
= next_nonnote_insn (trial
);
2499 if (GET_CODE (trial
) == CODE_LABEL
)
2503 /* If this is our target, see if we have seen all its uses.
2504 If so, indicate we have passed our target and ignore it.
2505 All other labels cause us to stop our search. */
2506 if (trial
== target
&& target_uses
== 0)
2514 else if (GET_CODE (trial
) == BARRIER
)
2517 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2518 pat
= PATTERN (trial
);
2520 /* Stand-alone USE and CLOBBER are just for flow. */
2521 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2524 /* If this already has filled delay slots, get the insn needing
2526 if (GET_CODE (pat
) == SEQUENCE
)
2527 trial_delay
= XVECEXP (pat
, 0, 0);
2529 trial_delay
= trial
;
2531 /* If this is a jump insn to our target, indicate that we have
2532 seen another jump to it. If we aren't handling a conditional
2533 jump, stop our search. Otherwise, compute the needs at its
2534 target and add them to NEEDED. */
2535 if (GET_CODE (trial_delay
) == JUMP_INSN
)
2539 else if (JUMP_LABEL (trial_delay
) == target
)
2543 mark_target_live_regs
2544 (next_active_insn (JUMP_LABEL (trial_delay
)),
2546 needed
.memory
|= needed_at_jump
.memory
;
2547 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
2551 /* See if we have a resource problem before we try to
2554 && GET_CODE (pat
) != SEQUENCE
2555 && ! insn_references_resource_p (trial
, &set
, 1)
2556 && ! insn_sets_resource_p (trial
, &set
, 1)
2557 && ! insn_sets_resource_p (trial
, &needed
, 1)
2559 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2561 && ! (maybe_never
&& may_trap_p (pat
))
2562 && (trial
= try_split (pat
, trial
, 0))
2563 && eligible_for_delay (insn
, slots_filled
, trial
))
2565 next_trial
= next_nonnote_insn (trial
);
2566 delay_list
= add_to_delay_list (trial
, delay_list
);
2569 if (reg_mentioned_p (cc0_rtx
, pat
))
2570 link_cc0_insns (trial
);
2574 update_block (trial
, trial
);
2575 delete_insn (trial
);
2576 if (slots_to_fill
== ++slots_filled
)
2581 mark_set_resources (trial
, &set
, 0, 1);
2582 mark_referenced_resources (trial
, &needed
, 1);
2584 /* Ensure we don't put insns between the setting of cc and the
2585 comparison by moving a setting of cc into an earlier delay
2586 slot since these insns could clobber the condition code. */
2589 /* If this is a call or jump, we might not get here. */
2590 if (GET_CODE (trial
) == CALL_INSN
2591 || GET_CODE (trial
) == JUMP_INSN
)
2595 /* If there are slots left to fill and our search was stopped by an
2596 unconditional branch, try the insn at the branch target. We can
2597 redirect the branch if it works. */
2598 if (slots_to_fill
!= slots_filled
2600 && GET_CODE (trial
) == JUMP_INSN
2601 && simplejump_p (trial
)
2602 && (target
== 0 || JUMP_LABEL (trial
) == target
)
2603 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
2604 && ! (GET_CODE (next_trial
) == INSN
2605 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
2606 && ! insn_references_resource_p (next_trial
, &set
, 1)
2607 && ! insn_sets_resource_p (next_trial
, &set
, 1)
2608 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
2610 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
2612 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
2613 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
2614 && eligible_for_delay (insn
, slots_filled
, next_trial
))
2616 rtx new_label
= next_active_insn (next_trial
);
2619 new_label
= get_label_before (new_label
);
2622 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
2624 redirect_jump (trial
, new_label
);
2626 /* If we merged because we both jumped to the same place,
2627 redirect the original insn also. */
2629 redirect_jump (insn
, new_label
);
2634 unfilled_slots_base
[i
]
2635 = emit_delay_sequence (insn
, delay_list
,
2636 slots_filled
, slots_to_fill
);
2638 if (slots_to_fill
== slots_filled
)
2639 unfilled_slots_base
[i
] = 0;
2641 note_delay_statistics (slots_filled
, 0);
2644 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2645 /* See if the epilogue needs any delay slots. Try to fill them if so.
2646 The only thing we can do is scan backwards from the end of the
2647 function. If we did this in a previous pass, it is incorrect to do it
2649 if (current_function_epilogue_delay_list
)
2652 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
2653 if (slots_to_fill
== 0)
2657 CLEAR_RESOURCE (&needed
);
2658 CLEAR_RESOURCE (&set
);
2660 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
2661 trial
= PREV_INSN (trial
))
2663 if (GET_CODE (trial
) == NOTE
)
2665 pat
= PATTERN (trial
);
2666 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2669 if (! insn_references_resource_p (trial
, &set
, 1)
2670 && ! insn_sets_resource_p (trial
, &needed
, 1)
2672 /* Don't want to mess with cc0 here. */
2673 && ! reg_mentioned_p (cc0_rtx
, pat
)
2677 trial
= try_split (pat
, trial
, 1);
2678 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
2680 /* Here as well we are searching backward, so put the
2681 insns we find on the head of the list. */
2683 current_function_epilogue_delay_list
2684 = gen_rtx (INSN_LIST
, VOIDmode
, trial
,
2685 current_function_epilogue_delay_list
);
2686 mark_referenced_resources (trial
, &end_of_function_needs
, 1);
2687 update_block (trial
, trial
);
2688 delete_insn (trial
);
2690 /* Clear deleted bit so final.c will output the insn. */
2691 INSN_DELETED_P (trial
) = 0;
2693 if (slots_to_fill
== ++slots_filled
)
2699 mark_set_resources (trial
, &set
, 0, 1);
2700 mark_referenced_resources (trial
, &needed
, 1);
2703 note_delay_statistics (slots_filled
, 0);
2707 /* Try to find insns to place in delay slots.
2709 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2710 or is an unconditional branch if CONDITION is const_true_rtx.
2711 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2713 THREAD is a flow-of-control, either the insns to be executed if the
2714 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2716 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2717 to see if any potential delay slot insns set things needed there.
2719 LIKELY is non-zero if it is extremely likely that the branch will be
2720 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2721 end of a loop back up to the top.
2723 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2724 thread. I.e., it is the fallthrough code of our jump or the target of the
2725 jump when we are the only jump going there.
2727 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2728 case, we can only take insns from the head of the thread for our delay
2729 slot. We then adjust the jump to point after the insns we have taken. */
2732 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
2733 thread_if_true
, own_thread
, own_opposite_thread
,
2734 slots_to_fill
, pslots_filled
)
2737 rtx thread
, opposite_thread
;
2740 int own_thread
, own_opposite_thread
;
2741 int slots_to_fill
, *pslots_filled
;
2745 struct resources opposite_needed
, set
, needed
;
2750 /* Validate our arguments. */
2751 if ((condition
== const_true_rtx
&& ! thread_if_true
)
2752 || (! own_thread
&& ! thread_if_true
))
2755 /* If our thread is the end of subroutine, we can't get any delay
2760 /* If this is an unconditional branch, nothing is needed at the
2761 opposite thread. Otherwise, compute what is needed there. */
2762 if (condition
== const_true_rtx
)
2763 CLEAR_RESOURCE (&opposite_needed
);
2765 mark_target_live_regs (opposite_thread
, &opposite_needed
);
2767 /* If the insn at THREAD can be split, do it here to avoid having to
2768 update THREAD and NEW_THREAD if it is done in the loop below. Also
2769 initialize NEW_THREAD. */
2771 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
2773 /* Scan insns at THREAD. We are looking for an insn that can be removed
2774 from THREAD (it neither sets nor references resources that were set
2775 ahead of it and it doesn't set anything needs by the insns ahead of
2776 it) and that either can be placed in an annulling insn or aren't
2777 needed at OPPOSITE_THREAD. */
2779 CLEAR_RESOURCE (&needed
);
2780 CLEAR_RESOURCE (&set
);
2782 /* If we do not own this thread, we must stop as soon as we find
2783 something that we can't put in a delay slot, since all we can do
2784 is branch into THREAD at a later point. Therefore, labels stop
2785 the search if this is not the `true' thread. */
2787 for (trial
= thread
;
2788 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
2789 trial
= next_nonnote_insn (trial
))
2793 /* If we have passed a label, we no longer own this thread. */
2794 if (GET_CODE (trial
) == CODE_LABEL
)
2800 pat
= PATTERN (trial
);
2801 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2804 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2805 don't separate or copy insns that set and use CC0. */
2806 if (! insn_references_resource_p (trial
, &set
, 1)
2807 && ! insn_sets_resource_p (trial
, &set
, 1)
2808 && ! insn_sets_resource_p (trial
, &needed
, 1)
2810 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2811 && (! own_thread
|| ! sets_cc0_p (pat
)))
2815 /* If TRIAL is redundant with some insn before INSN, we don't
2816 actually need to add it to the delay list; we can merely pretend
2818 if (redundant_insn_p (trial
, insn
, delay_list
))
2822 update_block (trial
, thread
);
2823 delete_insn (trial
);
2826 new_thread
= next_active_insn (trial
);
2831 /* There are two ways we can win: If TRIAL doesn't set anything
2832 needed at the opposite thread and can't trap, or if it can
2833 go into an annulled delay slot. */
2834 if (condition
== const_true_rtx
2835 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
2836 && ! may_trap_p (pat
)))
2838 trial
= try_split (pat
, trial
, 0);
2839 pat
= PATTERN (trial
);
2840 if (eligible_for_delay (insn
, *pslots_filled
, trial
))
2844 #ifdef ANNUL_IFTRUE_SLOTS
2847 #ifdef ANNUL_IFFALSE_SLOTS
2852 trial
= try_split (pat
, trial
, 0);
2853 pat
= PATTERN (trial
);
2855 ? eligible_for_annul_false (insn
, *pslots_filled
, trial
)
2856 : eligible_for_annul_true (insn
, *pslots_filled
, trial
)))
2864 if (reg_mentioned_p (cc0_rtx
, pat
))
2865 link_cc0_insns (trial
);
2868 /* If we own this thread, delete the insn. If this is the
2869 destination of a branch, show that a basic block status
2870 may have been updated. In any case, mark the new
2871 starting point of this thread. */
2874 update_block (trial
, thread
);
2875 delete_insn (trial
);
2878 new_thread
= next_active_insn (trial
);
2880 temp
= own_thread
? trial
: copy_rtx (trial
);
2882 INSN_FROM_TARGET_P (temp
) = 1;
2884 delay_list
= add_to_delay_list (temp
, delay_list
);
2886 if (slots_to_fill
== ++(*pslots_filled
))
2888 /* Even though we have filled all the slots, we
2889 may be branching to a location that has a
2890 redundant insn. Skip any if so. */
2891 while (new_thread
&& ! own_thread
2892 && ! insn_sets_resource_p (new_thread
, &set
, 1)
2893 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
2894 && ! insn_references_resource_p (new_thread
,
2896 && redundant_insn_p (new_thread
, insn
,
2898 new_thread
= next_active_insn (new_thread
);
2907 /* This insn can't go into a delay slot. */
2909 mark_set_resources (trial
, &set
, 0, 1);
2910 mark_referenced_resources (trial
, &needed
, 1);
2912 /* Ensure we don't put insns between the setting of cc and the comparison
2913 by moving a setting of cc into an earlier delay slot since these insns
2914 could clobber the condition code. */
2917 /* If this insn is a register-register copy and the next insn has
2918 a use of our destination, change it to use our source. That way,
2919 it will become a candidate for our delay slot the next time
2920 through this loop. This case occurs commonly in loops that
2923 We could check for more complex cases than those tested below,
2924 but it doesn't seem worth it. It might also be a good idea to try
2925 to swap the two insns. That might do better.
2927 We can't do this if the next insn modifies our source, because that
2928 would make the replacement into the insn invalid. This also
2929 prevents updating the contents of a PRE_INC. */
2931 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
2932 && GET_CODE (SET_SRC (pat
)) == REG
2933 && GET_CODE (SET_DEST (pat
)) == REG
)
2935 rtx next
= next_nonnote_insn (trial
);
2937 if (next
&& GET_CODE (next
) == INSN
2938 && GET_CODE (PATTERN (next
)) != USE
2939 && ! reg_set_p (SET_DEST (pat
), next
)
2940 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
2941 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
2945 /* If we stopped on a branch insn that has delay slots, see if we can
2946 steal some of the insns in those slots. */
2947 if (trial
&& GET_CODE (trial
) == INSN
2948 && GET_CODE (PATTERN (trial
)) == SEQUENCE
2949 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
2951 /* If this is the `true' thread, we will want to follow the jump,
2952 so we can only do this if we have taken everything up to here. */
2953 if (thread_if_true
&& trial
== new_thread
)
2955 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
2956 delay_list
, &set
, &needed
,
2957 &opposite_needed
, slots_to_fill
,
2958 pslots_filled
, &must_annul
,
2960 else if (! thread_if_true
)
2962 = steal_delay_list_from_fallthrough (insn
, condition
,
2964 delay_list
, &set
, &needed
,
2965 &opposite_needed
, slots_to_fill
,
2966 pslots_filled
, &must_annul
);
2969 /* If we haven't found anything for this delay slot and it is very
2970 likely that the branch will be taken, see if the insn at our target
2971 increments or decrements a register with an increment that does not
2972 depend on the destination register. If so, try to place the opposite
2973 arithmetic insn after the jump insn and put the arithmetic insn in the
2974 delay slot. If we can't do this, return. */
2975 if (delay_list
== 0 && likely
&& new_thread
&& GET_CODE (new_thread
) == INSN
)
2977 rtx pat
= PATTERN (new_thread
);
2982 pat
= PATTERN (trial
);
2984 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
2985 || ! eligible_for_delay (insn
, 0, trial
))
2988 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
2989 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
2990 && rtx_equal_p (XEXP (src
, 0), dest
)
2991 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
2993 rtx other
= XEXP (src
, 1);
2997 /* If this is a constant adjustment, use the same code with
2998 the negated constant. Otherwise, reverse the sense of the
3000 if (GET_CODE (other
) == CONST_INT
)
3001 new_arith
= gen_rtx (GET_CODE (src
), GET_MODE (src
), dest
,
3002 negate_rtx (GET_MODE (src
), other
));
3004 new_arith
= gen_rtx (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
3005 GET_MODE (src
), dest
, other
);
3007 ninsn
= emit_insn_after (gen_rtx (SET
, VOIDmode
, dest
, new_arith
),
3010 if (recog_memoized (ninsn
) < 0
3011 || (insn_extract (ninsn
),
3012 ! constrain_operands (INSN_CODE (ninsn
), 1)))
3014 delete_insn (ninsn
);
3020 update_block (trial
, thread
);
3021 delete_insn (trial
);
3024 new_thread
= next_active_insn (trial
);
3026 ninsn
= own_thread
? trial
: copy_rtx (trial
);
3028 INSN_FROM_TARGET_P (ninsn
) = 1;
3030 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
3035 if (delay_list
&& must_annul
)
3036 INSN_ANNULLED_BRANCH_P (insn
) = 1;
3038 /* If we are to branch into the middle of this thread, find an appropriate
3039 label or make a new one if none, and redirect INSN to it. If we hit the
3040 end of the function, use the end-of-function label. */
3041 if (new_thread
!= thread
)
3045 if (! thread_if_true
)
3048 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
3049 && (simplejump_p (new_thread
)
3050 || GET_CODE (PATTERN (new_thread
)) == RETURN
))
3051 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
3053 if (new_thread
== 0)
3054 label
= find_end_label ();
3055 else if (GET_CODE (new_thread
) == CODE_LABEL
)
3058 label
= get_label_before (new_thread
);
3060 redirect_jump (insn
, label
);
3066 /* Make another attempt to find insns to place in delay slots.
3068 We previously looked for insns located in front of the delay insn
3069 and, for non-jump delay insns, located behind the delay insn.
3071 Here only try to schedule jump insns and try to move insns from either
3072 the target or the following insns into the delay slot. If annulling is
3073 supported, we will be likely to do this. Otherwise, we can do this only
3077 fill_eager_delay_slots (first
)
3082 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
3084 for (i
= 0; i
< num_unfilled_slots
; i
++)
3087 rtx target_label
, insn_at_target
, fallthrough_insn
;
3090 int own_fallthrough
;
3091 int prediction
, slots_to_fill
, slots_filled
;
3093 insn
= unfilled_slots_base
[i
];
3095 || INSN_DELETED_P (insn
)
3096 || GET_CODE (insn
) != JUMP_INSN
3097 || ! condjump_p (insn
))
3100 slots_to_fill
= num_delay_slots (insn
);
3101 if (slots_to_fill
== 0)
3105 target_label
= JUMP_LABEL (insn
);
3106 condition
= get_branch_condition (insn
, target_label
);
3111 /* Get the next active fallthough and target insns and see if we own
3112 them. Then see whether the branch is likely true. We don't need
3113 to do a lot of this for unconditional branches. */
3115 insn_at_target
= next_active_insn (target_label
);
3116 own_target
= own_thread_p (target_label
, target_label
, 0);
3118 if (condition
== const_true_rtx
)
3120 own_fallthrough
= 0;
3121 fallthrough_insn
= 0;
3126 fallthrough_insn
= next_active_insn (insn
);
3127 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
3128 prediction
= mostly_true_jump (insn
, condition
);
3131 /* If this insn is expected to branch, first try to get insns from our
3132 target, then our fallthrough insns. If it is not, expected to branch,
3133 try the other order. */
3138 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3139 fallthrough_insn
, prediction
== 2, 1,
3140 own_target
, own_fallthrough
,
3141 slots_to_fill
, &slots_filled
);
3143 if (delay_list
== 0 && own_fallthrough
)
3145 /* Even though we didn't find anything for delay slots,
3146 we might have found a redundant insn which we deleted
3147 from the thread that was filled. So we have to recompute
3148 the next insn at the target. */
3149 target_label
= JUMP_LABEL (insn
);
3150 insn_at_target
= next_active_insn (target_label
);
3153 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3154 insn_at_target
, 0, 0,
3155 own_fallthrough
, own_target
,
3156 slots_to_fill
, &slots_filled
);
3161 if (own_fallthrough
)
3163 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3164 insn_at_target
, 0, 0,
3165 own_fallthrough
, own_target
,
3166 slots_to_fill
, &slots_filled
);
3168 if (delay_list
== 0)
3170 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3171 next_active_insn (insn
), 0, 1,
3172 own_target
, own_fallthrough
,
3173 slots_to_fill
, &slots_filled
);
3177 unfilled_slots_base
[i
]
3178 = emit_delay_sequence (insn
, delay_list
,
3179 slots_filled
, slots_to_fill
);
3181 if (slots_to_fill
== slots_filled
)
3182 unfilled_slots_base
[i
] = 0;
3184 note_delay_statistics (slots_filled
, 1);
3188 /* Once we have tried two ways to fill a delay slot, make a pass over the
3189 code to try to improve the results and to do such things as more jump
3193 relax_delay_slots (first
)
3196 register rtx insn
, next
, pat
;
3197 register rtx trial
, delay_insn
, target_label
;
3199 /* Look at every JUMP_INSN and see if we can improve it. */
3200 for (insn
= first
; insn
; insn
= next
)
3204 next
= next_active_insn (insn
);
3206 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3207 the next insn, or jumps to a label that is not the last of a
3208 group of consecutive labels. */
3209 if (GET_CODE (insn
) == JUMP_INSN
3210 && condjump_p (insn
)
3211 && (target_label
= JUMP_LABEL (insn
)) != 0)
3213 target_label
= follow_jumps (target_label
);
3214 target_label
= prev_label (next_active_insn (target_label
));
3216 if (target_label
== 0)
3217 target_label
= find_end_label ();
3219 if (next_active_insn (target_label
) == next
)
3225 if (target_label
!= JUMP_LABEL (insn
))
3226 redirect_jump (insn
, target_label
);
3228 /* See if this jump branches around a unconditional jump.
3229 If so, invert this jump and point it to the target of the
3231 if (next
&& GET_CODE (next
) == JUMP_INSN
3232 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3233 && next_active_insn (target_label
) == next_active_insn (next
)
3234 && no_labels_between_p (insn
, next
))
3236 rtx label
= JUMP_LABEL (next
);
3238 /* Be careful how we do this to avoid deleting code or
3239 labels that are momentarily dead. See similar optimization
3242 We also need to ensure we properly handle the case when
3243 invert_jump fails. */
3245 ++LABEL_NUSES (target_label
);
3247 ++LABEL_NUSES (label
);
3249 if (invert_jump (insn
, label
))
3256 --LABEL_NUSES (label
);
3258 if (--LABEL_NUSES (target_label
) == 0)
3259 delete_insn (target_label
);
3265 /* If this is an unconditional jump and the previous insn is a
3266 conditional jump, try reversing the condition of the previous
3267 insn and swapping our targets. The next pass might be able to
3270 Don't do this if we expect the conditional branch to be true, because
3271 we would then be making the more common case longer. */
3273 if (GET_CODE (insn
) == JUMP_INSN
3274 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
3275 && (other
= prev_active_insn (insn
)) != 0
3276 && condjump_p (other
)
3277 && no_labels_between_p (other
, insn
)
3278 && ! mostly_true_jump (other
,
3279 get_branch_condition (other
,
3280 JUMP_LABEL (other
))))
3282 rtx other_target
= JUMP_LABEL (other
);
3284 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3285 as we move the label. */
3287 ++LABEL_NUSES (other_target
);
3289 if (invert_jump (other
, target_label
))
3290 redirect_jump (insn
, other_target
);
3293 --LABEL_NUSES (other_target
);
3296 /* Now look only at cases where we have filled a delay slot. */
3297 if (GET_CODE (insn
) != INSN
3298 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3301 pat
= PATTERN (insn
);
3302 delay_insn
= XVECEXP (pat
, 0, 0);
3304 /* See if the first insn in the delay slot is redundant with some
3305 previous insn. Remove it from the delay slot if so; then set up
3306 to reprocess this insn. */
3307 if (redundant_insn_p (XVECEXP (pat
, 0, 1), delay_insn
, 0))
3309 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
3310 next
= prev_active_insn (next
);
3314 /* Now look only at the cases where we have a filled JUMP_INSN. */
3315 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3316 || ! condjump_p (XVECEXP (PATTERN (insn
), 0, 0)))
3319 target_label
= JUMP_LABEL (delay_insn
);
3323 /* If this jump goes to another unconditional jump, thread it, but
3324 don't convert a jump into a RETURN here. */
3325 trial
= follow_jumps (target_label
);
3326 trial
= prev_label (next_active_insn (trial
));
3327 if (trial
== 0 && target_label
!= 0)
3328 trial
= find_end_label ();
3330 if (trial
!= target_label
)
3332 redirect_jump (delay_insn
, trial
);
3333 target_label
= trial
;
3336 /* If the first insn at TARGET_LABEL is redundant with a previous
3337 insn, redirect the jump to the following insn process again. */
3338 trial
= next_active_insn (target_label
);
3339 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3340 && redundant_insn_p (trial
, insn
, 0))
3342 trial
= next_active_insn (trial
);
3344 target_label
= find_end_label ();
3346 target_label
= get_label_before (trial
);
3347 redirect_jump (delay_insn
, target_label
);
3352 /* Similarly, if it is an unconditional jump with one insn in its
3353 delay list and that insn is redundant, thread the jump. */
3354 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
3355 && XVECLEN (PATTERN (trial
), 0) == 2
3356 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
3357 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
3358 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
3359 && redundant_insn_p (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
3361 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
3362 if (target_label
== 0)
3363 target_label
= find_end_label ();
3364 redirect_jump (delay_insn
, target_label
);
3370 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3371 && prev_active_insn (target_label
) == insn
3373 /* If the last insn in the delay slot sets CC0 for some insn,
3374 various code assumes that it is in a delay slot. We could
3375 put it back where it belonged and delete the register notes,
3376 but it doesn't seem worthwhile in this uncommon case. */
3377 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3378 REG_CC_USER
, NULL_RTX
)
3384 /* All this insn does is execute its delay list and jump to the
3385 following insn. So delete the jump and just execute the delay
3388 We do this by deleting the INSN containing the SEQUENCE, then
3389 re-emitting the insns separately, and then deleting the jump.
3390 This allows the count of the jump target to be properly
3393 /* Clear the from target bit, since these insns are no longer
3395 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3396 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3398 trial
= PREV_INSN (insn
);
3400 emit_insn_after (pat
, trial
);
3401 delete_scheduled_jump (delay_insn
);
3405 /* See if this jump (with its delay slots) branches around another
3406 jump (without delay slots). If so, invert this jump and point
3407 it to the target of the second jump. We cannot do this for
3408 annulled jumps, though. Again, don't convert a jump to a RETURN
3410 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3411 && next
&& GET_CODE (next
) == JUMP_INSN
3412 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3413 && next_active_insn (target_label
) == next_active_insn (next
)
3414 && no_labels_between_p (insn
, next
))
3416 rtx label
= JUMP_LABEL (next
);
3417 rtx old_label
= JUMP_LABEL (delay_insn
);
3420 label
= find_end_label ();
3422 /* Be careful how we do this to avoid deleting code or labels
3423 that are momentarily dead. See similar optimization in jump.c */
3425 ++LABEL_NUSES (old_label
);
3427 if (invert_jump (delay_insn
, label
))
3433 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
3434 delete_insn (old_label
);
3438 /* If we own the thread opposite the way this insn branches, see if we
3439 can merge its delay slots with following insns. */
3440 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3441 && own_thread_p (NEXT_INSN (insn
), 0, 1))
3442 try_merge_delay_insns (insn
, next
);
3443 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3444 && own_thread_p (target_label
, target_label
, 0))
3445 try_merge_delay_insns (insn
, next_active_insn (target_label
));
3447 /* If we get here, we haven't deleted INSN. But we may have deleted
3448 NEXT, so recompute it. */
3449 next
= next_active_insn (insn
);
3455 /* Look for filled jumps to the end of function label. We can try to convert
3456 them into RETURN insns if the insns in the delay slot are valid for the
3460 make_return_insns (first
)
3463 rtx insn
, jump_insn
, pat
;
3464 rtx real_return_label
= end_of_function_label
;
3467 /* See if there is a RETURN insn in the function other than the one we
3468 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3469 into a RETURN to jump to it. */
3470 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3471 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
3473 real_return_label
= get_label_before (insn
);
3477 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3478 was equal to END_OF_FUNCTION_LABEL. */
3479 LABEL_NUSES (real_return_label
)++;
3481 /* Clear the list of insns to fill so we can use it. */
3482 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3484 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3486 /* Only look at filled JUMP_INSNs that go to the end of function
3488 if (GET_CODE (insn
) != INSN
3489 || GET_CODE (PATTERN (insn
)) != SEQUENCE
3490 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3491 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
3494 pat
= PATTERN (insn
);
3495 jump_insn
= XVECEXP (pat
, 0, 0);
3497 /* If we can't make the jump into a RETURN, redirect it to the best
3498 RETURN and go on to the next insn. */
3499 if (! redirect_jump (jump_insn
, NULL_RTX
))
3501 redirect_jump (jump_insn
, real_return_label
);
3505 /* See if this RETURN can accept the insns current in its delay slot.
3506 It can if it has more or an equal number of slots and the contents
3507 of each is valid. */
3509 slots
= num_delay_slots (jump_insn
);
3510 if (slots
>= XVECLEN (pat
, 0) - 1)
3512 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3514 #ifdef ANNUL_IFFALSE_SLOTS
3515 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3516 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3517 ? eligible_for_annul_false (jump_insn
, i
- 1,
3518 XVECEXP (pat
, 0, i
)) :
3520 #ifdef ANNUL_IFTRUE_SLOTS
3521 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3522 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3523 ? eligible_for_annul_true (jump_insn
, i
- 1,
3524 XVECEXP (pat
, 0, i
)) :
3526 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
))))
3532 if (i
== XVECLEN (pat
, 0))
3535 /* We have to do something with this insn. If it is an unconditional
3536 RETURN, delete the SEQUENCE and output the individual insns,
3537 followed by the RETURN. Then set things up so we try to find
3538 insns for its delay slots, if it needs some. */
3539 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
3541 rtx prev
= PREV_INSN (insn
);
3544 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3545 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
3547 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
3548 emit_barrier_after (insn
);
3551 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3554 /* It is probably more efficient to keep this with its current
3555 delay slot as a branch to a RETURN. */
3556 redirect_jump (jump_insn
, real_return_label
);
3559 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3560 new delay slots we have created. */
3561 if (--LABEL_NUSES (real_return_label
) == 0)
3562 delete_insn (real_return_label
);
3564 fill_simple_delay_slots (first
, 1);
3565 fill_simple_delay_slots (first
, 0);
3569 /* Try to find insns to place in delay slots. */
3572 dbr_schedule (first
, file
)
3576 rtx insn
, next
, epilogue_insn
= 0;
3579 int old_flag_no_peephole
= flag_no_peephole
;
3581 /* Execute `final' once in prescan mode to delete any insns that won't be
3582 used. Don't let final try to do any peephole optimization--it will
3583 ruin dataflow information for this pass. */
3585 flag_no_peephole
= 1;
3586 final (first
, 0, NO_DEBUG
, 1, 1);
3587 flag_no_peephole
= old_flag_no_peephole
;
3590 /* Find the highest INSN_UID and allocate and initialize our map from
3591 INSN_UID's to position in code. */
3592 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3594 if (INSN_UID (insn
) > max_uid
)
3595 max_uid
= INSN_UID (insn
);
3596 if (GET_CODE (insn
) == NOTE
3597 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
3598 epilogue_insn
= insn
;
3601 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int *));
3602 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
3603 uid_to_ruid
[INSN_UID (insn
)] = i
;
3605 /* Initialize the list of insns that need filling. */
3606 if (unfilled_firstobj
== 0)
3608 gcc_obstack_init (&unfilled_slots_obstack
);
3609 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3612 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
3616 INSN_ANNULLED_BRANCH_P (insn
) = 0;
3617 INSN_FROM_TARGET_P (insn
) = 0;
3619 /* Skip vector tables. We can't get attributes for them. */
3620 if (GET_CODE (insn
) == JUMP_INSN
3621 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
3622 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
3625 if (num_delay_slots (insn
) > 0)
3626 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3628 /* Ensure all jumps go to the last of a set of consecutive labels. */
3629 if (GET_CODE (insn
) == JUMP_INSN
&& condjump_p (insn
)
3630 && JUMP_LABEL (insn
) != 0
3631 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
3632 != JUMP_LABEL (insn
)))
3633 redirect_jump (insn
, target
);
3636 /* Indicate what resources are required to be valid at the end of the current
3637 function. The condition code never is and memory always is. If the
3638 frame pointer is needed, it is and so is the stack pointer unless
3639 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
3640 stack pointer is. Registers used to return the function value are
3641 needed. Registers holding global variables are needed. */
3643 end_of_function_needs
.cc
= 0;
3644 end_of_function_needs
.memory
= 1;
3645 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
3647 if (frame_pointer_needed
)
3649 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
3650 #ifdef EXIT_IGNORE_STACK
3651 if (! EXIT_IGNORE_STACK
)
3653 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
3656 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
3658 if (current_function_return_rtx
!= 0
3659 && GET_CODE (current_function_return_rtx
) == REG
)
3660 mark_referenced_resources (current_function_return_rtx
,
3661 &end_of_function_needs
, 0);
3663 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3665 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
3667 /* The registers required to be live at the end of the function are
3668 represented in the flow information as being dead just prior to
3669 reaching the end of the function. For example, the return of a value
3670 might be represented by a USE of the return register immediately
3671 followed by an unconditional jump to the return label where the
3672 return label is the end of the RTL chain. The end of the RTL chain
3673 is then taken to mean that the return register is live.
3675 This sequence is no longer maintained when epilogue instructions are
3676 added to the RTL chain. To reconstruct the original meaning, the
3677 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
3678 point where these registers become live (start_of_epilogue_needs).
3679 If epilogue instructions are present, the registers set by those
3680 instructions won't have been processed by flow. Thus, those
3681 registers are additionally required at the end of the RTL chain
3682 (end_of_function_needs). */
3684 start_of_epilogue_needs
= end_of_function_needs
;
3686 while (epilogue_insn
= next_nonnote_insn (epilogue_insn
))
3687 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0, 0);
3689 /* Show we haven't computed an end-of-function label yet. */
3690 end_of_function_label
= 0;
3692 /* Allocate and initialize the tables used by mark_target_live_regs. */
3694 = (struct target_info
**) alloca ((TARGET_HASH_PRIME
3695 * sizeof (struct target_info
*)));
3696 bzero (target_hash_table
, TARGET_HASH_PRIME
* sizeof (struct target_info
*));
3698 bb_ticks
= (int *) alloca (n_basic_blocks
* sizeof (int));
3699 bzero (bb_ticks
, n_basic_blocks
* sizeof (int));
3701 /* Initialize the statistics for this function. */
3702 bzero (num_insns_needing_delays
, sizeof num_insns_needing_delays
);
3703 bzero (num_filled_delays
, sizeof num_filled_delays
);
3705 /* Now do the delay slot filling. Try everything twice in case earlier
3706 changes make more slots fillable. */
3708 for (reorg_pass_number
= 0;
3709 reorg_pass_number
< MAX_REORG_PASSES
;
3710 reorg_pass_number
++)
3712 fill_simple_delay_slots (first
, 1);
3713 fill_simple_delay_slots (first
, 0);
3714 fill_eager_delay_slots (first
);
3715 relax_delay_slots (first
);
3718 /* Delete any USE insns made by update_block; subsequent passes don't need
3719 them or know how to deal with them. */
3720 for (insn
= first
; insn
; insn
= next
)
3722 next
= NEXT_INSN (insn
);
3724 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
3725 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
3726 next
= delete_insn (insn
);
3729 /* If we made an end of function label, indicate that it is now
3730 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3731 If it is now unused, delete it. */
3732 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
3733 delete_insn (end_of_function_label
);
3736 if (HAVE_return
&& end_of_function_label
!= 0)
3737 make_return_insns (first
);
3740 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3742 /* It is not clear why the line below is needed, but it does seem to be. */
3743 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3745 /* Reposition the prologue and epilogue notes in case we moved the
3746 prologue/epilogue insns. */
3747 reposition_prologue_and_epilogue_notes (first
);
3751 register int i
, j
, need_comma
;
3753 for (reorg_pass_number
= 0;
3754 reorg_pass_number
< MAX_REORG_PASSES
;
3755 reorg_pass_number
++)
3757 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
3758 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
3761 fprintf (file
, ";; Reorg function #%d\n", i
);
3763 fprintf (file
, ";; %d insns needing delay slots\n;; ",
3764 num_insns_needing_delays
[i
][reorg_pass_number
]);
3766 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
3767 if (num_filled_delays
[i
][j
][reorg_pass_number
])
3770 fprintf (file
, ", ");
3772 fprintf (file
, "%d got %d delays",
3773 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
3775 fprintf (file
, "\n");
3780 #endif /* DELAY_SLOTS */