]> gcc.gnu.org Git - gcc.git/blame - gcc/reorg.c
Merge in gcc2-ss-010999
[gcc.git] / gcc / reorg.c
CommitLineData
9c7e2978 1/* Perform instruction reorganizations for delay slot filling.
a5cad800 2 Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
1923e516 3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
9c7e2978
RK
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
9c7e2978 22
9c7e2978
RK
23/* Instruction reorganization pass.
24
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
6dc42e49 29 interactions of data dependencies and resource constraints, and
9c7e2978
RK
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
32
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
37
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
42
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
46
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
55 is taken.
56
35523fce
JL
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
63
18e765cb
JL
64 The TMS320C3x and C4x have three branch delay slots. When the three
65 slots are filled, the branch penalty is zero. Most insns can fill the
66 delay slots except jump insns.
67
9c7e2978
RK
68 Three techniques for filling delay slots have been implemented so far:
69
70 (1) `fill_simple_delay_slots' is the simplest, most efficient way
71 to fill delay slots. This pass first looks for insns which come
72 from before the branch and which are safe to execute after the
73 branch. Then it searches after the insn requiring delay slots or,
74 in the case of a branch, for insns that are after the point at
75 which the branch merges into the fallthrough code, if such a point
76 exists. When such insns are found, the branch penalty decreases
77 and no code expansion takes place.
78
79 (2) `fill_eager_delay_slots' is more complicated: it is used for
80 scheduling conditional jumps, or for scheduling jumps which cannot
81 be filled using (1). A machine need not have annulled jumps to use
82 this strategy, but it helps (by keeping more options open).
83 `fill_eager_delay_slots' tries to guess the direction the branch
84 will go; if it guesses right 100% of the time, it can reduce the
c0e12601 85 branch penalty as much as `fill_simple_delay_slots' does. If it
9c7e2978
RK
86 guesses wrong 100% of the time, it might as well schedule nops (or
87 on the m88k, unexpose the branch slot). When
88 `fill_eager_delay_slots' takes insns from the fall-through path of
89 the jump, usually there is no code expansion; when it takes insns
90 from the branch target, there is code expansion if it is not the
91 only way to reach that target.
92
93 (3) `relax_delay_slots' uses a set of rules to simplify code that
94 has been reorganized by (1) and (2). It finds cases where
95 conditional test can be eliminated, jumps can be threaded, extra
96 insns can be eliminated, etc. It is the job of (1) and (2) to do a
97 good job of scheduling locally; `relax_delay_slots' takes care of
98 making the various individual schedules work well together. It is
99 especially tuned to handle the control flow interactions of branch
100 insns. It does nothing for insns with delay slots that do not
101 branch.
102
103 On machines that use CC0, we are very conservative. We will not make
104 a copy of an insn involving CC0 since we want to maintain a 1-1
d45cf215 105 correspondence between the insn that sets and uses CC0. The insns are
9c7e2978
RK
106 allowed to be separated by placing an insn that sets CC0 (but not an insn
107 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
108 delay slot. In that case, we point each insn at the other with REG_CC_USER
109 and REG_CC_SETTER notes. Note that these restrictions affect very few
110 machines because most RISC machines with delay slots will not use CC0
111 (the RT is the only known exception at this point).
112
113 Not yet implemented:
114
115 The Acorn Risc Machine can conditionally execute most insns, so
116 it is profitable to move single insns into a position to execute
117 based on the condition code of the previous insn.
118
119 The HP-PA can conditionally nullify insns, providing a similar
120 effect to the ARM, differing mostly in which insn is "in charge". */
121
9c7e2978 122#include "config.h"
670ee920 123#include "system.h"
01198c2f 124#include "toplev.h"
9c7e2978 125#include "rtl.h"
51549d76 126#include "expr.h"
49ad7cfa 127#include "function.h"
9c7e2978
RK
128#include "insn-config.h"
129#include "conditions.h"
130#include "hard-reg-set.h"
131#include "basic-block.h"
132#include "regs.h"
133#include "insn-flags.h"
134#include "recog.h"
135#include "flags.h"
136#include "output.h"
137#include "obstack.h"
d80e9fd7 138#include "insn-attr.h"
ca545bb5 139#include "resource.h"
d80e9fd7 140
b55f96db 141
d80e9fd7 142#ifdef DELAY_SLOTS
9c7e2978
RK
143
144#define obstack_chunk_alloc xmalloc
145#define obstack_chunk_free free
146
9c7e2978 147#ifndef ANNUL_IFTRUE_SLOTS
35523fce 148#define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
9c7e2978
RK
149#endif
150#ifndef ANNUL_IFFALSE_SLOTS
35523fce 151#define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
9c7e2978
RK
152#endif
153
154/* Insns which have delay slots that have not yet been filled. */
155
156static struct obstack unfilled_slots_obstack;
157static rtx *unfilled_firstobj;
158
159/* Define macros to refer to the first and last slot containing unfilled
160 insns. These are used because the list may move and its address
161 should be recomputed at each use. */
162
163#define unfilled_slots_base \
164 ((rtx *) obstack_base (&unfilled_slots_obstack))
165
166#define unfilled_slots_next \
167 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
168
9c7e2978
RK
169/* Points to the label before the end of the function. */
170static rtx end_of_function_label;
171
9c7e2978
RK
172/* Mapping between INSN_UID's and position in the code since INSN_UID's do
173 not always monotonically increase. */
174static int *uid_to_ruid;
175
176/* Highest valid index in `uid_to_ruid'. */
177static int max_uid;
178
ab87f8c8
JL
179static int stop_search_p PROTO((rtx, int));
180static int resource_conflicts_p PROTO((struct resources *,
181 struct resources *));
182static int insn_references_resource_p PROTO((rtx, struct resources *, int));
183static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
184static rtx find_end_label PROTO((void));
185static rtx emit_delay_sequence PROTO((rtx, rtx, int));
186static rtx add_to_delay_list PROTO((rtx, rtx));
187static rtx delete_from_delay_slot PROTO((rtx));
188static void delete_scheduled_jump PROTO((rtx));
189static void note_delay_statistics PROTO((int, int));
190static rtx optimize_skip PROTO((rtx));
191static int get_jump_flags PROTO((rtx, rtx));
192static int rare_destination PROTO((rtx));
193static int mostly_true_jump PROTO((rtx, rtx));
194static rtx get_branch_condition PROTO((rtx, rtx));
195static int condition_dominates_p PROTO((rtx, rtx));
ca545bb5
BM
196static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
197static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
198static int check_annul_list_true_false PROTO ((int, rtx));
d8e8f346
RK
199static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
200 struct resources *,
201 struct resources *,
202 struct resources *,
203 int, int *, int *, rtx *));
204static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
205 struct resources *,
206 struct resources *,
207 struct resources *,
208 int, int *, int *));
ab87f8c8
JL
209static void try_merge_delay_insns PROTO((rtx, rtx));
210static rtx redundant_insn PROTO((rtx, rtx, rtx));
211static int own_thread_p PROTO((rtx, rtx, int));
ab87f8c8
JL
212static void update_block PROTO((rtx, rtx));
213static int reorg_redirect_jump PROTO((rtx, rtx));
214static void update_reg_dead_notes PROTO((rtx, rtx));
215static void fix_reg_dead_note PROTO((rtx, rtx));
216static void update_reg_unused_notes PROTO((rtx, rtx));
ab87f8c8
JL
217static void fill_simple_delay_slots PROTO((int));
218static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
219 int, int, int *, rtx));
220static void fill_eager_delay_slots PROTO((void));
221static void relax_delay_slots PROTO((rtx));
222static void make_return_insns PROTO((rtx));
9c7e2978
RK
223\f
224/* Return TRUE if this insn should stop the search for insn to fill delay
225 slots. LABELS_P indicates that labels should terminate the search.
226 In all cases, jumps terminate the search. */
227
228static int
229stop_search_p (insn, labels_p)
230 rtx insn;
231 int labels_p;
232{
233 if (insn == 0)
234 return 1;
235
236 switch (GET_CODE (insn))
237 {
238 case NOTE:
239 case CALL_INSN:
240 return 0;
241
242 case CODE_LABEL:
243 return labels_p;
244
245 case JUMP_INSN:
246 case BARRIER:
247 return 1;
248
249 case INSN:
250 /* OK unless it contains a delay slot or is an `asm' insn of some type.
251 We don't know anything about these. */
252 return (GET_CODE (PATTERN (insn)) == SEQUENCE
253 || GET_CODE (PATTERN (insn)) == ASM_INPUT
254 || asm_noperands (PATTERN (insn)) >= 0);
255
256 default:
257 abort ();
258 }
259}
260\f
261/* Return TRUE if any resources are marked in both RES1 and RES2 or if either
262 resource set contains a volatile memory reference. Otherwise, return FALSE. */
263
264static int
265resource_conflicts_p (res1, res2)
266 struct resources *res1, *res2;
267{
268 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
8eae5ed6 269 || (res1->unch_memory && res2->unch_memory)
9c7e2978
RK
270 || res1->volatil || res2->volatil)
271 return 1;
272
273#ifdef HARD_REG_SET
274 return (res1->regs & res2->regs) != HARD_CONST (0);
275#else
276 {
277 int i;
278
279 for (i = 0; i < HARD_REG_SET_LONGS; i++)
280 if ((res1->regs[i] & res2->regs[i]) != 0)
281 return 1;
282 return 0;
283 }
284#endif
285}
286
287/* Return TRUE if any resource marked in RES, a `struct resources', is
ab63953e 288 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
9c7e2978
RK
289 routine is using those resources.
290
291 We compute this by computing all the resources referenced by INSN and
292 seeing if this conflicts with RES. It might be faster to directly check
293 ourselves, and this is the way it used to work, but it means duplicating
294 a large block of complex code. */
295
296static int
674345b1 297insn_references_resource_p (insn, res, include_delayed_effects)
9c7e2978
RK
298 register rtx insn;
299 register struct resources *res;
674345b1 300 int include_delayed_effects;
9c7e2978
RK
301{
302 struct resources insn_res;
303
304 CLEAR_RESOURCE (&insn_res);
674345b1 305 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
9c7e2978
RK
306 return resource_conflicts_p (&insn_res, res);
307}
308
309/* Return TRUE if INSN modifies resources that are marked in RES.
ab63953e 310 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
9c7e2978
RK
311 included. CC0 is only modified if it is explicitly set; see comments
312 in front of mark_set_resources for details. */
313
314static int
674345b1 315insn_sets_resource_p (insn, res, include_delayed_effects)
9c7e2978
RK
316 register rtx insn;
317 register struct resources *res;
674345b1 318 int include_delayed_effects;
9c7e2978
RK
319{
320 struct resources insn_sets;
321
322 CLEAR_RESOURCE (&insn_sets);
674345b1 323 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
9c7e2978
RK
324 return resource_conflicts_p (&insn_sets, res);
325}
326\f
327/* Find a label at the end of the function or before a RETURN. If there is
328 none, make one. */
329
330static rtx
331find_end_label ()
332{
333 rtx insn;
334
335 /* If we found one previously, return it. */
336 if (end_of_function_label)
337 return end_of_function_label;
338
339 /* Otherwise, see if there is a label at the end of the function. If there
340 is, it must be that RETURN insns aren't needed, so that is our return
341 label and we don't have to do anything else. */
342
343 insn = get_last_insn ();
344 while (GET_CODE (insn) == NOTE
345 || (GET_CODE (insn) == INSN
346 && (GET_CODE (PATTERN (insn)) == USE
347 || GET_CODE (PATTERN (insn)) == CLOBBER)))
348 insn = PREV_INSN (insn);
349
e572bad3
JL
350 /* When a target threads its epilogue we might already have a
351 suitable return insn. If so put a label before it for the
352 end_of_function_label. */
353 if (GET_CODE (insn) == BARRIER
354 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
355 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
356 {
357 rtx temp = PREV_INSN (PREV_INSN (insn));
358 end_of_function_label = gen_label_rtx ();
359 LABEL_NUSES (end_of_function_label) = 0;
360
0f41302f 361 /* Put the label before an USE insns that may proceed the RETURN insn. */
e572bad3
JL
362 while (GET_CODE (temp) == USE)
363 temp = PREV_INSN (temp);
364
365 emit_label_after (end_of_function_label, temp);
366 }
367
368 else if (GET_CODE (insn) == CODE_LABEL)
369 end_of_function_label = insn;
9c7e2978
RK
370 else
371 {
372 /* Otherwise, make a new label and emit a RETURN and BARRIER,
373 if needed. */
374 end_of_function_label = gen_label_rtx ();
375 LABEL_NUSES (end_of_function_label) = 0;
376 emit_label (end_of_function_label);
377#ifdef HAVE_return
378 if (HAVE_return)
379 {
5fcd63d0 380 /* The return we make may have delay slots too. */
2dff5a06
RS
381 rtx insn = gen_return ();
382 insn = emit_jump_insn (insn);
9c7e2978 383 emit_barrier ();
5fcd63d0
JL
384 if (num_delay_slots (insn) > 0)
385 obstack_ptr_grow (&unfilled_slots_obstack, insn);
9c7e2978
RK
386 }
387#endif
388 }
389
390 /* Show one additional use for this label so it won't go away until
391 we are done. */
392 ++LABEL_NUSES (end_of_function_label);
393
394 return end_of_function_label;
395}
396\f
397/* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
398 the pattern of INSN with the SEQUENCE.
399
400 Chain the insns so that NEXT_INSN of each insn in the sequence points to
401 the next and NEXT_INSN of the last insn in the sequence points to
402 the first insn after the sequence. Similarly for PREV_INSN. This makes
403 it easier to scan all insns.
404
405 Returns the SEQUENCE that replaces INSN. */
406
407static rtx
91a51951 408emit_delay_sequence (insn, list, length)
9c7e2978
RK
409 rtx insn;
410 rtx list;
411 int length;
9c7e2978
RK
412{
413 register int i = 1;
414 register rtx li;
415 int had_barrier = 0;
416
38e01259 417 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
9c7e2978 418 rtvec seqv = rtvec_alloc (length + 1);
38a448ca 419 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
9c7e2978
RK
420 rtx seq_insn = make_insn_raw (seq);
421 rtx first = get_insns ();
422 rtx last = get_last_insn ();
423
0f41302f 424 /* Make a copy of the insn having delay slots. */
9c7e2978
RK
425 rtx delay_insn = copy_rtx (insn);
426
427 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
428 confuse further processing. Update LAST in case it was the last insn.
429 We will put the BARRIER back in later. */
430 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
431 {
432 delete_insn (NEXT_INSN (insn));
433 last = get_last_insn ();
434 had_barrier = 1;
435 }
436
437 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
438 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
439 PREV_INSN (seq_insn) = PREV_INSN (insn);
440
f5546425
JL
441 if (insn != last)
442 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
443
444 if (insn != first)
445 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
446
447 /* Note the calls to set_new_first_and_last_insn must occur after
448 SEQ_INSN has been completely spliced into the insn stream.
449
450 Otherwise CUR_INSN_UID will get set to an incorrect value because
451 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
9c7e2978
RK
452 if (insn == last)
453 set_new_first_and_last_insn (first, seq_insn);
9c7e2978
RK
454
455 if (insn == first)
456 set_new_first_and_last_insn (seq_insn, last);
9c7e2978
RK
457
458 /* Build our SEQUENCE and rebuild the insn chain. */
459 XVECEXP (seq, 0, 0) = delay_insn;
460 INSN_DELETED_P (delay_insn) = 0;
461 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
462
463 for (li = list; li; li = XEXP (li, 1), i++)
464 {
465 rtx tem = XEXP (li, 0);
466 rtx note;
467
468 /* Show that this copy of the insn isn't deleted. */
469 INSN_DELETED_P (tem) = 0;
470
471 XVECEXP (seq, 0, i) = tem;
472 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
473 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
474
475 /* Remove any REG_DEAD notes because we can't rely on them now
476 that the insn has been moved. */
477 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
478 if (REG_NOTE_KIND (note) == REG_DEAD)
479 XEXP (note, 0) = const0_rtx;
480 }
481
482 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
483
f1f9081a
RS
484 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
485 last insn in that SEQUENCE to point to us. Similarly for the first
486 insn in the following insn if it is a SEQUENCE. */
487
488 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
489 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
490 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
491 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
492 = seq_insn;
493
494 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
495 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
496 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
497
9c7e2978
RK
498 /* If there used to be a BARRIER, put it back. */
499 if (had_barrier)
500 emit_barrier_after (seq_insn);
501
502 if (i != length + 1)
503 abort ();
504
505 return seq_insn;
506}
507
508/* Add INSN to DELAY_LIST and return the head of the new list. The list must
509 be in the order in which the insns are to be executed. */
510
511static rtx
512add_to_delay_list (insn, delay_list)
513 rtx insn;
514 rtx delay_list;
515{
c8cfe1f6 516 /* If we have an empty list, just make a new list element. If
9ec36da5 517 INSN has its block number recorded, clear it since we may
c8cfe1f6
RK
518 be moving the insn to a new block. */
519
9c7e2978 520 if (delay_list == 0)
c8cfe1f6 521 {
ca545bb5 522 clear_hashed_info_for_insn (insn);
38a448ca 523 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
c8cfe1f6 524 }
9c7e2978
RK
525
526 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
527 list. */
528 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
529
530 return delay_list;
531}
9c7e2978 532\f
ab87f8c8
JL
533/* Delete INSN from the delay slot of the insn that it is in, which may
534 produce an insn with no delay slots. Return the new insn. */
9c7e2978 535
96960d10 536static rtx
9c7e2978
RK
537delete_from_delay_slot (insn)
538 rtx insn;
539{
540 rtx trial, seq_insn, seq, prev;
541 rtx delay_list = 0;
542 int i;
543
544 /* We first must find the insn containing the SEQUENCE with INSN in its
545 delay slot. Do this by finding an insn, TRIAL, where
546 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
547
548 for (trial = insn;
549 PREV_INSN (NEXT_INSN (trial)) == trial;
550 trial = NEXT_INSN (trial))
551 ;
552
553 seq_insn = PREV_INSN (NEXT_INSN (trial));
554 seq = PATTERN (seq_insn);
555
556 /* Create a delay list consisting of all the insns other than the one
557 we are deleting (unless we were the only one). */
558 if (XVECLEN (seq, 0) > 2)
559 for (i = 1; i < XVECLEN (seq, 0); i++)
560 if (XVECEXP (seq, 0, i) != insn)
561 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
562
563 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
564 list, and rebuild the delay list if non-empty. */
565 prev = PREV_INSN (seq_insn);
566 trial = XVECEXP (seq, 0, 0);
567 delete_insn (seq_insn);
568 add_insn_after (trial, prev);
569
570 if (GET_CODE (trial) == JUMP_INSN
571 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
572 emit_barrier_after (trial);
573
574 /* If there are any delay insns, remit them. Otherwise clear the
575 annul flag. */
576 if (delay_list)
91a51951 577 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
9c7e2978
RK
578 else
579 INSN_ANNULLED_BRANCH_P (trial) = 0;
580
581 INSN_FROM_TARGET_P (insn) = 0;
582
583 /* Show we need to fill this insn again. */
584 obstack_ptr_grow (&unfilled_slots_obstack, trial);
96960d10
HB
585
586 return trial;
9c7e2978
RK
587}
588\f
589/* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
590 the insn that sets CC0 for it and delete it too. */
591
592static void
593delete_scheduled_jump (insn)
594 rtx insn;
595{
596 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
597 delete the insn that sets the condition code, but it is hard to find it.
598 Since this case is rare anyway, don't bother trying; there would likely
599 be other insns that became dead anyway, which we wouldn't know to
600 delete. */
601
602#ifdef HAVE_cc0
603 if (reg_mentioned_p (cc0_rtx, insn))
604 {
fb3821f7 605 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
9c7e2978
RK
606
607 /* If a reg-note was found, it points to an insn to set CC0. This
608 insn is in the delay list of some other insn. So delete it from
609 the delay list it was in. */
610 if (note)
611 {
fb3821f7 612 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
9c7e2978
RK
613 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
614 delete_from_delay_slot (XEXP (note, 0));
615 }
616 else
617 {
618 /* The insn setting CC0 is our previous insn, but it may be in
619 a delay slot. It will be the last insn in the delay slot, if
620 it is. */
621 rtx trial = previous_insn (insn);
622 if (GET_CODE (trial) == NOTE)
623 trial = prev_nonnote_insn (trial);
624 if (sets_cc0_p (PATTERN (trial)) != 1
625 || FIND_REG_INC_NOTE (trial, 0))
626 return;
627 if (PREV_INSN (NEXT_INSN (trial)) == trial)
628 delete_insn (trial);
629 else
630 delete_from_delay_slot (trial);
631 }
632 }
633#endif
634
635 delete_insn (insn);
636}
637\f
638/* Counters for delay-slot filling. */
639
640#define NUM_REORG_FUNCTIONS 2
641#define MAX_DELAY_HISTOGRAM 3
642#define MAX_REORG_PASSES 2
643
644static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
645
646static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
647
648static int reorg_pass_number;
649
650static void
651note_delay_statistics (slots_filled, index)
652 int slots_filled, index;
653{
654 num_insns_needing_delays[index][reorg_pass_number]++;
655 if (slots_filled > MAX_DELAY_HISTOGRAM)
656 slots_filled = MAX_DELAY_HISTOGRAM;
657 num_filled_delays[index][slots_filled][reorg_pass_number]++;
658}
659\f
660#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
661
662/* Optimize the following cases:
663
664 1. When a conditional branch skips over only one instruction,
665 use an annulling branch and put that insn in the delay slot.
6dc42e49
RS
666 Use either a branch that annuls when the condition if true or
667 invert the test with a branch that annuls when the condition is
9c7e2978
RK
668 false. This saves insns, since otherwise we must copy an insn
669 from the L1 target.
670
671 (orig) (skip) (otherwise)
672 Bcc.n L1 Bcc',a L1 Bcc,a L1'
673 insn insn insn2
674 L1: L1: L1:
675 insn2 insn2 insn2
676 insn3 insn3 L1':
677 insn3
678
679 2. When a conditional branch skips over only one instruction,
680 and after that, it unconditionally branches somewhere else,
681 perform the similar optimization. This saves executing the
682 second branch in the case where the inverted condition is true.
683
684 Bcc.n L1 Bcc',a L2
685 insn insn
686 L1: L1:
687 Bra L2 Bra L2
688
689 INSN is a JUMP_INSN.
690
691 This should be expanded to skip over N insns, where N is the number
692 of delay slots required. */
693
694static rtx
695optimize_skip (insn)
696 register rtx insn;
697{
698 register rtx trial = next_nonnote_insn (insn);
699 rtx next_trial = next_active_insn (trial);
700 rtx delay_list = 0;
701 rtx target_label;
35523fce
JL
702 int flags;
703
704 flags = get_jump_flags (insn, JUMP_LABEL (insn));
9c7e2978
RK
705
706 if (trial == 0
707 || GET_CODE (trial) != INSN
708 || GET_CODE (PATTERN (trial)) == SEQUENCE
709 || recog_memoized (trial) < 0
35523fce
JL
710 || (! eligible_for_annul_false (insn, 0, trial, flags)
711 && ! eligible_for_annul_true (insn, 0, trial, flags)))
9c7e2978
RK
712 return 0;
713
714 /* There are two cases where we are just executing one insn (we assume
715 here that a branch requires only one insn; this should be generalized
716 at some point): Where the branch goes around a single insn or where
717 we have one insn followed by a branch to the same label we branch to.
718 In both of these cases, inverting the jump and annulling the delay
719 slot give the same effect in fewer insns. */
c5c76735
JL
720 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
721 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
9c7e2978
RK
722 || (next_trial != 0
723 && GET_CODE (next_trial) == JUMP_INSN
724 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
725 && (simplejump_p (next_trial)
726 || GET_CODE (PATTERN (next_trial)) == RETURN)))
727 {
35523fce 728 if (eligible_for_annul_false (insn, 0, trial, flags))
9c7e2978
RK
729 {
730 if (invert_jump (insn, JUMP_LABEL (insn)))
731 INSN_FROM_TARGET_P (trial) = 1;
35523fce 732 else if (! eligible_for_annul_true (insn, 0, trial, flags))
9c7e2978
RK
733 return 0;
734 }
735
fb3821f7 736 delay_list = add_to_delay_list (trial, NULL_RTX);
9c7e2978
RK
737 next_trial = next_active_insn (trial);
738 update_block (trial, trial);
739 delete_insn (trial);
740
741 /* Also, if we are targeting an unconditional
742 branch, thread our jump to the target of that branch. Don't
743 change this into a RETURN here, because it may not accept what
744 we have in the delay slot. We'll fix this up later. */
745 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
746 && (simplejump_p (next_trial)
747 || GET_CODE (PATTERN (next_trial)) == RETURN))
748 {
749 target_label = JUMP_LABEL (next_trial);
750 if (target_label == 0)
751 target_label = find_end_label ();
0e5bad53
JL
752
753 /* Recompute the flags based on TARGET_LABEL since threading
754 the jump to TARGET_LABEL may change the direction of the
755 jump (which may change the circumstances in which the
756 delay slot is nullified). */
757 flags = get_jump_flags (insn, target_label);
758 if (eligible_for_annul_true (insn, 0, trial, flags))
759 reorg_redirect_jump (insn, target_label);
9c7e2978
RK
760 }
761
762 INSN_ANNULLED_BRANCH_P (insn) = 1;
763 }
764
765 return delay_list;
766}
767#endif
768\f
35523fce
JL
769
770/* Encode and return branch direction and prediction information for
771 INSN assuming it will jump to LABEL.
772
773 Non conditional branches return no direction information and
774 are predicted as very likely taken. */
0f41302f 775
35523fce
JL
776static int
777get_jump_flags (insn, label)
778 rtx insn, label;
779{
780 int flags;
781
782 /* get_jump_flags can be passed any insn with delay slots, these may
783 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
784 direction information, and only if they are conditional jumps.
785
786 If LABEL is zero, then there is no way to determine the branch
787 direction. */
788 if (GET_CODE (insn) == JUMP_INSN
3480bb98 789 && (condjump_p (insn) || condjump_in_parallel_p (insn))
35523fce 790 && INSN_UID (insn) <= max_uid
e328af29
RK
791 && label != 0
792 && INSN_UID (label) <= max_uid)
35523fce
JL
793 flags
794 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
795 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
796 /* No valid direction information. */
797 else
798 flags = 0;
799
800 /* If insn is a conditional branch call mostly_true_jump to get
801 determine the branch prediction.
802
803 Non conditional branches are predicted as very likely taken. */
804 if (GET_CODE (insn) == JUMP_INSN
3480bb98 805 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
35523fce
JL
806 {
807 int prediction;
808
809 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
810 switch (prediction)
811 {
812 case 2:
813 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
814 break;
815 case 1:
816 flags |= ATTR_FLAG_likely;
817 break;
35523fce
JL
818 case 0:
819 flags |= ATTR_FLAG_unlikely;
820 break;
35523fce
JL
821 case -1:
822 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
823 break;
824
825 default:
826 abort();
827 }
828 }
829 else
830 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
831
832 return flags;
833}
834
65753f55 835/* Return 1 if INSN is a destination that will be branched to rarely (the
0275a51b
RK
836 return point of a function); return 2 if DEST will be branched to very
837 rarely (a call to a function that doesn't return). Otherwise,
838 return 0. */
839
840static int
841rare_destination (insn)
842 rtx insn;
843{
844 int jump_count = 0;
65753f55 845 rtx next;
0275a51b 846
65753f55 847 for (; insn; insn = next)
0275a51b
RK
848 {
849 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
850 insn = XVECEXP (PATTERN (insn), 0, 0);
851
65753f55
RK
852 next = NEXT_INSN (insn);
853
0275a51b
RK
854 switch (GET_CODE (insn))
855 {
856 case CODE_LABEL:
857 return 0;
858 case BARRIER:
859 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
860 don't scan past JUMP_INSNs, so any barrier we find here must
861 have been after a CALL_INSN and hence mean the call doesn't
862 return. */
863 return 2;
864 case JUMP_INSN:
865 if (GET_CODE (PATTERN (insn)) == RETURN)
866 return 1;
867 else if (simplejump_p (insn)
868 && jump_count++ < 10)
65753f55 869 next = JUMP_LABEL (insn);
0275a51b
RK
870 else
871 return 0;
91a51951
KG
872
873 default:
874 break;
0275a51b
RK
875 }
876 }
877
878 /* If we got here it means we hit the end of the function. So this
879 is an unlikely destination. */
880
881 return 1;
882}
883
9c7e2978
RK
884/* Return truth value of the statement that this branch
885 is mostly taken. If we think that the branch is extremely likely
886 to be taken, we return 2. If the branch is slightly more likely to be
0275a51b
RK
887 taken, return 1. If the branch is slightly less likely to be taken,
888 return 0 and if the branch is highly unlikely to be taken, return -1.
9c7e2978
RK
889
890 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
891
892static int
893mostly_true_jump (jump_insn, condition)
894 rtx jump_insn, condition;
895{
896 rtx target_label = JUMP_LABEL (jump_insn);
897 rtx insn;
0275a51b
RK
898 int rare_dest = rare_destination (target_label);
899 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
900
a6c383b0
DE
901 /* If branch probabilities are available, then use that number since it
902 always gives a correct answer. */
903 if (flag_branch_probabilities)
904 {
6f4d7222 905 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
a6c383b0
DE
906 if (note)
907 {
908 int prob = XINT (note, 0);
909
910 if (prob >= REG_BR_PROB_BASE * 9 / 10)
911 return 2;
912 else if (prob >= REG_BR_PROB_BASE / 2)
913 return 1;
914 else if (prob >= REG_BR_PROB_BASE / 10)
915 return 0;
916 else
917 return -1;
918 }
919 }
920
0275a51b
RK
921 /* If this is a branch outside a loop, it is highly unlikely. */
922 if (GET_CODE (PATTERN (jump_insn)) == SET
923 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
924 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
925 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
926 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
927 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
928 return -1;
929
930 if (target_label)
4d0e69c3 931 {
0275a51b
RK
932 /* If this is the test of a loop, it is very likely true. We scan
933 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
934 before the next real insn, we assume the branch is to the top of
935 the loop. */
936 for (insn = PREV_INSN (target_label);
937 insn && GET_CODE (insn) == NOTE;
938 insn = PREV_INSN (insn))
939 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
940 return 2;
941
942 /* If this is a jump to the test of a loop, it is likely true. We scan
943 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
944 before the next real insn, we assume the branch is to the loop branch
945 test. */
946 for (insn = NEXT_INSN (target_label);
947 insn && GET_CODE (insn) == NOTE;
948 insn = PREV_INSN (insn))
949 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
950 return 1;
4d0e69c3 951 }
9c7e2978 952
abc95ed3 953 /* Look at the relative rarities of the fallthrough and destination. If
0f41302f 954 they differ, we can predict the branch that way. */
9c7e2978 955
0275a51b
RK
956 switch (rare_fallthrough - rare_dest)
957 {
958 case -2:
959 return -1;
960 case -1:
961 return 0;
962 case 0:
963 break;
964 case 1:
4d0e69c3 965 return 1;
0275a51b
RK
966 case 2:
967 return 2;
968 }
4d0e69c3 969
9c7e2978
RK
970 /* If we couldn't figure out what this jump was, assume it won't be
971 taken. This should be rare. */
972 if (condition == 0)
973 return 0;
974
975 /* EQ tests are usually false and NE tests are usually true. Also,
976 most quantities are positive, so we can make the appropriate guesses
977 about signed comparisons against zero. */
978 switch (GET_CODE (condition))
979 {
980 case CONST_INT:
981 /* Unconditional branch. */
982 return 1;
983 case EQ:
984 return 0;
985 case NE:
986 return 1;
987 case LE:
988 case LT:
989 if (XEXP (condition, 1) == const0_rtx)
990 return 0;
991 break;
992 case GE:
993 case GT:
994 if (XEXP (condition, 1) == const0_rtx)
995 return 1;
996 break;
91a51951
KG
997
998 default:
999 break;
9c7e2978
RK
1000 }
1001
1002 /* Predict backward branches usually take, forward branches usually not. If
1003 we don't know whether this is forward or backward, assume the branch
1004 will be taken, since most are. */
13b8df74
RK
1005 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1006 || INSN_UID (target_label) > max_uid
9c7e2978 1007 || (uid_to_ruid[INSN_UID (jump_insn)]
6d649d26 1008 > uid_to_ruid[INSN_UID (target_label)]));
9c7e2978
RK
1009}
1010
1011/* Return the condition under which INSN will branch to TARGET. If TARGET
1012 is zero, return the condition under which INSN will return. If INSN is
1013 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1014 type of jump, or it doesn't go to TARGET, return 0. */
1015
1016static rtx
1017get_branch_condition (insn, target)
1018 rtx insn;
1019 rtx target;
1020{
1021 rtx pat = PATTERN (insn);
1022 rtx src;
1023
3480bb98
JL
1024 if (condjump_in_parallel_p (insn))
1025 pat = XVECEXP (pat, 0, 0);
1026
9c7e2978
RK
1027 if (GET_CODE (pat) == RETURN)
1028 return target == 0 ? const_true_rtx : 0;
1029
1030 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1031 return 0;
1032
1033 src = SET_SRC (pat);
1034 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1035 return const_true_rtx;
1036
1037 else if (GET_CODE (src) == IF_THEN_ELSE
1038 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1039 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1040 && XEXP (XEXP (src, 1), 0) == target))
1041 && XEXP (src, 2) == pc_rtx)
1042 return XEXP (src, 0);
1043
1044 else if (GET_CODE (src) == IF_THEN_ELSE
1045 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1046 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1047 && XEXP (XEXP (src, 2), 0) == target))
1048 && XEXP (src, 1) == pc_rtx)
38a448ca
RH
1049 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1050 GET_MODE (XEXP (src, 0)),
1051 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
58c8c593
RK
1052
1053 return 0;
9c7e2978
RK
1054}
1055
1056/* Return non-zero if CONDITION is more strict than the condition of
1057 INSN, i.e., if INSN will always branch if CONDITION is true. */
1058
1059static int
1060condition_dominates_p (condition, insn)
1061 rtx condition;
1062 rtx insn;
1063{
1064 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1065 enum rtx_code code = GET_CODE (condition);
1066 enum rtx_code other_code;
1067
1068 if (rtx_equal_p (condition, other_condition)
1069 || other_condition == const_true_rtx)
1070 return 1;
1071
1072 else if (condition == const_true_rtx || other_condition == 0)
1073 return 0;
1074
1075 other_code = GET_CODE (other_condition);
1076 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1077 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1078 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1079 return 0;
1080
1081 return comparison_dominates_p (code, other_code);
1082}
83fd5651
JL
1083
1084/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1085 any insns already in the delay slot of JUMP. */
1086
1087static int
1088redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1089 rtx jump, newlabel, seq;
1090{
91a51951 1091 int flags, i;
83fd5651
JL
1092 rtx pat = PATTERN (seq);
1093
1094 /* Make sure all the delay slots of this jump would still
1095 be valid after threading the jump. If they are still
1096 valid, then return non-zero. */
1097
1098 flags = get_jump_flags (jump, newlabel);
1099 for (i = 1; i < XVECLEN (pat, 0); i++)
1100 if (! (
1101#ifdef ANNUL_IFFALSE_SLOTS
1102 (INSN_ANNULLED_BRANCH_P (jump)
1103 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1104 ? eligible_for_annul_false (jump, i - 1,
1105 XVECEXP (pat, 0, i), flags) :
1106#endif
1107#ifdef ANNUL_IFTRUE_SLOTS
1108 (INSN_ANNULLED_BRANCH_P (jump)
1109 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1110 ? eligible_for_annul_true (jump, i - 1,
1111 XVECEXP (pat, 0, i), flags) :
1112#endif
1113 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1114 break;
1115
1116 return (i == XVECLEN (pat, 0));
1117}
1118
b304ad47
JL
1119/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1120 any insns we wish to place in the delay slot of JUMP. */
1121
1122static int
1123redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1124 rtx jump, newlabel, delay_list;
1125{
1126 int flags, i;
1127 rtx li;
1128
1129 /* Make sure all the insns in DELAY_LIST would still be
1130 valid after threading the jump. If they are still
1131 valid, then return non-zero. */
1132
1133 flags = get_jump_flags (jump, newlabel);
1134 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1135 if (! (
1136#ifdef ANNUL_IFFALSE_SLOTS
1137 (INSN_ANNULLED_BRANCH_P (jump)
1138 && INSN_FROM_TARGET_P (XEXP (li, 0)))
4791d99b 1139 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
b304ad47
JL
1140#endif
1141#ifdef ANNUL_IFTRUE_SLOTS
1142 (INSN_ANNULLED_BRANCH_P (jump)
1143 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
4791d99b 1144 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
b304ad47 1145#endif
4791d99b 1146 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
b304ad47
JL
1147 break;
1148
1149 return (li == NULL);
1150}
1151
96960d10
HB
1152/* DELAY_LIST is a list of insns that have already been placed into delay
1153 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1154 If not, return 0; otherwise return 1. */
1155
1156static int
1157check_annul_list_true_false (annul_true_p, delay_list)
1158 int annul_true_p;
1159 rtx delay_list;
1160{
f0c76b51 1161 rtx temp;
96960d10
HB
1162
1163 if (delay_list)
1164 {
1165 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1166 {
1167 rtx trial = XEXP (temp, 0);
1168
1169 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1170 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1171 return 0;
1172 }
1173 }
ab87f8c8 1174
96960d10
HB
1175 return 1;
1176}
1177
9c7e2978
RK
1178\f
1179/* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1180 the condition tested by INSN is CONDITION and the resources shown in
1181 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1182 from SEQ's delay list, in addition to whatever insns it may execute
1183 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1184 needed while searching for delay slot insns. Return the concatenated
1185 delay list if possible, otherwise, return 0.
1186
1187 SLOTS_TO_FILL is the total number of slots required by INSN, and
1188 PSLOTS_FILLED points to the number filled so far (also the number of
1189 insns in DELAY_LIST). It is updated with the number that have been
1190 filled from the SEQUENCE, if any.
1191
1192 PANNUL_P points to a non-zero value if we already know that we need
1193 to annul INSN. If this routine determines that annulling is needed,
1194 it may set that value non-zero.
1195
1196 PNEW_THREAD points to a location that is to receive the place at which
1197 execution should continue. */
1198
1199static rtx
1200steal_delay_list_from_target (insn, condition, seq, delay_list,
1201 sets, needed, other_needed,
1202 slots_to_fill, pslots_filled, pannul_p,
1203 pnew_thread)
1204 rtx insn, condition;
1205 rtx seq;
1206 rtx delay_list;
1207 struct resources *sets, *needed, *other_needed;
1208 int slots_to_fill;
1209 int *pslots_filled;
1210 int *pannul_p;
1211 rtx *pnew_thread;
1212{
1213 rtx temp;
1214 int slots_remaining = slots_to_fill - *pslots_filled;
1215 int total_slots_filled = *pslots_filled;
1216 rtx new_delay_list = 0;
1217 int must_annul = *pannul_p;
f0c76b51 1218 int used_annul = 0;
ab87f8c8 1219 int i;
18e765cb 1220 struct resources cc_set;
9c7e2978
RK
1221
1222 /* We can't do anything if there are more delay slots in SEQ than we
1223 can handle, or if we don't know that it will be a taken branch.
9c7e2978 1224 We know that it will be a taken branch if it is either an unconditional
fe41a98e
JW
1225 branch or a conditional branch with a stricter branch condition.
1226
1227 Also, exit if the branch has more than one set, since then it is computing
1228 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1229 ??? It may be possible to move other sets into INSN in addition to
18e765cb
JL
1230 moving the instructions in the delay slots.
1231
1232 We can not steal the delay list if one of the instructions in the
1233 current delay_list modifies the condition codes and the jump in the
1234 sequence is a conditional jump. We can not do this because we can
1235 not change the direction of the jump because the condition codes
1236 will effect the direction of the jump in the sequence. */
1237
1238 CLEAR_RESOURCE (&cc_set);
1239 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1240 {
1241 rtx trial = XEXP (temp, 0);
1242
1243 mark_set_resources (trial, &cc_set, 0, 1);
1244 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1245 return delay_list;
1246 }
9c7e2978
RK
1247
1248 if (XVECLEN (seq, 0) - 1 > slots_remaining
fe41a98e
JW
1249 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1250 || ! single_set (XVECEXP (seq, 0, 0)))
9c7e2978
RK
1251 return delay_list;
1252
1253 for (i = 1; i < XVECLEN (seq, 0); i++)
1254 {
1255 rtx trial = XVECEXP (seq, 0, i);
35523fce 1256 int flags;
9c7e2978
RK
1257
1258 if (insn_references_resource_p (trial, sets, 0)
1259 || insn_sets_resource_p (trial, needed, 0)
1260 || insn_sets_resource_p (trial, sets, 0)
1261#ifdef HAVE_cc0
1262 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1263 delay list. */
fb3821f7 1264 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
9c7e2978
RK
1265#endif
1266 /* If TRIAL is from the fallthrough code of an annulled branch insn
1267 in SEQ, we cannot use it. */
1268 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1269 && ! INSN_FROM_TARGET_P (trial)))
1270 return delay_list;
1271
1272 /* If this insn was already done (usually in a previous delay slot),
1273 pretend we put it in our delay slot. */
f898abd7 1274 if (redundant_insn (trial, insn, new_delay_list))
9c7e2978
RK
1275 continue;
1276
35523fce
JL
1277 /* We will end up re-vectoring this branch, so compute flags
1278 based on jumping to the new label. */
1279 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1280
9c7e2978
RK
1281 if (! must_annul
1282 && ((condition == const_true_rtx
1283 || (! insn_sets_resource_p (trial, other_needed, 0)
1284 && ! may_trap_p (PATTERN (trial)))))
35523fce 1285 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
96960d10
HB
1286 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1287 && (must_annul = 1,
1288 check_annul_list_true_false (0, delay_list)
1289 && check_annul_list_true_false (0, new_delay_list)
1290 && eligible_for_annul_false (insn, total_slots_filled,
1291 trial, flags)))
9c7e2978 1292 {
96960d10
HB
1293 if (must_annul)
1294 used_annul = 1;
9c7e2978
RK
1295 temp = copy_rtx (trial);
1296 INSN_FROM_TARGET_P (temp) = 1;
1297 new_delay_list = add_to_delay_list (temp, new_delay_list);
1298 total_slots_filled++;
1299
1300 if (--slots_remaining == 0)
1301 break;
1302 }
1303 else
1304 return delay_list;
1305 }
1306
1307 /* Show the place to which we will be branching. */
1308 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1309
1310 /* Add any new insns to the delay list and update the count of the
1311 number of slots filled. */
1312 *pslots_filled = total_slots_filled;
96960d10
HB
1313 if (used_annul)
1314 *pannul_p = 1;
9c7e2978
RK
1315
1316 if (delay_list == 0)
1317 return new_delay_list;
1318
1319 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1320 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1321
1322 return delay_list;
1323}
1324\f
1325/* Similar to steal_delay_list_from_target except that SEQ is on the
1326 fallthrough path of INSN. Here we only do something if the delay insn
1327 of SEQ is an unconditional branch. In that case we steal its delay slot
1328 for INSN since unconditional branches are much easier to fill. */
1329
1330static rtx
1331steal_delay_list_from_fallthrough (insn, condition, seq,
1332 delay_list, sets, needed, other_needed,
1333 slots_to_fill, pslots_filled, pannul_p)
1334 rtx insn, condition;
1335 rtx seq;
1336 rtx delay_list;
1337 struct resources *sets, *needed, *other_needed;
1338 int slots_to_fill;
1339 int *pslots_filled;
1340 int *pannul_p;
1341{
1342 int i;
35523fce 1343 int flags;
96960d10
HB
1344 int must_annul = *pannul_p;
1345 int used_annul = 0;
35523fce
JL
1346
1347 flags = get_jump_flags (insn, JUMP_LABEL (insn));
9c7e2978
RK
1348
1349 /* We can't do anything if SEQ's delay insn isn't an
1350 unconditional branch. */
1351
1352 if (! simplejump_p (XVECEXP (seq, 0, 0))
1353 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1354 return delay_list;
1355
1356 for (i = 1; i < XVECLEN (seq, 0); i++)
1357 {
1358 rtx trial = XVECEXP (seq, 0, i);
1359
1360 /* If TRIAL sets CC0, stealing it will move it too far from the use
1361 of CC0. */
1362 if (insn_references_resource_p (trial, sets, 0)
1363 || insn_sets_resource_p (trial, needed, 0)
1364 || insn_sets_resource_p (trial, sets, 0)
1365#ifdef HAVE_cc0
1366 || sets_cc0_p (PATTERN (trial))
1367#endif
1368 )
1369
1370 break;
1371
1372 /* If this insn was already done, we don't need it. */
f898abd7 1373 if (redundant_insn (trial, insn, delay_list))
9c7e2978
RK
1374 {
1375 delete_from_delay_slot (trial);
1376 continue;
1377 }
1378
96960d10 1379 if (! must_annul
9c7e2978
RK
1380 && ((condition == const_true_rtx
1381 || (! insn_sets_resource_p (trial, other_needed, 0)
1382 && ! may_trap_p (PATTERN (trial)))))
35523fce 1383 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
96960d10
HB
1384 : (must_annul || delay_list == NULL) && (must_annul = 1,
1385 check_annul_list_true_false (1, delay_list)
1386 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
9c7e2978 1387 {
96960d10
HB
1388 if (must_annul)
1389 used_annul = 1;
9c7e2978
RK
1390 delete_from_delay_slot (trial);
1391 delay_list = add_to_delay_list (trial, delay_list);
1392
1393 if (++(*pslots_filled) == slots_to_fill)
1394 break;
1395 }
1396 else
1397 break;
1398 }
1399
96960d10
HB
1400 if (used_annul)
1401 *pannul_p = 1;
9c7e2978
RK
1402 return delay_list;
1403}
96960d10 1404
9c7e2978
RK
1405\f
1406/* Try merging insns starting at THREAD which match exactly the insns in
1407 INSN's delay list.
1408
1409 If all insns were matched and the insn was previously annulling, the
1410 annul bit will be cleared.
1411
1412 For each insn that is merged, if the branch is or will be non-annulling,
1413 we delete the merged insn. */
1414
1415static void
1416try_merge_delay_insns (insn, thread)
1417 rtx insn, thread;
1418{
1419 rtx trial, next_trial;
1420 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1421 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1422 int slot_number = 1;
1423 int num_slots = XVECLEN (PATTERN (insn), 0);
1424 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1425 struct resources set, needed;
1426 rtx merged_insns = 0;
1427 int i;
35523fce
JL
1428 int flags;
1429
a2f54138 1430 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
9c7e2978
RK
1431
1432 CLEAR_RESOURCE (&needed);
1433 CLEAR_RESOURCE (&set);
1434
1435 /* If this is not an annulling branch, take into account anything needed in
96960d10 1436 INSN's delay slot. This prevents two increments from being incorrectly
9c7e2978
RK
1437 folded into one. If we are annulling, this would be the correct
1438 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1439 will essentially disable this optimization. This method is somewhat of
1440 a kludge, but I don't see a better way.) */
1441 if (! annul_p)
96960d10
HB
1442 for (i = 1 ; i < num_slots ; i++)
1443 if (XVECEXP (PATTERN (insn), 0, i))
1444 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
9c7e2978
RK
1445
1446 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1447 {
1448 rtx pat = PATTERN (trial);
ce15adaa 1449 rtx oldtrial = trial;
9c7e2978
RK
1450
1451 next_trial = next_nonnote_insn (trial);
1452
1453 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1454 if (GET_CODE (trial) == INSN
1455 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1456 continue;
1457
1458 if (GET_CODE (next_to_match) == GET_CODE (trial)
1459#ifdef HAVE_cc0
1460 /* We can't share an insn that sets cc0. */
1461 && ! sets_cc0_p (pat)
1462#endif
1463 && ! insn_references_resource_p (trial, &set, 1)
1464 && ! insn_sets_resource_p (trial, &set, 1)
1465 && ! insn_sets_resource_p (trial, &needed, 1)
1466 && (trial = try_split (pat, trial, 0)) != 0
9772d94f
JW
1467 /* Update next_trial, in case try_split succeeded. */
1468 && (next_trial = next_nonnote_insn (trial))
ce15adaa
RK
1469 /* Likewise THREAD. */
1470 && (thread = oldtrial == thread ? trial : thread)
9c7e2978
RK
1471 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1472 /* Have to test this condition if annul condition is different
1473 from (and less restrictive than) non-annulling one. */
35523fce 1474 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
9c7e2978 1475 {
9c7e2978
RK
1476
1477 if (! annul_p)
1478 {
f1f9081a 1479 update_block (trial, thread);
8ad4abfc
RK
1480 if (trial == thread)
1481 thread = next_active_insn (thread);
1482
9c7e2978
RK
1483 delete_insn (trial);
1484 INSN_FROM_TARGET_P (next_to_match) = 0;
1485 }
1486 else
38a448ca 1487 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
9c7e2978
RK
1488
1489 if (++slot_number == num_slots)
1490 break;
1491
1492 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
9c7e2978
RK
1493 }
1494
26d970a5 1495 mark_set_resources (trial, &set, 0, 1);
9c7e2978
RK
1496 mark_referenced_resources (trial, &needed, 1);
1497 }
1498
1499 /* See if we stopped on a filled insn. If we did, try to see if its
1500 delay slots match. */
1501 if (slot_number != num_slots
1502 && trial && GET_CODE (trial) == INSN
1503 && GET_CODE (PATTERN (trial)) == SEQUENCE
1504 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1505 {
1506 rtx pat = PATTERN (trial);
058acefd
JW
1507 rtx filled_insn = XVECEXP (pat, 0, 0);
1508
1509 /* Account for resources set/needed by the filled insn. */
1510 mark_set_resources (filled_insn, &set, 0, 1);
1511 mark_referenced_resources (filled_insn, &needed, 1);
9c7e2978
RK
1512
1513 for (i = 1; i < XVECLEN (pat, 0); i++)
1514 {
1515 rtx dtrial = XVECEXP (pat, 0, i);
1516
1517 if (! insn_references_resource_p (dtrial, &set, 1)
1518 && ! insn_sets_resource_p (dtrial, &set, 1)
1519 && ! insn_sets_resource_p (dtrial, &needed, 1)
1520#ifdef HAVE_cc0
1521 && ! sets_cc0_p (PATTERN (dtrial))
1522#endif
1523 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
35523fce 1524 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
9c7e2978
RK
1525 {
1526 if (! annul_p)
1527 {
96960d10
HB
1528 rtx new;
1529
f1f9081a 1530 update_block (dtrial, thread);
96960d10
HB
1531 new = delete_from_delay_slot (dtrial);
1532 if (INSN_DELETED_P (thread))
1533 thread = new;
9c7e2978
RK
1534 INSN_FROM_TARGET_P (next_to_match) = 0;
1535 }
1536 else
38a448ca
RH
1537 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1538 merged_insns);
9c7e2978
RK
1539
1540 if (++slot_number == num_slots)
1541 break;
1542
1543 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1544 }
96960d10
HB
1545 else
1546 {
f0c76b51
JL
1547 /* Keep track of the set/referenced resources for the delay
1548 slots of any trial insns we encounter. */
96960d10
HB
1549 mark_set_resources (dtrial, &set, 0, 1);
1550 mark_referenced_resources (dtrial, &needed, 1);
1551 }
9c7e2978
RK
1552 }
1553 }
1554
1555 /* If all insns in the delay slot have been matched and we were previously
1556 annulling the branch, we need not any more. In that case delete all the
38e01259 1557 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
9c7e2978
RK
1558 the delay list so that we know that it isn't only being used at the
1559 target. */
d58b6986 1560 if (slot_number == num_slots && annul_p)
9c7e2978
RK
1561 {
1562 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1563 {
1564 if (GET_MODE (merged_insns) == SImode)
1565 {
96960d10
HB
1566 rtx new;
1567
f1f9081a 1568 update_block (XEXP (merged_insns, 0), thread);
96960d10
HB
1569 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1570 if (INSN_DELETED_P (thread))
1571 thread = new;
9c7e2978
RK
1572 }
1573 else
1574 {
f1f9081a 1575 update_block (XEXP (merged_insns, 0), thread);
9c7e2978
RK
1576 delete_insn (XEXP (merged_insns, 0));
1577 }
1578 }
1579
1580 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1581
1582 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1583 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1584 }
1585}
1586\f
1587/* See if INSN is redundant with an insn in front of TARGET. Often this
1588 is called when INSN is a candidate for a delay slot of TARGET.
1589 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1590 of INSN. Often INSN will be redundant with an insn in a delay slot of
1591 some previous insn. This happens when we have a series of branches to the
1592 same label; in that case the first insn at the target might want to go
1593 into each of the delay slots.
1594
1595 If we are not careful, this routine can take up a significant fraction
1596 of the total compilation time (4%), but only wins rarely. Hence we
1597 speed this routine up by making two passes. The first pass goes back
1598 until it hits a label and sees if it find an insn with an identical
1599 pattern. Only in this (relatively rare) event does it check for
1600 data conflicts.
1601
1602 We do not split insns we encounter. This could cause us not to find a
1603 redundant insn, but the cost of splitting seems greater than the possible
1604 gain in rare cases. */
1605
5317d2f8 1606static rtx
f898abd7 1607redundant_insn (insn, target, delay_list)
9c7e2978
RK
1608 rtx insn;
1609 rtx target;
1610 rtx delay_list;
1611{
1612 rtx target_main = target;
1613 rtx ipat = PATTERN (insn);
1614 rtx trial, pat;
1615 struct resources needed, set;
1616 int i;
1617
cbae24bc
RK
1618 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1619 are allowed to not actually assign to such a register. */
1620 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1621 return 0;
1622
9c7e2978
RK
1623 /* Scan backwards looking for a match. */
1624 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1625 {
1626 if (GET_CODE (trial) == CODE_LABEL)
1627 return 0;
1628
009f6146 1629 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
9c7e2978
RK
1630 continue;
1631
1632 pat = PATTERN (trial);
1633 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1634 continue;
1635
1636 if (GET_CODE (pat) == SEQUENCE)
1637 {
ca104c13
JL
1638 /* Stop for a CALL and its delay slots because it is difficult to
1639 track its resource needs correctly. */
9c7e2978
RK
1640 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1641 return 0;
1642
ca104c13
JL
1643 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1644 slots because it is difficult to track its resource needs
1645 correctly. */
1646
1647#ifdef INSN_SETS_ARE_DELAYED
1648 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1649 return 0;
1650#endif
1651
1652#ifdef INSN_REFERENCES_ARE_DELAYED
1653 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1654 return 0;
1655#endif
1656
1657 /* See if any of the insns in the delay slot match, updating
1658 resource requirements as we go. */
9c7e2978
RK
1659 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1660 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
cbae24bc
RK
1661 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1662 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
9c7e2978
RK
1663 break;
1664
1665 /* If found a match, exit this loop early. */
1666 if (i > 0)
1667 break;
1668 }
1669
cbae24bc
RK
1670 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1671 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
9c7e2978
RK
1672 break;
1673 }
1674
1675 /* If we didn't find an insn that matches, return 0. */
1676 if (trial == 0)
1677 return 0;
1678
1679 /* See what resources this insn sets and needs. If they overlap, or
1680 if this insn references CC0, it can't be redundant. */
1681
1682 CLEAR_RESOURCE (&needed);
1683 CLEAR_RESOURCE (&set);
26d970a5 1684 mark_set_resources (insn, &set, 0, 1);
9c7e2978
RK
1685 mark_referenced_resources (insn, &needed, 1);
1686
1687 /* If TARGET is a SEQUENCE, get the main insn. */
1688 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1689 target_main = XVECEXP (PATTERN (target), 0, 0);
1690
1691 if (resource_conflicts_p (&needed, &set)
1692#ifdef HAVE_cc0
1693 || reg_mentioned_p (cc0_rtx, ipat)
1694#endif
1695 /* The insn requiring the delay may not set anything needed or set by
1696 INSN. */
1697 || insn_sets_resource_p (target_main, &needed, 1)
1698 || insn_sets_resource_p (target_main, &set, 1))
1699 return 0;
1700
1701 /* Insns we pass may not set either NEEDED or SET, so merge them for
1702 simpler tests. */
1703 needed.memory |= set.memory;
8eae5ed6 1704 needed.unch_memory |= set.unch_memory;
9c7e2978
RK
1705 IOR_HARD_REG_SET (needed.regs, set.regs);
1706
1707 /* This insn isn't redundant if it conflicts with an insn that either is
1708 or will be in a delay slot of TARGET. */
1709
1710 while (delay_list)
1711 {
1712 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1713 return 0;
1714 delay_list = XEXP (delay_list, 1);
1715 }
1716
1717 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1718 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1719 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1720 return 0;
1721
1722 /* Scan backwards until we reach a label or an insn that uses something
1723 INSN sets or sets something insn uses or sets. */
1724
1725 for (trial = PREV_INSN (target);
1726 trial && GET_CODE (trial) != CODE_LABEL;
1727 trial = PREV_INSN (trial))
1728 {
1729 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1730 && GET_CODE (trial) != JUMP_INSN)
1731 continue;
1732
1733 pat = PATTERN (trial);
1734 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1735 continue;
1736
1737 if (GET_CODE (pat) == SEQUENCE)
1738 {
1739 /* If this is a CALL_INSN and its delay slots, it is hard to track
1740 the resource needs properly, so give up. */
1741 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1742 return 0;
1743
38e01259 1744 /* If this is an INSN or JUMP_INSN with delayed effects, it
ca104c13
JL
1745 is hard to track the resource needs properly, so give up. */
1746
1747#ifdef INSN_SETS_ARE_DELAYED
1748 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1749 return 0;
1750#endif
1751
1752#ifdef INSN_REFERENCES_ARE_DELAYED
1753 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1754 return 0;
1755#endif
1756
9c7e2978
RK
1757 /* See if any of the insns in the delay slot match, updating
1758 resource requirements as we go. */
1759 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1760 {
1761 rtx candidate = XVECEXP (pat, 0, i);
1762
1763 /* If an insn will be annulled if the branch is false, it isn't
1764 considered as a possible duplicate insn. */
1765 if (rtx_equal_p (PATTERN (candidate), ipat)
1766 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1767 && INSN_FROM_TARGET_P (candidate)))
1768 {
1769 /* Show that this insn will be used in the sequel. */
1770 INSN_FROM_TARGET_P (candidate) = 0;
5317d2f8 1771 return candidate;
9c7e2978
RK
1772 }
1773
1774 /* Unless this is an annulled insn from the target of a branch,
1775 we must stop if it sets anything needed or set by INSN. */
1776 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1777 || ! INSN_FROM_TARGET_P (candidate))
1778 && insn_sets_resource_p (candidate, &needed, 1))
1779 return 0;
1780 }
1781
1782
1783 /* If the insn requiring the delay slot conflicts with INSN, we
1784 must stop. */
1785 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1786 return 0;
1787 }
1788 else
1789 {
1790 /* See if TRIAL is the same as INSN. */
1791 pat = PATTERN (trial);
1792 if (rtx_equal_p (pat, ipat))
5317d2f8 1793 return trial;
9c7e2978
RK
1794
1795 /* Can't go any further if TRIAL conflicts with INSN. */
1796 if (insn_sets_resource_p (trial, &needed, 1))
1797 return 0;
1798 }
1799 }
1800
1801 return 0;
1802}
1803\f
1804/* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1805 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1806 is non-zero, we are allowed to fall into this thread; otherwise, we are
1807 not.
1808
1809 If LABEL is used more than one or we pass a label other than LABEL before
1810 finding an active insn, we do not own this thread. */
1811
1812static int
1813own_thread_p (thread, label, allow_fallthrough)
1814 rtx thread;
1815 rtx label;
1816 int allow_fallthrough;
1817{
1818 rtx active_insn;
1819 rtx insn;
1820
1821 /* We don't own the function end. */
1822 if (thread == 0)
1823 return 0;
1824
1825 /* Get the first active insn, or THREAD, if it is an active insn. */
1826 active_insn = next_active_insn (PREV_INSN (thread));
1827
1828 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1829 if (GET_CODE (insn) == CODE_LABEL
1830 && (insn != label || LABEL_NUSES (insn) != 1))
1831 return 0;
1832
1833 if (allow_fallthrough)
1834 return 1;
1835
1836 /* Ensure that we reach a BARRIER before any insn or label. */
1837 for (insn = prev_nonnote_insn (thread);
1838 insn == 0 || GET_CODE (insn) != BARRIER;
1839 insn = prev_nonnote_insn (insn))
1840 if (insn == 0
1841 || GET_CODE (insn) == CODE_LABEL
1842 || (GET_CODE (insn) == INSN
1843 && GET_CODE (PATTERN (insn)) != USE
1844 && GET_CODE (PATTERN (insn)) != CLOBBER))
1845 return 0;
1846
1847 return 1;
1848}
1849\f
9c7e2978 1850/* Called when INSN is being moved from a location near the target of a jump.
aa2c50d6 1851 We leave a marker of the form (use (INSN)) immediately in front
9c7e2978 1852 of WHERE for mark_target_live_regs. These markers will be deleted when
aa2c50d6
RK
1853 reorg finishes.
1854
1855 We used to try to update the live status of registers if WHERE is at
1856 the start of a basic block, but that can't work since we may remove a
1857 BARRIER in relax_delay_slots. */
9c7e2978
RK
1858
1859static void
1860update_block (insn, where)
1861 rtx insn;
1862 rtx where;
1863{
1864 /* Ignore if this was in a delay slot and it came from the target of
1865 a branch. */
1866 if (INSN_FROM_TARGET_P (insn))
1867 return;
1868
38a448ca 1869 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
9c7e2978
RK
1870
1871 /* INSN might be making a value live in a block where it didn't use to
1872 be. So recompute liveness information for this block. */
aa2c50d6 1873
ca545bb5 1874 incr_ticks_for_insn (insn);
9c7e2978 1875}
28c9500b 1876
326f06f7
RK
1877/* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1878 the basic block containing the jump. */
1879
1880static int
1881reorg_redirect_jump (jump, nlabel)
1882 rtx jump;
1883 rtx nlabel;
1884{
ca545bb5 1885 incr_ticks_for_insn (jump);
326f06f7
RK
1886 return redirect_jump (jump, nlabel);
1887}
1888
28c9500b
JW
1889/* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1890 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1891 that reference values used in INSN. If we find one, then we move the
1892 REG_DEAD note to INSN.
1893
1894 This is needed to handle the case where an later insn (after INSN) has a
1895 REG_DEAD note for a register used by INSN, and this later insn subsequently
1896 gets moved before a CODE_LABEL because it is a redundant insn. In this
1897 case, mark_target_live_regs may be confused into thinking the register
1898 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1899
1900static void
1901update_reg_dead_notes (insn, delayed_insn)
1902 rtx insn, delayed_insn;
1903{
1904 rtx p, link, next;
1905
1906 for (p = next_nonnote_insn (insn); p != delayed_insn;
1907 p = next_nonnote_insn (p))
1908 for (link = REG_NOTES (p); link; link = next)
1909 {
1910 next = XEXP (link, 1);
1911
1912 if (REG_NOTE_KIND (link) != REG_DEAD
1913 || GET_CODE (XEXP (link, 0)) != REG)
1914 continue;
1915
1916 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1917 {
1918 /* Move the REG_DEAD note from P to INSN. */
1919 remove_note (p, link);
1920 XEXP (link, 1) = REG_NOTES (insn);
1921 REG_NOTES (insn) = link;
1922 }
1923 }
1924}
5317d2f8 1925
c170c8c2
JW
1926/* Called when an insn redundant with start_insn is deleted. If there
1927 is a REG_DEAD note for the target of start_insn between start_insn
1928 and stop_insn, then the REG_DEAD note needs to be deleted since the
1929 value no longer dies there.
1930
1931 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1932 confused into thinking the register is dead. */
1933
1934static void
1935fix_reg_dead_note (start_insn, stop_insn)
1936 rtx start_insn, stop_insn;
1937{
1938 rtx p, link, next;
1939
1940 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1941 p = next_nonnote_insn (p))
1942 for (link = REG_NOTES (p); link; link = next)
1943 {
1944 next = XEXP (link, 1);
1945
1946 if (REG_NOTE_KIND (link) != REG_DEAD
1947 || GET_CODE (XEXP (link, 0)) != REG)
1948 continue;
1949
1950 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1951 {
1952 remove_note (p, link);
1953 return;
1954 }
1955 }
1956}
1957
5317d2f8
RK
1958/* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1959
1960 This handles the case of udivmodXi4 instructions which optimize their
1961 output depending on whether any REG_UNUSED notes are present.
1962 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1963 does. */
1964
1965static void
1966update_reg_unused_notes (insn, redundant_insn)
1967 rtx insn, redundant_insn;
1968{
91a51951 1969 rtx link, next;
5317d2f8
RK
1970
1971 for (link = REG_NOTES (insn); link; link = next)
1972 {
1973 next = XEXP (link, 1);
1974
1975 if (REG_NOTE_KIND (link) != REG_UNUSED
1976 || GET_CODE (XEXP (link, 0)) != REG)
1977 continue;
1978
1979 if (! find_regno_note (redundant_insn, REG_UNUSED,
1980 REGNO (XEXP (link, 0))))
1981 remove_note (insn, link);
1982 }
1983}
9c7e2978 1984\f
9c7e2978
RK
1985/* Scan a function looking for insns that need a delay slot and find insns to
1986 put into the delay slot.
1987
1988 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1989 as calls). We do these first since we don't want jump insns (that are
1990 easier to fill) to get the only insns that could be used for non-jump insns.
1991 When it is zero, only try to fill JUMP_INSNs.
1992
1993 When slots are filled in this manner, the insns (including the
1994 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1995 it is possible to tell whether a delay slot has really been filled
1996 or not. `final' knows how to deal with this, by communicating
1997 through FINAL_SEQUENCE. */
1998
1999static void
91a51951 2000fill_simple_delay_slots (non_jumps_p)
d8e8f346 2001 int non_jumps_p;
9c7e2978
RK
2002{
2003 register rtx insn, pat, trial, next_trial;
91a51951 2004 register int i;
9c7e2978
RK
2005 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2006 struct resources needed, set;
126db1fa 2007 int slots_to_fill, slots_filled;
9c7e2978
RK
2008 rtx delay_list;
2009
2010 for (i = 0; i < num_unfilled_slots; i++)
2011 {
35523fce 2012 int flags;
9c7e2978
RK
2013 /* Get the next insn to fill. If it has already had any slots assigned,
2014 we can't do anything with it. Maybe we'll improve this later. */
2015
2016 insn = unfilled_slots_base[i];
2017 if (insn == 0
2018 || INSN_DELETED_P (insn)
2019 || (GET_CODE (insn) == INSN
2020 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2021 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2022 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2023 continue;
35523fce 2024
c5c76735
JL
2025 /* It may have been that this insn used to need delay slots, but
2026 now doesn't; ignore in that case. This can happen, for example,
2027 on the HP PA RISC, where the number of delay slots depends on
2028 what insns are nearby. */
9c7e2978 2029 slots_to_fill = num_delay_slots (insn);
c3a3b536
JL
2030
2031 /* Some machine description have defined instructions to have
2032 delay slots only in certain circumstances which may depend on
2033 nearby insns (which change due to reorg's actions).
2034
2035 For example, the PA port normally has delay slots for unconditional
2036 jumps.
2037
2038 However, the PA port claims such jumps do not have a delay slot
2039 if they are immediate successors of certain CALL_INSNs. This
2040 allows the port to favor filling the delay slot of the call with
2041 the unconditional jump. */
9c7e2978 2042 if (slots_to_fill == 0)
c3a3b536 2043 continue;
9c7e2978
RK
2044
2045 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
464b453d
TW
2046 says how many. After initialization, first try optimizing
2047
2048 call _foo call _foo
2049 nop add %o7,.-L1,%o7
2050 b,a L1
2051 nop
2052
2053 If this case applies, the delay slot of the call is filled with
2054 the unconditional jump. This is done first to avoid having the
2055 delay slot of the call filled in the backward scan. Also, since
2056 the unconditional jump is likely to also have a delay slot, that
733fa7ef
JL
2057 insn must exist when it is subsequently scanned.
2058
2059 This is tried on each insn with delay slots as some machines
2060 have insns which perform calls, but are not represented as
2061 CALL_INSNs. */
464b453d
TW
2062
2063 slots_filled = 0;
2064 delay_list = 0;
2065
c5c76735
JL
2066 if (GET_CODE (insn) == JUMP_INSN)
2067 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2068 else
2069 flags = get_jump_flags (insn, NULL_RTX);
2070
733fa7ef 2071 if ((trial = next_active_insn (insn))
464b453d
TW
2072 && GET_CODE (trial) == JUMP_INSN
2073 && simplejump_p (trial)
35523fce 2074 && eligible_for_delay (insn, slots_filled, trial, flags)
464b453d
TW
2075 && no_labels_between_p (insn, trial))
2076 {
45d9a5c6 2077 rtx *tmp;
464b453d
TW
2078 slots_filled++;
2079 delay_list = add_to_delay_list (trial, delay_list);
45d9a5c6
RK
2080
2081 /* TRIAL may have had its delay slot filled, then unfilled. When
2082 the delay slot is unfilled, TRIAL is placed back on the unfilled
2083 slots obstack. Unfortunately, it is placed on the end of the
2084 obstack, not in its original location. Therefore, we must search
2085 from entry i + 1 to the end of the unfilled slots obstack to
2086 try and find TRIAL. */
2087 tmp = &unfilled_slots_base[i + 1];
2088 while (*tmp != trial && tmp != unfilled_slots_next)
2089 tmp++;
2090
464b453d 2091 /* Remove the unconditional jump from consideration for delay slot
45d9a5c6
RK
2092 filling and unthread it. */
2093 if (*tmp == trial)
2094 *tmp = 0;
464b453d
TW
2095 {
2096 rtx next = NEXT_INSN (trial);
2097 rtx prev = PREV_INSN (trial);
2098 if (prev)
2099 NEXT_INSN (prev) = next;
2100 if (next)
2101 PREV_INSN (next) = prev;
2102 }
2103 }
2104
2105 /* Now, scan backwards from the insn to search for a potential
2106 delay-slot candidate. Stop searching when a label or jump is hit.
2107
9c7e2978
RK
2108 For each candidate, if it is to go into the delay slot (moved
2109 forward in execution sequence), it must not need or set any resources
2110 that were set by later insns and must not set any resources that
2111 are needed for those insns.
2112
2113 The delay slot insn itself sets resources unless it is a call
2114 (in which case the called routine, not the insn itself, is doing
2115 the setting). */
2116
464b453d 2117 if (slots_filled < slots_to_fill)
9c7e2978 2118 {
464b453d
TW
2119 CLEAR_RESOURCE (&needed);
2120 CLEAR_RESOURCE (&set);
2121 mark_set_resources (insn, &set, 0, 0);
2122 mark_referenced_resources (insn, &needed, 0);
9c7e2978 2123
464b453d
TW
2124 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2125 trial = next_trial)
2126 {
2127 next_trial = prev_nonnote_insn (trial);
9c7e2978 2128
464b453d
TW
2129 /* This must be an INSN or CALL_INSN. */
2130 pat = PATTERN (trial);
2131
2132 /* USE and CLOBBER at this level was just for flow; ignore it. */
2133 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2134 continue;
9c7e2978 2135
464b453d
TW
2136 /* Check for resource conflict first, to avoid unnecessary
2137 splitting. */
2138 if (! insn_references_resource_p (trial, &set, 1)
2139 && ! insn_sets_resource_p (trial, &set, 1)
2140 && ! insn_sets_resource_p (trial, &needed, 1)
9c7e2978 2141#ifdef HAVE_cc0
464b453d 2142 /* Can't separate set of cc0 from its use. */
c5c76735 2143 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
9c7e2978 2144#endif
464b453d 2145 )
9c7e2978 2146 {
464b453d
TW
2147 trial = try_split (pat, trial, 1);
2148 next_trial = prev_nonnote_insn (trial);
35523fce 2149 if (eligible_for_delay (insn, slots_filled, trial, flags))
464b453d
TW
2150 {
2151 /* In this case, we are searching backward, so if we
2152 find insns to put on the delay list, we want
2153 to put them at the head, rather than the
2154 tail, of the list. */
2155
28c9500b 2156 update_reg_dead_notes (trial, insn);
38a448ca
RH
2157 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2158 trial, delay_list);
464b453d
TW
2159 update_block (trial, trial);
2160 delete_insn (trial);
2161 if (slots_to_fill == ++slots_filled)
2162 break;
2163 continue;
2164 }
9c7e2978 2165 }
9c7e2978 2166
464b453d
TW
2167 mark_set_resources (trial, &set, 0, 1);
2168 mark_referenced_resources (trial, &needed, 1);
2169 }
9c7e2978
RK
2170 }
2171
9c7e2978
RK
2172 /* If all needed slots haven't been filled, we come here. */
2173
2174 /* Try to optimize case of jumping around a single insn. */
2175#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2f9ba5a9
JL
2176 if (slots_filled != slots_to_fill
2177 && delay_list == 0
3480bb98
JL
2178 && GET_CODE (insn) == JUMP_INSN
2179 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
9c7e2978
RK
2180 {
2181 delay_list = optimize_skip (insn);
2182 if (delay_list)
2183 slots_filled += 1;
2184 }
2185#endif
2186
9c7e2978
RK
2187 /* Try to get insns from beyond the insn needing the delay slot.
2188 These insns can neither set or reference resources set in insns being
2189 skipped, cannot set resources in the insn being skipped, and, if this
2190 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2191 call might not return).
2192
f03e51d4
RK
2193 There used to be code which continued past the target label if
2194 we saw all uses of the target label. This code did not work,
2195 because it failed to account for some instructions which were
2196 both annulled and marked as from the target. This can happen as a
2197 result of optimize_skip. Since this code was redundant with
2198 fill_eager_delay_slots anyways, it was just deleted. */
9c7e2978 2199
2f9ba5a9
JL
2200 if (slots_filled != slots_to_fill
2201 && (GET_CODE (insn) != JUMP_INSN
3480bb98
JL
2202 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2203 && ! simplejump_p (insn)
2f9ba5a9 2204 && JUMP_LABEL (insn) != 0)))
9c7e2978
RK
2205 {
2206 rtx target = 0;
2207 int maybe_never = 0;
9c7e2978
RK
2208 struct resources needed_at_jump;
2209
2210 CLEAR_RESOURCE (&needed);
2211 CLEAR_RESOURCE (&set);
2212
2213 if (GET_CODE (insn) == CALL_INSN)
2214 {
26d970a5 2215 mark_set_resources (insn, &set, 0, 1);
9c7e2978
RK
2216 mark_referenced_resources (insn, &needed, 1);
2217 maybe_never = 1;
2218 }
2f9ba5a9 2219 else
9c7e2978 2220 {
674345b1
JL
2221 mark_set_resources (insn, &set, 0, 1);
2222 mark_referenced_resources (insn, &needed, 1);
2f9ba5a9 2223 if (GET_CODE (insn) == JUMP_INSN)
f03e51d4 2224 target = JUMP_LABEL (insn);
9c7e2978
RK
2225 }
2226
2227 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2228 {
2229 rtx pat, trial_delay;
2230
2231 next_trial = next_nonnote_insn (trial);
2232
f03e51d4
RK
2233 if (GET_CODE (trial) == CODE_LABEL
2234 || GET_CODE (trial) == BARRIER)
9c7e2978
RK
2235 break;
2236
2237 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2238 pat = PATTERN (trial);
2239
2240 /* Stand-alone USE and CLOBBER are just for flow. */
2241 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2242 continue;
2243
2244 /* If this already has filled delay slots, get the insn needing
2245 the delay slots. */
2246 if (GET_CODE (pat) == SEQUENCE)
2247 trial_delay = XVECEXP (pat, 0, 0);
2248 else
2249 trial_delay = trial;
2250
2251 /* If this is a jump insn to our target, indicate that we have
2252 seen another jump to it. If we aren't handling a conditional
2253 jump, stop our search. Otherwise, compute the needs at its
2254 target and add them to NEEDED. */
2255 if (GET_CODE (trial_delay) == JUMP_INSN)
2256 {
2257 if (target == 0)
2258 break;
f03e51d4 2259 else if (JUMP_LABEL (trial_delay) != target)
9c7e2978 2260 {
ca545bb5
BM
2261 rtx ninsn =
2262 next_active_insn (JUMP_LABEL (trial_delay));
2263
2264 mark_target_live_regs (get_insns (), ninsn,
2265 &needed_at_jump);
9c7e2978 2266 needed.memory |= needed_at_jump.memory;
8eae5ed6 2267 needed.unch_memory |= needed_at_jump.unch_memory;
9c7e2978
RK
2268 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2269 }
2270 }
2271
2272 /* See if we have a resource problem before we try to
2273 split. */
2274 if (target == 0
2275 && GET_CODE (pat) != SEQUENCE
2276 && ! insn_references_resource_p (trial, &set, 1)
2277 && ! insn_sets_resource_p (trial, &set, 1)
2278 && ! insn_sets_resource_p (trial, &needed, 1)
2279#ifdef HAVE_cc0
2280 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2281#endif
2282 && ! (maybe_never && may_trap_p (pat))
2283 && (trial = try_split (pat, trial, 0))
35523fce 2284 && eligible_for_delay (insn, slots_filled, trial, flags))
9c7e2978
RK
2285 {
2286 next_trial = next_nonnote_insn (trial);
2287 delay_list = add_to_delay_list (trial, delay_list);
2288
2289#ifdef HAVE_cc0
2290 if (reg_mentioned_p (cc0_rtx, pat))
2291 link_cc0_insns (trial);
2292#endif
2293
9c7e2978
RK
2294 delete_insn (trial);
2295 if (slots_to_fill == ++slots_filled)
2296 break;
2297 continue;
2298 }
2299
26d970a5 2300 mark_set_resources (trial, &set, 0, 1);
9c7e2978
RK
2301 mark_referenced_resources (trial, &needed, 1);
2302
2303 /* Ensure we don't put insns between the setting of cc and the
2304 comparison by moving a setting of cc into an earlier delay
2305 slot since these insns could clobber the condition code. */
2306 set.cc = 1;
2307
2308 /* If this is a call or jump, we might not get here. */
e4be64d0
RK
2309 if (GET_CODE (trial_delay) == CALL_INSN
2310 || GET_CODE (trial_delay) == JUMP_INSN)
9c7e2978
RK
2311 maybe_never = 1;
2312 }
2313
2314 /* If there are slots left to fill and our search was stopped by an
2315 unconditional branch, try the insn at the branch target. We can
6f7775d5
JL
2316 redirect the branch if it works.
2317
2318 Don't do this if the insn at the branch target is a branch. */
9c7e2978
RK
2319 if (slots_to_fill != slots_filled
2320 && trial
2321 && GET_CODE (trial) == JUMP_INSN
2322 && simplejump_p (trial)
2323 && (target == 0 || JUMP_LABEL (trial) == target)
2324 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2325 && ! (GET_CODE (next_trial) == INSN
2326 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
6f7775d5 2327 && GET_CODE (next_trial) != JUMP_INSN
9c7e2978
RK
2328 && ! insn_references_resource_p (next_trial, &set, 1)
2329 && ! insn_sets_resource_p (next_trial, &set, 1)
2330 && ! insn_sets_resource_p (next_trial, &needed, 1)
2331#ifdef HAVE_cc0
d6749dec 2332 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
9c7e2978
RK
2333#endif
2334 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2335 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
35523fce 2336 && eligible_for_delay (insn, slots_filled, next_trial, flags))
9c7e2978
RK
2337 {
2338 rtx new_label = next_active_insn (next_trial);
2339
2340 if (new_label != 0)
2341 new_label = get_label_before (new_label);
99f14de7
JW
2342 else
2343 new_label = find_end_label ();
9c7e2978
RK
2344
2345 delay_list
2346 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2347 slots_filled++;
326f06f7 2348 reorg_redirect_jump (trial, new_label);
9c7e2978
RK
2349
2350 /* If we merged because we both jumped to the same place,
2351 redirect the original insn also. */
2352 if (target)
326f06f7 2353 reorg_redirect_jump (insn, new_label);
9c7e2978
RK
2354 }
2355 }
2356
126db1fa
JL
2357 /* If this is an unconditional jump, then try to get insns from the
2358 target of the jump. */
2359 if (GET_CODE (insn) == JUMP_INSN
2360 && simplejump_p (insn)
2361 && slots_filled != slots_to_fill)
2362 delay_list
2363 = fill_slots_from_thread (insn, const_true_rtx,
2364 next_active_insn (JUMP_LABEL (insn)),
2365 NULL, 1, 1,
2366 own_thread_p (JUMP_LABEL (insn),
2367 JUMP_LABEL (insn), 0),
ab63953e
JL
2368 slots_to_fill, &slots_filled,
2369 delay_list);
126db1fa 2370
9c7e2978
RK
2371 if (delay_list)
2372 unfilled_slots_base[i]
91a51951 2373 = emit_delay_sequence (insn, delay_list, slots_filled);
9c7e2978
RK
2374
2375 if (slots_to_fill == slots_filled)
2376 unfilled_slots_base[i] = 0;
2377
2378 note_delay_statistics (slots_filled, 0);
2379 }
2380
2381#ifdef DELAY_SLOTS_FOR_EPILOGUE
2382 /* See if the epilogue needs any delay slots. Try to fill them if so.
2383 The only thing we can do is scan backwards from the end of the
2384 function. If we did this in a previous pass, it is incorrect to do it
2385 again. */
2386 if (current_function_epilogue_delay_list)
2387 return;
2388
2389 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2390 if (slots_to_fill == 0)
2391 return;
2392
2393 slots_filled = 0;
9c7e2978
RK
2394 CLEAR_RESOURCE (&set);
2395
8c2977e2
JW
2396 /* The frame pointer and stack pointer are needed at the beginning of
2397 the epilogue, so instructions setting them can not be put in the
2398 epilogue delay slot. However, everything else needed at function
2399 end is safe, so we don't want to use end_of_function_needs here. */
2400 CLEAR_RESOURCE (&needed);
2401 if (frame_pointer_needed)
2402 {
2403 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2404#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2405 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2406#endif
2407#ifdef EXIT_IGNORE_STACK
fdb8a883
JW
2408 if (! EXIT_IGNORE_STACK
2409 || current_function_sp_is_unchanging)
8c2977e2
JW
2410#endif
2411 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2412 }
2413 else
2414 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2415
632c9d9e
MS
2416#ifdef EPILOGUE_USES
2417 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
2418 {
2419 if (EPILOGUE_USES (i))
2420 SET_HARD_REG_BIT (needed.regs, i);
2421 }
2422#endif
2423
9c7e2978
RK
2424 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2425 trial = PREV_INSN (trial))
2426 {
2427 if (GET_CODE (trial) == NOTE)
2428 continue;
2429 pat = PATTERN (trial);
2430 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2431 continue;
2432
2433 if (! insn_references_resource_p (trial, &set, 1)
2434 && ! insn_sets_resource_p (trial, &needed, 1)
8c2977e2 2435 && ! insn_sets_resource_p (trial, &set, 1)
9c7e2978
RK
2436#ifdef HAVE_cc0
2437 /* Don't want to mess with cc0 here. */
2438 && ! reg_mentioned_p (cc0_rtx, pat)
2439#endif
2440 )
2441 {
2442 trial = try_split (pat, trial, 1);
2443 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2444 {
17448690
RK
2445 /* Here as well we are searching backward, so put the
2446 insns we find on the head of the list. */
2447
9c7e2978 2448 current_function_epilogue_delay_list
38a448ca
RH
2449 = gen_rtx_INSN_LIST (VOIDmode, trial,
2450 current_function_epilogue_delay_list);
ca545bb5 2451 mark_end_of_function_resources (trial, 1);
9c7e2978
RK
2452 update_block (trial, trial);
2453 delete_insn (trial);
2454
2455 /* Clear deleted bit so final.c will output the insn. */
2456 INSN_DELETED_P (trial) = 0;
2457
2458 if (slots_to_fill == ++slots_filled)
2459 break;
2460 continue;
2461 }
2462 }
2463
26d970a5 2464 mark_set_resources (trial, &set, 0, 1);
9c7e2978
RK
2465 mark_referenced_resources (trial, &needed, 1);
2466 }
2467
2468 note_delay_statistics (slots_filled, 0);
2469#endif
2470}
2471\f
2472/* Try to find insns to place in delay slots.
2473
2474 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2475 or is an unconditional branch if CONDITION is const_true_rtx.
2476 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2477
2478 THREAD is a flow-of-control, either the insns to be executed if the
2479 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2480
2481 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2482 to see if any potential delay slot insns set things needed there.
2483
2484 LIKELY is non-zero if it is extremely likely that the branch will be
2485 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2486 end of a loop back up to the top.
2487
2488 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2489 thread. I.e., it is the fallthrough code of our jump or the target of the
2490 jump when we are the only jump going there.
2491
2492 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2493 case, we can only take insns from the head of the thread for our delay
2494 slot. We then adjust the jump to point after the insns we have taken. */
2495
2496static rtx
2497fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
91a51951 2498 thread_if_true, own_thread,
ab63953e 2499 slots_to_fill, pslots_filled, delay_list)
9c7e2978
RK
2500 rtx insn;
2501 rtx condition;
2502 rtx thread, opposite_thread;
2503 int likely;
2504 int thread_if_true;
91a51951 2505 int own_thread;
9c7e2978 2506 int slots_to_fill, *pslots_filled;
ab63953e 2507 rtx delay_list;
9c7e2978 2508{
d674b9e3 2509 rtx new_thread;
9c7e2978
RK
2510 struct resources opposite_needed, set, needed;
2511 rtx trial;
2512 int lose = 0;
2513 int must_annul = 0;
35523fce 2514 int flags;
9c7e2978
RK
2515
2516 /* Validate our arguments. */
2517 if ((condition == const_true_rtx && ! thread_if_true)
2518 || (! own_thread && ! thread_if_true))
2519 abort ();
2520
35523fce
JL
2521 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2522
9c7e2978
RK
2523 /* If our thread is the end of subroutine, we can't get any delay
2524 insns from that. */
2525 if (thread == 0)
ab63953e 2526 return delay_list;
9c7e2978
RK
2527
2528 /* If this is an unconditional branch, nothing is needed at the
2529 opposite thread. Otherwise, compute what is needed there. */
2530 if (condition == const_true_rtx)
2531 CLEAR_RESOURCE (&opposite_needed);
2532 else
ca545bb5 2533 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
9c7e2978 2534
d674b9e3
RK
2535 /* If the insn at THREAD can be split, do it here to avoid having to
2536 update THREAD and NEW_THREAD if it is done in the loop below. Also
2537 initialize NEW_THREAD. */
2538
d45cf215 2539 new_thread = thread = try_split (PATTERN (thread), thread, 0);
d674b9e3 2540
9c7e2978
RK
2541 /* Scan insns at THREAD. We are looking for an insn that can be removed
2542 from THREAD (it neither sets nor references resources that were set
2543 ahead of it and it doesn't set anything needs by the insns ahead of
2544 it) and that either can be placed in an annulling insn or aren't
2545 needed at OPPOSITE_THREAD. */
2546
2547 CLEAR_RESOURCE (&needed);
2548 CLEAR_RESOURCE (&set);
2549
2550 /* If we do not own this thread, we must stop as soon as we find
2551 something that we can't put in a delay slot, since all we can do
2552 is branch into THREAD at a later point. Therefore, labels stop
2553 the search if this is not the `true' thread. */
2554
2555 for (trial = thread;
2556 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2557 trial = next_nonnote_insn (trial))
2558 {
70011923 2559 rtx pat, old_trial;
9c7e2978
RK
2560
2561 /* If we have passed a label, we no longer own this thread. */
2562 if (GET_CODE (trial) == CODE_LABEL)
2563 {
2564 own_thread = 0;
2565 continue;
2566 }
2567
2568 pat = PATTERN (trial);
2569 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2570 continue;
2571
2572 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2573 don't separate or copy insns that set and use CC0. */
2574 if (! insn_references_resource_p (trial, &set, 1)
2575 && ! insn_sets_resource_p (trial, &set, 1)
2576 && ! insn_sets_resource_p (trial, &needed, 1)
2577#ifdef HAVE_cc0
2578 && ! (reg_mentioned_p (cc0_rtx, pat)
2579 && (! own_thread || ! sets_cc0_p (pat)))
2580#endif
2581 )
2582 {
5317d2f8
RK
2583 rtx prior_insn;
2584
9c7e2978
RK
2585 /* If TRIAL is redundant with some insn before INSN, we don't
2586 actually need to add it to the delay list; we can merely pretend
2587 we did. */
91a51951 2588 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
9c7e2978 2589 {
c170c8c2 2590 fix_reg_dead_note (prior_insn, insn);
9c7e2978
RK
2591 if (own_thread)
2592 {
f1f9081a 2593 update_block (trial, thread);
9bfe7965 2594 if (trial == thread)
8b11645c
RK
2595 {
2596 thread = next_active_insn (thread);
2597 if (new_thread == trial)
2598 new_thread = thread;
2599 }
9bfe7965 2600
9c7e2978
RK
2601 delete_insn (trial);
2602 }
2603 else
5317d2f8
RK
2604 {
2605 update_reg_unused_notes (prior_insn, trial);
088131ee 2606 new_thread = next_active_insn (trial);
5317d2f8 2607 }
9c7e2978
RK
2608
2609 continue;
2610 }
2611
2612 /* There are two ways we can win: If TRIAL doesn't set anything
2613 needed at the opposite thread and can't trap, or if it can
2614 go into an annulled delay slot. */
96960d10
HB
2615 if (!must_annul
2616 && (condition == const_true_rtx
2617 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2618 && ! may_trap_p (pat))))
9c7e2978 2619 {
70011923 2620 old_trial = trial;
9c7e2978 2621 trial = try_split (pat, trial, 0);
70011923
ILT
2622 if (new_thread == old_trial)
2623 new_thread = trial;
9b9cd81b
JW
2624 if (thread == old_trial)
2625 thread = trial;
9c7e2978 2626 pat = PATTERN (trial);
35523fce 2627 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
9c7e2978
RK
2628 goto winner;
2629 }
2630 else if (0
2631#ifdef ANNUL_IFTRUE_SLOTS
2632 || ! thread_if_true
2633#endif
2634#ifdef ANNUL_IFFALSE_SLOTS
2635 || thread_if_true
2636#endif
2637 )
2638 {
70011923 2639 old_trial = trial;
9c7e2978 2640 trial = try_split (pat, trial, 0);
70011923
ILT
2641 if (new_thread == old_trial)
2642 new_thread = trial;
760607e8
RK
2643 if (thread == old_trial)
2644 thread = trial;
9c7e2978 2645 pat = PATTERN (trial);
96960d10
HB
2646 if ((must_annul || delay_list == NULL) && (thread_if_true
2647 ? check_annul_list_true_false (0, delay_list)
2648 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2649 : check_annul_list_true_false (1, delay_list)
2650 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
9c7e2978
RK
2651 {
2652 rtx temp;
2653
2654 must_annul = 1;
2655 winner:
2656
2657#ifdef HAVE_cc0
2658 if (reg_mentioned_p (cc0_rtx, pat))
2659 link_cc0_insns (trial);
2660#endif
2661
2662 /* If we own this thread, delete the insn. If this is the
2663 destination of a branch, show that a basic block status
2664 may have been updated. In any case, mark the new
2665 starting point of this thread. */
2666 if (own_thread)
2667 {
f1f9081a 2668 update_block (trial, thread);
a7ad699e
RK
2669 if (trial == thread)
2670 {
2671 thread = next_active_insn (thread);
2672 if (new_thread == trial)
2673 new_thread = thread;
2674 }
9c7e2978
RK
2675 delete_insn (trial);
2676 }
2677 else
2678 new_thread = next_active_insn (trial);
2679
2680 temp = own_thread ? trial : copy_rtx (trial);
2681 if (thread_if_true)
2682 INSN_FROM_TARGET_P (temp) = 1;
2683
2684 delay_list = add_to_delay_list (temp, delay_list);
2685
2686 if (slots_to_fill == ++(*pslots_filled))
2687 {
2688 /* Even though we have filled all the slots, we
2689 may be branching to a location that has a
2690 redundant insn. Skip any if so. */
2691 while (new_thread && ! own_thread
2692 && ! insn_sets_resource_p (new_thread, &set, 1)
2693 && ! insn_sets_resource_p (new_thread, &needed, 1)
2694 && ! insn_references_resource_p (new_thread,
2695 &set, 1)
f78c792c
JL
2696 && (prior_insn
2697 = redundant_insn (new_thread, insn,
2698 delay_list)))
2699 {
2700 /* We know we do not own the thread, so no need
2701 to call update_block and delete_insn. */
2702 fix_reg_dead_note (prior_insn, insn);
2703 update_reg_unused_notes (prior_insn, new_thread);
2704 new_thread = next_active_insn (new_thread);
2705 }
9c7e2978
RK
2706 break;
2707 }
2708
2709 continue;
2710 }
2711 }
2712 }
2713
2714 /* This insn can't go into a delay slot. */
2715 lose = 1;
26d970a5 2716 mark_set_resources (trial, &set, 0, 1);
9c7e2978
RK
2717 mark_referenced_resources (trial, &needed, 1);
2718
2719 /* Ensure we don't put insns between the setting of cc and the comparison
2720 by moving a setting of cc into an earlier delay slot since these insns
2721 could clobber the condition code. */
2722 set.cc = 1;
2723
2724 /* If this insn is a register-register copy and the next insn has
2725 a use of our destination, change it to use our source. That way,
2726 it will become a candidate for our delay slot the next time
2727 through this loop. This case occurs commonly in loops that
2728 scan a list.
2729
2730 We could check for more complex cases than those tested below,
2731 but it doesn't seem worth it. It might also be a good idea to try
9b72fab4
RK
2732 to swap the two insns. That might do better.
2733
963d6142
RK
2734 We can't do this if the next insn modifies our destination, because
2735 that would make the replacement into the insn invalid. We also can't
2736 do this if it modifies our source, because it might be an earlyclobber
2737 operand. This latter test also prevents updating the contents of
2738 a PRE_INC. */
9c7e2978
RK
2739
2740 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2741 && GET_CODE (SET_SRC (pat)) == REG
2742 && GET_CODE (SET_DEST (pat)) == REG)
2743 {
2744 rtx next = next_nonnote_insn (trial);
9c7e2978
RK
2745
2746 if (next && GET_CODE (next) == INSN
9b72fab4
RK
2747 && GET_CODE (PATTERN (next)) != USE
2748 && ! reg_set_p (SET_DEST (pat), next)
963d6142 2749 && ! reg_set_p (SET_SRC (pat), next)
9b72fab4 2750 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
9c7e2978
RK
2751 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2752 }
2753 }
2754
2755 /* If we stopped on a branch insn that has delay slots, see if we can
2756 steal some of the insns in those slots. */
2757 if (trial && GET_CODE (trial) == INSN
2758 && GET_CODE (PATTERN (trial)) == SEQUENCE
2759 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2760 {
2761 /* If this is the `true' thread, we will want to follow the jump,
2762 so we can only do this if we have taken everything up to here. */
18e765cb 2763 if (thread_if_true && trial == new_thread)
9c7e2978
RK
2764 delay_list
2765 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2766 delay_list, &set, &needed,
2767 &opposite_needed, slots_to_fill,
2768 pslots_filled, &must_annul,
2769 &new_thread);
2770 else if (! thread_if_true)
2771 delay_list
2772 = steal_delay_list_from_fallthrough (insn, condition,
2773 PATTERN (trial),
2774 delay_list, &set, &needed,
2775 &opposite_needed, slots_to_fill,
2776 pslots_filled, &must_annul);
2777 }
2778
2779 /* If we haven't found anything for this delay slot and it is very
2780 likely that the branch will be taken, see if the insn at our target
d674b9e3
RK
2781 increments or decrements a register with an increment that does not
2782 depend on the destination register. If so, try to place the opposite
9c7e2978
RK
2783 arithmetic insn after the jump insn and put the arithmetic insn in the
2784 delay slot. If we can't do this, return. */
a0a7cb35
JL
2785 if (delay_list == 0 && likely && new_thread
2786 && GET_CODE (new_thread) == INSN
2787 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2788 && asm_noperands (PATTERN (new_thread)) < 0)
9c7e2978
RK
2789 {
2790 rtx pat = PATTERN (new_thread);
2791 rtx dest;
2792 rtx src;
2793
d674b9e3 2794 trial = new_thread;
9c7e2978
RK
2795 pat = PATTERN (trial);
2796
2797 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
35523fce 2798 || ! eligible_for_delay (insn, 0, trial, flags))
9c7e2978
RK
2799 return 0;
2800
2801 dest = SET_DEST (pat), src = SET_SRC (pat);
2802 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
d674b9e3
RK
2803 && rtx_equal_p (XEXP (src, 0), dest)
2804 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
9c7e2978
RK
2805 {
2806 rtx other = XEXP (src, 1);
2807 rtx new_arith;
2808 rtx ninsn;
2809
2810 /* If this is a constant adjustment, use the same code with
2811 the negated constant. Otherwise, reverse the sense of the
2812 arithmetic. */
2813 if (GET_CODE (other) == CONST_INT)
38a448ca
RH
2814 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2815 negate_rtx (GET_MODE (src), other));
9c7e2978 2816 else
38a448ca
RH
2817 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2818 GET_MODE (src), dest, other);
9c7e2978 2819
38a448ca 2820 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
9c7e2978
RK
2821 insn);
2822
2823 if (recog_memoized (ninsn) < 0
0eadeb15 2824 || (extract_insn (ninsn), ! constrain_operands (1)))
9c7e2978
RK
2825 {
2826 delete_insn (ninsn);
2827 return 0;
2828 }
2829
2830 if (own_thread)
2831 {
f1f9081a 2832 update_block (trial, thread);
a7ad699e
RK
2833 if (trial == thread)
2834 {
2835 thread = next_active_insn (thread);
2836 if (new_thread == trial)
2837 new_thread = thread;
2838 }
9c7e2978
RK
2839 delete_insn (trial);
2840 }
2841 else
2842 new_thread = next_active_insn (trial);
2843
2844 ninsn = own_thread ? trial : copy_rtx (trial);
2845 if (thread_if_true)
2846 INSN_FROM_TARGET_P (ninsn) = 1;
2847
fb3821f7 2848 delay_list = add_to_delay_list (ninsn, NULL_RTX);
9c7e2978
RK
2849 (*pslots_filled)++;
2850 }
2851 }
2852
2853 if (delay_list && must_annul)
2854 INSN_ANNULLED_BRANCH_P (insn) = 1;
2855
2856 /* If we are to branch into the middle of this thread, find an appropriate
2857 label or make a new one if none, and redirect INSN to it. If we hit the
2858 end of the function, use the end-of-function label. */
2859 if (new_thread != thread)
2860 {
2861 rtx label;
2862
2863 if (! thread_if_true)
2864 abort ();
2865
2866 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2867 && (simplejump_p (new_thread)
b304ad47
JL
2868 || GET_CODE (PATTERN (new_thread)) == RETURN)
2869 && redirect_with_delay_list_safe_p (insn,
2870 JUMP_LABEL (new_thread),
2871 delay_list))
22422dbf 2872 new_thread = follow_jumps (JUMP_LABEL (new_thread));
9c7e2978
RK
2873
2874 if (new_thread == 0)
2875 label = find_end_label ();
2876 else if (GET_CODE (new_thread) == CODE_LABEL)
2877 label = new_thread;
2878 else
2879 label = get_label_before (new_thread);
2880
326f06f7 2881 reorg_redirect_jump (insn, label);
9c7e2978
RK
2882 }
2883
2884 return delay_list;
2885}
2886\f
2887/* Make another attempt to find insns to place in delay slots.
2888
2889 We previously looked for insns located in front of the delay insn
2890 and, for non-jump delay insns, located behind the delay insn.
2891
2892 Here only try to schedule jump insns and try to move insns from either
2893 the target or the following insns into the delay slot. If annulling is
2894 supported, we will be likely to do this. Otherwise, we can do this only
2895 if safe. */
2896
2897static void
91a51951 2898fill_eager_delay_slots ()
9c7e2978
RK
2899{
2900 register rtx insn;
2901 register int i;
2902 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2903
2904 for (i = 0; i < num_unfilled_slots; i++)
2905 {
2906 rtx condition;
2907 rtx target_label, insn_at_target, fallthrough_insn;
2908 rtx delay_list = 0;
2909 int own_target;
2910 int own_fallthrough;
2911 int prediction, slots_to_fill, slots_filled;
2912
2913 insn = unfilled_slots_base[i];
2914 if (insn == 0
2915 || INSN_DELETED_P (insn)
2916 || GET_CODE (insn) != JUMP_INSN
3480bb98 2917 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
9c7e2978
RK
2918 continue;
2919
2920 slots_to_fill = num_delay_slots (insn);
c3a3b536
JL
2921 /* Some machine description have defined instructions to have
2922 delay slots only in certain circumstances which may depend on
2923 nearby insns (which change due to reorg's actions).
2924
2925 For example, the PA port normally has delay slots for unconditional
2926 jumps.
2927
2928 However, the PA port claims such jumps do not have a delay slot
2929 if they are immediate successors of certain CALL_INSNs. This
2930 allows the port to favor filling the delay slot of the call with
2931 the unconditional jump. */
9c7e2978 2932 if (slots_to_fill == 0)
c5c76735 2933 continue;
9c7e2978
RK
2934
2935 slots_filled = 0;
2936 target_label = JUMP_LABEL (insn);
2937 condition = get_branch_condition (insn, target_label);
2938
2939 if (condition == 0)
2940 continue;
2941
abc95ed3 2942 /* Get the next active fallthrough and target insns and see if we own
9c7e2978
RK
2943 them. Then see whether the branch is likely true. We don't need
2944 to do a lot of this for unconditional branches. */
2945
2946 insn_at_target = next_active_insn (target_label);
2947 own_target = own_thread_p (target_label, target_label, 0);
2948
2949 if (condition == const_true_rtx)
2950 {
2951 own_fallthrough = 0;
2952 fallthrough_insn = 0;
2953 prediction = 2;
2954 }
2955 else
2956 {
2957 fallthrough_insn = next_active_insn (insn);
fb3821f7 2958 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
9c7e2978
RK
2959 prediction = mostly_true_jump (insn, condition);
2960 }
2961
2962 /* If this insn is expected to branch, first try to get insns from our
2963 target, then our fallthrough insns. If it is not, expected to branch,
2964 try the other order. */
2965
0275a51b 2966 if (prediction > 0)
9c7e2978
RK
2967 {
2968 delay_list
2969 = fill_slots_from_thread (insn, condition, insn_at_target,
2970 fallthrough_insn, prediction == 2, 1,
91a51951 2971 own_target,
ab63953e 2972 slots_to_fill, &slots_filled, delay_list);
9c7e2978
RK
2973
2974 if (delay_list == 0 && own_fallthrough)
2975 {
2976 /* Even though we didn't find anything for delay slots,
2977 we might have found a redundant insn which we deleted
2978 from the thread that was filled. So we have to recompute
2979 the next insn at the target. */
2980 target_label = JUMP_LABEL (insn);
2981 insn_at_target = next_active_insn (target_label);
2982
2983 delay_list
2984 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2985 insn_at_target, 0, 0,
91a51951 2986 own_fallthrough,
ab63953e
JL
2987 slots_to_fill, &slots_filled,
2988 delay_list);
9c7e2978
RK
2989 }
2990 }
2991 else
2992 {
2993 if (own_fallthrough)
2994 delay_list
2995 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2996 insn_at_target, 0, 0,
91a51951 2997 own_fallthrough,
d9f1e3da
MH
2998 slots_to_fill, &slots_filled,
2999 delay_list);
9c7e2978
RK
3000
3001 if (delay_list == 0)
3002 delay_list
3003 = fill_slots_from_thread (insn, condition, insn_at_target,
3004 next_active_insn (insn), 0, 1,
91a51951 3005 own_target,
ab63953e
JL
3006 slots_to_fill, &slots_filled,
3007 delay_list);
9c7e2978
RK
3008 }
3009
3010 if (delay_list)
3011 unfilled_slots_base[i]
91a51951 3012 = emit_delay_sequence (insn, delay_list, slots_filled);
9c7e2978
RK
3013
3014 if (slots_to_fill == slots_filled)
3015 unfilled_slots_base[i] = 0;
3016
3017 note_delay_statistics (slots_filled, 1);
3018 }
3019}
3020\f
3021/* Once we have tried two ways to fill a delay slot, make a pass over the
3022 code to try to improve the results and to do such things as more jump
3023 threading. */
3024
3025static void
3026relax_delay_slots (first)
3027 rtx first;
3028{
3029 register rtx insn, next, pat;
3030 register rtx trial, delay_insn, target_label;
3031
3032 /* Look at every JUMP_INSN and see if we can improve it. */
3033 for (insn = first; insn; insn = next)
3034 {
3035 rtx other;
3036
3037 next = next_active_insn (insn);
3038
3039 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3040 the next insn, or jumps to a label that is not the last of a
3041 group of consecutive labels. */
3042 if (GET_CODE (insn) == JUMP_INSN
3480bb98 3043 && (condjump_p (insn) || condjump_in_parallel_p (insn))
9c7e2978
RK
3044 && (target_label = JUMP_LABEL (insn)) != 0)
3045 {
22422dbf 3046 target_label = follow_jumps (target_label);
9c7e2978
RK
3047 target_label = prev_label (next_active_insn (target_label));
3048
de5292c7
RK
3049 if (target_label == 0)
3050 target_label = find_end_label ();
3051
3480bb98
JL
3052 if (next_active_insn (target_label) == next
3053 && ! condjump_in_parallel_p (insn))
9c7e2978
RK
3054 {
3055 delete_jump (insn);
3056 continue;
3057 }
3058
3059 if (target_label != JUMP_LABEL (insn))
326f06f7 3060 reorg_redirect_jump (insn, target_label);
9c7e2978
RK
3061
3062 /* See if this jump branches around a unconditional jump.
3063 If so, invert this jump and point it to the target of the
3064 second jump. */
3065 if (next && GET_CODE (next) == JUMP_INSN
3066 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3067 && next_active_insn (target_label) == next_active_insn (next)
3068 && no_labels_between_p (insn, next))
3069 {
3070 rtx label = JUMP_LABEL (next);
3071
3072 /* Be careful how we do this to avoid deleting code or
3073 labels that are momentarily dead. See similar optimization
3074 in jump.c.
3075
3076 We also need to ensure we properly handle the case when
3077 invert_jump fails. */
3078
3079 ++LABEL_NUSES (target_label);
3080 if (label)
3081 ++LABEL_NUSES (label);
3082
3083 if (invert_jump (insn, label))
3084 {
3085 delete_insn (next);
3086 next = insn;
3087 }
3088
3089 if (label)
3090 --LABEL_NUSES (label);
3091
3092 if (--LABEL_NUSES (target_label) == 0)
3093 delete_insn (target_label);
3094
3095 continue;
3096 }
3097 }
3098
3099 /* If this is an unconditional jump and the previous insn is a
3100 conditional jump, try reversing the condition of the previous
3101 insn and swapping our targets. The next pass might be able to
3102 fill the slots.
3103
3104 Don't do this if we expect the conditional branch to be true, because
3105 we would then be making the more common case longer. */
3106
3107 if (GET_CODE (insn) == JUMP_INSN
3108 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3109 && (other = prev_active_insn (insn)) != 0
3480bb98 3110 && (condjump_p (other) || condjump_in_parallel_p (other))
9c7e2978 3111 && no_labels_between_p (other, insn)
6f20793d 3112 && 0 > mostly_true_jump (other,
0275a51b
RK
3113 get_branch_condition (other,
3114 JUMP_LABEL (other))))
9c7e2978
RK
3115 {
3116 rtx other_target = JUMP_LABEL (other);
7ca4e06e 3117 target_label = JUMP_LABEL (insn);
9c7e2978
RK
3118
3119 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3120 as we move the label. */
3121 if (other_target)
3122 ++LABEL_NUSES (other_target);
3123
3124 if (invert_jump (other, target_label))
326f06f7 3125 reorg_redirect_jump (insn, other_target);
9c7e2978
RK
3126
3127 if (other_target)
3128 --LABEL_NUSES (other_target);
3129 }
3130
3131 /* Now look only at cases where we have filled a delay slot. */
3132 if (GET_CODE (insn) != INSN
3133 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3134 continue;
3135
3136 pat = PATTERN (insn);
3137 delay_insn = XVECEXP (pat, 0, 0);
3138
3139 /* See if the first insn in the delay slot is redundant with some
3140 previous insn. Remove it from the delay slot if so; then set up
3141 to reprocess this insn. */
f898abd7 3142 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
9c7e2978
RK
3143 {
3144 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3145 next = prev_active_insn (next);
3146 continue;
3147 }
3148
9f5a2691
JL
3149 /* See if we have a RETURN insn with a filled delay slot followed
3150 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3151 the first RETURN (but not it's delay insn). This gives the same
3152 effect in fewer instructions.
3153
3154 Only do so if optimizing for size since this results in slower, but
3155 smaller code. */
3156 if (optimize_size
3157 && GET_CODE (PATTERN (delay_insn)) == RETURN
3158 && next
3159 && GET_CODE (next) == JUMP_INSN
3160 && GET_CODE (PATTERN (next)) == RETURN)
3161 {
3162 int i;
3163
3164 /* Delete the RETURN and just execute the delay list insns.
3165
3166 We do this by deleting the INSN containing the SEQUENCE, then
3167 re-emitting the insns separately, and then deleting the RETURN.
3168 This allows the count of the jump target to be properly
3169 decremented. */
3170
3171 /* Clear the from target bit, since these insns are no longer
3172 in delay slots. */
3173 for (i = 0; i < XVECLEN (pat, 0); i++)
3174 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3175
3176 trial = PREV_INSN (insn);
3177 delete_insn (insn);
3178 emit_insn_after (pat, trial);
3179 delete_scheduled_jump (delay_insn);
3180 continue;
3181 }
3182
9c7e2978
RK
3183 /* Now look only at the cases where we have a filled JUMP_INSN. */
3184 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3480bb98
JL
3185 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3186 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
9c7e2978
RK
3187 continue;
3188
3189 target_label = JUMP_LABEL (delay_insn);
3190
3191 if (target_label)
3192 {
3193 /* If this jump goes to another unconditional jump, thread it, but
3194 don't convert a jump into a RETURN here. */
22422dbf 3195 trial = follow_jumps (target_label);
9e94dc88
JW
3196 /* We use next_real_insn instead of next_active_insn, so that
3197 the special USE insns emitted by reorg won't be ignored.
3198 If they are ignored, then they will get deleted if target_label
3199 is now unreachable, and that would cause mark_target_live_regs
3200 to fail. */
3201 trial = prev_label (next_real_insn (trial));
9c7e2978
RK
3202 if (trial == 0 && target_label != 0)
3203 trial = find_end_label ();
3204
83fd5651
JL
3205 if (trial != target_label
3206 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
9c7e2978 3207 {
326f06f7 3208 reorg_redirect_jump (delay_insn, trial);
9c7e2978
RK
3209 target_label = trial;
3210 }
3211
3212 /* If the first insn at TARGET_LABEL is redundant with a previous
3213 insn, redirect the jump to the following insn process again. */
3214 trial = next_active_insn (target_label);
3215 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
f898abd7 3216 && redundant_insn (trial, insn, 0))
9c7e2978 3217 {
eed04dff
JL
3218 rtx tmp;
3219
3220 /* Figure out where to emit the special USE insn so we don't
3221 later incorrectly compute register live/death info. */
3222 tmp = next_active_insn (trial);
3223 if (tmp == 0)
3224 tmp = find_end_label ();
3225
3226 /* Insert the special USE insn and update dataflow info. */
3227 update_block (trial, tmp);
3228
3229 /* Now emit a label before the special USE insn, and
3230 redirect our jump to the new label. */
3231 target_label = get_label_before (PREV_INSN (tmp));
326f06f7 3232 reorg_redirect_jump (delay_insn, target_label);
9c7e2978
RK
3233 next = insn;
3234 continue;
3235 }
3236
3237 /* Similarly, if it is an unconditional jump with one insn in its
3238 delay list and that insn is redundant, thread the jump. */
3239 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3240 && XVECLEN (PATTERN (trial), 0) == 2
3241 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3242 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3243 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
f898abd7 3244 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
9c7e2978
RK
3245 {
3246 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3247 if (target_label == 0)
3248 target_label = find_end_label ();
83fd5651
JL
3249
3250 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3251 insn))
3252 {
3253 reorg_redirect_jump (delay_insn, target_label);
3254 next = insn;
3255 continue;
3256 }
9c7e2978
RK
3257 }
3258 }
3259
3260 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3261 && prev_active_insn (target_label) == insn
3480bb98 3262 && ! condjump_in_parallel_p (delay_insn)
9c7e2978
RK
3263#ifdef HAVE_cc0
3264 /* If the last insn in the delay slot sets CC0 for some insn,
3265 various code assumes that it is in a delay slot. We could
3266 put it back where it belonged and delete the register notes,
6dc42e49 3267 but it doesn't seem worthwhile in this uncommon case. */
9c7e2978 3268 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
fb3821f7 3269 REG_CC_USER, NULL_RTX)
9c7e2978
RK
3270#endif
3271 )
3272 {
ac224823
RK
3273 int i;
3274
9c7e2978
RK
3275 /* All this insn does is execute its delay list and jump to the
3276 following insn. So delete the jump and just execute the delay
3277 list insns.
3278
3279 We do this by deleting the INSN containing the SEQUENCE, then
3280 re-emitting the insns separately, and then deleting the jump.
3281 This allows the count of the jump target to be properly
3282 decremented. */
3283
ac224823
RK
3284 /* Clear the from target bit, since these insns are no longer
3285 in delay slots. */
3286 for (i = 0; i < XVECLEN (pat, 0); i++)
3287 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3288
9c7e2978
RK
3289 trial = PREV_INSN (insn);
3290 delete_insn (insn);
3291 emit_insn_after (pat, trial);
3292 delete_scheduled_jump (delay_insn);
3293 continue;
3294 }
3295
51ec2375
JW
3296 /* See if this is an unconditional jump around a single insn which is
3297 identical to the one in its delay slot. In this case, we can just
3298 delete the branch and the insn in its delay slot. */
3299 if (next && GET_CODE (next) == INSN
3300 && prev_label (next_active_insn (next)) == target_label
3301 && simplejump_p (insn)
3302 && XVECLEN (pat, 0) == 2
3303 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3304 {
3305 delete_insn (insn);
3306 continue;
3307 }
3308
9c7e2978
RK
3309 /* See if this jump (with its delay slots) branches around another
3310 jump (without delay slots). If so, invert this jump and point
3311 it to the target of the second jump. We cannot do this for
3312 annulled jumps, though. Again, don't convert a jump to a RETURN
3313 here. */
3314 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3315 && next && GET_CODE (next) == JUMP_INSN
3316 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3317 && next_active_insn (target_label) == next_active_insn (next)
3318 && no_labels_between_p (insn, next))
3319 {
3320 rtx label = JUMP_LABEL (next);
3321 rtx old_label = JUMP_LABEL (delay_insn);
3322
3323 if (label == 0)
3324 label = find_end_label ();
3325
83fd5651 3326 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
9c7e2978 3327 {
83fd5651
JL
3328 /* Be careful how we do this to avoid deleting code or labels
3329 that are momentarily dead. See similar optimization in
3330 jump.c */
3331 if (old_label)
3332 ++LABEL_NUSES (old_label);
9c7e2978 3333
83fd5651
JL
3334 if (invert_jump (delay_insn, label))
3335 {
9e8b2461
RK
3336 int i;
3337
3338 /* Must update the INSN_FROM_TARGET_P bits now that
3339 the branch is reversed, so that mark_target_live_regs
3340 will handle the delay slot insn correctly. */
3341 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3342 {
3343 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3344 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3345 }
3346
83fd5651
JL
3347 delete_insn (next);
3348 next = insn;
3349 }
3350
3351 if (old_label && --LABEL_NUSES (old_label) == 0)
3352 delete_insn (old_label);
3353 continue;
3354 }
9c7e2978
RK
3355 }
3356
3357 /* If we own the thread opposite the way this insn branches, see if we
3358 can merge its delay slots with following insns. */
3359 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3360 && own_thread_p (NEXT_INSN (insn), 0, 1))
3361 try_merge_delay_insns (insn, next);
3362 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3363 && own_thread_p (target_label, target_label, 0))
3364 try_merge_delay_insns (insn, next_active_insn (target_label));
3365
3366 /* If we get here, we haven't deleted INSN. But we may have deleted
3367 NEXT, so recompute it. */
3368 next = next_active_insn (insn);
3369 }
3370}
3371\f
3372#ifdef HAVE_return
3373
3374/* Look for filled jumps to the end of function label. We can try to convert
3375 them into RETURN insns if the insns in the delay slot are valid for the
3376 RETURN as well. */
3377
3378static void
3379make_return_insns (first)
3380 rtx first;
3381{
3382 rtx insn, jump_insn, pat;
3383 rtx real_return_label = end_of_function_label;
3384 int slots, i;
3385
3386 /* See if there is a RETURN insn in the function other than the one we
3387 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3388 into a RETURN to jump to it. */
3389 for (insn = first; insn; insn = NEXT_INSN (insn))
3390 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3391 {
3392 real_return_label = get_label_before (insn);
3393 break;
3394 }
3395
3396 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3397 was equal to END_OF_FUNCTION_LABEL. */
3398 LABEL_NUSES (real_return_label)++;
3399
3400 /* Clear the list of insns to fill so we can use it. */
3401 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3402
3403 for (insn = first; insn; insn = NEXT_INSN (insn))
3404 {
35523fce
JL
3405 int flags;
3406
9c7e2978
RK
3407 /* Only look at filled JUMP_INSNs that go to the end of function
3408 label. */
3409 if (GET_CODE (insn) != INSN
3410 || GET_CODE (PATTERN (insn)) != SEQUENCE
3411 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3412 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3413 continue;
3414
3415 pat = PATTERN (insn);
3416 jump_insn = XVECEXP (pat, 0, 0);
3417
18beb3e9 3418 /* If we can't make the jump into a RETURN, try to redirect it to the best
9c7e2978 3419 RETURN and go on to the next insn. */
326f06f7 3420 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
9c7e2978 3421 {
18beb3e9
JL
3422 /* Make sure redirecting the jump will not invalidate the delay
3423 slot insns. */
3424 if (redirect_with_delay_slots_safe_p (jump_insn,
3425 real_return_label,
3426 insn))
3427 reorg_redirect_jump (jump_insn, real_return_label);
9c7e2978
RK
3428 continue;
3429 }
3430
3431 /* See if this RETURN can accept the insns current in its delay slot.
3432 It can if it has more or an equal number of slots and the contents
3433 of each is valid. */
3434
35523fce 3435 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
9c7e2978
RK
3436 slots = num_delay_slots (jump_insn);
3437 if (slots >= XVECLEN (pat, 0) - 1)
3438 {
3439 for (i = 1; i < XVECLEN (pat, 0); i++)
3440 if (! (
3441#ifdef ANNUL_IFFALSE_SLOTS
3442 (INSN_ANNULLED_BRANCH_P (jump_insn)
3443 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3444 ? eligible_for_annul_false (jump_insn, i - 1,
35523fce 3445 XVECEXP (pat, 0, i), flags) :
9c7e2978
RK
3446#endif
3447#ifdef ANNUL_IFTRUE_SLOTS
3448 (INSN_ANNULLED_BRANCH_P (jump_insn)
3449 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3450 ? eligible_for_annul_true (jump_insn, i - 1,
35523fce 3451 XVECEXP (pat, 0, i), flags) :
9c7e2978 3452#endif
35523fce 3453 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
9c7e2978
RK
3454 break;
3455 }
3456 else
3457 i = 0;
3458
3459 if (i == XVECLEN (pat, 0))
3460 continue;
3461
3462 /* We have to do something with this insn. If it is an unconditional
3463 RETURN, delete the SEQUENCE and output the individual insns,
3464 followed by the RETURN. Then set things up so we try to find
3465 insns for its delay slots, if it needs some. */
3466 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3467 {
3468 rtx prev = PREV_INSN (insn);
3469
3470 delete_insn (insn);
3471 for (i = 1; i < XVECLEN (pat, 0); i++)
3472 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3473
3474 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3475 emit_barrier_after (insn);
3476
3477 if (slots)
3478 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3479 }
3480 else
3481 /* It is probably more efficient to keep this with its current
3482 delay slot as a branch to a RETURN. */
326f06f7 3483 reorg_redirect_jump (jump_insn, real_return_label);
9c7e2978
RK
3484 }
3485
3486 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3487 new delay slots we have created. */
3488 if (--LABEL_NUSES (real_return_label) == 0)
3489 delete_insn (real_return_label);
3490
91a51951
KG
3491 fill_simple_delay_slots (1);
3492 fill_simple_delay_slots (0);
9c7e2978
RK
3493}
3494#endif
3495\f
3496/* Try to find insns to place in delay slots. */
3497
3498void
3499dbr_schedule (first, file)
3500 rtx first;
3501 FILE *file;
3502{
7bd80f37 3503 rtx insn, next, epilogue_insn = 0;
9c7e2978
RK
3504 int i;
3505#if 0
3506 int old_flag_no_peephole = flag_no_peephole;
3507
3508 /* Execute `final' once in prescan mode to delete any insns that won't be
3509 used. Don't let final try to do any peephole optimization--it will
3510 ruin dataflow information for this pass. */
3511
3512 flag_no_peephole = 1;
3513 final (first, 0, NO_DEBUG, 1, 1);
3514 flag_no_peephole = old_flag_no_peephole;
3515#endif
3516
c3b80729
JL
3517 /* If the current function has no insns other than the prologue and
3518 epilogue, then do not try to fill any delay slots. */
3519 if (n_basic_blocks == 0)
3520 return;
3521
9c7e2978
RK
3522 /* Find the highest INSN_UID and allocate and initialize our map from
3523 INSN_UID's to position in code. */
3524 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
7bd80f37
TW
3525 {
3526 if (INSN_UID (insn) > max_uid)
3527 max_uid = INSN_UID (insn);
3528 if (GET_CODE (insn) == NOTE
3529 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3530 epilogue_insn = insn;
3531 }
9c7e2978 3532
2a92c071 3533 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
9c7e2978
RK
3534 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3535 uid_to_ruid[INSN_UID (insn)] = i;
3536
3537 /* Initialize the list of insns that need filling. */
3538 if (unfilled_firstobj == 0)
3539 {
3540 gcc_obstack_init (&unfilled_slots_obstack);
3541 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3542 }
3543
3544 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3545 {
3546 rtx target;
3547
3548 INSN_ANNULLED_BRANCH_P (insn) = 0;
3549 INSN_FROM_TARGET_P (insn) = 0;
3550
3551 /* Skip vector tables. We can't get attributes for them. */
3552 if (GET_CODE (insn) == JUMP_INSN
3553 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3554 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3555 continue;
3556
3557 if (num_delay_slots (insn) > 0)
3558 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3559
3560 /* Ensure all jumps go to the last of a set of consecutive labels. */
3480bb98
JL
3561 if (GET_CODE (insn) == JUMP_INSN
3562 && (condjump_p (insn) || condjump_in_parallel_p (insn))
9c7e2978
RK
3563 && JUMP_LABEL (insn) != 0
3564 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3565 != JUMP_LABEL (insn)))
3566 redirect_jump (insn, target);
3567 }
3568
ca545bb5 3569 init_resource_info (epilogue_insn);
7bd80f37 3570
9c7e2978
RK
3571 /* Show we haven't computed an end-of-function label yet. */
3572 end_of_function_label = 0;
3573
9c7e2978 3574 /* Initialize the statistics for this function. */
780c3483
RK
3575 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
3576 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
9c7e2978
RK
3577
3578 /* Now do the delay slot filling. Try everything twice in case earlier
3579 changes make more slots fillable. */
3580
3581 for (reorg_pass_number = 0;
3582 reorg_pass_number < MAX_REORG_PASSES;
3583 reorg_pass_number++)
3584 {
91a51951
KG
3585 fill_simple_delay_slots (1);
3586 fill_simple_delay_slots (0);
3587 fill_eager_delay_slots ();
9c7e2978
RK
3588 relax_delay_slots (first);
3589 }
3590
3591 /* Delete any USE insns made by update_block; subsequent passes don't need
3592 them or know how to deal with them. */
3593 for (insn = first; insn; insn = next)
3594 {
3595 next = NEXT_INSN (insn);
3596
3597 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
1422dce0 3598 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
9c7e2978
RK
3599 next = delete_insn (insn);
3600 }
3601
3602 /* If we made an end of function label, indicate that it is now
3603 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3604 If it is now unused, delete it. */
3605 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3606 delete_insn (end_of_function_label);
3607
3608#ifdef HAVE_return
3609 if (HAVE_return && end_of_function_label != 0)
3610 make_return_insns (first);
3611#endif
3612
3613 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3614
3615 /* It is not clear why the line below is needed, but it does seem to be. */
3616 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3617
bdac5f58
TW
3618 /* Reposition the prologue and epilogue notes in case we moved the
3619 prologue/epilogue insns. */
3620 reposition_prologue_and_epilogue_notes (first);
3621
9c7e2978
RK
3622 if (file)
3623 {
3624 register int i, j, need_comma;
3625
3626 for (reorg_pass_number = 0;
3627 reorg_pass_number < MAX_REORG_PASSES;
3628 reorg_pass_number++)
3629 {
3630 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3631 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3632 {
3633 need_comma = 0;
3634 fprintf (file, ";; Reorg function #%d\n", i);
3635
3636 fprintf (file, ";; %d insns needing delay slots\n;; ",
3637 num_insns_needing_delays[i][reorg_pass_number]);
3638
3639 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
3640 if (num_filled_delays[i][j][reorg_pass_number])
3641 {
3642 if (need_comma)
3643 fprintf (file, ", ");
3644 need_comma = 1;
3645 fprintf (file, "%d got %d delays",
3646 num_filled_delays[i][j][reorg_pass_number], j);
3647 }
3648 fprintf (file, "\n");
3649 }
3650 }
3651 }
c107334d
DM
3652
3653 /* For all JUMP insns, fill in branch prediction notes, so that during
3654 assembler output a target can set branch prediction bits in the code.
3655 We have to do this now, as up until this point the destinations of
3656 JUMPS can be moved around and changed, but past right here that cannot
3657 happen. */
3658 for (insn = first; insn; insn = NEXT_INSN (insn))
3659 {
3660 int pred_flags;
3661
e0d80184
DM
3662 if (GET_CODE (insn) == INSN)
3663 {
3664 rtx pat = PATTERN (insn);
3665
3666 if (GET_CODE (pat) == SEQUENCE)
3667 insn = XVECEXP (pat, 0, 0);
3668 }
c107334d
DM
3669 if (GET_CODE (insn) != JUMP_INSN)
3670 continue;
3671
3672 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
38a448ca
RH
3673 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3674 GEN_INT (pred_flags),
3675 REG_NOTES (insn));
c107334d 3676 }
ca545bb5 3677 free_resource_info ();
9c7e2978
RK
3678}
3679#endif /* DELAY_SLOTS */
This page took 1.25792 seconds and 5 git commands to generate.