]> gcc.gnu.org Git - gcc.git/blob - gcc/reorg.c
* cfgloop.c (flow_loops_cfg_dump): Use bb->index, not i.
[gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
5 Hacked by Michael Tiemann (tiemann@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Instruction reorganization pass.
25
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
33
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
38
39 The MIPS and AMD 29000 have a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
43
44 The Motorola 88000 conditionally exposes its branch delay slot,
45 so code is shorter when it is turned off, but will run faster
46 when useful insns are scheduled there.
47
48 The IBM ROMP has two forms of branch and call insns, both with and
49 without a delay slot. Much like the 88k, insns not using the delay
50 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51
52 The SPARC always has a branch delay slot, but its effects can be
53 annulled when the branch is not taken. This means that failing to
54 find other sources of insns, we can hoist an insn from the branch
55 target that would only be safe to execute knowing that the branch
56 is taken.
57
58 The HP-PA always has a branch delay slot. For unconditional branches
59 its effects can be annulled when the branch is taken. The effects
60 of the delay slot in a conditional branch can be nullified for forward
61 taken branches, or for untaken backward branches. This means
62 we can hoist insns from the fall-through path for forward branches or
63 steal insns from the target of backward branches.
64
65 The TMS320C3x and C4x have three branch delay slots. When the three
66 slots are filled, the branch penalty is zero. Most insns can fill the
67 delay slots except jump insns.
68
69 Three techniques for filling delay slots have been implemented so far:
70
71 (1) `fill_simple_delay_slots' is the simplest, most efficient way
72 to fill delay slots. This pass first looks for insns which come
73 from before the branch and which are safe to execute after the
74 branch. Then it searches after the insn requiring delay slots or,
75 in the case of a branch, for insns that are after the point at
76 which the branch merges into the fallthrough code, if such a point
77 exists. When such insns are found, the branch penalty decreases
78 and no code expansion takes place.
79
80 (2) `fill_eager_delay_slots' is more complicated: it is used for
81 scheduling conditional jumps, or for scheduling jumps which cannot
82 be filled using (1). A machine need not have annulled jumps to use
83 this strategy, but it helps (by keeping more options open).
84 `fill_eager_delay_slots' tries to guess the direction the branch
85 will go; if it guesses right 100% of the time, it can reduce the
86 branch penalty as much as `fill_simple_delay_slots' does. If it
87 guesses wrong 100% of the time, it might as well schedule nops (or
88 on the m88k, unexpose the branch slot). When
89 `fill_eager_delay_slots' takes insns from the fall-through path of
90 the jump, usually there is no code expansion; when it takes insns
91 from the branch target, there is code expansion if it is not the
92 only way to reach that target.
93
94 (3) `relax_delay_slots' uses a set of rules to simplify code that
95 has been reorganized by (1) and (2). It finds cases where
96 conditional test can be eliminated, jumps can be threaded, extra
97 insns can be eliminated, etc. It is the job of (1) and (2) to do a
98 good job of scheduling locally; `relax_delay_slots' takes care of
99 making the various individual schedules work well together. It is
100 especially tuned to handle the control flow interactions of branch
101 insns. It does nothing for insns with delay slots that do not
102 branch.
103
104 On machines that use CC0, we are very conservative. We will not make
105 a copy of an insn involving CC0 since we want to maintain a 1-1
106 correspondence between the insn that sets and uses CC0. The insns are
107 allowed to be separated by placing an insn that sets CC0 (but not an insn
108 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
109 delay slot. In that case, we point each insn at the other with REG_CC_USER
110 and REG_CC_SETTER notes. Note that these restrictions affect very few
111 machines because most RISC machines with delay slots will not use CC0
112 (the RT is the only known exception at this point).
113
114 Not yet implemented:
115
116 The Acorn Risc Machine can conditionally execute most insns, so
117 it is profitable to move single insns into a position to execute
118 based on the condition code of the previous insn.
119
120 The HP-PA can conditionally nullify insns, providing a similar
121 effect to the ARM, differing mostly in which insn is "in charge". */
122
123 #include "config.h"
124 #include "system.h"
125 #include "toplev.h"
126 #include "rtl.h"
127 #include "tm_p.h"
128 #include "expr.h"
129 #include "function.h"
130 #include "insn-config.h"
131 #include "conditions.h"
132 #include "hard-reg-set.h"
133 #include "basic-block.h"
134 #include "regs.h"
135 #include "recog.h"
136 #include "flags.h"
137 #include "output.h"
138 #include "obstack.h"
139 #include "insn-attr.h"
140 #include "resource.h"
141 #include "except.h"
142 #include "params.h"
143
144 #ifdef DELAY_SLOTS
145
146 #define obstack_chunk_alloc xmalloc
147 #define obstack_chunk_free free
148
149 #ifndef ANNUL_IFTRUE_SLOTS
150 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
151 #endif
152 #ifndef ANNUL_IFFALSE_SLOTS
153 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
154 #endif
155
156 /* Insns which have delay slots that have not yet been filled. */
157
158 static struct obstack unfilled_slots_obstack;
159 static rtx *unfilled_firstobj;
160
161 /* Define macros to refer to the first and last slot containing unfilled
162 insns. These are used because the list may move and its address
163 should be recomputed at each use. */
164
165 #define unfilled_slots_base \
166 ((rtx *) obstack_base (&unfilled_slots_obstack))
167
168 #define unfilled_slots_next \
169 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
170
171 /* Points to the label before the end of the function. */
172 static rtx end_of_function_label;
173
174 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
175 not always monotonically increase. */
176 static int *uid_to_ruid;
177
178 /* Highest valid index in `uid_to_ruid'. */
179 static int max_uid;
180
181 static int stop_search_p PARAMS ((rtx, int));
182 static int resource_conflicts_p PARAMS ((struct resources *,
183 struct resources *));
184 static int insn_references_resource_p PARAMS ((rtx, struct resources *, int));
185 static int insn_sets_resource_p PARAMS ((rtx, struct resources *, int));
186 static rtx find_end_label PARAMS ((void));
187 static rtx emit_delay_sequence PARAMS ((rtx, rtx, int));
188 static rtx add_to_delay_list PARAMS ((rtx, rtx));
189 static rtx delete_from_delay_slot PARAMS ((rtx));
190 static void delete_scheduled_jump PARAMS ((rtx));
191 static void note_delay_statistics PARAMS ((int, int));
192 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
193 static rtx optimize_skip PARAMS ((rtx));
194 #endif
195 static int get_jump_flags PARAMS ((rtx, rtx));
196 static int rare_destination PARAMS ((rtx));
197 static int mostly_true_jump PARAMS ((rtx, rtx));
198 static rtx get_branch_condition PARAMS ((rtx, rtx));
199 static int condition_dominates_p PARAMS ((rtx, rtx));
200 static int redirect_with_delay_slots_safe_p PARAMS ((rtx, rtx, rtx));
201 static int redirect_with_delay_list_safe_p PARAMS ((rtx, rtx, rtx));
202 static int check_annul_list_true_false PARAMS ((int, rtx));
203 static rtx steal_delay_list_from_target PARAMS ((rtx, rtx, rtx, rtx,
204 struct resources *,
205 struct resources *,
206 struct resources *,
207 int, int *, int *, rtx *));
208 static rtx steal_delay_list_from_fallthrough PARAMS ((rtx, rtx, rtx, rtx,
209 struct resources *,
210 struct resources *,
211 struct resources *,
212 int, int *, int *));
213 static void try_merge_delay_insns PARAMS ((rtx, rtx));
214 static rtx redundant_insn PARAMS ((rtx, rtx, rtx));
215 static int own_thread_p PARAMS ((rtx, rtx, int));
216 static void update_block PARAMS ((rtx, rtx));
217 static int reorg_redirect_jump PARAMS ((rtx, rtx));
218 static void update_reg_dead_notes PARAMS ((rtx, rtx));
219 static void fix_reg_dead_note PARAMS ((rtx, rtx));
220 static void update_reg_unused_notes PARAMS ((rtx, rtx));
221 static void fill_simple_delay_slots PARAMS ((int));
222 static rtx fill_slots_from_thread PARAMS ((rtx, rtx, rtx, rtx, int, int,
223 int, int, int *, rtx));
224 static void fill_eager_delay_slots PARAMS ((void));
225 static void relax_delay_slots PARAMS ((rtx));
226 #ifdef HAVE_return
227 static void make_return_insns PARAMS ((rtx));
228 #endif
229 \f
230 /* Return TRUE if this insn should stop the search for insn to fill delay
231 slots. LABELS_P indicates that labels should terminate the search.
232 In all cases, jumps terminate the search. */
233
234 static int
235 stop_search_p (insn, labels_p)
236 rtx insn;
237 int labels_p;
238 {
239 if (insn == 0)
240 return 1;
241
242 switch (GET_CODE (insn))
243 {
244 case NOTE:
245 case CALL_INSN:
246 return 0;
247
248 case CODE_LABEL:
249 return labels_p;
250
251 case JUMP_INSN:
252 case BARRIER:
253 return 1;
254
255 case INSN:
256 /* OK unless it contains a delay slot or is an `asm' insn of some type.
257 We don't know anything about these. */
258 return (GET_CODE (PATTERN (insn)) == SEQUENCE
259 || GET_CODE (PATTERN (insn)) == ASM_INPUT
260 || asm_noperands (PATTERN (insn)) >= 0);
261
262 default:
263 abort ();
264 }
265 }
266 \f
267 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
268 resource set contains a volatile memory reference. Otherwise, return FALSE. */
269
270 static int
271 resource_conflicts_p (res1, res2)
272 struct resources *res1, *res2;
273 {
274 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
275 || (res1->unch_memory && res2->unch_memory)
276 || res1->volatil || res2->volatil)
277 return 1;
278
279 #ifdef HARD_REG_SET
280 return (res1->regs & res2->regs) != HARD_CONST (0);
281 #else
282 {
283 int i;
284
285 for (i = 0; i < HARD_REG_SET_LONGS; i++)
286 if ((res1->regs[i] & res2->regs[i]) != 0)
287 return 1;
288 return 0;
289 }
290 #endif
291 }
292
293 /* Return TRUE if any resource marked in RES, a `struct resources', is
294 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
295 routine is using those resources.
296
297 We compute this by computing all the resources referenced by INSN and
298 seeing if this conflicts with RES. It might be faster to directly check
299 ourselves, and this is the way it used to work, but it means duplicating
300 a large block of complex code. */
301
302 static int
303 insn_references_resource_p (insn, res, include_delayed_effects)
304 rtx insn;
305 struct resources *res;
306 int include_delayed_effects;
307 {
308 struct resources insn_res;
309
310 CLEAR_RESOURCE (&insn_res);
311 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
312 return resource_conflicts_p (&insn_res, res);
313 }
314
315 /* Return TRUE if INSN modifies resources that are marked in RES.
316 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
317 included. CC0 is only modified if it is explicitly set; see comments
318 in front of mark_set_resources for details. */
319
320 static int
321 insn_sets_resource_p (insn, res, include_delayed_effects)
322 rtx insn;
323 struct resources *res;
324 int include_delayed_effects;
325 {
326 struct resources insn_sets;
327
328 CLEAR_RESOURCE (&insn_sets);
329 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
330 return resource_conflicts_p (&insn_sets, res);
331 }
332 \f
333 /* Find a label at the end of the function or before a RETURN. If there is
334 none, make one. */
335
336 static rtx
337 find_end_label ()
338 {
339 rtx insn;
340
341 /* If we found one previously, return it. */
342 if (end_of_function_label)
343 return end_of_function_label;
344
345 /* Otherwise, see if there is a label at the end of the function. If there
346 is, it must be that RETURN insns aren't needed, so that is our return
347 label and we don't have to do anything else. */
348
349 insn = get_last_insn ();
350 while (GET_CODE (insn) == NOTE
351 || (GET_CODE (insn) == INSN
352 && (GET_CODE (PATTERN (insn)) == USE
353 || GET_CODE (PATTERN (insn)) == CLOBBER)))
354 insn = PREV_INSN (insn);
355
356 /* When a target threads its epilogue we might already have a
357 suitable return insn. If so put a label before it for the
358 end_of_function_label. */
359 if (GET_CODE (insn) == BARRIER
360 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
361 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
362 {
363 rtx temp = PREV_INSN (PREV_INSN (insn));
364 end_of_function_label = gen_label_rtx ();
365 LABEL_NUSES (end_of_function_label) = 0;
366
367 /* Put the label before an USE insns that may proceed the RETURN insn. */
368 while (GET_CODE (temp) == USE)
369 temp = PREV_INSN (temp);
370
371 emit_label_after (end_of_function_label, temp);
372 }
373
374 else if (GET_CODE (insn) == CODE_LABEL)
375 end_of_function_label = insn;
376 else
377 {
378 end_of_function_label = gen_label_rtx ();
379 LABEL_NUSES (end_of_function_label) = 0;
380 /* If the basic block reorder pass moves the return insn to
381 some other place try to locate it again and put our
382 end_of_function_label there. */
383 while (insn && ! (GET_CODE (insn) == JUMP_INSN
384 && (GET_CODE (PATTERN (insn)) == RETURN)))
385 insn = PREV_INSN (insn);
386 if (insn)
387 {
388 insn = PREV_INSN (insn);
389
390 /* Put the label before an USE insns that may proceed the
391 RETURN insn. */
392 while (GET_CODE (insn) == USE)
393 insn = PREV_INSN (insn);
394
395 emit_label_after (end_of_function_label, insn);
396 }
397 else
398 {
399 /* Otherwise, make a new label and emit a RETURN and BARRIER,
400 if needed. */
401 emit_label (end_of_function_label);
402 #ifdef HAVE_return
403 if (HAVE_return)
404 {
405 /* The return we make may have delay slots too. */
406 rtx insn = gen_return ();
407 insn = emit_jump_insn (insn);
408 emit_barrier ();
409 if (num_delay_slots (insn) > 0)
410 obstack_ptr_grow (&unfilled_slots_obstack, insn);
411 }
412 #endif
413 }
414 }
415
416 /* Show one additional use for this label so it won't go away until
417 we are done. */
418 ++LABEL_NUSES (end_of_function_label);
419
420 return end_of_function_label;
421 }
422 \f
423 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
424 the pattern of INSN with the SEQUENCE.
425
426 Chain the insns so that NEXT_INSN of each insn in the sequence points to
427 the next and NEXT_INSN of the last insn in the sequence points to
428 the first insn after the sequence. Similarly for PREV_INSN. This makes
429 it easier to scan all insns.
430
431 Returns the SEQUENCE that replaces INSN. */
432
433 static rtx
434 emit_delay_sequence (insn, list, length)
435 rtx insn;
436 rtx list;
437 int length;
438 {
439 int i = 1;
440 rtx li;
441 int had_barrier = 0;
442
443 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
444 rtvec seqv = rtvec_alloc (length + 1);
445 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
446 rtx seq_insn = make_insn_raw (seq);
447 rtx first = get_insns ();
448 rtx last = get_last_insn ();
449
450 /* Make a copy of the insn having delay slots. */
451 rtx delay_insn = copy_rtx (insn);
452
453 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
454 confuse further processing. Update LAST in case it was the last insn.
455 We will put the BARRIER back in later. */
456 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
457 {
458 delete_related_insns (NEXT_INSN (insn));
459 last = get_last_insn ();
460 had_barrier = 1;
461 }
462
463 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
464 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
465 PREV_INSN (seq_insn) = PREV_INSN (insn);
466
467 if (insn != last)
468 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
469
470 if (insn != first)
471 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
472
473 /* Note the calls to set_new_first_and_last_insn must occur after
474 SEQ_INSN has been completely spliced into the insn stream.
475
476 Otherwise CUR_INSN_UID will get set to an incorrect value because
477 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
478 if (insn == last)
479 set_new_first_and_last_insn (first, seq_insn);
480
481 if (insn == first)
482 set_new_first_and_last_insn (seq_insn, last);
483
484 /* Build our SEQUENCE and rebuild the insn chain. */
485 XVECEXP (seq, 0, 0) = delay_insn;
486 INSN_DELETED_P (delay_insn) = 0;
487 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
488
489 for (li = list; li; li = XEXP (li, 1), i++)
490 {
491 rtx tem = XEXP (li, 0);
492 rtx note, next;
493
494 /* Show that this copy of the insn isn't deleted. */
495 INSN_DELETED_P (tem) = 0;
496
497 XVECEXP (seq, 0, i) = tem;
498 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
499 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
500
501 for (note = REG_NOTES (tem); note; note = next)
502 {
503 next = XEXP (note, 1);
504 switch (REG_NOTE_KIND (note))
505 {
506 case REG_DEAD:
507 /* Remove any REG_DEAD notes because we can't rely on them now
508 that the insn has been moved. */
509 remove_note (tem, note);
510 break;
511
512 case REG_LABEL:
513 /* Keep the label reference count up to date. */
514 if (GET_CODE (XEXP (note, 0)) == CODE_LABEL)
515 LABEL_NUSES (XEXP (note, 0)) ++;
516 break;
517
518 default:
519 break;
520 }
521 }
522 }
523
524 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
525
526 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
527 last insn in that SEQUENCE to point to us. Similarly for the first
528 insn in the following insn if it is a SEQUENCE. */
529
530 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
531 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
532 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
533 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
534 = seq_insn;
535
536 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
537 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
538 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
539
540 /* If there used to be a BARRIER, put it back. */
541 if (had_barrier)
542 emit_barrier_after (seq_insn);
543
544 if (i != length + 1)
545 abort ();
546
547 return seq_insn;
548 }
549
550 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
551 be in the order in which the insns are to be executed. */
552
553 static rtx
554 add_to_delay_list (insn, delay_list)
555 rtx insn;
556 rtx delay_list;
557 {
558 /* If we have an empty list, just make a new list element. If
559 INSN has its block number recorded, clear it since we may
560 be moving the insn to a new block. */
561
562 if (delay_list == 0)
563 {
564 clear_hashed_info_for_insn (insn);
565 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
566 }
567
568 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
569 list. */
570 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
571
572 return delay_list;
573 }
574 \f
575 /* Delete INSN from the delay slot of the insn that it is in, which may
576 produce an insn with no delay slots. Return the new insn. */
577
578 static rtx
579 delete_from_delay_slot (insn)
580 rtx insn;
581 {
582 rtx trial, seq_insn, seq, prev;
583 rtx delay_list = 0;
584 int i;
585
586 /* We first must find the insn containing the SEQUENCE with INSN in its
587 delay slot. Do this by finding an insn, TRIAL, where
588 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
589
590 for (trial = insn;
591 PREV_INSN (NEXT_INSN (trial)) == trial;
592 trial = NEXT_INSN (trial))
593 ;
594
595 seq_insn = PREV_INSN (NEXT_INSN (trial));
596 seq = PATTERN (seq_insn);
597
598 /* Create a delay list consisting of all the insns other than the one
599 we are deleting (unless we were the only one). */
600 if (XVECLEN (seq, 0) > 2)
601 for (i = 1; i < XVECLEN (seq, 0); i++)
602 if (XVECEXP (seq, 0, i) != insn)
603 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
604
605 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
606 list, and rebuild the delay list if non-empty. */
607 prev = PREV_INSN (seq_insn);
608 trial = XVECEXP (seq, 0, 0);
609 delete_related_insns (seq_insn);
610 add_insn_after (trial, prev);
611
612 if (GET_CODE (trial) == JUMP_INSN
613 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
614 emit_barrier_after (trial);
615
616 /* If there are any delay insns, remit them. Otherwise clear the
617 annul flag. */
618 if (delay_list)
619 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
620 else if (GET_CODE (trial) == JUMP_INSN
621 || GET_CODE (trial) == CALL_INSN
622 || GET_CODE (trial) == INSN)
623 INSN_ANNULLED_BRANCH_P (trial) = 0;
624
625 INSN_FROM_TARGET_P (insn) = 0;
626
627 /* Show we need to fill this insn again. */
628 obstack_ptr_grow (&unfilled_slots_obstack, trial);
629
630 return trial;
631 }
632 \f
633 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
634 the insn that sets CC0 for it and delete it too. */
635
636 static void
637 delete_scheduled_jump (insn)
638 rtx insn;
639 {
640 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
641 delete the insn that sets the condition code, but it is hard to find it.
642 Since this case is rare anyway, don't bother trying; there would likely
643 be other insns that became dead anyway, which we wouldn't know to
644 delete. */
645
646 #ifdef HAVE_cc0
647 if (reg_mentioned_p (cc0_rtx, insn))
648 {
649 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
650
651 /* If a reg-note was found, it points to an insn to set CC0. This
652 insn is in the delay list of some other insn. So delete it from
653 the delay list it was in. */
654 if (note)
655 {
656 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
657 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
658 delete_from_delay_slot (XEXP (note, 0));
659 }
660 else
661 {
662 /* The insn setting CC0 is our previous insn, but it may be in
663 a delay slot. It will be the last insn in the delay slot, if
664 it is. */
665 rtx trial = previous_insn (insn);
666 if (GET_CODE (trial) == NOTE)
667 trial = prev_nonnote_insn (trial);
668 if (sets_cc0_p (PATTERN (trial)) != 1
669 || FIND_REG_INC_NOTE (trial, NULL_RTX))
670 return;
671 if (PREV_INSN (NEXT_INSN (trial)) == trial)
672 delete_related_insns (trial);
673 else
674 delete_from_delay_slot (trial);
675 }
676 }
677 #endif
678
679 delete_related_insns (insn);
680 }
681 \f
682 /* Counters for delay-slot filling. */
683
684 #define NUM_REORG_FUNCTIONS 2
685 #define MAX_DELAY_HISTOGRAM 3
686 #define MAX_REORG_PASSES 2
687
688 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
689
690 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
691
692 static int reorg_pass_number;
693
694 static void
695 note_delay_statistics (slots_filled, index)
696 int slots_filled, index;
697 {
698 num_insns_needing_delays[index][reorg_pass_number]++;
699 if (slots_filled > MAX_DELAY_HISTOGRAM)
700 slots_filled = MAX_DELAY_HISTOGRAM;
701 num_filled_delays[index][slots_filled][reorg_pass_number]++;
702 }
703 \f
704 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
705
706 /* Optimize the following cases:
707
708 1. When a conditional branch skips over only one instruction,
709 use an annulling branch and put that insn in the delay slot.
710 Use either a branch that annuls when the condition if true or
711 invert the test with a branch that annuls when the condition is
712 false. This saves insns, since otherwise we must copy an insn
713 from the L1 target.
714
715 (orig) (skip) (otherwise)
716 Bcc.n L1 Bcc',a L1 Bcc,a L1'
717 insn insn insn2
718 L1: L1: L1:
719 insn2 insn2 insn2
720 insn3 insn3 L1':
721 insn3
722
723 2. When a conditional branch skips over only one instruction,
724 and after that, it unconditionally branches somewhere else,
725 perform the similar optimization. This saves executing the
726 second branch in the case where the inverted condition is true.
727
728 Bcc.n L1 Bcc',a L2
729 insn insn
730 L1: L1:
731 Bra L2 Bra L2
732
733 INSN is a JUMP_INSN.
734
735 This should be expanded to skip over N insns, where N is the number
736 of delay slots required. */
737
738 static rtx
739 optimize_skip (insn)
740 rtx insn;
741 {
742 rtx trial = next_nonnote_insn (insn);
743 rtx next_trial = next_active_insn (trial);
744 rtx delay_list = 0;
745 rtx target_label;
746 int flags;
747
748 flags = get_jump_flags (insn, JUMP_LABEL (insn));
749
750 if (trial == 0
751 || GET_CODE (trial) != INSN
752 || GET_CODE (PATTERN (trial)) == SEQUENCE
753 || recog_memoized (trial) < 0
754 || (! eligible_for_annul_false (insn, 0, trial, flags)
755 && ! eligible_for_annul_true (insn, 0, trial, flags))
756 || can_throw_internal (trial))
757 return 0;
758
759 /* There are two cases where we are just executing one insn (we assume
760 here that a branch requires only one insn; this should be generalized
761 at some point): Where the branch goes around a single insn or where
762 we have one insn followed by a branch to the same label we branch to.
763 In both of these cases, inverting the jump and annulling the delay
764 slot give the same effect in fewer insns. */
765 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
766 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
767 || (next_trial != 0
768 && GET_CODE (next_trial) == JUMP_INSN
769 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
770 && (simplejump_p (next_trial)
771 || GET_CODE (PATTERN (next_trial)) == RETURN)))
772 {
773 if (eligible_for_annul_false (insn, 0, trial, flags))
774 {
775 if (invert_jump (insn, JUMP_LABEL (insn), 1))
776 INSN_FROM_TARGET_P (trial) = 1;
777 else if (! eligible_for_annul_true (insn, 0, trial, flags))
778 return 0;
779 }
780
781 delay_list = add_to_delay_list (trial, NULL_RTX);
782 next_trial = next_active_insn (trial);
783 update_block (trial, trial);
784 delete_related_insns (trial);
785
786 /* Also, if we are targeting an unconditional
787 branch, thread our jump to the target of that branch. Don't
788 change this into a RETURN here, because it may not accept what
789 we have in the delay slot. We'll fix this up later. */
790 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
791 && (simplejump_p (next_trial)
792 || GET_CODE (PATTERN (next_trial)) == RETURN))
793 {
794 target_label = JUMP_LABEL (next_trial);
795 if (target_label == 0)
796 target_label = find_end_label ();
797
798 /* Recompute the flags based on TARGET_LABEL since threading
799 the jump to TARGET_LABEL may change the direction of the
800 jump (which may change the circumstances in which the
801 delay slot is nullified). */
802 flags = get_jump_flags (insn, target_label);
803 if (eligible_for_annul_true (insn, 0, trial, flags))
804 reorg_redirect_jump (insn, target_label);
805 }
806
807 INSN_ANNULLED_BRANCH_P (insn) = 1;
808 }
809
810 return delay_list;
811 }
812 #endif
813 \f
814 /* Encode and return branch direction and prediction information for
815 INSN assuming it will jump to LABEL.
816
817 Non conditional branches return no direction information and
818 are predicted as very likely taken. */
819
820 static int
821 get_jump_flags (insn, label)
822 rtx insn, label;
823 {
824 int flags;
825
826 /* get_jump_flags can be passed any insn with delay slots, these may
827 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
828 direction information, and only if they are conditional jumps.
829
830 If LABEL is zero, then there is no way to determine the branch
831 direction. */
832 if (GET_CODE (insn) == JUMP_INSN
833 && (condjump_p (insn) || condjump_in_parallel_p (insn))
834 && INSN_UID (insn) <= max_uid
835 && label != 0
836 && INSN_UID (label) <= max_uid)
837 flags
838 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
839 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
840 /* No valid direction information. */
841 else
842 flags = 0;
843
844 /* If insn is a conditional branch call mostly_true_jump to get
845 determine the branch prediction.
846
847 Non conditional branches are predicted as very likely taken. */
848 if (GET_CODE (insn) == JUMP_INSN
849 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
850 {
851 int prediction;
852
853 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
854 switch (prediction)
855 {
856 case 2:
857 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
858 break;
859 case 1:
860 flags |= ATTR_FLAG_likely;
861 break;
862 case 0:
863 flags |= ATTR_FLAG_unlikely;
864 break;
865 case -1:
866 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
867 break;
868
869 default:
870 abort ();
871 }
872 }
873 else
874 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
875
876 return flags;
877 }
878
879 /* Return 1 if INSN is a destination that will be branched to rarely (the
880 return point of a function); return 2 if DEST will be branched to very
881 rarely (a call to a function that doesn't return). Otherwise,
882 return 0. */
883
884 static int
885 rare_destination (insn)
886 rtx insn;
887 {
888 int jump_count = 0;
889 rtx next;
890
891 for (; insn; insn = next)
892 {
893 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
894 insn = XVECEXP (PATTERN (insn), 0, 0);
895
896 next = NEXT_INSN (insn);
897
898 switch (GET_CODE (insn))
899 {
900 case CODE_LABEL:
901 return 0;
902 case BARRIER:
903 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
904 don't scan past JUMP_INSNs, so any barrier we find here must
905 have been after a CALL_INSN and hence mean the call doesn't
906 return. */
907 return 2;
908 case JUMP_INSN:
909 if (GET_CODE (PATTERN (insn)) == RETURN)
910 return 1;
911 else if (simplejump_p (insn)
912 && jump_count++ < 10)
913 next = JUMP_LABEL (insn);
914 else
915 return 0;
916
917 default:
918 break;
919 }
920 }
921
922 /* If we got here it means we hit the end of the function. So this
923 is an unlikely destination. */
924
925 return 1;
926 }
927
928 /* Return truth value of the statement that this branch
929 is mostly taken. If we think that the branch is extremely likely
930 to be taken, we return 2. If the branch is slightly more likely to be
931 taken, return 1. If the branch is slightly less likely to be taken,
932 return 0 and if the branch is highly unlikely to be taken, return -1.
933
934 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
935
936 static int
937 mostly_true_jump (jump_insn, condition)
938 rtx jump_insn, condition;
939 {
940 rtx target_label = JUMP_LABEL (jump_insn);
941 rtx insn, note;
942 int rare_dest = rare_destination (target_label);
943 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
944
945 /* If branch probabilities are available, then use that number since it
946 always gives a correct answer. */
947 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
948 if (note)
949 {
950 int prob = INTVAL (XEXP (note, 0));
951
952 if (prob >= REG_BR_PROB_BASE * 9 / 10)
953 return 2;
954 else if (prob >= REG_BR_PROB_BASE / 2)
955 return 1;
956 else if (prob >= REG_BR_PROB_BASE / 10)
957 return 0;
958 else
959 return -1;
960 }
961
962 /* ??? Ought to use estimate_probability instead. */
963
964 /* If this is a branch outside a loop, it is highly unlikely. */
965 if (GET_CODE (PATTERN (jump_insn)) == SET
966 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
967 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
968 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
969 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
970 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
971 return -1;
972
973 if (target_label)
974 {
975 /* If this is the test of a loop, it is very likely true. We scan
976 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
977 before the next real insn, we assume the branch is to the top of
978 the loop. */
979 for (insn = PREV_INSN (target_label);
980 insn && GET_CODE (insn) == NOTE;
981 insn = PREV_INSN (insn))
982 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
983 return 2;
984
985 /* If this is a jump to the test of a loop, it is likely true. We scan
986 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
987 before the next real insn, we assume the branch is to the loop branch
988 test. */
989 for (insn = NEXT_INSN (target_label);
990 insn && GET_CODE (insn) == NOTE;
991 insn = PREV_INSN (insn))
992 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
993 return 1;
994 }
995
996 /* Look at the relative rarities of the fallthrough and destination. If
997 they differ, we can predict the branch that way. */
998
999 switch (rare_fallthrough - rare_dest)
1000 {
1001 case -2:
1002 return -1;
1003 case -1:
1004 return 0;
1005 case 0:
1006 break;
1007 case 1:
1008 return 1;
1009 case 2:
1010 return 2;
1011 }
1012
1013 /* If we couldn't figure out what this jump was, assume it won't be
1014 taken. This should be rare. */
1015 if (condition == 0)
1016 return 0;
1017
1018 /* EQ tests are usually false and NE tests are usually true. Also,
1019 most quantities are positive, so we can make the appropriate guesses
1020 about signed comparisons against zero. */
1021 switch (GET_CODE (condition))
1022 {
1023 case CONST_INT:
1024 /* Unconditional branch. */
1025 return 1;
1026 case EQ:
1027 return 0;
1028 case NE:
1029 return 1;
1030 case LE:
1031 case LT:
1032 if (XEXP (condition, 1) == const0_rtx)
1033 return 0;
1034 break;
1035 case GE:
1036 case GT:
1037 if (XEXP (condition, 1) == const0_rtx)
1038 return 1;
1039 break;
1040
1041 default:
1042 break;
1043 }
1044
1045 /* Predict backward branches usually take, forward branches usually not. If
1046 we don't know whether this is forward or backward, assume the branch
1047 will be taken, since most are. */
1048 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1049 || INSN_UID (target_label) > max_uid
1050 || (uid_to_ruid[INSN_UID (jump_insn)]
1051 > uid_to_ruid[INSN_UID (target_label)]));
1052 }
1053
1054 /* Return the condition under which INSN will branch to TARGET. If TARGET
1055 is zero, return the condition under which INSN will return. If INSN is
1056 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1057 type of jump, or it doesn't go to TARGET, return 0. */
1058
1059 static rtx
1060 get_branch_condition (insn, target)
1061 rtx insn;
1062 rtx target;
1063 {
1064 rtx pat = PATTERN (insn);
1065 rtx src;
1066
1067 if (condjump_in_parallel_p (insn))
1068 pat = XVECEXP (pat, 0, 0);
1069
1070 if (GET_CODE (pat) == RETURN)
1071 return target == 0 ? const_true_rtx : 0;
1072
1073 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1074 return 0;
1075
1076 src = SET_SRC (pat);
1077 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1078 return const_true_rtx;
1079
1080 else if (GET_CODE (src) == IF_THEN_ELSE
1081 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1082 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1083 && XEXP (XEXP (src, 1), 0) == target))
1084 && XEXP (src, 2) == pc_rtx)
1085 return XEXP (src, 0);
1086
1087 else if (GET_CODE (src) == IF_THEN_ELSE
1088 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1089 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1090 && XEXP (XEXP (src, 2), 0) == target))
1091 && XEXP (src, 1) == pc_rtx)
1092 {
1093 enum rtx_code rev;
1094 rev = reversed_comparison_code (XEXP (src, 0), insn);
1095 if (rev != UNKNOWN)
1096 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1097 XEXP (XEXP (src, 0), 0),
1098 XEXP (XEXP (src, 0), 1));
1099 }
1100
1101 return 0;
1102 }
1103
1104 /* Return non-zero if CONDITION is more strict than the condition of
1105 INSN, i.e., if INSN will always branch if CONDITION is true. */
1106
1107 static int
1108 condition_dominates_p (condition, insn)
1109 rtx condition;
1110 rtx insn;
1111 {
1112 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1113 enum rtx_code code = GET_CODE (condition);
1114 enum rtx_code other_code;
1115
1116 if (rtx_equal_p (condition, other_condition)
1117 || other_condition == const_true_rtx)
1118 return 1;
1119
1120 else if (condition == const_true_rtx || other_condition == 0)
1121 return 0;
1122
1123 other_code = GET_CODE (other_condition);
1124 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1125 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1126 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1127 return 0;
1128
1129 return comparison_dominates_p (code, other_code);
1130 }
1131
1132 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1133 any insns already in the delay slot of JUMP. */
1134
1135 static int
1136 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1137 rtx jump, newlabel, seq;
1138 {
1139 int flags, i;
1140 rtx pat = PATTERN (seq);
1141
1142 /* Make sure all the delay slots of this jump would still
1143 be valid after threading the jump. If they are still
1144 valid, then return non-zero. */
1145
1146 flags = get_jump_flags (jump, newlabel);
1147 for (i = 1; i < XVECLEN (pat, 0); i++)
1148 if (! (
1149 #ifdef ANNUL_IFFALSE_SLOTS
1150 (INSN_ANNULLED_BRANCH_P (jump)
1151 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1152 ? eligible_for_annul_false (jump, i - 1,
1153 XVECEXP (pat, 0, i), flags) :
1154 #endif
1155 #ifdef ANNUL_IFTRUE_SLOTS
1156 (INSN_ANNULLED_BRANCH_P (jump)
1157 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1158 ? eligible_for_annul_true (jump, i - 1,
1159 XVECEXP (pat, 0, i), flags) :
1160 #endif
1161 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1162 break;
1163
1164 return (i == XVECLEN (pat, 0));
1165 }
1166
1167 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1168 any insns we wish to place in the delay slot of JUMP. */
1169
1170 static int
1171 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1172 rtx jump, newlabel, delay_list;
1173 {
1174 int flags, i;
1175 rtx li;
1176
1177 /* Make sure all the insns in DELAY_LIST would still be
1178 valid after threading the jump. If they are still
1179 valid, then return non-zero. */
1180
1181 flags = get_jump_flags (jump, newlabel);
1182 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1183 if (! (
1184 #ifdef ANNUL_IFFALSE_SLOTS
1185 (INSN_ANNULLED_BRANCH_P (jump)
1186 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1187 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1188 #endif
1189 #ifdef ANNUL_IFTRUE_SLOTS
1190 (INSN_ANNULLED_BRANCH_P (jump)
1191 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1192 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1193 #endif
1194 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1195 break;
1196
1197 return (li == NULL);
1198 }
1199
1200 /* DELAY_LIST is a list of insns that have already been placed into delay
1201 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1202 If not, return 0; otherwise return 1. */
1203
1204 static int
1205 check_annul_list_true_false (annul_true_p, delay_list)
1206 int annul_true_p;
1207 rtx delay_list;
1208 {
1209 rtx temp;
1210
1211 if (delay_list)
1212 {
1213 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1214 {
1215 rtx trial = XEXP (temp, 0);
1216
1217 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1218 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1219 return 0;
1220 }
1221 }
1222
1223 return 1;
1224 }
1225 \f
1226 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1227 the condition tested by INSN is CONDITION and the resources shown in
1228 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1229 from SEQ's delay list, in addition to whatever insns it may execute
1230 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1231 needed while searching for delay slot insns. Return the concatenated
1232 delay list if possible, otherwise, return 0.
1233
1234 SLOTS_TO_FILL is the total number of slots required by INSN, and
1235 PSLOTS_FILLED points to the number filled so far (also the number of
1236 insns in DELAY_LIST). It is updated with the number that have been
1237 filled from the SEQUENCE, if any.
1238
1239 PANNUL_P points to a non-zero value if we already know that we need
1240 to annul INSN. If this routine determines that annulling is needed,
1241 it may set that value non-zero.
1242
1243 PNEW_THREAD points to a location that is to receive the place at which
1244 execution should continue. */
1245
1246 static rtx
1247 steal_delay_list_from_target (insn, condition, seq, delay_list,
1248 sets, needed, other_needed,
1249 slots_to_fill, pslots_filled, pannul_p,
1250 pnew_thread)
1251 rtx insn, condition;
1252 rtx seq;
1253 rtx delay_list;
1254 struct resources *sets, *needed, *other_needed;
1255 int slots_to_fill;
1256 int *pslots_filled;
1257 int *pannul_p;
1258 rtx *pnew_thread;
1259 {
1260 rtx temp;
1261 int slots_remaining = slots_to_fill - *pslots_filled;
1262 int total_slots_filled = *pslots_filled;
1263 rtx new_delay_list = 0;
1264 int must_annul = *pannul_p;
1265 int used_annul = 0;
1266 int i;
1267 struct resources cc_set;
1268
1269 /* We can't do anything if there are more delay slots in SEQ than we
1270 can handle, or if we don't know that it will be a taken branch.
1271 We know that it will be a taken branch if it is either an unconditional
1272 branch or a conditional branch with a stricter branch condition.
1273
1274 Also, exit if the branch has more than one set, since then it is computing
1275 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1276 ??? It may be possible to move other sets into INSN in addition to
1277 moving the instructions in the delay slots.
1278
1279 We can not steal the delay list if one of the instructions in the
1280 current delay_list modifies the condition codes and the jump in the
1281 sequence is a conditional jump. We can not do this because we can
1282 not change the direction of the jump because the condition codes
1283 will effect the direction of the jump in the sequence. */
1284
1285 CLEAR_RESOURCE (&cc_set);
1286 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1287 {
1288 rtx trial = XEXP (temp, 0);
1289
1290 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1291 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1292 return delay_list;
1293 }
1294
1295 if (XVECLEN (seq, 0) - 1 > slots_remaining
1296 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1297 || ! single_set (XVECEXP (seq, 0, 0)))
1298 return delay_list;
1299
1300 #ifdef MD_CAN_REDIRECT_BRANCH
1301 /* On some targets, branches with delay slots can have a limited
1302 displacement. Give the back end a chance to tell us we can't do
1303 this. */
1304 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1305 return delay_list;
1306 #endif
1307
1308 for (i = 1; i < XVECLEN (seq, 0); i++)
1309 {
1310 rtx trial = XVECEXP (seq, 0, i);
1311 int flags;
1312
1313 if (insn_references_resource_p (trial, sets, 0)
1314 || insn_sets_resource_p (trial, needed, 0)
1315 || insn_sets_resource_p (trial, sets, 0)
1316 #ifdef HAVE_cc0
1317 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1318 delay list. */
1319 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1320 #endif
1321 /* If TRIAL is from the fallthrough code of an annulled branch insn
1322 in SEQ, we cannot use it. */
1323 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1324 && ! INSN_FROM_TARGET_P (trial)))
1325 return delay_list;
1326
1327 /* If this insn was already done (usually in a previous delay slot),
1328 pretend we put it in our delay slot. */
1329 if (redundant_insn (trial, insn, new_delay_list))
1330 continue;
1331
1332 /* We will end up re-vectoring this branch, so compute flags
1333 based on jumping to the new label. */
1334 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1335
1336 if (! must_annul
1337 && ((condition == const_true_rtx
1338 || (! insn_sets_resource_p (trial, other_needed, 0)
1339 && ! may_trap_p (PATTERN (trial)))))
1340 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1341 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1342 && (must_annul = 1,
1343 check_annul_list_true_false (0, delay_list)
1344 && check_annul_list_true_false (0, new_delay_list)
1345 && eligible_for_annul_false (insn, total_slots_filled,
1346 trial, flags)))
1347 {
1348 if (must_annul)
1349 used_annul = 1;
1350 temp = copy_rtx (trial);
1351 INSN_FROM_TARGET_P (temp) = 1;
1352 new_delay_list = add_to_delay_list (temp, new_delay_list);
1353 total_slots_filled++;
1354
1355 if (--slots_remaining == 0)
1356 break;
1357 }
1358 else
1359 return delay_list;
1360 }
1361
1362 /* Show the place to which we will be branching. */
1363 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1364
1365 /* Add any new insns to the delay list and update the count of the
1366 number of slots filled. */
1367 *pslots_filled = total_slots_filled;
1368 if (used_annul)
1369 *pannul_p = 1;
1370
1371 if (delay_list == 0)
1372 return new_delay_list;
1373
1374 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1375 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1376
1377 return delay_list;
1378 }
1379 \f
1380 /* Similar to steal_delay_list_from_target except that SEQ is on the
1381 fallthrough path of INSN. Here we only do something if the delay insn
1382 of SEQ is an unconditional branch. In that case we steal its delay slot
1383 for INSN since unconditional branches are much easier to fill. */
1384
1385 static rtx
1386 steal_delay_list_from_fallthrough (insn, condition, seq,
1387 delay_list, sets, needed, other_needed,
1388 slots_to_fill, pslots_filled, pannul_p)
1389 rtx insn, condition;
1390 rtx seq;
1391 rtx delay_list;
1392 struct resources *sets, *needed, *other_needed;
1393 int slots_to_fill;
1394 int *pslots_filled;
1395 int *pannul_p;
1396 {
1397 int i;
1398 int flags;
1399 int must_annul = *pannul_p;
1400 int used_annul = 0;
1401
1402 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1403
1404 /* We can't do anything if SEQ's delay insn isn't an
1405 unconditional branch. */
1406
1407 if (! simplejump_p (XVECEXP (seq, 0, 0))
1408 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1409 return delay_list;
1410
1411 for (i = 1; i < XVECLEN (seq, 0); i++)
1412 {
1413 rtx trial = XVECEXP (seq, 0, i);
1414
1415 /* If TRIAL sets CC0, stealing it will move it too far from the use
1416 of CC0. */
1417 if (insn_references_resource_p (trial, sets, 0)
1418 || insn_sets_resource_p (trial, needed, 0)
1419 || insn_sets_resource_p (trial, sets, 0)
1420 #ifdef HAVE_cc0
1421 || sets_cc0_p (PATTERN (trial))
1422 #endif
1423 )
1424
1425 break;
1426
1427 /* If this insn was already done, we don't need it. */
1428 if (redundant_insn (trial, insn, delay_list))
1429 {
1430 delete_from_delay_slot (trial);
1431 continue;
1432 }
1433
1434 if (! must_annul
1435 && ((condition == const_true_rtx
1436 || (! insn_sets_resource_p (trial, other_needed, 0)
1437 && ! may_trap_p (PATTERN (trial)))))
1438 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1439 : (must_annul || delay_list == NULL) && (must_annul = 1,
1440 check_annul_list_true_false (1, delay_list)
1441 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1442 {
1443 if (must_annul)
1444 used_annul = 1;
1445 delete_from_delay_slot (trial);
1446 delay_list = add_to_delay_list (trial, delay_list);
1447
1448 if (++(*pslots_filled) == slots_to_fill)
1449 break;
1450 }
1451 else
1452 break;
1453 }
1454
1455 if (used_annul)
1456 *pannul_p = 1;
1457 return delay_list;
1458 }
1459 \f
1460 /* Try merging insns starting at THREAD which match exactly the insns in
1461 INSN's delay list.
1462
1463 If all insns were matched and the insn was previously annulling, the
1464 annul bit will be cleared.
1465
1466 For each insn that is merged, if the branch is or will be non-annulling,
1467 we delete the merged insn. */
1468
1469 static void
1470 try_merge_delay_insns (insn, thread)
1471 rtx insn, thread;
1472 {
1473 rtx trial, next_trial;
1474 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1475 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1476 int slot_number = 1;
1477 int num_slots = XVECLEN (PATTERN (insn), 0);
1478 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1479 struct resources set, needed;
1480 rtx merged_insns = 0;
1481 int i;
1482 int flags;
1483
1484 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1485
1486 CLEAR_RESOURCE (&needed);
1487 CLEAR_RESOURCE (&set);
1488
1489 /* If this is not an annulling branch, take into account anything needed in
1490 INSN's delay slot. This prevents two increments from being incorrectly
1491 folded into one. If we are annulling, this would be the correct
1492 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1493 will essentially disable this optimization. This method is somewhat of
1494 a kludge, but I don't see a better way.) */
1495 if (! annul_p)
1496 for (i = 1 ; i < num_slots; i++)
1497 if (XVECEXP (PATTERN (insn), 0, i))
1498 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1499
1500 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1501 {
1502 rtx pat = PATTERN (trial);
1503 rtx oldtrial = trial;
1504
1505 next_trial = next_nonnote_insn (trial);
1506
1507 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1508 if (GET_CODE (trial) == INSN
1509 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1510 continue;
1511
1512 if (GET_CODE (next_to_match) == GET_CODE (trial)
1513 #ifdef HAVE_cc0
1514 /* We can't share an insn that sets cc0. */
1515 && ! sets_cc0_p (pat)
1516 #endif
1517 && ! insn_references_resource_p (trial, &set, 1)
1518 && ! insn_sets_resource_p (trial, &set, 1)
1519 && ! insn_sets_resource_p (trial, &needed, 1)
1520 && (trial = try_split (pat, trial, 0)) != 0
1521 /* Update next_trial, in case try_split succeeded. */
1522 && (next_trial = next_nonnote_insn (trial))
1523 /* Likewise THREAD. */
1524 && (thread = oldtrial == thread ? trial : thread)
1525 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1526 /* Have to test this condition if annul condition is different
1527 from (and less restrictive than) non-annulling one. */
1528 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1529 {
1530
1531 if (! annul_p)
1532 {
1533 update_block (trial, thread);
1534 if (trial == thread)
1535 thread = next_active_insn (thread);
1536
1537 delete_related_insns (trial);
1538 INSN_FROM_TARGET_P (next_to_match) = 0;
1539 }
1540 else
1541 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1542
1543 if (++slot_number == num_slots)
1544 break;
1545
1546 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1547 }
1548
1549 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1550 mark_referenced_resources (trial, &needed, 1);
1551 }
1552
1553 /* See if we stopped on a filled insn. If we did, try to see if its
1554 delay slots match. */
1555 if (slot_number != num_slots
1556 && trial && GET_CODE (trial) == INSN
1557 && GET_CODE (PATTERN (trial)) == SEQUENCE
1558 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1559 {
1560 rtx pat = PATTERN (trial);
1561 rtx filled_insn = XVECEXP (pat, 0, 0);
1562
1563 /* Account for resources set/needed by the filled insn. */
1564 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1565 mark_referenced_resources (filled_insn, &needed, 1);
1566
1567 for (i = 1; i < XVECLEN (pat, 0); i++)
1568 {
1569 rtx dtrial = XVECEXP (pat, 0, i);
1570
1571 if (! insn_references_resource_p (dtrial, &set, 1)
1572 && ! insn_sets_resource_p (dtrial, &set, 1)
1573 && ! insn_sets_resource_p (dtrial, &needed, 1)
1574 #ifdef HAVE_cc0
1575 && ! sets_cc0_p (PATTERN (dtrial))
1576 #endif
1577 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1578 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1579 {
1580 if (! annul_p)
1581 {
1582 rtx new;
1583
1584 update_block (dtrial, thread);
1585 new = delete_from_delay_slot (dtrial);
1586 if (INSN_DELETED_P (thread))
1587 thread = new;
1588 INSN_FROM_TARGET_P (next_to_match) = 0;
1589 }
1590 else
1591 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1592 merged_insns);
1593
1594 if (++slot_number == num_slots)
1595 break;
1596
1597 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1598 }
1599 else
1600 {
1601 /* Keep track of the set/referenced resources for the delay
1602 slots of any trial insns we encounter. */
1603 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1604 mark_referenced_resources (dtrial, &needed, 1);
1605 }
1606 }
1607 }
1608
1609 /* If all insns in the delay slot have been matched and we were previously
1610 annulling the branch, we need not any more. In that case delete all the
1611 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1612 the delay list so that we know that it isn't only being used at the
1613 target. */
1614 if (slot_number == num_slots && annul_p)
1615 {
1616 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1617 {
1618 if (GET_MODE (merged_insns) == SImode)
1619 {
1620 rtx new;
1621
1622 update_block (XEXP (merged_insns, 0), thread);
1623 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1624 if (INSN_DELETED_P (thread))
1625 thread = new;
1626 }
1627 else
1628 {
1629 update_block (XEXP (merged_insns, 0), thread);
1630 delete_related_insns (XEXP (merged_insns, 0));
1631 }
1632 }
1633
1634 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1635
1636 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1637 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1638 }
1639 }
1640 \f
1641 /* See if INSN is redundant with an insn in front of TARGET. Often this
1642 is called when INSN is a candidate for a delay slot of TARGET.
1643 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1644 of INSN. Often INSN will be redundant with an insn in a delay slot of
1645 some previous insn. This happens when we have a series of branches to the
1646 same label; in that case the first insn at the target might want to go
1647 into each of the delay slots.
1648
1649 If we are not careful, this routine can take up a significant fraction
1650 of the total compilation time (4%), but only wins rarely. Hence we
1651 speed this routine up by making two passes. The first pass goes back
1652 until it hits a label and sees if it find an insn with an identical
1653 pattern. Only in this (relatively rare) event does it check for
1654 data conflicts.
1655
1656 We do not split insns we encounter. This could cause us not to find a
1657 redundant insn, but the cost of splitting seems greater than the possible
1658 gain in rare cases. */
1659
1660 static rtx
1661 redundant_insn (insn, target, delay_list)
1662 rtx insn;
1663 rtx target;
1664 rtx delay_list;
1665 {
1666 rtx target_main = target;
1667 rtx ipat = PATTERN (insn);
1668 rtx trial, pat;
1669 struct resources needed, set;
1670 int i;
1671 unsigned insns_to_search;
1672
1673 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1674 are allowed to not actually assign to such a register. */
1675 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1676 return 0;
1677
1678 /* Scan backwards looking for a match. */
1679 for (trial = PREV_INSN (target),
1680 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1681 trial && insns_to_search > 0;
1682 trial = PREV_INSN (trial), --insns_to_search)
1683 {
1684 if (GET_CODE (trial) == CODE_LABEL)
1685 return 0;
1686
1687 if (! INSN_P (trial))
1688 continue;
1689
1690 pat = PATTERN (trial);
1691 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1692 continue;
1693
1694 if (GET_CODE (pat) == SEQUENCE)
1695 {
1696 /* Stop for a CALL and its delay slots because it is difficult to
1697 track its resource needs correctly. */
1698 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1699 return 0;
1700
1701 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1702 slots because it is difficult to track its resource needs
1703 correctly. */
1704
1705 #ifdef INSN_SETS_ARE_DELAYED
1706 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1707 return 0;
1708 #endif
1709
1710 #ifdef INSN_REFERENCES_ARE_DELAYED
1711 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1712 return 0;
1713 #endif
1714
1715 /* See if any of the insns in the delay slot match, updating
1716 resource requirements as we go. */
1717 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1718 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1719 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1720 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1721 break;
1722
1723 /* If found a match, exit this loop early. */
1724 if (i > 0)
1725 break;
1726 }
1727
1728 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1729 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1730 break;
1731 }
1732
1733 /* If we didn't find an insn that matches, return 0. */
1734 if (trial == 0)
1735 return 0;
1736
1737 /* See what resources this insn sets and needs. If they overlap, or
1738 if this insn references CC0, it can't be redundant. */
1739
1740 CLEAR_RESOURCE (&needed);
1741 CLEAR_RESOURCE (&set);
1742 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1743 mark_referenced_resources (insn, &needed, 1);
1744
1745 /* If TARGET is a SEQUENCE, get the main insn. */
1746 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1747 target_main = XVECEXP (PATTERN (target), 0, 0);
1748
1749 if (resource_conflicts_p (&needed, &set)
1750 #ifdef HAVE_cc0
1751 || reg_mentioned_p (cc0_rtx, ipat)
1752 #endif
1753 /* The insn requiring the delay may not set anything needed or set by
1754 INSN. */
1755 || insn_sets_resource_p (target_main, &needed, 1)
1756 || insn_sets_resource_p (target_main, &set, 1))
1757 return 0;
1758
1759 /* Insns we pass may not set either NEEDED or SET, so merge them for
1760 simpler tests. */
1761 needed.memory |= set.memory;
1762 needed.unch_memory |= set.unch_memory;
1763 IOR_HARD_REG_SET (needed.regs, set.regs);
1764
1765 /* This insn isn't redundant if it conflicts with an insn that either is
1766 or will be in a delay slot of TARGET. */
1767
1768 while (delay_list)
1769 {
1770 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1771 return 0;
1772 delay_list = XEXP (delay_list, 1);
1773 }
1774
1775 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1776 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1777 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1778 return 0;
1779
1780 /* Scan backwards until we reach a label or an insn that uses something
1781 INSN sets or sets something insn uses or sets. */
1782
1783 for (trial = PREV_INSN (target),
1784 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1785 trial && GET_CODE (trial) != CODE_LABEL && insns_to_search > 0;
1786 trial = PREV_INSN (trial), --insns_to_search)
1787 {
1788 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1789 && GET_CODE (trial) != JUMP_INSN)
1790 continue;
1791
1792 pat = PATTERN (trial);
1793 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1794 continue;
1795
1796 if (GET_CODE (pat) == SEQUENCE)
1797 {
1798 /* If this is a CALL_INSN and its delay slots, it is hard to track
1799 the resource needs properly, so give up. */
1800 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1801 return 0;
1802
1803 /* If this is an INSN or JUMP_INSN with delayed effects, it
1804 is hard to track the resource needs properly, so give up. */
1805
1806 #ifdef INSN_SETS_ARE_DELAYED
1807 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1808 return 0;
1809 #endif
1810
1811 #ifdef INSN_REFERENCES_ARE_DELAYED
1812 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1813 return 0;
1814 #endif
1815
1816 /* See if any of the insns in the delay slot match, updating
1817 resource requirements as we go. */
1818 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1819 {
1820 rtx candidate = XVECEXP (pat, 0, i);
1821
1822 /* If an insn will be annulled if the branch is false, it isn't
1823 considered as a possible duplicate insn. */
1824 if (rtx_equal_p (PATTERN (candidate), ipat)
1825 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1826 && INSN_FROM_TARGET_P (candidate)))
1827 {
1828 /* Show that this insn will be used in the sequel. */
1829 INSN_FROM_TARGET_P (candidate) = 0;
1830 return candidate;
1831 }
1832
1833 /* Unless this is an annulled insn from the target of a branch,
1834 we must stop if it sets anything needed or set by INSN. */
1835 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1836 || ! INSN_FROM_TARGET_P (candidate))
1837 && insn_sets_resource_p (candidate, &needed, 1))
1838 return 0;
1839 }
1840
1841 /* If the insn requiring the delay slot conflicts with INSN, we
1842 must stop. */
1843 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1844 return 0;
1845 }
1846 else
1847 {
1848 /* See if TRIAL is the same as INSN. */
1849 pat = PATTERN (trial);
1850 if (rtx_equal_p (pat, ipat))
1851 return trial;
1852
1853 /* Can't go any further if TRIAL conflicts with INSN. */
1854 if (insn_sets_resource_p (trial, &needed, 1))
1855 return 0;
1856 }
1857 }
1858
1859 return 0;
1860 }
1861 \f
1862 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1863 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1864 is non-zero, we are allowed to fall into this thread; otherwise, we are
1865 not.
1866
1867 If LABEL is used more than one or we pass a label other than LABEL before
1868 finding an active insn, we do not own this thread. */
1869
1870 static int
1871 own_thread_p (thread, label, allow_fallthrough)
1872 rtx thread;
1873 rtx label;
1874 int allow_fallthrough;
1875 {
1876 rtx active_insn;
1877 rtx insn;
1878
1879 /* We don't own the function end. */
1880 if (thread == 0)
1881 return 0;
1882
1883 /* Get the first active insn, or THREAD, if it is an active insn. */
1884 active_insn = next_active_insn (PREV_INSN (thread));
1885
1886 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1887 if (GET_CODE (insn) == CODE_LABEL
1888 && (insn != label || LABEL_NUSES (insn) != 1))
1889 return 0;
1890
1891 if (allow_fallthrough)
1892 return 1;
1893
1894 /* Ensure that we reach a BARRIER before any insn or label. */
1895 for (insn = prev_nonnote_insn (thread);
1896 insn == 0 || GET_CODE (insn) != BARRIER;
1897 insn = prev_nonnote_insn (insn))
1898 if (insn == 0
1899 || GET_CODE (insn) == CODE_LABEL
1900 || (GET_CODE (insn) == INSN
1901 && GET_CODE (PATTERN (insn)) != USE
1902 && GET_CODE (PATTERN (insn)) != CLOBBER))
1903 return 0;
1904
1905 return 1;
1906 }
1907 \f
1908 /* Called when INSN is being moved from a location near the target of a jump.
1909 We leave a marker of the form (use (INSN)) immediately in front
1910 of WHERE for mark_target_live_regs. These markers will be deleted when
1911 reorg finishes.
1912
1913 We used to try to update the live status of registers if WHERE is at
1914 the start of a basic block, but that can't work since we may remove a
1915 BARRIER in relax_delay_slots. */
1916
1917 static void
1918 update_block (insn, where)
1919 rtx insn;
1920 rtx where;
1921 {
1922 /* Ignore if this was in a delay slot and it came from the target of
1923 a branch. */
1924 if (INSN_FROM_TARGET_P (insn))
1925 return;
1926
1927 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1928
1929 /* INSN might be making a value live in a block where it didn't use to
1930 be. So recompute liveness information for this block. */
1931
1932 incr_ticks_for_insn (insn);
1933 }
1934
1935 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1936 the basic block containing the jump. */
1937
1938 static int
1939 reorg_redirect_jump (jump, nlabel)
1940 rtx jump;
1941 rtx nlabel;
1942 {
1943 incr_ticks_for_insn (jump);
1944 return redirect_jump (jump, nlabel, 1);
1945 }
1946
1947 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1948 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1949 that reference values used in INSN. If we find one, then we move the
1950 REG_DEAD note to INSN.
1951
1952 This is needed to handle the case where an later insn (after INSN) has a
1953 REG_DEAD note for a register used by INSN, and this later insn subsequently
1954 gets moved before a CODE_LABEL because it is a redundant insn. In this
1955 case, mark_target_live_regs may be confused into thinking the register
1956 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1957
1958 static void
1959 update_reg_dead_notes (insn, delayed_insn)
1960 rtx insn, delayed_insn;
1961 {
1962 rtx p, link, next;
1963
1964 for (p = next_nonnote_insn (insn); p != delayed_insn;
1965 p = next_nonnote_insn (p))
1966 for (link = REG_NOTES (p); link; link = next)
1967 {
1968 next = XEXP (link, 1);
1969
1970 if (REG_NOTE_KIND (link) != REG_DEAD
1971 || GET_CODE (XEXP (link, 0)) != REG)
1972 continue;
1973
1974 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1975 {
1976 /* Move the REG_DEAD note from P to INSN. */
1977 remove_note (p, link);
1978 XEXP (link, 1) = REG_NOTES (insn);
1979 REG_NOTES (insn) = link;
1980 }
1981 }
1982 }
1983
1984 /* Called when an insn redundant with start_insn is deleted. If there
1985 is a REG_DEAD note for the target of start_insn between start_insn
1986 and stop_insn, then the REG_DEAD note needs to be deleted since the
1987 value no longer dies there.
1988
1989 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1990 confused into thinking the register is dead. */
1991
1992 static void
1993 fix_reg_dead_note (start_insn, stop_insn)
1994 rtx start_insn, stop_insn;
1995 {
1996 rtx p, link, next;
1997
1998 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1999 p = next_nonnote_insn (p))
2000 for (link = REG_NOTES (p); link; link = next)
2001 {
2002 next = XEXP (link, 1);
2003
2004 if (REG_NOTE_KIND (link) != REG_DEAD
2005 || GET_CODE (XEXP (link, 0)) != REG)
2006 continue;
2007
2008 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2009 {
2010 remove_note (p, link);
2011 return;
2012 }
2013 }
2014 }
2015
2016 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2017
2018 This handles the case of udivmodXi4 instructions which optimize their
2019 output depending on whether any REG_UNUSED notes are present.
2020 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2021 does. */
2022
2023 static void
2024 update_reg_unused_notes (insn, redundant_insn)
2025 rtx insn, redundant_insn;
2026 {
2027 rtx link, next;
2028
2029 for (link = REG_NOTES (insn); link; link = next)
2030 {
2031 next = XEXP (link, 1);
2032
2033 if (REG_NOTE_KIND (link) != REG_UNUSED
2034 || GET_CODE (XEXP (link, 0)) != REG)
2035 continue;
2036
2037 if (! find_regno_note (redundant_insn, REG_UNUSED,
2038 REGNO (XEXP (link, 0))))
2039 remove_note (insn, link);
2040 }
2041 }
2042 \f
2043 /* Scan a function looking for insns that need a delay slot and find insns to
2044 put into the delay slot.
2045
2046 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2047 as calls). We do these first since we don't want jump insns (that are
2048 easier to fill) to get the only insns that could be used for non-jump insns.
2049 When it is zero, only try to fill JUMP_INSNs.
2050
2051 When slots are filled in this manner, the insns (including the
2052 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2053 it is possible to tell whether a delay slot has really been filled
2054 or not. `final' knows how to deal with this, by communicating
2055 through FINAL_SEQUENCE. */
2056
2057 static void
2058 fill_simple_delay_slots (non_jumps_p)
2059 int non_jumps_p;
2060 {
2061 rtx insn, pat, trial, next_trial;
2062 int i;
2063 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2064 struct resources needed, set;
2065 int slots_to_fill, slots_filled;
2066 rtx delay_list;
2067
2068 for (i = 0; i < num_unfilled_slots; i++)
2069 {
2070 int flags;
2071 /* Get the next insn to fill. If it has already had any slots assigned,
2072 we can't do anything with it. Maybe we'll improve this later. */
2073
2074 insn = unfilled_slots_base[i];
2075 if (insn == 0
2076 || INSN_DELETED_P (insn)
2077 || (GET_CODE (insn) == INSN
2078 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2079 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2080 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2081 continue;
2082
2083 /* It may have been that this insn used to need delay slots, but
2084 now doesn't; ignore in that case. This can happen, for example,
2085 on the HP PA RISC, where the number of delay slots depends on
2086 what insns are nearby. */
2087 slots_to_fill = num_delay_slots (insn);
2088
2089 /* Some machine description have defined instructions to have
2090 delay slots only in certain circumstances which may depend on
2091 nearby insns (which change due to reorg's actions).
2092
2093 For example, the PA port normally has delay slots for unconditional
2094 jumps.
2095
2096 However, the PA port claims such jumps do not have a delay slot
2097 if they are immediate successors of certain CALL_INSNs. This
2098 allows the port to favor filling the delay slot of the call with
2099 the unconditional jump. */
2100 if (slots_to_fill == 0)
2101 continue;
2102
2103 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2104 says how many. After initialization, first try optimizing
2105
2106 call _foo call _foo
2107 nop add %o7,.-L1,%o7
2108 b,a L1
2109 nop
2110
2111 If this case applies, the delay slot of the call is filled with
2112 the unconditional jump. This is done first to avoid having the
2113 delay slot of the call filled in the backward scan. Also, since
2114 the unconditional jump is likely to also have a delay slot, that
2115 insn must exist when it is subsequently scanned.
2116
2117 This is tried on each insn with delay slots as some machines
2118 have insns which perform calls, but are not represented as
2119 CALL_INSNs. */
2120
2121 slots_filled = 0;
2122 delay_list = 0;
2123
2124 if (GET_CODE (insn) == JUMP_INSN)
2125 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2126 else
2127 flags = get_jump_flags (insn, NULL_RTX);
2128
2129 if ((trial = next_active_insn (insn))
2130 && GET_CODE (trial) == JUMP_INSN
2131 && simplejump_p (trial)
2132 && eligible_for_delay (insn, slots_filled, trial, flags)
2133 && no_labels_between_p (insn, trial)
2134 && ! can_throw_internal (trial))
2135 {
2136 rtx *tmp;
2137 slots_filled++;
2138 delay_list = add_to_delay_list (trial, delay_list);
2139
2140 /* TRIAL may have had its delay slot filled, then unfilled. When
2141 the delay slot is unfilled, TRIAL is placed back on the unfilled
2142 slots obstack. Unfortunately, it is placed on the end of the
2143 obstack, not in its original location. Therefore, we must search
2144 from entry i + 1 to the end of the unfilled slots obstack to
2145 try and find TRIAL. */
2146 tmp = &unfilled_slots_base[i + 1];
2147 while (*tmp != trial && tmp != unfilled_slots_next)
2148 tmp++;
2149
2150 /* Remove the unconditional jump from consideration for delay slot
2151 filling and unthread it. */
2152 if (*tmp == trial)
2153 *tmp = 0;
2154 {
2155 rtx next = NEXT_INSN (trial);
2156 rtx prev = PREV_INSN (trial);
2157 if (prev)
2158 NEXT_INSN (prev) = next;
2159 if (next)
2160 PREV_INSN (next) = prev;
2161 }
2162 }
2163
2164 /* Now, scan backwards from the insn to search for a potential
2165 delay-slot candidate. Stop searching when a label or jump is hit.
2166
2167 For each candidate, if it is to go into the delay slot (moved
2168 forward in execution sequence), it must not need or set any resources
2169 that were set by later insns and must not set any resources that
2170 are needed for those insns.
2171
2172 The delay slot insn itself sets resources unless it is a call
2173 (in which case the called routine, not the insn itself, is doing
2174 the setting). */
2175
2176 if (slots_filled < slots_to_fill)
2177 {
2178 CLEAR_RESOURCE (&needed);
2179 CLEAR_RESOURCE (&set);
2180 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2181 mark_referenced_resources (insn, &needed, 0);
2182
2183 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2184 trial = next_trial)
2185 {
2186 next_trial = prev_nonnote_insn (trial);
2187
2188 /* This must be an INSN or CALL_INSN. */
2189 pat = PATTERN (trial);
2190
2191 /* USE and CLOBBER at this level was just for flow; ignore it. */
2192 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2193 continue;
2194
2195 /* Check for resource conflict first, to avoid unnecessary
2196 splitting. */
2197 if (! insn_references_resource_p (trial, &set, 1)
2198 && ! insn_sets_resource_p (trial, &set, 1)
2199 && ! insn_sets_resource_p (trial, &needed, 1)
2200 #ifdef HAVE_cc0
2201 /* Can't separate set of cc0 from its use. */
2202 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2203 #endif
2204 && ! can_throw_internal (trial))
2205 {
2206 trial = try_split (pat, trial, 1);
2207 next_trial = prev_nonnote_insn (trial);
2208 if (eligible_for_delay (insn, slots_filled, trial, flags))
2209 {
2210 /* In this case, we are searching backward, so if we
2211 find insns to put on the delay list, we want
2212 to put them at the head, rather than the
2213 tail, of the list. */
2214
2215 update_reg_dead_notes (trial, insn);
2216 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2217 trial, delay_list);
2218 update_block (trial, trial);
2219 delete_related_insns (trial);
2220 if (slots_to_fill == ++slots_filled)
2221 break;
2222 continue;
2223 }
2224 }
2225
2226 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2227 mark_referenced_resources (trial, &needed, 1);
2228 }
2229 }
2230
2231 /* If all needed slots haven't been filled, we come here. */
2232
2233 /* Try to optimize case of jumping around a single insn. */
2234 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2235 if (slots_filled != slots_to_fill
2236 && delay_list == 0
2237 && GET_CODE (insn) == JUMP_INSN
2238 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2239 {
2240 delay_list = optimize_skip (insn);
2241 if (delay_list)
2242 slots_filled += 1;
2243 }
2244 #endif
2245
2246 /* Try to get insns from beyond the insn needing the delay slot.
2247 These insns can neither set or reference resources set in insns being
2248 skipped, cannot set resources in the insn being skipped, and, if this
2249 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2250 call might not return).
2251
2252 There used to be code which continued past the target label if
2253 we saw all uses of the target label. This code did not work,
2254 because it failed to account for some instructions which were
2255 both annulled and marked as from the target. This can happen as a
2256 result of optimize_skip. Since this code was redundant with
2257 fill_eager_delay_slots anyways, it was just deleted. */
2258
2259 if (slots_filled != slots_to_fill
2260 /* If this instruction could throw an exception which is
2261 caught in the same function, then it's not safe to fill
2262 the delay slot with an instruction from beyond this
2263 point. For example, consider:
2264
2265 int i = 2;
2266
2267 try {
2268 f();
2269 i = 3;
2270 } catch (...) {}
2271
2272 return i;
2273
2274 Even though `i' is a local variable, we must be sure not
2275 to put `i = 3' in the delay slot if `f' might throw an
2276 exception.
2277
2278 Presumably, we should also check to see if we could get
2279 back to this function via `setjmp'. */
2280 && ! can_throw_internal (insn)
2281 && (GET_CODE (insn) != JUMP_INSN
2282 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2283 && ! simplejump_p (insn)
2284 && JUMP_LABEL (insn) != 0)))
2285 {
2286 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2287 label. Otherwise, zero. */
2288 rtx target = 0;
2289 int maybe_never = 0;
2290 rtx pat, trial_delay;
2291
2292 CLEAR_RESOURCE (&needed);
2293 CLEAR_RESOURCE (&set);
2294
2295 if (GET_CODE (insn) == CALL_INSN)
2296 {
2297 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2298 mark_referenced_resources (insn, &needed, 1);
2299 maybe_never = 1;
2300 }
2301 else
2302 {
2303 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2304 mark_referenced_resources (insn, &needed, 1);
2305 if (GET_CODE (insn) == JUMP_INSN)
2306 target = JUMP_LABEL (insn);
2307 }
2308
2309 if (target == 0)
2310 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2311 {
2312 next_trial = next_nonnote_insn (trial);
2313
2314 if (GET_CODE (trial) == CODE_LABEL
2315 || GET_CODE (trial) == BARRIER)
2316 break;
2317
2318 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2319 pat = PATTERN (trial);
2320
2321 /* Stand-alone USE and CLOBBER are just for flow. */
2322 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2323 continue;
2324
2325 /* If this already has filled delay slots, get the insn needing
2326 the delay slots. */
2327 if (GET_CODE (pat) == SEQUENCE)
2328 trial_delay = XVECEXP (pat, 0, 0);
2329 else
2330 trial_delay = trial;
2331
2332 /* Stop our search when seeing an unconditional jump. */
2333 if (GET_CODE (trial_delay) == JUMP_INSN)
2334 break;
2335
2336 /* See if we have a resource problem before we try to
2337 split. */
2338 if (GET_CODE (pat) != SEQUENCE
2339 && ! insn_references_resource_p (trial, &set, 1)
2340 && ! insn_sets_resource_p (trial, &set, 1)
2341 && ! insn_sets_resource_p (trial, &needed, 1)
2342 #ifdef HAVE_cc0
2343 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2344 #endif
2345 && ! (maybe_never && may_trap_p (pat))
2346 && (trial = try_split (pat, trial, 0))
2347 && eligible_for_delay (insn, slots_filled, trial, flags)
2348 && ! can_throw_internal(trial))
2349 {
2350 next_trial = next_nonnote_insn (trial);
2351 delay_list = add_to_delay_list (trial, delay_list);
2352
2353 #ifdef HAVE_cc0
2354 if (reg_mentioned_p (cc0_rtx, pat))
2355 link_cc0_insns (trial);
2356 #endif
2357
2358 delete_related_insns (trial);
2359 if (slots_to_fill == ++slots_filled)
2360 break;
2361 continue;
2362 }
2363
2364 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2365 mark_referenced_resources (trial, &needed, 1);
2366
2367 /* Ensure we don't put insns between the setting of cc and the
2368 comparison by moving a setting of cc into an earlier delay
2369 slot since these insns could clobber the condition code. */
2370 set.cc = 1;
2371
2372 /* If this is a call or jump, we might not get here. */
2373 if (GET_CODE (trial_delay) == CALL_INSN
2374 || GET_CODE (trial_delay) == JUMP_INSN)
2375 maybe_never = 1;
2376 }
2377
2378 /* If there are slots left to fill and our search was stopped by an
2379 unconditional branch, try the insn at the branch target. We can
2380 redirect the branch if it works.
2381
2382 Don't do this if the insn at the branch target is a branch. */
2383 if (slots_to_fill != slots_filled
2384 && trial
2385 && GET_CODE (trial) == JUMP_INSN
2386 && simplejump_p (trial)
2387 && (target == 0 || JUMP_LABEL (trial) == target)
2388 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2389 && ! (GET_CODE (next_trial) == INSN
2390 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2391 && GET_CODE (next_trial) != JUMP_INSN
2392 && ! insn_references_resource_p (next_trial, &set, 1)
2393 && ! insn_sets_resource_p (next_trial, &set, 1)
2394 && ! insn_sets_resource_p (next_trial, &needed, 1)
2395 #ifdef HAVE_cc0
2396 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2397 #endif
2398 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2399 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2400 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2401 && ! can_throw_internal (trial))
2402 {
2403 rtx new_label = next_active_insn (next_trial);
2404
2405 if (new_label != 0)
2406 new_label = get_label_before (new_label);
2407 else
2408 new_label = find_end_label ();
2409
2410 delay_list
2411 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2412 slots_filled++;
2413 reorg_redirect_jump (trial, new_label);
2414
2415 /* If we merged because we both jumped to the same place,
2416 redirect the original insn also. */
2417 if (target)
2418 reorg_redirect_jump (insn, new_label);
2419 }
2420 }
2421
2422 /* If this is an unconditional jump, then try to get insns from the
2423 target of the jump. */
2424 if (GET_CODE (insn) == JUMP_INSN
2425 && simplejump_p (insn)
2426 && slots_filled != slots_to_fill)
2427 delay_list
2428 = fill_slots_from_thread (insn, const_true_rtx,
2429 next_active_insn (JUMP_LABEL (insn)),
2430 NULL, 1, 1,
2431 own_thread_p (JUMP_LABEL (insn),
2432 JUMP_LABEL (insn), 0),
2433 slots_to_fill, &slots_filled,
2434 delay_list);
2435
2436 if (delay_list)
2437 unfilled_slots_base[i]
2438 = emit_delay_sequence (insn, delay_list, slots_filled);
2439
2440 if (slots_to_fill == slots_filled)
2441 unfilled_slots_base[i] = 0;
2442
2443 note_delay_statistics (slots_filled, 0);
2444 }
2445
2446 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2447 /* See if the epilogue needs any delay slots. Try to fill them if so.
2448 The only thing we can do is scan backwards from the end of the
2449 function. If we did this in a previous pass, it is incorrect to do it
2450 again. */
2451 if (current_function_epilogue_delay_list)
2452 return;
2453
2454 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2455 if (slots_to_fill == 0)
2456 return;
2457
2458 slots_filled = 0;
2459 CLEAR_RESOURCE (&set);
2460
2461 /* The frame pointer and stack pointer are needed at the beginning of
2462 the epilogue, so instructions setting them can not be put in the
2463 epilogue delay slot. However, everything else needed at function
2464 end is safe, so we don't want to use end_of_function_needs here. */
2465 CLEAR_RESOURCE (&needed);
2466 if (frame_pointer_needed)
2467 {
2468 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2469 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2470 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2471 #endif
2472 #ifdef EXIT_IGNORE_STACK
2473 if (! EXIT_IGNORE_STACK
2474 || current_function_sp_is_unchanging)
2475 #endif
2476 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2477 }
2478 else
2479 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2480
2481 #ifdef EPILOGUE_USES
2482 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2483 {
2484 if (EPILOGUE_USES (i))
2485 SET_HARD_REG_BIT (needed.regs, i);
2486 }
2487 #endif
2488
2489 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2490 trial = PREV_INSN (trial))
2491 {
2492 if (GET_CODE (trial) == NOTE)
2493 continue;
2494 pat = PATTERN (trial);
2495 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2496 continue;
2497
2498 if (! insn_references_resource_p (trial, &set, 1)
2499 && ! insn_sets_resource_p (trial, &needed, 1)
2500 && ! insn_sets_resource_p (trial, &set, 1)
2501 #ifdef HAVE_cc0
2502 /* Don't want to mess with cc0 here. */
2503 && ! reg_mentioned_p (cc0_rtx, pat)
2504 #endif
2505 && ! can_throw_internal (trial))
2506 {
2507 trial = try_split (pat, trial, 1);
2508 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2509 {
2510 /* Here as well we are searching backward, so put the
2511 insns we find on the head of the list. */
2512
2513 current_function_epilogue_delay_list
2514 = gen_rtx_INSN_LIST (VOIDmode, trial,
2515 current_function_epilogue_delay_list);
2516 mark_end_of_function_resources (trial, 1);
2517 update_block (trial, trial);
2518 delete_related_insns (trial);
2519
2520 /* Clear deleted bit so final.c will output the insn. */
2521 INSN_DELETED_P (trial) = 0;
2522
2523 if (slots_to_fill == ++slots_filled)
2524 break;
2525 continue;
2526 }
2527 }
2528
2529 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2530 mark_referenced_resources (trial, &needed, 1);
2531 }
2532
2533 note_delay_statistics (slots_filled, 0);
2534 #endif
2535 }
2536 \f
2537 /* Try to find insns to place in delay slots.
2538
2539 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2540 or is an unconditional branch if CONDITION is const_true_rtx.
2541 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2542
2543 THREAD is a flow-of-control, either the insns to be executed if the
2544 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2545
2546 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2547 to see if any potential delay slot insns set things needed there.
2548
2549 LIKELY is non-zero if it is extremely likely that the branch will be
2550 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2551 end of a loop back up to the top.
2552
2553 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2554 thread. I.e., it is the fallthrough code of our jump or the target of the
2555 jump when we are the only jump going there.
2556
2557 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2558 case, we can only take insns from the head of the thread for our delay
2559 slot. We then adjust the jump to point after the insns we have taken. */
2560
2561 static rtx
2562 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2563 thread_if_true, own_thread,
2564 slots_to_fill, pslots_filled, delay_list)
2565 rtx insn;
2566 rtx condition;
2567 rtx thread, opposite_thread;
2568 int likely;
2569 int thread_if_true;
2570 int own_thread;
2571 int slots_to_fill, *pslots_filled;
2572 rtx delay_list;
2573 {
2574 rtx new_thread;
2575 struct resources opposite_needed, set, needed;
2576 rtx trial;
2577 int lose = 0;
2578 int must_annul = 0;
2579 int flags;
2580
2581 /* Validate our arguments. */
2582 if ((condition == const_true_rtx && ! thread_if_true)
2583 || (! own_thread && ! thread_if_true))
2584 abort ();
2585
2586 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2587
2588 /* If our thread is the end of subroutine, we can't get any delay
2589 insns from that. */
2590 if (thread == 0)
2591 return delay_list;
2592
2593 /* If this is an unconditional branch, nothing is needed at the
2594 opposite thread. Otherwise, compute what is needed there. */
2595 if (condition == const_true_rtx)
2596 CLEAR_RESOURCE (&opposite_needed);
2597 else
2598 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2599
2600 /* If the insn at THREAD can be split, do it here to avoid having to
2601 update THREAD and NEW_THREAD if it is done in the loop below. Also
2602 initialize NEW_THREAD. */
2603
2604 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2605
2606 /* Scan insns at THREAD. We are looking for an insn that can be removed
2607 from THREAD (it neither sets nor references resources that were set
2608 ahead of it and it doesn't set anything needs by the insns ahead of
2609 it) and that either can be placed in an annulling insn or aren't
2610 needed at OPPOSITE_THREAD. */
2611
2612 CLEAR_RESOURCE (&needed);
2613 CLEAR_RESOURCE (&set);
2614
2615 /* If we do not own this thread, we must stop as soon as we find
2616 something that we can't put in a delay slot, since all we can do
2617 is branch into THREAD at a later point. Therefore, labels stop
2618 the search if this is not the `true' thread. */
2619
2620 for (trial = thread;
2621 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2622 trial = next_nonnote_insn (trial))
2623 {
2624 rtx pat, old_trial;
2625
2626 /* If we have passed a label, we no longer own this thread. */
2627 if (GET_CODE (trial) == CODE_LABEL)
2628 {
2629 own_thread = 0;
2630 continue;
2631 }
2632
2633 pat = PATTERN (trial);
2634 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2635 continue;
2636
2637 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2638 don't separate or copy insns that set and use CC0. */
2639 if (! insn_references_resource_p (trial, &set, 1)
2640 && ! insn_sets_resource_p (trial, &set, 1)
2641 && ! insn_sets_resource_p (trial, &needed, 1)
2642 #ifdef HAVE_cc0
2643 && ! (reg_mentioned_p (cc0_rtx, pat)
2644 && (! own_thread || ! sets_cc0_p (pat)))
2645 #endif
2646 && ! can_throw_internal (trial))
2647 {
2648 rtx prior_insn;
2649
2650 /* If TRIAL is redundant with some insn before INSN, we don't
2651 actually need to add it to the delay list; we can merely pretend
2652 we did. */
2653 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2654 {
2655 fix_reg_dead_note (prior_insn, insn);
2656 if (own_thread)
2657 {
2658 update_block (trial, thread);
2659 if (trial == thread)
2660 {
2661 thread = next_active_insn (thread);
2662 if (new_thread == trial)
2663 new_thread = thread;
2664 }
2665
2666 delete_related_insns (trial);
2667 }
2668 else
2669 {
2670 update_reg_unused_notes (prior_insn, trial);
2671 new_thread = next_active_insn (trial);
2672 }
2673
2674 continue;
2675 }
2676
2677 /* There are two ways we can win: If TRIAL doesn't set anything
2678 needed at the opposite thread and can't trap, or if it can
2679 go into an annulled delay slot. */
2680 if (!must_annul
2681 && (condition == const_true_rtx
2682 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2683 && ! may_trap_p (pat))))
2684 {
2685 old_trial = trial;
2686 trial = try_split (pat, trial, 0);
2687 if (new_thread == old_trial)
2688 new_thread = trial;
2689 if (thread == old_trial)
2690 thread = trial;
2691 pat = PATTERN (trial);
2692 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2693 goto winner;
2694 }
2695 else if (0
2696 #ifdef ANNUL_IFTRUE_SLOTS
2697 || ! thread_if_true
2698 #endif
2699 #ifdef ANNUL_IFFALSE_SLOTS
2700 || thread_if_true
2701 #endif
2702 )
2703 {
2704 old_trial = trial;
2705 trial = try_split (pat, trial, 0);
2706 if (new_thread == old_trial)
2707 new_thread = trial;
2708 if (thread == old_trial)
2709 thread = trial;
2710 pat = PATTERN (trial);
2711 if ((must_annul || delay_list == NULL) && (thread_if_true
2712 ? check_annul_list_true_false (0, delay_list)
2713 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2714 : check_annul_list_true_false (1, delay_list)
2715 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2716 {
2717 rtx temp;
2718
2719 must_annul = 1;
2720 winner:
2721
2722 #ifdef HAVE_cc0
2723 if (reg_mentioned_p (cc0_rtx, pat))
2724 link_cc0_insns (trial);
2725 #endif
2726
2727 /* If we own this thread, delete the insn. If this is the
2728 destination of a branch, show that a basic block status
2729 may have been updated. In any case, mark the new
2730 starting point of this thread. */
2731 if (own_thread)
2732 {
2733 rtx note;
2734
2735 update_block (trial, thread);
2736 if (trial == thread)
2737 {
2738 thread = next_active_insn (thread);
2739 if (new_thread == trial)
2740 new_thread = thread;
2741 }
2742
2743 /* We are moving this insn, not deleting it. We must
2744 temporarily increment the use count on any referenced
2745 label lest it be deleted by delete_related_insns. */
2746 note = find_reg_note (trial, REG_LABEL, 0);
2747 /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
2748 if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2749 LABEL_NUSES (XEXP (note, 0))++;
2750
2751 delete_related_insns (trial);
2752
2753 if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2754 LABEL_NUSES (XEXP (note, 0))--;
2755 }
2756 else
2757 new_thread = next_active_insn (trial);
2758
2759 temp = own_thread ? trial : copy_rtx (trial);
2760 if (thread_if_true)
2761 INSN_FROM_TARGET_P (temp) = 1;
2762
2763 delay_list = add_to_delay_list (temp, delay_list);
2764
2765 if (slots_to_fill == ++(*pslots_filled))
2766 {
2767 /* Even though we have filled all the slots, we
2768 may be branching to a location that has a
2769 redundant insn. Skip any if so. */
2770 while (new_thread && ! own_thread
2771 && ! insn_sets_resource_p (new_thread, &set, 1)
2772 && ! insn_sets_resource_p (new_thread, &needed, 1)
2773 && ! insn_references_resource_p (new_thread,
2774 &set, 1)
2775 && (prior_insn
2776 = redundant_insn (new_thread, insn,
2777 delay_list)))
2778 {
2779 /* We know we do not own the thread, so no need
2780 to call update_block and delete_insn. */
2781 fix_reg_dead_note (prior_insn, insn);
2782 update_reg_unused_notes (prior_insn, new_thread);
2783 new_thread = next_active_insn (new_thread);
2784 }
2785 break;
2786 }
2787
2788 continue;
2789 }
2790 }
2791 }
2792
2793 /* This insn can't go into a delay slot. */
2794 lose = 1;
2795 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2796 mark_referenced_resources (trial, &needed, 1);
2797
2798 /* Ensure we don't put insns between the setting of cc and the comparison
2799 by moving a setting of cc into an earlier delay slot since these insns
2800 could clobber the condition code. */
2801 set.cc = 1;
2802
2803 /* If this insn is a register-register copy and the next insn has
2804 a use of our destination, change it to use our source. That way,
2805 it will become a candidate for our delay slot the next time
2806 through this loop. This case occurs commonly in loops that
2807 scan a list.
2808
2809 We could check for more complex cases than those tested below,
2810 but it doesn't seem worth it. It might also be a good idea to try
2811 to swap the two insns. That might do better.
2812
2813 We can't do this if the next insn modifies our destination, because
2814 that would make the replacement into the insn invalid. We also can't
2815 do this if it modifies our source, because it might be an earlyclobber
2816 operand. This latter test also prevents updating the contents of
2817 a PRE_INC. */
2818
2819 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2820 && GET_CODE (SET_SRC (pat)) == REG
2821 && GET_CODE (SET_DEST (pat)) == REG)
2822 {
2823 rtx next = next_nonnote_insn (trial);
2824
2825 if (next && GET_CODE (next) == INSN
2826 && GET_CODE (PATTERN (next)) != USE
2827 && ! reg_set_p (SET_DEST (pat), next)
2828 && ! reg_set_p (SET_SRC (pat), next)
2829 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2830 && ! modified_in_p (SET_DEST (pat), next))
2831 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2832 }
2833 }
2834
2835 /* If we stopped on a branch insn that has delay slots, see if we can
2836 steal some of the insns in those slots. */
2837 if (trial && GET_CODE (trial) == INSN
2838 && GET_CODE (PATTERN (trial)) == SEQUENCE
2839 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2840 {
2841 /* If this is the `true' thread, we will want to follow the jump,
2842 so we can only do this if we have taken everything up to here. */
2843 if (thread_if_true && trial == new_thread)
2844 {
2845 delay_list
2846 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2847 delay_list, &set, &needed,
2848 &opposite_needed, slots_to_fill,
2849 pslots_filled, &must_annul,
2850 &new_thread);
2851 /* If we owned the thread and are told that it branched
2852 elsewhere, make sure we own the thread at the new location. */
2853 if (own_thread && trial != new_thread)
2854 own_thread = own_thread_p (new_thread, new_thread, 0);
2855 }
2856 else if (! thread_if_true)
2857 delay_list
2858 = steal_delay_list_from_fallthrough (insn, condition,
2859 PATTERN (trial),
2860 delay_list, &set, &needed,
2861 &opposite_needed, slots_to_fill,
2862 pslots_filled, &must_annul);
2863 }
2864
2865 /* If we haven't found anything for this delay slot and it is very
2866 likely that the branch will be taken, see if the insn at our target
2867 increments or decrements a register with an increment that does not
2868 depend on the destination register. If so, try to place the opposite
2869 arithmetic insn after the jump insn and put the arithmetic insn in the
2870 delay slot. If we can't do this, return. */
2871 if (delay_list == 0 && likely && new_thread
2872 && GET_CODE (new_thread) == INSN
2873 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2874 && asm_noperands (PATTERN (new_thread)) < 0)
2875 {
2876 rtx pat = PATTERN (new_thread);
2877 rtx dest;
2878 rtx src;
2879
2880 trial = new_thread;
2881 pat = PATTERN (trial);
2882
2883 if (GET_CODE (trial) != INSN
2884 || GET_CODE (pat) != SET
2885 || ! eligible_for_delay (insn, 0, trial, flags)
2886 || can_throw_internal (trial))
2887 return 0;
2888
2889 dest = SET_DEST (pat), src = SET_SRC (pat);
2890 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2891 && rtx_equal_p (XEXP (src, 0), dest)
2892 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2893 && ! side_effects_p (pat))
2894 {
2895 rtx other = XEXP (src, 1);
2896 rtx new_arith;
2897 rtx ninsn;
2898
2899 /* If this is a constant adjustment, use the same code with
2900 the negated constant. Otherwise, reverse the sense of the
2901 arithmetic. */
2902 if (GET_CODE (other) == CONST_INT)
2903 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2904 negate_rtx (GET_MODE (src), other));
2905 else
2906 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2907 GET_MODE (src), dest, other);
2908
2909 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2910 insn);
2911
2912 if (recog_memoized (ninsn) < 0
2913 || (extract_insn (ninsn), ! constrain_operands (1)))
2914 {
2915 delete_related_insns (ninsn);
2916 return 0;
2917 }
2918
2919 if (own_thread)
2920 {
2921 update_block (trial, thread);
2922 if (trial == thread)
2923 {
2924 thread = next_active_insn (thread);
2925 if (new_thread == trial)
2926 new_thread = thread;
2927 }
2928 delete_related_insns (trial);
2929 }
2930 else
2931 new_thread = next_active_insn (trial);
2932
2933 ninsn = own_thread ? trial : copy_rtx (trial);
2934 if (thread_if_true)
2935 INSN_FROM_TARGET_P (ninsn) = 1;
2936
2937 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2938 (*pslots_filled)++;
2939 }
2940 }
2941
2942 if (delay_list && must_annul)
2943 INSN_ANNULLED_BRANCH_P (insn) = 1;
2944
2945 /* If we are to branch into the middle of this thread, find an appropriate
2946 label or make a new one if none, and redirect INSN to it. If we hit the
2947 end of the function, use the end-of-function label. */
2948 if (new_thread != thread)
2949 {
2950 rtx label;
2951
2952 if (! thread_if_true)
2953 abort ();
2954
2955 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2956 && (simplejump_p (new_thread)
2957 || GET_CODE (PATTERN (new_thread)) == RETURN)
2958 && redirect_with_delay_list_safe_p (insn,
2959 JUMP_LABEL (new_thread),
2960 delay_list))
2961 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2962
2963 if (new_thread == 0)
2964 label = find_end_label ();
2965 else if (GET_CODE (new_thread) == CODE_LABEL)
2966 label = new_thread;
2967 else
2968 label = get_label_before (new_thread);
2969
2970 reorg_redirect_jump (insn, label);
2971 }
2972
2973 return delay_list;
2974 }
2975 \f
2976 /* Make another attempt to find insns to place in delay slots.
2977
2978 We previously looked for insns located in front of the delay insn
2979 and, for non-jump delay insns, located behind the delay insn.
2980
2981 Here only try to schedule jump insns and try to move insns from either
2982 the target or the following insns into the delay slot. If annulling is
2983 supported, we will be likely to do this. Otherwise, we can do this only
2984 if safe. */
2985
2986 static void
2987 fill_eager_delay_slots ()
2988 {
2989 rtx insn;
2990 int i;
2991 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2992
2993 for (i = 0; i < num_unfilled_slots; i++)
2994 {
2995 rtx condition;
2996 rtx target_label, insn_at_target, fallthrough_insn;
2997 rtx delay_list = 0;
2998 int own_target;
2999 int own_fallthrough;
3000 int prediction, slots_to_fill, slots_filled;
3001
3002 insn = unfilled_slots_base[i];
3003 if (insn == 0
3004 || INSN_DELETED_P (insn)
3005 || GET_CODE (insn) != JUMP_INSN
3006 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3007 continue;
3008
3009 slots_to_fill = num_delay_slots (insn);
3010 /* Some machine description have defined instructions to have
3011 delay slots only in certain circumstances which may depend on
3012 nearby insns (which change due to reorg's actions).
3013
3014 For example, the PA port normally has delay slots for unconditional
3015 jumps.
3016
3017 However, the PA port claims such jumps do not have a delay slot
3018 if they are immediate successors of certain CALL_INSNs. This
3019 allows the port to favor filling the delay slot of the call with
3020 the unconditional jump. */
3021 if (slots_to_fill == 0)
3022 continue;
3023
3024 slots_filled = 0;
3025 target_label = JUMP_LABEL (insn);
3026 condition = get_branch_condition (insn, target_label);
3027
3028 if (condition == 0)
3029 continue;
3030
3031 /* Get the next active fallthrough and target insns and see if we own
3032 them. Then see whether the branch is likely true. We don't need
3033 to do a lot of this for unconditional branches. */
3034
3035 insn_at_target = next_active_insn (target_label);
3036 own_target = own_thread_p (target_label, target_label, 0);
3037
3038 if (condition == const_true_rtx)
3039 {
3040 own_fallthrough = 0;
3041 fallthrough_insn = 0;
3042 prediction = 2;
3043 }
3044 else
3045 {
3046 fallthrough_insn = next_active_insn (insn);
3047 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3048 prediction = mostly_true_jump (insn, condition);
3049 }
3050
3051 /* If this insn is expected to branch, first try to get insns from our
3052 target, then our fallthrough insns. If it is not expected to branch,
3053 try the other order. */
3054
3055 if (prediction > 0)
3056 {
3057 delay_list
3058 = fill_slots_from_thread (insn, condition, insn_at_target,
3059 fallthrough_insn, prediction == 2, 1,
3060 own_target,
3061 slots_to_fill, &slots_filled, delay_list);
3062
3063 if (delay_list == 0 && own_fallthrough)
3064 {
3065 /* Even though we didn't find anything for delay slots,
3066 we might have found a redundant insn which we deleted
3067 from the thread that was filled. So we have to recompute
3068 the next insn at the target. */
3069 target_label = JUMP_LABEL (insn);
3070 insn_at_target = next_active_insn (target_label);
3071
3072 delay_list
3073 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3074 insn_at_target, 0, 0,
3075 own_fallthrough,
3076 slots_to_fill, &slots_filled,
3077 delay_list);
3078 }
3079 }
3080 else
3081 {
3082 if (own_fallthrough)
3083 delay_list
3084 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3085 insn_at_target, 0, 0,
3086 own_fallthrough,
3087 slots_to_fill, &slots_filled,
3088 delay_list);
3089
3090 if (delay_list == 0)
3091 delay_list
3092 = fill_slots_from_thread (insn, condition, insn_at_target,
3093 next_active_insn (insn), 0, 1,
3094 own_target,
3095 slots_to_fill, &slots_filled,
3096 delay_list);
3097 }
3098
3099 if (delay_list)
3100 unfilled_slots_base[i]
3101 = emit_delay_sequence (insn, delay_list, slots_filled);
3102
3103 if (slots_to_fill == slots_filled)
3104 unfilled_slots_base[i] = 0;
3105
3106 note_delay_statistics (slots_filled, 1);
3107 }
3108 }
3109 \f
3110 /* Once we have tried two ways to fill a delay slot, make a pass over the
3111 code to try to improve the results and to do such things as more jump
3112 threading. */
3113
3114 static void
3115 relax_delay_slots (first)
3116 rtx first;
3117 {
3118 rtx insn, next, pat;
3119 rtx trial, delay_insn, target_label;
3120
3121 /* Look at every JUMP_INSN and see if we can improve it. */
3122 for (insn = first; insn; insn = next)
3123 {
3124 rtx other;
3125
3126 next = next_active_insn (insn);
3127
3128 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3129 the next insn, or jumps to a label that is not the last of a
3130 group of consecutive labels. */
3131 if (GET_CODE (insn) == JUMP_INSN
3132 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3133 && (target_label = JUMP_LABEL (insn)) != 0)
3134 {
3135 target_label = follow_jumps (target_label);
3136 target_label = prev_label (next_active_insn (target_label));
3137
3138 if (target_label == 0)
3139 target_label = find_end_label ();
3140
3141 if (next_active_insn (target_label) == next
3142 && ! condjump_in_parallel_p (insn))
3143 {
3144 delete_jump (insn);
3145 continue;
3146 }
3147
3148 if (target_label != JUMP_LABEL (insn))
3149 reorg_redirect_jump (insn, target_label);
3150
3151 /* See if this jump branches around an unconditional jump.
3152 If so, invert this jump and point it to the target of the
3153 second jump. */
3154 if (next && GET_CODE (next) == JUMP_INSN
3155 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3156 && next_active_insn (target_label) == next_active_insn (next)
3157 && no_labels_between_p (insn, next))
3158 {
3159 rtx label = JUMP_LABEL (next);
3160
3161 /* Be careful how we do this to avoid deleting code or
3162 labels that are momentarily dead. See similar optimization
3163 in jump.c.
3164
3165 We also need to ensure we properly handle the case when
3166 invert_jump fails. */
3167
3168 ++LABEL_NUSES (target_label);
3169 if (label)
3170 ++LABEL_NUSES (label);
3171
3172 if (invert_jump (insn, label, 1))
3173 {
3174 delete_related_insns (next);
3175 next = insn;
3176 }
3177
3178 if (label)
3179 --LABEL_NUSES (label);
3180
3181 if (--LABEL_NUSES (target_label) == 0)
3182 delete_related_insns (target_label);
3183
3184 continue;
3185 }
3186 }
3187
3188 /* If this is an unconditional jump and the previous insn is a
3189 conditional jump, try reversing the condition of the previous
3190 insn and swapping our targets. The next pass might be able to
3191 fill the slots.
3192
3193 Don't do this if we expect the conditional branch to be true, because
3194 we would then be making the more common case longer. */
3195
3196 if (GET_CODE (insn) == JUMP_INSN
3197 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3198 && (other = prev_active_insn (insn)) != 0
3199 && (condjump_p (other) || condjump_in_parallel_p (other))
3200 && no_labels_between_p (other, insn)
3201 && 0 > mostly_true_jump (other,
3202 get_branch_condition (other,
3203 JUMP_LABEL (other))))
3204 {
3205 rtx other_target = JUMP_LABEL (other);
3206 target_label = JUMP_LABEL (insn);
3207
3208 if (invert_jump (other, target_label, 0))
3209 reorg_redirect_jump (insn, other_target);
3210 }
3211
3212 /* Now look only at cases where we have filled a delay slot. */
3213 if (GET_CODE (insn) != INSN
3214 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3215 continue;
3216
3217 pat = PATTERN (insn);
3218 delay_insn = XVECEXP (pat, 0, 0);
3219
3220 /* See if the first insn in the delay slot is redundant with some
3221 previous insn. Remove it from the delay slot if so; then set up
3222 to reprocess this insn. */
3223 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3224 {
3225 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3226 next = prev_active_insn (next);
3227 continue;
3228 }
3229
3230 /* See if we have a RETURN insn with a filled delay slot followed
3231 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3232 the first RETURN (but not it's delay insn). This gives the same
3233 effect in fewer instructions.
3234
3235 Only do so if optimizing for size since this results in slower, but
3236 smaller code. */
3237 if (optimize_size
3238 && GET_CODE (PATTERN (delay_insn)) == RETURN
3239 && next
3240 && GET_CODE (next) == JUMP_INSN
3241 && GET_CODE (PATTERN (next)) == RETURN)
3242 {
3243 int i;
3244
3245 /* Delete the RETURN and just execute the delay list insns.
3246
3247 We do this by deleting the INSN containing the SEQUENCE, then
3248 re-emitting the insns separately, and then deleting the RETURN.
3249 This allows the count of the jump target to be properly
3250 decremented. */
3251
3252 /* Clear the from target bit, since these insns are no longer
3253 in delay slots. */
3254 for (i = 0; i < XVECLEN (pat, 0); i++)
3255 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3256
3257 trial = PREV_INSN (insn);
3258 delete_related_insns (insn);
3259 emit_insn_after (pat, trial);
3260 delete_scheduled_jump (delay_insn);
3261 continue;
3262 }
3263
3264 /* Now look only at the cases where we have a filled JUMP_INSN. */
3265 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3266 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3267 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3268 continue;
3269
3270 target_label = JUMP_LABEL (delay_insn);
3271
3272 if (target_label)
3273 {
3274 /* If this jump goes to another unconditional jump, thread it, but
3275 don't convert a jump into a RETURN here. */
3276 trial = follow_jumps (target_label);
3277 /* We use next_real_insn instead of next_active_insn, so that
3278 the special USE insns emitted by reorg won't be ignored.
3279 If they are ignored, then they will get deleted if target_label
3280 is now unreachable, and that would cause mark_target_live_regs
3281 to fail. */
3282 trial = prev_label (next_real_insn (trial));
3283 if (trial == 0 && target_label != 0)
3284 trial = find_end_label ();
3285
3286 if (trial != target_label
3287 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3288 {
3289 reorg_redirect_jump (delay_insn, trial);
3290 target_label = trial;
3291 }
3292
3293 /* If the first insn at TARGET_LABEL is redundant with a previous
3294 insn, redirect the jump to the following insn process again. */
3295 trial = next_active_insn (target_label);
3296 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3297 && redundant_insn (trial, insn, 0)
3298 && ! can_throw_internal (trial))
3299 {
3300 rtx tmp;
3301
3302 /* Figure out where to emit the special USE insn so we don't
3303 later incorrectly compute register live/death info. */
3304 tmp = next_active_insn (trial);
3305 if (tmp == 0)
3306 tmp = find_end_label ();
3307
3308 /* Insert the special USE insn and update dataflow info. */
3309 update_block (trial, tmp);
3310
3311 /* Now emit a label before the special USE insn, and
3312 redirect our jump to the new label. */
3313 target_label = get_label_before (PREV_INSN (tmp));
3314 reorg_redirect_jump (delay_insn, target_label);
3315 next = insn;
3316 continue;
3317 }
3318
3319 /* Similarly, if it is an unconditional jump with one insn in its
3320 delay list and that insn is redundant, thread the jump. */
3321 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3322 && XVECLEN (PATTERN (trial), 0) == 2
3323 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3324 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3325 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3326 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3327 {
3328 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3329 if (target_label == 0)
3330 target_label = find_end_label ();
3331
3332 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3333 insn))
3334 {
3335 reorg_redirect_jump (delay_insn, target_label);
3336 next = insn;
3337 continue;
3338 }
3339 }
3340 }
3341
3342 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3343 && prev_active_insn (target_label) == insn
3344 && ! condjump_in_parallel_p (delay_insn)
3345 #ifdef HAVE_cc0
3346 /* If the last insn in the delay slot sets CC0 for some insn,
3347 various code assumes that it is in a delay slot. We could
3348 put it back where it belonged and delete the register notes,
3349 but it doesn't seem worthwhile in this uncommon case. */
3350 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3351 REG_CC_USER, NULL_RTX)
3352 #endif
3353 )
3354 {
3355 int i;
3356
3357 /* All this insn does is execute its delay list and jump to the
3358 following insn. So delete the jump and just execute the delay
3359 list insns.
3360
3361 We do this by deleting the INSN containing the SEQUENCE, then
3362 re-emitting the insns separately, and then deleting the jump.
3363 This allows the count of the jump target to be properly
3364 decremented. */
3365
3366 /* Clear the from target bit, since these insns are no longer
3367 in delay slots. */
3368 for (i = 0; i < XVECLEN (pat, 0); i++)
3369 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3370
3371 trial = PREV_INSN (insn);
3372 delete_related_insns (insn);
3373 emit_insn_after (pat, trial);
3374 delete_scheduled_jump (delay_insn);
3375 continue;
3376 }
3377
3378 /* See if this is an unconditional jump around a single insn which is
3379 identical to the one in its delay slot. In this case, we can just
3380 delete the branch and the insn in its delay slot. */
3381 if (next && GET_CODE (next) == INSN
3382 && prev_label (next_active_insn (next)) == target_label
3383 && simplejump_p (insn)
3384 && XVECLEN (pat, 0) == 2
3385 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3386 {
3387 delete_related_insns (insn);
3388 continue;
3389 }
3390
3391 /* See if this jump (with its delay slots) branches around another
3392 jump (without delay slots). If so, invert this jump and point
3393 it to the target of the second jump. We cannot do this for
3394 annulled jumps, though. Again, don't convert a jump to a RETURN
3395 here. */
3396 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3397 && next && GET_CODE (next) == JUMP_INSN
3398 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3399 && next_active_insn (target_label) == next_active_insn (next)
3400 && no_labels_between_p (insn, next))
3401 {
3402 rtx label = JUMP_LABEL (next);
3403 rtx old_label = JUMP_LABEL (delay_insn);
3404
3405 if (label == 0)
3406 label = find_end_label ();
3407
3408 /* find_end_label can generate a new label. Check this first. */
3409 if (no_labels_between_p (insn, next)
3410 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3411 {
3412 /* Be careful how we do this to avoid deleting code or labels
3413 that are momentarily dead. See similar optimization in
3414 jump.c */
3415 if (old_label)
3416 ++LABEL_NUSES (old_label);
3417
3418 if (invert_jump (delay_insn, label, 1))
3419 {
3420 int i;
3421
3422 /* Must update the INSN_FROM_TARGET_P bits now that
3423 the branch is reversed, so that mark_target_live_regs
3424 will handle the delay slot insn correctly. */
3425 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3426 {
3427 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3428 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3429 }
3430
3431 delete_related_insns (next);
3432 next = insn;
3433 }
3434
3435 if (old_label && --LABEL_NUSES (old_label) == 0)
3436 delete_related_insns (old_label);
3437 continue;
3438 }
3439 }
3440
3441 /* If we own the thread opposite the way this insn branches, see if we
3442 can merge its delay slots with following insns. */
3443 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3444 && own_thread_p (NEXT_INSN (insn), 0, 1))
3445 try_merge_delay_insns (insn, next);
3446 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3447 && own_thread_p (target_label, target_label, 0))
3448 try_merge_delay_insns (insn, next_active_insn (target_label));
3449
3450 /* If we get here, we haven't deleted INSN. But we may have deleted
3451 NEXT, so recompute it. */
3452 next = next_active_insn (insn);
3453 }
3454 }
3455 \f
3456 #ifdef HAVE_return
3457
3458 /* Look for filled jumps to the end of function label. We can try to convert
3459 them into RETURN insns if the insns in the delay slot are valid for the
3460 RETURN as well. */
3461
3462 static void
3463 make_return_insns (first)
3464 rtx first;
3465 {
3466 rtx insn, jump_insn, pat;
3467 rtx real_return_label = end_of_function_label;
3468 int slots, i;
3469
3470 /* See if there is a RETURN insn in the function other than the one we
3471 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3472 into a RETURN to jump to it. */
3473 for (insn = first; insn; insn = NEXT_INSN (insn))
3474 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3475 {
3476 real_return_label = get_label_before (insn);
3477 break;
3478 }
3479
3480 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3481 was equal to END_OF_FUNCTION_LABEL. */
3482 LABEL_NUSES (real_return_label)++;
3483
3484 /* Clear the list of insns to fill so we can use it. */
3485 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3486
3487 for (insn = first; insn; insn = NEXT_INSN (insn))
3488 {
3489 int flags;
3490
3491 /* Only look at filled JUMP_INSNs that go to the end of function
3492 label. */
3493 if (GET_CODE (insn) != INSN
3494 || GET_CODE (PATTERN (insn)) != SEQUENCE
3495 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3496 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3497 continue;
3498
3499 pat = PATTERN (insn);
3500 jump_insn = XVECEXP (pat, 0, 0);
3501
3502 /* If we can't make the jump into a RETURN, try to redirect it to the best
3503 RETURN and go on to the next insn. */
3504 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3505 {
3506 /* Make sure redirecting the jump will not invalidate the delay
3507 slot insns. */
3508 if (redirect_with_delay_slots_safe_p (jump_insn,
3509 real_return_label,
3510 insn))
3511 reorg_redirect_jump (jump_insn, real_return_label);
3512 continue;
3513 }
3514
3515 /* See if this RETURN can accept the insns current in its delay slot.
3516 It can if it has more or an equal number of slots and the contents
3517 of each is valid. */
3518
3519 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3520 slots = num_delay_slots (jump_insn);
3521 if (slots >= XVECLEN (pat, 0) - 1)
3522 {
3523 for (i = 1; i < XVECLEN (pat, 0); i++)
3524 if (! (
3525 #ifdef ANNUL_IFFALSE_SLOTS
3526 (INSN_ANNULLED_BRANCH_P (jump_insn)
3527 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3528 ? eligible_for_annul_false (jump_insn, i - 1,
3529 XVECEXP (pat, 0, i), flags) :
3530 #endif
3531 #ifdef ANNUL_IFTRUE_SLOTS
3532 (INSN_ANNULLED_BRANCH_P (jump_insn)
3533 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3534 ? eligible_for_annul_true (jump_insn, i - 1,
3535 XVECEXP (pat, 0, i), flags) :
3536 #endif
3537 eligible_for_delay (jump_insn, i - 1,
3538 XVECEXP (pat, 0, i), flags)))
3539 break;
3540 }
3541 else
3542 i = 0;
3543
3544 if (i == XVECLEN (pat, 0))
3545 continue;
3546
3547 /* We have to do something with this insn. If it is an unconditional
3548 RETURN, delete the SEQUENCE and output the individual insns,
3549 followed by the RETURN. Then set things up so we try to find
3550 insns for its delay slots, if it needs some. */
3551 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3552 {
3553 rtx prev = PREV_INSN (insn);
3554
3555 delete_related_insns (insn);
3556 for (i = 1; i < XVECLEN (pat, 0); i++)
3557 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3558
3559 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3560 emit_barrier_after (insn);
3561
3562 if (slots)
3563 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3564 }
3565 else
3566 /* It is probably more efficient to keep this with its current
3567 delay slot as a branch to a RETURN. */
3568 reorg_redirect_jump (jump_insn, real_return_label);
3569 }
3570
3571 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3572 new delay slots we have created. */
3573 if (--LABEL_NUSES (real_return_label) == 0)
3574 delete_related_insns (real_return_label);
3575
3576 fill_simple_delay_slots (1);
3577 fill_simple_delay_slots (0);
3578 }
3579 #endif
3580 \f
3581 /* Try to find insns to place in delay slots. */
3582
3583 void
3584 dbr_schedule (first, file)
3585 rtx first;
3586 FILE *file;
3587 {
3588 rtx insn, next, epilogue_insn = 0;
3589 int i;
3590 #if 0
3591 int old_flag_no_peephole = flag_no_peephole;
3592
3593 /* Execute `final' once in prescan mode to delete any insns that won't be
3594 used. Don't let final try to do any peephole optimization--it will
3595 ruin dataflow information for this pass. */
3596
3597 flag_no_peephole = 1;
3598 final (first, 0, NO_DEBUG, 1, 1);
3599 flag_no_peephole = old_flag_no_peephole;
3600 #endif
3601
3602 /* If the current function has no insns other than the prologue and
3603 epilogue, then do not try to fill any delay slots. */
3604 if (n_basic_blocks == 0)
3605 return;
3606
3607 /* Find the highest INSN_UID and allocate and initialize our map from
3608 INSN_UID's to position in code. */
3609 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3610 {
3611 if (INSN_UID (insn) > max_uid)
3612 max_uid = INSN_UID (insn);
3613 if (GET_CODE (insn) == NOTE
3614 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3615 epilogue_insn = insn;
3616 }
3617
3618 uid_to_ruid = (int *) xmalloc ((max_uid + 1) * sizeof (int));
3619 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3620 uid_to_ruid[INSN_UID (insn)] = i;
3621
3622 /* Initialize the list of insns that need filling. */
3623 if (unfilled_firstobj == 0)
3624 {
3625 gcc_obstack_init (&unfilled_slots_obstack);
3626 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3627 }
3628
3629 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3630 {
3631 rtx target;
3632
3633 INSN_ANNULLED_BRANCH_P (insn) = 0;
3634 INSN_FROM_TARGET_P (insn) = 0;
3635
3636 /* Skip vector tables. We can't get attributes for them. */
3637 if (GET_CODE (insn) == JUMP_INSN
3638 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3639 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3640 continue;
3641
3642 if (num_delay_slots (insn) > 0)
3643 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3644
3645 /* Ensure all jumps go to the last of a set of consecutive labels. */
3646 if (GET_CODE (insn) == JUMP_INSN
3647 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3648 && JUMP_LABEL (insn) != 0
3649 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3650 != JUMP_LABEL (insn)))
3651 redirect_jump (insn, target, 1);
3652 }
3653
3654 init_resource_info (epilogue_insn);
3655
3656 /* Show we haven't computed an end-of-function label yet. */
3657 end_of_function_label = 0;
3658
3659 /* Initialize the statistics for this function. */
3660 memset ((char *) num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3661 memset ((char *) num_filled_delays, 0, sizeof num_filled_delays);
3662
3663 /* Now do the delay slot filling. Try everything twice in case earlier
3664 changes make more slots fillable. */
3665
3666 for (reorg_pass_number = 0;
3667 reorg_pass_number < MAX_REORG_PASSES;
3668 reorg_pass_number++)
3669 {
3670 fill_simple_delay_slots (1);
3671 fill_simple_delay_slots (0);
3672 fill_eager_delay_slots ();
3673 relax_delay_slots (first);
3674 }
3675
3676 /* Delete any USE insns made by update_block; subsequent passes don't need
3677 them or know how to deal with them. */
3678 for (insn = first; insn; insn = next)
3679 {
3680 next = NEXT_INSN (insn);
3681
3682 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3683 && INSN_P (XEXP (PATTERN (insn), 0)))
3684 next = delete_related_insns (insn);
3685 }
3686
3687 /* If we made an end of function label, indicate that it is now
3688 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3689 If it is now unused, delete it. */
3690 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3691 delete_related_insns (end_of_function_label);
3692
3693 #ifdef HAVE_return
3694 if (HAVE_return && end_of_function_label != 0)
3695 make_return_insns (first);
3696 #endif
3697
3698 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3699
3700 /* It is not clear why the line below is needed, but it does seem to be. */
3701 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3702
3703 if (file)
3704 {
3705 int i, j, need_comma;
3706 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3707 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3708
3709 for (reorg_pass_number = 0;
3710 reorg_pass_number < MAX_REORG_PASSES;
3711 reorg_pass_number++)
3712 {
3713 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3714 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3715 {
3716 need_comma = 0;
3717 fprintf (file, ";; Reorg function #%d\n", i);
3718
3719 fprintf (file, ";; %d insns needing delay slots\n;; ",
3720 num_insns_needing_delays[i][reorg_pass_number]);
3721
3722 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3723 if (num_filled_delays[i][j][reorg_pass_number])
3724 {
3725 if (need_comma)
3726 fprintf (file, ", ");
3727 need_comma = 1;
3728 fprintf (file, "%d got %d delays",
3729 num_filled_delays[i][j][reorg_pass_number], j);
3730 }
3731 fprintf (file, "\n");
3732 }
3733 }
3734 memset ((char *) total_delay_slots, 0, sizeof total_delay_slots);
3735 memset ((char *) total_annul_slots, 0, sizeof total_annul_slots);
3736 for (insn = first; insn; insn = NEXT_INSN (insn))
3737 {
3738 if (! INSN_DELETED_P (insn)
3739 && GET_CODE (insn) == INSN
3740 && GET_CODE (PATTERN (insn)) != USE
3741 && GET_CODE (PATTERN (insn)) != CLOBBER)
3742 {
3743 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3744 {
3745 j = XVECLEN (PATTERN (insn), 0) - 1;
3746 if (j > MAX_DELAY_HISTOGRAM)
3747 j = MAX_DELAY_HISTOGRAM;
3748 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3749 total_annul_slots[j]++;
3750 else
3751 total_delay_slots[j]++;
3752 }
3753 else if (num_delay_slots (insn) > 0)
3754 total_delay_slots[0]++;
3755 }
3756 }
3757 fprintf (file, ";; Reorg totals: ");
3758 need_comma = 0;
3759 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3760 {
3761 if (total_delay_slots[j])
3762 {
3763 if (need_comma)
3764 fprintf (file, ", ");
3765 need_comma = 1;
3766 fprintf (file, "%d got %d delays", total_delay_slots[j], j);
3767 }
3768 }
3769 fprintf (file, "\n");
3770 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3771 fprintf (file, ";; Reorg annuls: ");
3772 need_comma = 0;
3773 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3774 {
3775 if (total_annul_slots[j])
3776 {
3777 if (need_comma)
3778 fprintf (file, ", ");
3779 need_comma = 1;
3780 fprintf (file, "%d got %d delays", total_annul_slots[j], j);
3781 }
3782 }
3783 fprintf (file, "\n");
3784 #endif
3785 fprintf (file, "\n");
3786 }
3787
3788 /* For all JUMP insns, fill in branch prediction notes, so that during
3789 assembler output a target can set branch prediction bits in the code.
3790 We have to do this now, as up until this point the destinations of
3791 JUMPS can be moved around and changed, but past right here that cannot
3792 happen. */
3793 for (insn = first; insn; insn = NEXT_INSN (insn))
3794 {
3795 int pred_flags;
3796
3797 if (GET_CODE (insn) == INSN)
3798 {
3799 rtx pat = PATTERN (insn);
3800
3801 if (GET_CODE (pat) == SEQUENCE)
3802 insn = XVECEXP (pat, 0, 0);
3803 }
3804 if (GET_CODE (insn) != JUMP_INSN)
3805 continue;
3806
3807 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3808 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3809 GEN_INT (pred_flags),
3810 REG_NOTES (insn));
3811 }
3812 free_resource_info ();
3813 free (uid_to_ruid);
3814 }
3815 #endif /* DELAY_SLOTS */
This page took 0.205946 seconds and 5 git commands to generate.