]> gcc.gnu.org Git - gcc.git/blob - gcc/reorg.c
reorg.c (find_end_label): If a suitable RETURN insn exists at the end of the current...
[gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21
22 /* Instruction reorganization pass.
23
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
31
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
36
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
41
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
45
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
49
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
54 is taken.
55
56 The HP-PA always has a branch delay slot. For unconditional branches
57 its effects can be annulled when the branch is taken. The effects
58 of the delay slot in a conditional branch can be nullified for forward
59 taken branches, or for untaken backward branches. This means
60 we can hoist insns from the fall-through path for forward branches or
61 steal insns from the target of backward branches.
62
63 Three techniques for filling delay slots have been implemented so far:
64
65 (1) `fill_simple_delay_slots' is the simplest, most efficient way
66 to fill delay slots. This pass first looks for insns which come
67 from before the branch and which are safe to execute after the
68 branch. Then it searches after the insn requiring delay slots or,
69 in the case of a branch, for insns that are after the point at
70 which the branch merges into the fallthrough code, if such a point
71 exists. When such insns are found, the branch penalty decreases
72 and no code expansion takes place.
73
74 (2) `fill_eager_delay_slots' is more complicated: it is used for
75 scheduling conditional jumps, or for scheduling jumps which cannot
76 be filled using (1). A machine need not have annulled jumps to use
77 this strategy, but it helps (by keeping more options open).
78 `fill_eager_delay_slots' tries to guess the direction the branch
79 will go; if it guesses right 100% of the time, it can reduce the
80 branch penalty as much as `fill_simple_delay_slots' does. If it
81 guesses wrong 100% of the time, it might as well schedule nops (or
82 on the m88k, unexpose the branch slot). When
83 `fill_eager_delay_slots' takes insns from the fall-through path of
84 the jump, usually there is no code expansion; when it takes insns
85 from the branch target, there is code expansion if it is not the
86 only way to reach that target.
87
88 (3) `relax_delay_slots' uses a set of rules to simplify code that
89 has been reorganized by (1) and (2). It finds cases where
90 conditional test can be eliminated, jumps can be threaded, extra
91 insns can be eliminated, etc. It is the job of (1) and (2) to do a
92 good job of scheduling locally; `relax_delay_slots' takes care of
93 making the various individual schedules work well together. It is
94 especially tuned to handle the control flow interactions of branch
95 insns. It does nothing for insns with delay slots that do not
96 branch.
97
98 On machines that use CC0, we are very conservative. We will not make
99 a copy of an insn involving CC0 since we want to maintain a 1-1
100 correspondence between the insn that sets and uses CC0. The insns are
101 allowed to be separated by placing an insn that sets CC0 (but not an insn
102 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
103 delay slot. In that case, we point each insn at the other with REG_CC_USER
104 and REG_CC_SETTER notes. Note that these restrictions affect very few
105 machines because most RISC machines with delay slots will not use CC0
106 (the RT is the only known exception at this point).
107
108 Not yet implemented:
109
110 The Acorn Risc Machine can conditionally execute most insns, so
111 it is profitable to move single insns into a position to execute
112 based on the condition code of the previous insn.
113
114 The HP-PA can conditionally nullify insns, providing a similar
115 effect to the ARM, differing mostly in which insn is "in charge". */
116
117 #include <stdio.h>
118 #include "config.h"
119 #include "rtl.h"
120 #include "insn-config.h"
121 #include "conditions.h"
122 #include "hard-reg-set.h"
123 #include "basic-block.h"
124 #include "regs.h"
125 #include "insn-flags.h"
126 #include "recog.h"
127 #include "flags.h"
128 #include "output.h"
129 #include "obstack.h"
130 #include "insn-attr.h"
131
132 #ifdef DELAY_SLOTS
133
134 #define obstack_chunk_alloc xmalloc
135 #define obstack_chunk_free free
136
137 #ifndef ANNUL_IFTRUE_SLOTS
138 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
139 #endif
140 #ifndef ANNUL_IFFALSE_SLOTS
141 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
142 #endif
143
144 /* Insns which have delay slots that have not yet been filled. */
145
146 static struct obstack unfilled_slots_obstack;
147 static rtx *unfilled_firstobj;
148
149 /* Define macros to refer to the first and last slot containing unfilled
150 insns. These are used because the list may move and its address
151 should be recomputed at each use. */
152
153 #define unfilled_slots_base \
154 ((rtx *) obstack_base (&unfilled_slots_obstack))
155
156 #define unfilled_slots_next \
157 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
158
159 /* This structure is used to indicate which hardware resources are set or
160 needed by insns so far. */
161
162 struct resources
163 {
164 char memory; /* Insn sets or needs a memory location. */
165 char volatil; /* Insn sets or needs a volatile memory loc. */
166 char cc; /* Insn sets or needs the condition codes. */
167 HARD_REG_SET regs; /* Which registers are set or needed. */
168 };
169
170 /* Macro to clear all resources. */
171 #define CLEAR_RESOURCE(RES) \
172 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
173 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
174
175 /* Indicates what resources are required at the beginning of the epilogue. */
176 static struct resources start_of_epilogue_needs;
177
178 /* Indicates what resources are required at function end. */
179 static struct resources end_of_function_needs;
180
181 /* Points to the label before the end of the function. */
182 static rtx end_of_function_label;
183
184 /* This structure is used to record liveness information at the targets or
185 fallthrough insns of branches. We will most likely need the information
186 at targets again, so save them in a hash table rather than recomputing them
187 each time. */
188
189 struct target_info
190 {
191 int uid; /* INSN_UID of target. */
192 struct target_info *next; /* Next info for same hash bucket. */
193 HARD_REG_SET live_regs; /* Registers live at target. */
194 int block; /* Basic block number containing target. */
195 int bb_tick; /* Generation count of basic block info. */
196 };
197
198 #define TARGET_HASH_PRIME 257
199
200 /* Define the hash table itself. */
201 static struct target_info **target_hash_table;
202
203 /* For each basic block, we maintain a generation number of its basic
204 block info, which is updated each time we move an insn from the
205 target of a jump. This is the generation number indexed by block
206 number. */
207
208 static int *bb_ticks;
209
210 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
211 not always monotonically increase. */
212 static int *uid_to_ruid;
213
214 /* Highest valid index in `uid_to_ruid'. */
215 static int max_uid;
216
217 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
218 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
219 static int stop_search_p PROTO((rtx, int));
220 static int resource_conflicts_p PROTO((struct resources *,
221 struct resources *));
222 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
223 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
224 static rtx find_end_label PROTO((void));
225 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
226 static rtx add_to_delay_list PROTO((rtx, rtx));
227 static void delete_from_delay_slot PROTO((rtx));
228 static void delete_scheduled_jump PROTO((rtx));
229 static void note_delay_statistics PROTO((int, int));
230 static rtx optimize_skip PROTO((rtx));
231 static int get_jump_flags PROTO((rtx, rtx));
232 static int rare_destination PROTO((rtx));
233 static int mostly_true_jump PROTO((rtx, rtx));
234 static rtx get_branch_condition PROTO((rtx, rtx));
235 static int condition_dominates_p PROTO((rtx, rtx));
236 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
237 struct resources *,
238 struct resources *,
239 struct resources *,
240 int, int *, int *, rtx *));
241 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
242 struct resources *,
243 struct resources *,
244 struct resources *,
245 int, int *, int *));
246 static void try_merge_delay_insns PROTO((rtx, rtx));
247 static int redundant_insn_p PROTO((rtx, rtx, rtx));
248 static int own_thread_p PROTO((rtx, rtx, int));
249 static int find_basic_block PROTO((rtx));
250 static void update_block PROTO((rtx, rtx));
251 static int reorg_redirect_jump PROTO((rtx, rtx));
252 static void update_reg_dead_notes PROTO((rtx, rtx));
253 static void update_live_status PROTO((rtx, rtx));
254 static rtx next_insn_no_annul PROTO((rtx));
255 static void mark_target_live_regs PROTO((rtx, struct resources *));
256 static void fill_simple_delay_slots PROTO((rtx, int));
257 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
258 int, int, int, int *));
259 static void fill_eager_delay_slots PROTO((rtx));
260 static void relax_delay_slots PROTO((rtx));
261 static void make_return_insns PROTO((rtx));
262 \f
263 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
264 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
265 is TRUE, resources used by the called routine will be included for
266 CALL_INSNs. */
267
268 static void
269 mark_referenced_resources (x, res, include_delayed_effects)
270 register rtx x;
271 register struct resources *res;
272 register int include_delayed_effects;
273 {
274 register enum rtx_code code = GET_CODE (x);
275 register int i, j;
276 register char *format_ptr;
277
278 /* Handle leaf items for which we set resource flags. Also, special-case
279 CALL, SET and CLOBBER operators. */
280 switch (code)
281 {
282 case CONST:
283 case CONST_INT:
284 case CONST_DOUBLE:
285 case PC:
286 case SYMBOL_REF:
287 case LABEL_REF:
288 return;
289
290 case SUBREG:
291 if (GET_CODE (SUBREG_REG (x)) != REG)
292 mark_referenced_resources (SUBREG_REG (x), res, 0);
293 else
294 {
295 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
296 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
297 for (i = regno; i < last_regno; i++)
298 SET_HARD_REG_BIT (res->regs, i);
299 }
300 return;
301
302 case REG:
303 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
304 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
305 return;
306
307 case MEM:
308 /* If this memory shouldn't change, it really isn't referencing
309 memory. */
310 if (! RTX_UNCHANGING_P (x))
311 res->memory = 1;
312 res->volatil = MEM_VOLATILE_P (x);
313
314 /* Mark registers used to access memory. */
315 mark_referenced_resources (XEXP (x, 0), res, 0);
316 return;
317
318 case CC0:
319 res->cc = 1;
320 return;
321
322 case UNSPEC_VOLATILE:
323 case ASM_INPUT:
324 /* Traditional asm's are always volatile. */
325 res->volatil = 1;
326 return;
327
328 case ASM_OPERANDS:
329 res->volatil = MEM_VOLATILE_P (x);
330
331 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
332 We can not just fall through here since then we would be confused
333 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
334 traditional asms unlike their normal usage. */
335
336 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
337 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
338 return;
339
340 case CALL:
341 /* The first operand will be a (MEM (xxx)) but doesn't really reference
342 memory. The second operand may be referenced, though. */
343 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
344 mark_referenced_resources (XEXP (x, 1), res, 0);
345 return;
346
347 case SET:
348 /* Usually, the first operand of SET is set, not referenced. But
349 registers used to access memory are referenced. SET_DEST is
350 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
351
352 mark_referenced_resources (SET_SRC (x), res, 0);
353
354 x = SET_DEST (x);
355 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
356 mark_referenced_resources (x, res, 0);
357 else if (GET_CODE (x) == SUBREG)
358 x = SUBREG_REG (x);
359 if (GET_CODE (x) == MEM)
360 mark_referenced_resources (XEXP (x, 0), res, 0);
361 return;
362
363 case CLOBBER:
364 return;
365
366 case CALL_INSN:
367 if (include_delayed_effects)
368 {
369 /* A CALL references memory, the frame pointer if it exists, the
370 stack pointer, any global registers and any registers given in
371 USE insns immediately in front of the CALL.
372
373 However, we may have moved some of the parameter loading insns
374 into the delay slot of this CALL. If so, the USE's for them
375 don't count and should be skipped. */
376 rtx insn = PREV_INSN (x);
377 rtx sequence = 0;
378 int seq_size = 0;
379 int i;
380
381 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
382 if (NEXT_INSN (insn) != x)
383 {
384 sequence = PATTERN (NEXT_INSN (insn));
385 seq_size = XVECLEN (sequence, 0);
386 if (GET_CODE (sequence) != SEQUENCE)
387 abort ();
388 }
389
390 res->memory = 1;
391 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
392 if (frame_pointer_needed)
393 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
394
395 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
396 if (global_regs[i])
397 SET_HARD_REG_BIT (res->regs, i);
398
399 /* Skip any labels between the CALL_INSN and possible USE insns. */
400 while (GET_CODE (insn) == CODE_LABEL)
401 insn = PREV_INSN (insn);
402
403 for ( ; (insn && GET_CODE (insn) == INSN
404 && GET_CODE (PATTERN (insn)) == USE);
405 insn = PREV_INSN (insn))
406 {
407 for (i = 1; i < seq_size; i++)
408 {
409 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
410 if (GET_CODE (slot_pat) == SET
411 && rtx_equal_p (SET_DEST (slot_pat),
412 XEXP (PATTERN (insn), 0)))
413 break;
414 }
415 if (i >= seq_size)
416 mark_referenced_resources (XEXP (PATTERN (insn), 0), res, 0);
417 }
418 }
419
420 /* ... fall through to other INSN processing ... */
421
422 case INSN:
423 case JUMP_INSN:
424
425 #ifdef INSN_REFERENCES_ARE_DELAYED
426 if (! include_delayed_effects
427 && INSN_REFERENCES_ARE_DELAYED (x))
428 return;
429 #endif
430
431 /* No special processing, just speed up. */
432 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
433 return;
434 }
435
436 /* Process each sub-expression and flag what it needs. */
437 format_ptr = GET_RTX_FORMAT (code);
438 for (i = 0; i < GET_RTX_LENGTH (code); i++)
439 switch (*format_ptr++)
440 {
441 case 'e':
442 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
443 break;
444
445 case 'E':
446 for (j = 0; j < XVECLEN (x, i); j++)
447 mark_referenced_resources (XVECEXP (x, i, j), res,
448 include_delayed_effects);
449 break;
450 }
451 }
452 \f
453 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
454 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
455 is nonzero, also mark resources potentially set by the called routine.
456
457 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
458 objects are being referenced instead of set.
459
460 We never mark the insn as modifying the condition code unless it explicitly
461 SETs CC0 even though this is not totally correct. The reason for this is
462 that we require a SET of CC0 to immediately precede the reference to CC0.
463 So if some other insn sets CC0 as a side-effect, we know it cannot affect
464 our computation and thus may be placed in a delay slot. */
465
466 static void
467 mark_set_resources (x, res, in_dest, include_delayed_effects)
468 register rtx x;
469 register struct resources *res;
470 int in_dest;
471 int include_delayed_effects;
472 {
473 register enum rtx_code code;
474 register int i, j;
475 register char *format_ptr;
476
477 restart:
478
479 code = GET_CODE (x);
480
481 switch (code)
482 {
483 case NOTE:
484 case BARRIER:
485 case CODE_LABEL:
486 case USE:
487 case CONST_INT:
488 case CONST_DOUBLE:
489 case LABEL_REF:
490 case SYMBOL_REF:
491 case CONST:
492 case PC:
493 /* These don't set any resources. */
494 return;
495
496 case CC0:
497 if (in_dest)
498 res->cc = 1;
499 return;
500
501 case CALL_INSN:
502 /* Called routine modifies the condition code, memory, any registers
503 that aren't saved across calls, global registers and anything
504 explicitly CLOBBERed immediately after the CALL_INSN. */
505
506 if (include_delayed_effects)
507 {
508 rtx next = NEXT_INSN (x);
509
510 res->cc = res->memory = 1;
511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
512 if (call_used_regs[i] || global_regs[i])
513 SET_HARD_REG_BIT (res->regs, i);
514
515 /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
516 while (GET_CODE (next) == CODE_LABEL)
517 next = NEXT_INSN (next);
518
519 for (; (next && GET_CODE (next) == INSN
520 && GET_CODE (PATTERN (next)) == CLOBBER);
521 next = NEXT_INSN (next))
522 mark_set_resources (XEXP (PATTERN (next), 0), res, 1, 0);
523 }
524
525 /* ... and also what it's RTL says it modifies, if anything. */
526
527 case JUMP_INSN:
528 case INSN:
529
530 /* An insn consisting of just a CLOBBER (or USE) is just for flow
531 and doesn't actually do anything, so we ignore it. */
532
533 #ifdef INSN_SETS_ARE_DELAYED
534 if (! include_delayed_effects
535 && INSN_SETS_ARE_DELAYED (x))
536 return;
537 #endif
538
539 x = PATTERN (x);
540 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
541 goto restart;
542 return;
543
544 case SET:
545 /* If the source of a SET is a CALL, this is actually done by
546 the called routine. So only include it if we are to include the
547 effects of the calling routine. */
548
549 mark_set_resources (SET_DEST (x), res,
550 (include_delayed_effects
551 || GET_CODE (SET_SRC (x)) != CALL),
552 0);
553
554 mark_set_resources (SET_SRC (x), res, 0, 0);
555 return;
556
557 case CLOBBER:
558 mark_set_resources (XEXP (x, 0), res, 1, 0);
559 return;
560
561 case SEQUENCE:
562 for (i = 0; i < XVECLEN (x, 0); i++)
563 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
564 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
565 mark_set_resources (XVECEXP (x, 0, i), res, 0,
566 include_delayed_effects);
567 return;
568
569 case POST_INC:
570 case PRE_INC:
571 case POST_DEC:
572 case PRE_DEC:
573 mark_set_resources (XEXP (x, 0), res, 1, 0);
574 return;
575
576 case ZERO_EXTRACT:
577 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
578 mark_set_resources (XEXP (x, 1), res, 0, 0);
579 mark_set_resources (XEXP (x, 2), res, 0, 0);
580 return;
581
582 case MEM:
583 if (in_dest)
584 {
585 res->memory = 1;
586 res->volatil = MEM_VOLATILE_P (x);
587 }
588
589 mark_set_resources (XEXP (x, 0), res, 0, 0);
590 return;
591
592 case REG:
593 if (in_dest)
594 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
595 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
596 return;
597 }
598
599 /* Process each sub-expression and flag what it needs. */
600 format_ptr = GET_RTX_FORMAT (code);
601 for (i = 0; i < GET_RTX_LENGTH (code); i++)
602 switch (*format_ptr++)
603 {
604 case 'e':
605 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
606 break;
607
608 case 'E':
609 for (j = 0; j < XVECLEN (x, i); j++)
610 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
611 include_delayed_effects);
612 break;
613 }
614 }
615 \f
616 /* Return TRUE if this insn should stop the search for insn to fill delay
617 slots. LABELS_P indicates that labels should terminate the search.
618 In all cases, jumps terminate the search. */
619
620 static int
621 stop_search_p (insn, labels_p)
622 rtx insn;
623 int labels_p;
624 {
625 if (insn == 0)
626 return 1;
627
628 switch (GET_CODE (insn))
629 {
630 case NOTE:
631 case CALL_INSN:
632 return 0;
633
634 case CODE_LABEL:
635 return labels_p;
636
637 case JUMP_INSN:
638 case BARRIER:
639 return 1;
640
641 case INSN:
642 /* OK unless it contains a delay slot or is an `asm' insn of some type.
643 We don't know anything about these. */
644 return (GET_CODE (PATTERN (insn)) == SEQUENCE
645 || GET_CODE (PATTERN (insn)) == ASM_INPUT
646 || asm_noperands (PATTERN (insn)) >= 0);
647
648 default:
649 abort ();
650 }
651 }
652 \f
653 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
654 resource set contains a volatile memory reference. Otherwise, return FALSE. */
655
656 static int
657 resource_conflicts_p (res1, res2)
658 struct resources *res1, *res2;
659 {
660 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
661 || res1->volatil || res2->volatil)
662 return 1;
663
664 #ifdef HARD_REG_SET
665 return (res1->regs & res2->regs) != HARD_CONST (0);
666 #else
667 {
668 int i;
669
670 for (i = 0; i < HARD_REG_SET_LONGS; i++)
671 if ((res1->regs[i] & res2->regs[i]) != 0)
672 return 1;
673 return 0;
674 }
675 #endif
676 }
677
678 /* Return TRUE if any resource marked in RES, a `struct resources', is
679 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
680 routine is using those resources.
681
682 We compute this by computing all the resources referenced by INSN and
683 seeing if this conflicts with RES. It might be faster to directly check
684 ourselves, and this is the way it used to work, but it means duplicating
685 a large block of complex code. */
686
687 static int
688 insn_references_resource_p (insn, res, include_delayed_effects)
689 register rtx insn;
690 register struct resources *res;
691 int include_delayed_effects;
692 {
693 struct resources insn_res;
694
695 CLEAR_RESOURCE (&insn_res);
696 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
697 return resource_conflicts_p (&insn_res, res);
698 }
699
700 /* Return TRUE if INSN modifies resources that are marked in RES.
701 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
702 included. CC0 is only modified if it is explicitly set; see comments
703 in front of mark_set_resources for details. */
704
705 static int
706 insn_sets_resource_p (insn, res, include_delayed_effects)
707 register rtx insn;
708 register struct resources *res;
709 int include_delayed_effects;
710 {
711 struct resources insn_sets;
712
713 CLEAR_RESOURCE (&insn_sets);
714 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
715 return resource_conflicts_p (&insn_sets, res);
716 }
717 \f
718 /* Find a label at the end of the function or before a RETURN. If there is
719 none, make one. */
720
721 static rtx
722 find_end_label ()
723 {
724 rtx insn;
725
726 /* If we found one previously, return it. */
727 if (end_of_function_label)
728 return end_of_function_label;
729
730 /* Otherwise, see if there is a label at the end of the function. If there
731 is, it must be that RETURN insns aren't needed, so that is our return
732 label and we don't have to do anything else. */
733
734 insn = get_last_insn ();
735 while (GET_CODE (insn) == NOTE
736 || (GET_CODE (insn) == INSN
737 && (GET_CODE (PATTERN (insn)) == USE
738 || GET_CODE (PATTERN (insn)) == CLOBBER)))
739 insn = PREV_INSN (insn);
740
741 /* When a target threads its epilogue we might already have a
742 suitable return insn. If so put a label before it for the
743 end_of_function_label. */
744 if (GET_CODE (insn) == BARRIER
745 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
746 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
747 {
748 rtx temp = PREV_INSN (PREV_INSN (insn));
749 end_of_function_label = gen_label_rtx ();
750 LABEL_NUSES (end_of_function_label) = 0;
751
752 /* Put the label before an USE insns that may proceed the RETURN insn. */
753 while (GET_CODE (temp) == USE)
754 temp = PREV_INSN (temp);
755
756 emit_label_after (end_of_function_label, temp);
757 }
758
759 else if (GET_CODE (insn) == CODE_LABEL)
760 end_of_function_label = insn;
761 else
762 {
763 /* Otherwise, make a new label and emit a RETURN and BARRIER,
764 if needed. */
765 end_of_function_label = gen_label_rtx ();
766 LABEL_NUSES (end_of_function_label) = 0;
767 emit_label (end_of_function_label);
768 #ifdef HAVE_return
769 if (HAVE_return)
770 {
771 /* The return we make may have delay slots too. */
772 rtx insn = gen_return ();
773 insn = emit_jump_insn (insn);
774 emit_barrier ();
775 if (num_delay_slots (insn) > 0)
776 obstack_ptr_grow (&unfilled_slots_obstack, insn);
777 }
778 #endif
779 }
780
781 /* Show one additional use for this label so it won't go away until
782 we are done. */
783 ++LABEL_NUSES (end_of_function_label);
784
785 return end_of_function_label;
786 }
787 \f
788 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
789 the pattern of INSN with the SEQUENCE.
790
791 Chain the insns so that NEXT_INSN of each insn in the sequence points to
792 the next and NEXT_INSN of the last insn in the sequence points to
793 the first insn after the sequence. Similarly for PREV_INSN. This makes
794 it easier to scan all insns.
795
796 Returns the SEQUENCE that replaces INSN. */
797
798 static rtx
799 emit_delay_sequence (insn, list, length, avail)
800 rtx insn;
801 rtx list;
802 int length;
803 int avail;
804 {
805 register int i = 1;
806 register rtx li;
807 int had_barrier = 0;
808
809 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
810 rtvec seqv = rtvec_alloc (length + 1);
811 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
812 rtx seq_insn = make_insn_raw (seq);
813 rtx first = get_insns ();
814 rtx last = get_last_insn ();
815
816 /* Make a copy of the insn having delay slots. */
817 rtx delay_insn = copy_rtx (insn);
818
819 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
820 confuse further processing. Update LAST in case it was the last insn.
821 We will put the BARRIER back in later. */
822 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
823 {
824 delete_insn (NEXT_INSN (insn));
825 last = get_last_insn ();
826 had_barrier = 1;
827 }
828
829 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
830 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
831 PREV_INSN (seq_insn) = PREV_INSN (insn);
832
833 if (insn == last)
834 set_new_first_and_last_insn (first, seq_insn);
835 else
836 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
837
838 if (insn == first)
839 set_new_first_and_last_insn (seq_insn, last);
840 else
841 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
842
843 /* Build our SEQUENCE and rebuild the insn chain. */
844 XVECEXP (seq, 0, 0) = delay_insn;
845 INSN_DELETED_P (delay_insn) = 0;
846 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
847
848 for (li = list; li; li = XEXP (li, 1), i++)
849 {
850 rtx tem = XEXP (li, 0);
851 rtx note;
852
853 /* Show that this copy of the insn isn't deleted. */
854 INSN_DELETED_P (tem) = 0;
855
856 XVECEXP (seq, 0, i) = tem;
857 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
858 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
859
860 /* Remove any REG_DEAD notes because we can't rely on them now
861 that the insn has been moved. */
862 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
863 if (REG_NOTE_KIND (note) == REG_DEAD)
864 XEXP (note, 0) = const0_rtx;
865 }
866
867 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
868
869 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
870 last insn in that SEQUENCE to point to us. Similarly for the first
871 insn in the following insn if it is a SEQUENCE. */
872
873 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
874 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
875 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
876 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
877 = seq_insn;
878
879 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
880 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
881 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
882
883 /* If there used to be a BARRIER, put it back. */
884 if (had_barrier)
885 emit_barrier_after (seq_insn);
886
887 if (i != length + 1)
888 abort ();
889
890 return seq_insn;
891 }
892
893 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
894 be in the order in which the insns are to be executed. */
895
896 static rtx
897 add_to_delay_list (insn, delay_list)
898 rtx insn;
899 rtx delay_list;
900 {
901 /* If we have an empty list, just make a new list element. If
902 INSN has it's block number recorded, clear it since we may
903 be moving the insn to a new block. */
904
905 if (delay_list == 0)
906 {
907 struct target_info *tinfo;
908
909 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
910 tinfo; tinfo = tinfo->next)
911 if (tinfo->uid == INSN_UID (insn))
912 break;
913
914 if (tinfo)
915 tinfo->block = -1;
916
917 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
918 }
919
920 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
921 list. */
922 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
923
924 return delay_list;
925 }
926 \f
927 /* Delete INSN from the the delay slot of the insn that it is in. This may
928 produce an insn without anything in its delay slots. */
929
930 static void
931 delete_from_delay_slot (insn)
932 rtx insn;
933 {
934 rtx trial, seq_insn, seq, prev;
935 rtx delay_list = 0;
936 int i;
937
938 /* We first must find the insn containing the SEQUENCE with INSN in its
939 delay slot. Do this by finding an insn, TRIAL, where
940 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
941
942 for (trial = insn;
943 PREV_INSN (NEXT_INSN (trial)) == trial;
944 trial = NEXT_INSN (trial))
945 ;
946
947 seq_insn = PREV_INSN (NEXT_INSN (trial));
948 seq = PATTERN (seq_insn);
949
950 /* Create a delay list consisting of all the insns other than the one
951 we are deleting (unless we were the only one). */
952 if (XVECLEN (seq, 0) > 2)
953 for (i = 1; i < XVECLEN (seq, 0); i++)
954 if (XVECEXP (seq, 0, i) != insn)
955 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
956
957 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
958 list, and rebuild the delay list if non-empty. */
959 prev = PREV_INSN (seq_insn);
960 trial = XVECEXP (seq, 0, 0);
961 delete_insn (seq_insn);
962 add_insn_after (trial, prev);
963
964 if (GET_CODE (trial) == JUMP_INSN
965 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
966 emit_barrier_after (trial);
967
968 /* If there are any delay insns, remit them. Otherwise clear the
969 annul flag. */
970 if (delay_list)
971 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
972 else
973 INSN_ANNULLED_BRANCH_P (trial) = 0;
974
975 INSN_FROM_TARGET_P (insn) = 0;
976
977 /* Show we need to fill this insn again. */
978 obstack_ptr_grow (&unfilled_slots_obstack, trial);
979 }
980 \f
981 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
982 the insn that sets CC0 for it and delete it too. */
983
984 static void
985 delete_scheduled_jump (insn)
986 rtx insn;
987 {
988 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
989 delete the insn that sets the condition code, but it is hard to find it.
990 Since this case is rare anyway, don't bother trying; there would likely
991 be other insns that became dead anyway, which we wouldn't know to
992 delete. */
993
994 #ifdef HAVE_cc0
995 if (reg_mentioned_p (cc0_rtx, insn))
996 {
997 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
998
999 /* If a reg-note was found, it points to an insn to set CC0. This
1000 insn is in the delay list of some other insn. So delete it from
1001 the delay list it was in. */
1002 if (note)
1003 {
1004 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1005 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1006 delete_from_delay_slot (XEXP (note, 0));
1007 }
1008 else
1009 {
1010 /* The insn setting CC0 is our previous insn, but it may be in
1011 a delay slot. It will be the last insn in the delay slot, if
1012 it is. */
1013 rtx trial = previous_insn (insn);
1014 if (GET_CODE (trial) == NOTE)
1015 trial = prev_nonnote_insn (trial);
1016 if (sets_cc0_p (PATTERN (trial)) != 1
1017 || FIND_REG_INC_NOTE (trial, 0))
1018 return;
1019 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1020 delete_insn (trial);
1021 else
1022 delete_from_delay_slot (trial);
1023 }
1024 }
1025 #endif
1026
1027 delete_insn (insn);
1028 }
1029 \f
1030 /* Counters for delay-slot filling. */
1031
1032 #define NUM_REORG_FUNCTIONS 2
1033 #define MAX_DELAY_HISTOGRAM 3
1034 #define MAX_REORG_PASSES 2
1035
1036 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1037
1038 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1039
1040 static int reorg_pass_number;
1041
1042 static void
1043 note_delay_statistics (slots_filled, index)
1044 int slots_filled, index;
1045 {
1046 num_insns_needing_delays[index][reorg_pass_number]++;
1047 if (slots_filled > MAX_DELAY_HISTOGRAM)
1048 slots_filled = MAX_DELAY_HISTOGRAM;
1049 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1050 }
1051 \f
1052 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1053
1054 /* Optimize the following cases:
1055
1056 1. When a conditional branch skips over only one instruction,
1057 use an annulling branch and put that insn in the delay slot.
1058 Use either a branch that annuls when the condition if true or
1059 invert the test with a branch that annuls when the condition is
1060 false. This saves insns, since otherwise we must copy an insn
1061 from the L1 target.
1062
1063 (orig) (skip) (otherwise)
1064 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1065 insn insn insn2
1066 L1: L1: L1:
1067 insn2 insn2 insn2
1068 insn3 insn3 L1':
1069 insn3
1070
1071 2. When a conditional branch skips over only one instruction,
1072 and after that, it unconditionally branches somewhere else,
1073 perform the similar optimization. This saves executing the
1074 second branch in the case where the inverted condition is true.
1075
1076 Bcc.n L1 Bcc',a L2
1077 insn insn
1078 L1: L1:
1079 Bra L2 Bra L2
1080
1081 INSN is a JUMP_INSN.
1082
1083 This should be expanded to skip over N insns, where N is the number
1084 of delay slots required. */
1085
1086 static rtx
1087 optimize_skip (insn)
1088 register rtx insn;
1089 {
1090 register rtx trial = next_nonnote_insn (insn);
1091 rtx next_trial = next_active_insn (trial);
1092 rtx delay_list = 0;
1093 rtx target_label;
1094 int flags;
1095
1096 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1097
1098 if (trial == 0
1099 || GET_CODE (trial) != INSN
1100 || GET_CODE (PATTERN (trial)) == SEQUENCE
1101 || recog_memoized (trial) < 0
1102 || (! eligible_for_annul_false (insn, 0, trial, flags)
1103 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1104 return 0;
1105
1106 /* There are two cases where we are just executing one insn (we assume
1107 here that a branch requires only one insn; this should be generalized
1108 at some point): Where the branch goes around a single insn or where
1109 we have one insn followed by a branch to the same label we branch to.
1110 In both of these cases, inverting the jump and annulling the delay
1111 slot give the same effect in fewer insns. */
1112 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1113 || (next_trial != 0
1114 && GET_CODE (next_trial) == JUMP_INSN
1115 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1116 && (simplejump_p (next_trial)
1117 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1118 {
1119 if (eligible_for_annul_false (insn, 0, trial, flags))
1120 {
1121 if (invert_jump (insn, JUMP_LABEL (insn)))
1122 INSN_FROM_TARGET_P (trial) = 1;
1123 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1124 return 0;
1125 }
1126
1127 delay_list = add_to_delay_list (trial, NULL_RTX);
1128 next_trial = next_active_insn (trial);
1129 update_block (trial, trial);
1130 delete_insn (trial);
1131
1132 /* Also, if we are targeting an unconditional
1133 branch, thread our jump to the target of that branch. Don't
1134 change this into a RETURN here, because it may not accept what
1135 we have in the delay slot. We'll fix this up later. */
1136 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1137 && (simplejump_p (next_trial)
1138 || GET_CODE (PATTERN (next_trial)) == RETURN))
1139 {
1140 target_label = JUMP_LABEL (next_trial);
1141 if (target_label == 0)
1142 target_label = find_end_label ();
1143 reorg_redirect_jump (insn, target_label);
1144 }
1145
1146 INSN_ANNULLED_BRANCH_P (insn) = 1;
1147 }
1148
1149 return delay_list;
1150 }
1151 #endif
1152 \f
1153
1154 /* Encode and return branch direction and prediction information for
1155 INSN assuming it will jump to LABEL.
1156
1157 Non conditional branches return no direction information and
1158 are predicted as very likely taken. */
1159 static int
1160 get_jump_flags (insn, label)
1161 rtx insn, label;
1162 {
1163 int flags;
1164
1165 /* get_jump_flags can be passed any insn with delay slots, these may
1166 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1167 direction information, and only if they are conditional jumps.
1168
1169 If LABEL is zero, then there is no way to determine the branch
1170 direction. */
1171 if (GET_CODE (insn) == JUMP_INSN
1172 && condjump_p (insn)
1173 && INSN_UID (insn) <= max_uid
1174 && label != 0
1175 && INSN_UID (label) <= max_uid)
1176 flags
1177 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1178 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1179 /* No valid direction information. */
1180 else
1181 flags = 0;
1182
1183 /* If insn is a conditional branch call mostly_true_jump to get
1184 determine the branch prediction.
1185
1186 Non conditional branches are predicted as very likely taken. */
1187 if (GET_CODE (insn) == JUMP_INSN
1188 && condjump_p (insn))
1189 {
1190 int prediction;
1191
1192 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1193 switch (prediction)
1194 {
1195 case 2:
1196 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1197 break;
1198 case 1:
1199 flags |= ATTR_FLAG_likely;
1200 break;
1201 case 0:
1202 flags |= ATTR_FLAG_unlikely;
1203 break;
1204 case -1:
1205 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1206 break;
1207
1208 default:
1209 abort();
1210 }
1211 }
1212 else
1213 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1214
1215 return flags;
1216 }
1217
1218 /* Return 1 if DEST is a destination that will be branched to rarely (the
1219 return point of a function); return 2 if DEST will be branched to very
1220 rarely (a call to a function that doesn't return). Otherwise,
1221 return 0. */
1222
1223 static int
1224 rare_destination (insn)
1225 rtx insn;
1226 {
1227 int jump_count = 0;
1228
1229 for (; insn; insn = NEXT_INSN (insn))
1230 {
1231 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1232 insn = XVECEXP (PATTERN (insn), 0, 0);
1233
1234 switch (GET_CODE (insn))
1235 {
1236 case CODE_LABEL:
1237 return 0;
1238 case BARRIER:
1239 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1240 don't scan past JUMP_INSNs, so any barrier we find here must
1241 have been after a CALL_INSN and hence mean the call doesn't
1242 return. */
1243 return 2;
1244 case JUMP_INSN:
1245 if (GET_CODE (PATTERN (insn)) == RETURN)
1246 return 1;
1247 else if (simplejump_p (insn)
1248 && jump_count++ < 10)
1249 insn = JUMP_LABEL (insn);
1250 else
1251 return 0;
1252 }
1253 }
1254
1255 /* If we got here it means we hit the end of the function. So this
1256 is an unlikely destination. */
1257
1258 return 1;
1259 }
1260
1261 /* Return truth value of the statement that this branch
1262 is mostly taken. If we think that the branch is extremely likely
1263 to be taken, we return 2. If the branch is slightly more likely to be
1264 taken, return 1. If the branch is slightly less likely to be taken,
1265 return 0 and if the branch is highly unlikely to be taken, return -1.
1266
1267 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1268
1269 static int
1270 mostly_true_jump (jump_insn, condition)
1271 rtx jump_insn, condition;
1272 {
1273 rtx target_label = JUMP_LABEL (jump_insn);
1274 rtx insn;
1275 int rare_dest = rare_destination (target_label);
1276 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1277
1278 /* If this is a branch outside a loop, it is highly unlikely. */
1279 if (GET_CODE (PATTERN (jump_insn)) == SET
1280 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1281 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1282 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1283 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1284 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1285 return -1;
1286
1287 if (target_label)
1288 {
1289 /* If this is the test of a loop, it is very likely true. We scan
1290 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1291 before the next real insn, we assume the branch is to the top of
1292 the loop. */
1293 for (insn = PREV_INSN (target_label);
1294 insn && GET_CODE (insn) == NOTE;
1295 insn = PREV_INSN (insn))
1296 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1297 return 2;
1298
1299 /* If this is a jump to the test of a loop, it is likely true. We scan
1300 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1301 before the next real insn, we assume the branch is to the loop branch
1302 test. */
1303 for (insn = NEXT_INSN (target_label);
1304 insn && GET_CODE (insn) == NOTE;
1305 insn = PREV_INSN (insn))
1306 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1307 return 1;
1308 }
1309
1310 /* Look at the relative rarities of the fallthough and destination. If
1311 they differ, we can predict the branch that way. */
1312
1313 switch (rare_fallthrough - rare_dest)
1314 {
1315 case -2:
1316 return -1;
1317 case -1:
1318 return 0;
1319 case 0:
1320 break;
1321 case 1:
1322 return 1;
1323 case 2:
1324 return 2;
1325 }
1326
1327 /* If we couldn't figure out what this jump was, assume it won't be
1328 taken. This should be rare. */
1329 if (condition == 0)
1330 return 0;
1331
1332 /* EQ tests are usually false and NE tests are usually true. Also,
1333 most quantities are positive, so we can make the appropriate guesses
1334 about signed comparisons against zero. */
1335 switch (GET_CODE (condition))
1336 {
1337 case CONST_INT:
1338 /* Unconditional branch. */
1339 return 1;
1340 case EQ:
1341 return 0;
1342 case NE:
1343 return 1;
1344 case LE:
1345 case LT:
1346 if (XEXP (condition, 1) == const0_rtx)
1347 return 0;
1348 break;
1349 case GE:
1350 case GT:
1351 if (XEXP (condition, 1) == const0_rtx)
1352 return 1;
1353 break;
1354 }
1355
1356 /* Predict backward branches usually take, forward branches usually not. If
1357 we don't know whether this is forward or backward, assume the branch
1358 will be taken, since most are. */
1359 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1360 || INSN_UID (target_label) > max_uid
1361 || (uid_to_ruid[INSN_UID (jump_insn)]
1362 > uid_to_ruid[INSN_UID (target_label)]));;
1363 }
1364
1365 /* Return the condition under which INSN will branch to TARGET. If TARGET
1366 is zero, return the condition under which INSN will return. If INSN is
1367 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1368 type of jump, or it doesn't go to TARGET, return 0. */
1369
1370 static rtx
1371 get_branch_condition (insn, target)
1372 rtx insn;
1373 rtx target;
1374 {
1375 rtx pat = PATTERN (insn);
1376 rtx src;
1377
1378 if (GET_CODE (pat) == RETURN)
1379 return target == 0 ? const_true_rtx : 0;
1380
1381 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1382 return 0;
1383
1384 src = SET_SRC (pat);
1385 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1386 return const_true_rtx;
1387
1388 else if (GET_CODE (src) == IF_THEN_ELSE
1389 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1390 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1391 && XEXP (XEXP (src, 1), 0) == target))
1392 && XEXP (src, 2) == pc_rtx)
1393 return XEXP (src, 0);
1394
1395 else if (GET_CODE (src) == IF_THEN_ELSE
1396 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1397 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1398 && XEXP (XEXP (src, 2), 0) == target))
1399 && XEXP (src, 1) == pc_rtx)
1400 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1401 GET_MODE (XEXP (src, 0)),
1402 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1403
1404 return 0;
1405 }
1406
1407 /* Return non-zero if CONDITION is more strict than the condition of
1408 INSN, i.e., if INSN will always branch if CONDITION is true. */
1409
1410 static int
1411 condition_dominates_p (condition, insn)
1412 rtx condition;
1413 rtx insn;
1414 {
1415 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1416 enum rtx_code code = GET_CODE (condition);
1417 enum rtx_code other_code;
1418
1419 if (rtx_equal_p (condition, other_condition)
1420 || other_condition == const_true_rtx)
1421 return 1;
1422
1423 else if (condition == const_true_rtx || other_condition == 0)
1424 return 0;
1425
1426 other_code = GET_CODE (other_condition);
1427 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1428 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1429 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1430 return 0;
1431
1432 return comparison_dominates_p (code, other_code);
1433 }
1434
1435 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1436 any insns already in the delay slot of JUMP. */
1437
1438 static int
1439 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1440 rtx jump, newlabel, seq;
1441 {
1442 int flags, slots, i;
1443 rtx pat = PATTERN (seq);
1444
1445 /* Make sure all the delay slots of this jump would still
1446 be valid after threading the jump. If they are still
1447 valid, then return non-zero. */
1448
1449 flags = get_jump_flags (jump, newlabel);
1450 for (i = 1; i < XVECLEN (pat, 0); i++)
1451 if (! (
1452 #ifdef ANNUL_IFFALSE_SLOTS
1453 (INSN_ANNULLED_BRANCH_P (jump)
1454 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1455 ? eligible_for_annul_false (jump, i - 1,
1456 XVECEXP (pat, 0, i), flags) :
1457 #endif
1458 #ifdef ANNUL_IFTRUE_SLOTS
1459 (INSN_ANNULLED_BRANCH_P (jump)
1460 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1461 ? eligible_for_annul_true (jump, i - 1,
1462 XVECEXP (pat, 0, i), flags) :
1463 #endif
1464 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1465 break;
1466
1467 return (i == XVECLEN (pat, 0));
1468 }
1469
1470 \f
1471 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1472 the condition tested by INSN is CONDITION and the resources shown in
1473 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1474 from SEQ's delay list, in addition to whatever insns it may execute
1475 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1476 needed while searching for delay slot insns. Return the concatenated
1477 delay list if possible, otherwise, return 0.
1478
1479 SLOTS_TO_FILL is the total number of slots required by INSN, and
1480 PSLOTS_FILLED points to the number filled so far (also the number of
1481 insns in DELAY_LIST). It is updated with the number that have been
1482 filled from the SEQUENCE, if any.
1483
1484 PANNUL_P points to a non-zero value if we already know that we need
1485 to annul INSN. If this routine determines that annulling is needed,
1486 it may set that value non-zero.
1487
1488 PNEW_THREAD points to a location that is to receive the place at which
1489 execution should continue. */
1490
1491 static rtx
1492 steal_delay_list_from_target (insn, condition, seq, delay_list,
1493 sets, needed, other_needed,
1494 slots_to_fill, pslots_filled, pannul_p,
1495 pnew_thread)
1496 rtx insn, condition;
1497 rtx seq;
1498 rtx delay_list;
1499 struct resources *sets, *needed, *other_needed;
1500 int slots_to_fill;
1501 int *pslots_filled;
1502 int *pannul_p;
1503 rtx *pnew_thread;
1504 {
1505 rtx temp;
1506 int slots_remaining = slots_to_fill - *pslots_filled;
1507 int total_slots_filled = *pslots_filled;
1508 rtx new_delay_list = 0;
1509 int must_annul = *pannul_p;
1510 int i;
1511
1512 /* We can't do anything if there are more delay slots in SEQ than we
1513 can handle, or if we don't know that it will be a taken branch.
1514
1515 We know that it will be a taken branch if it is either an unconditional
1516 branch or a conditional branch with a stricter branch condition. */
1517
1518 if (XVECLEN (seq, 0) - 1 > slots_remaining
1519 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0)))
1520 return delay_list;
1521
1522 for (i = 1; i < XVECLEN (seq, 0); i++)
1523 {
1524 rtx trial = XVECEXP (seq, 0, i);
1525 int flags;
1526
1527 if (insn_references_resource_p (trial, sets, 0)
1528 || insn_sets_resource_p (trial, needed, 0)
1529 || insn_sets_resource_p (trial, sets, 0)
1530 #ifdef HAVE_cc0
1531 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1532 delay list. */
1533 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1534 #endif
1535 /* If TRIAL is from the fallthrough code of an annulled branch insn
1536 in SEQ, we cannot use it. */
1537 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1538 && ! INSN_FROM_TARGET_P (trial)))
1539 return delay_list;
1540
1541 /* If this insn was already done (usually in a previous delay slot),
1542 pretend we put it in our delay slot. */
1543 if (redundant_insn_p (trial, insn, new_delay_list))
1544 continue;
1545
1546 /* We will end up re-vectoring this branch, so compute flags
1547 based on jumping to the new label. */
1548 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1549
1550 if (! must_annul
1551 && ((condition == const_true_rtx
1552 || (! insn_sets_resource_p (trial, other_needed, 0)
1553 && ! may_trap_p (PATTERN (trial)))))
1554 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1555 : (must_annul = 1,
1556 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1557 {
1558 temp = copy_rtx (trial);
1559 INSN_FROM_TARGET_P (temp) = 1;
1560 new_delay_list = add_to_delay_list (temp, new_delay_list);
1561 total_slots_filled++;
1562
1563 if (--slots_remaining == 0)
1564 break;
1565 }
1566 else
1567 return delay_list;
1568 }
1569
1570 /* Show the place to which we will be branching. */
1571 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1572
1573 /* Add any new insns to the delay list and update the count of the
1574 number of slots filled. */
1575 *pslots_filled = total_slots_filled;
1576 *pannul_p = must_annul;
1577
1578 if (delay_list == 0)
1579 return new_delay_list;
1580
1581 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1582 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1583
1584 return delay_list;
1585 }
1586 \f
1587 /* Similar to steal_delay_list_from_target except that SEQ is on the
1588 fallthrough path of INSN. Here we only do something if the delay insn
1589 of SEQ is an unconditional branch. In that case we steal its delay slot
1590 for INSN since unconditional branches are much easier to fill. */
1591
1592 static rtx
1593 steal_delay_list_from_fallthrough (insn, condition, seq,
1594 delay_list, sets, needed, other_needed,
1595 slots_to_fill, pslots_filled, pannul_p)
1596 rtx insn, condition;
1597 rtx seq;
1598 rtx delay_list;
1599 struct resources *sets, *needed, *other_needed;
1600 int slots_to_fill;
1601 int *pslots_filled;
1602 int *pannul_p;
1603 {
1604 int i;
1605 int flags;
1606
1607 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1608
1609 /* We can't do anything if SEQ's delay insn isn't an
1610 unconditional branch. */
1611
1612 if (! simplejump_p (XVECEXP (seq, 0, 0))
1613 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1614 return delay_list;
1615
1616 for (i = 1; i < XVECLEN (seq, 0); i++)
1617 {
1618 rtx trial = XVECEXP (seq, 0, i);
1619
1620 /* If TRIAL sets CC0, stealing it will move it too far from the use
1621 of CC0. */
1622 if (insn_references_resource_p (trial, sets, 0)
1623 || insn_sets_resource_p (trial, needed, 0)
1624 || insn_sets_resource_p (trial, sets, 0)
1625 #ifdef HAVE_cc0
1626 || sets_cc0_p (PATTERN (trial))
1627 #endif
1628 )
1629
1630 break;
1631
1632 /* If this insn was already done, we don't need it. */
1633 if (redundant_insn_p (trial, insn, delay_list))
1634 {
1635 delete_from_delay_slot (trial);
1636 continue;
1637 }
1638
1639 if (! *pannul_p
1640 && ((condition == const_true_rtx
1641 || (! insn_sets_resource_p (trial, other_needed, 0)
1642 && ! may_trap_p (PATTERN (trial)))))
1643 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1644 : (*pannul_p = 1,
1645 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1646 {
1647 delete_from_delay_slot (trial);
1648 delay_list = add_to_delay_list (trial, delay_list);
1649
1650 if (++(*pslots_filled) == slots_to_fill)
1651 break;
1652 }
1653 else
1654 break;
1655 }
1656
1657 return delay_list;
1658 }
1659 \f
1660 /* Try merging insns starting at THREAD which match exactly the insns in
1661 INSN's delay list.
1662
1663 If all insns were matched and the insn was previously annulling, the
1664 annul bit will be cleared.
1665
1666 For each insn that is merged, if the branch is or will be non-annulling,
1667 we delete the merged insn. */
1668
1669 static void
1670 try_merge_delay_insns (insn, thread)
1671 rtx insn, thread;
1672 {
1673 rtx trial, next_trial;
1674 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1675 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1676 int slot_number = 1;
1677 int num_slots = XVECLEN (PATTERN (insn), 0);
1678 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1679 struct resources set, needed;
1680 rtx merged_insns = 0;
1681 int i;
1682 int flags;
1683
1684 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1685
1686 CLEAR_RESOURCE (&needed);
1687 CLEAR_RESOURCE (&set);
1688
1689 /* If this is not an annulling branch, take into account anything needed in
1690 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1691 folded into one. If we are annulling, this would be the correct
1692 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1693 will essentially disable this optimization. This method is somewhat of
1694 a kludge, but I don't see a better way.) */
1695 if (! annul_p)
1696 mark_referenced_resources (next_to_match, &needed, 1);
1697
1698 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1699 {
1700 rtx pat = PATTERN (trial);
1701
1702 next_trial = next_nonnote_insn (trial);
1703
1704 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1705 if (GET_CODE (trial) == INSN
1706 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1707 continue;
1708
1709 if (GET_CODE (next_to_match) == GET_CODE (trial)
1710 #ifdef HAVE_cc0
1711 /* We can't share an insn that sets cc0. */
1712 && ! sets_cc0_p (pat)
1713 #endif
1714 && ! insn_references_resource_p (trial, &set, 1)
1715 && ! insn_sets_resource_p (trial, &set, 1)
1716 && ! insn_sets_resource_p (trial, &needed, 1)
1717 && (trial = try_split (pat, trial, 0)) != 0
1718 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1719 /* Have to test this condition if annul condition is different
1720 from (and less restrictive than) non-annulling one. */
1721 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1722 {
1723 next_trial = next_nonnote_insn (trial);
1724
1725 if (! annul_p)
1726 {
1727 update_block (trial, thread);
1728 delete_insn (trial);
1729 INSN_FROM_TARGET_P (next_to_match) = 0;
1730 }
1731 else
1732 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1733
1734 if (++slot_number == num_slots)
1735 break;
1736
1737 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1738 if (! annul_p)
1739 mark_referenced_resources (next_to_match, &needed, 1);
1740 }
1741
1742 mark_set_resources (trial, &set, 0, 1);
1743 mark_referenced_resources (trial, &needed, 1);
1744 }
1745
1746 /* See if we stopped on a filled insn. If we did, try to see if its
1747 delay slots match. */
1748 if (slot_number != num_slots
1749 && trial && GET_CODE (trial) == INSN
1750 && GET_CODE (PATTERN (trial)) == SEQUENCE
1751 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1752 {
1753 rtx pat = PATTERN (trial);
1754
1755 for (i = 1; i < XVECLEN (pat, 0); i++)
1756 {
1757 rtx dtrial = XVECEXP (pat, 0, i);
1758
1759 if (! insn_references_resource_p (dtrial, &set, 1)
1760 && ! insn_sets_resource_p (dtrial, &set, 1)
1761 && ! insn_sets_resource_p (dtrial, &needed, 1)
1762 #ifdef HAVE_cc0
1763 && ! sets_cc0_p (PATTERN (dtrial))
1764 #endif
1765 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1766 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1767 {
1768 if (! annul_p)
1769 {
1770 update_block (dtrial, thread);
1771 delete_from_delay_slot (dtrial);
1772 INSN_FROM_TARGET_P (next_to_match) = 0;
1773 }
1774 else
1775 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1776 merged_insns);
1777
1778 if (++slot_number == num_slots)
1779 break;
1780
1781 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1782 }
1783 }
1784 }
1785
1786 /* If all insns in the delay slot have been matched and we were previously
1787 annulling the branch, we need not any more. In that case delete all the
1788 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1789 the delay list so that we know that it isn't only being used at the
1790 target. */
1791 if (next_to_match == 0 && annul_p)
1792 {
1793 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1794 {
1795 if (GET_MODE (merged_insns) == SImode)
1796 {
1797 update_block (XEXP (merged_insns, 0), thread);
1798 delete_from_delay_slot (XEXP (merged_insns, 0));
1799 }
1800 else
1801 {
1802 update_block (XEXP (merged_insns, 0), thread);
1803 delete_insn (XEXP (merged_insns, 0));
1804 }
1805 }
1806
1807 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1808
1809 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1810 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1811 }
1812 }
1813 \f
1814 /* See if INSN is redundant with an insn in front of TARGET. Often this
1815 is called when INSN is a candidate for a delay slot of TARGET.
1816 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1817 of INSN. Often INSN will be redundant with an insn in a delay slot of
1818 some previous insn. This happens when we have a series of branches to the
1819 same label; in that case the first insn at the target might want to go
1820 into each of the delay slots.
1821
1822 If we are not careful, this routine can take up a significant fraction
1823 of the total compilation time (4%), but only wins rarely. Hence we
1824 speed this routine up by making two passes. The first pass goes back
1825 until it hits a label and sees if it find an insn with an identical
1826 pattern. Only in this (relatively rare) event does it check for
1827 data conflicts.
1828
1829 We do not split insns we encounter. This could cause us not to find a
1830 redundant insn, but the cost of splitting seems greater than the possible
1831 gain in rare cases. */
1832
1833 static int
1834 redundant_insn_p (insn, target, delay_list)
1835 rtx insn;
1836 rtx target;
1837 rtx delay_list;
1838 {
1839 rtx target_main = target;
1840 rtx ipat = PATTERN (insn);
1841 rtx trial, pat;
1842 struct resources needed, set;
1843 int i;
1844
1845 /* Scan backwards looking for a match. */
1846 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1847 {
1848 if (GET_CODE (trial) == CODE_LABEL)
1849 return 0;
1850
1851 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1852 continue;
1853
1854 pat = PATTERN (trial);
1855 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1856 continue;
1857
1858 if (GET_CODE (pat) == SEQUENCE)
1859 {
1860 /* Stop for a CALL and its delay slots because it is difficult to
1861 track its resource needs correctly. */
1862 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1863 return 0;
1864
1865 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1866 slots because it is difficult to track its resource needs
1867 correctly. */
1868
1869 #ifdef INSN_SETS_ARE_DELAYED
1870 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1871 return 0;
1872 #endif
1873
1874 #ifdef INSN_REFERENCES_ARE_DELAYED
1875 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1876 return 0;
1877 #endif
1878
1879 /* See if any of the insns in the delay slot match, updating
1880 resource requirements as we go. */
1881 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1882 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1883 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
1884 break;
1885
1886 /* If found a match, exit this loop early. */
1887 if (i > 0)
1888 break;
1889 }
1890
1891 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
1892 break;
1893 }
1894
1895 /* If we didn't find an insn that matches, return 0. */
1896 if (trial == 0)
1897 return 0;
1898
1899 /* See what resources this insn sets and needs. If they overlap, or
1900 if this insn references CC0, it can't be redundant. */
1901
1902 CLEAR_RESOURCE (&needed);
1903 CLEAR_RESOURCE (&set);
1904 mark_set_resources (insn, &set, 0, 1);
1905 mark_referenced_resources (insn, &needed, 1);
1906
1907 /* If TARGET is a SEQUENCE, get the main insn. */
1908 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1909 target_main = XVECEXP (PATTERN (target), 0, 0);
1910
1911 if (resource_conflicts_p (&needed, &set)
1912 #ifdef HAVE_cc0
1913 || reg_mentioned_p (cc0_rtx, ipat)
1914 #endif
1915 /* The insn requiring the delay may not set anything needed or set by
1916 INSN. */
1917 || insn_sets_resource_p (target_main, &needed, 1)
1918 || insn_sets_resource_p (target_main, &set, 1))
1919 return 0;
1920
1921 /* Insns we pass may not set either NEEDED or SET, so merge them for
1922 simpler tests. */
1923 needed.memory |= set.memory;
1924 IOR_HARD_REG_SET (needed.regs, set.regs);
1925
1926 /* This insn isn't redundant if it conflicts with an insn that either is
1927 or will be in a delay slot of TARGET. */
1928
1929 while (delay_list)
1930 {
1931 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1932 return 0;
1933 delay_list = XEXP (delay_list, 1);
1934 }
1935
1936 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1937 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1938 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1939 return 0;
1940
1941 /* Scan backwards until we reach a label or an insn that uses something
1942 INSN sets or sets something insn uses or sets. */
1943
1944 for (trial = PREV_INSN (target);
1945 trial && GET_CODE (trial) != CODE_LABEL;
1946 trial = PREV_INSN (trial))
1947 {
1948 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1949 && GET_CODE (trial) != JUMP_INSN)
1950 continue;
1951
1952 pat = PATTERN (trial);
1953 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1954 continue;
1955
1956 if (GET_CODE (pat) == SEQUENCE)
1957 {
1958 /* If this is a CALL_INSN and its delay slots, it is hard to track
1959 the resource needs properly, so give up. */
1960 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1961 return 0;
1962
1963 /* If this this is an INSN or JUMP_INSN with delayed effects, it
1964 is hard to track the resource needs properly, so give up. */
1965
1966 #ifdef INSN_SETS_ARE_DELAYED
1967 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1968 return 0;
1969 #endif
1970
1971 #ifdef INSN_REFERENCES_ARE_DELAYED
1972 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1973 return 0;
1974 #endif
1975
1976 /* See if any of the insns in the delay slot match, updating
1977 resource requirements as we go. */
1978 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1979 {
1980 rtx candidate = XVECEXP (pat, 0, i);
1981
1982 /* If an insn will be annulled if the branch is false, it isn't
1983 considered as a possible duplicate insn. */
1984 if (rtx_equal_p (PATTERN (candidate), ipat)
1985 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1986 && INSN_FROM_TARGET_P (candidate)))
1987 {
1988 /* Show that this insn will be used in the sequel. */
1989 INSN_FROM_TARGET_P (candidate) = 0;
1990 return 1;
1991 }
1992
1993 /* Unless this is an annulled insn from the target of a branch,
1994 we must stop if it sets anything needed or set by INSN. */
1995 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1996 || ! INSN_FROM_TARGET_P (candidate))
1997 && insn_sets_resource_p (candidate, &needed, 1))
1998 return 0;
1999 }
2000
2001
2002 /* If the insn requiring the delay slot conflicts with INSN, we
2003 must stop. */
2004 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2005 return 0;
2006 }
2007 else
2008 {
2009 /* See if TRIAL is the same as INSN. */
2010 pat = PATTERN (trial);
2011 if (rtx_equal_p (pat, ipat))
2012 return 1;
2013
2014 /* Can't go any further if TRIAL conflicts with INSN. */
2015 if (insn_sets_resource_p (trial, &needed, 1))
2016 return 0;
2017 }
2018 }
2019
2020 return 0;
2021 }
2022 \f
2023 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2024 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2025 is non-zero, we are allowed to fall into this thread; otherwise, we are
2026 not.
2027
2028 If LABEL is used more than one or we pass a label other than LABEL before
2029 finding an active insn, we do not own this thread. */
2030
2031 static int
2032 own_thread_p (thread, label, allow_fallthrough)
2033 rtx thread;
2034 rtx label;
2035 int allow_fallthrough;
2036 {
2037 rtx active_insn;
2038 rtx insn;
2039
2040 /* We don't own the function end. */
2041 if (thread == 0)
2042 return 0;
2043
2044 /* Get the first active insn, or THREAD, if it is an active insn. */
2045 active_insn = next_active_insn (PREV_INSN (thread));
2046
2047 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2048 if (GET_CODE (insn) == CODE_LABEL
2049 && (insn != label || LABEL_NUSES (insn) != 1))
2050 return 0;
2051
2052 if (allow_fallthrough)
2053 return 1;
2054
2055 /* Ensure that we reach a BARRIER before any insn or label. */
2056 for (insn = prev_nonnote_insn (thread);
2057 insn == 0 || GET_CODE (insn) != BARRIER;
2058 insn = prev_nonnote_insn (insn))
2059 if (insn == 0
2060 || GET_CODE (insn) == CODE_LABEL
2061 || (GET_CODE (insn) == INSN
2062 && GET_CODE (PATTERN (insn)) != USE
2063 && GET_CODE (PATTERN (insn)) != CLOBBER))
2064 return 0;
2065
2066 return 1;
2067 }
2068 \f
2069 /* Find the number of the basic block that starts closest to INSN. Return -1
2070 if we couldn't find such a basic block. */
2071
2072 static int
2073 find_basic_block (insn)
2074 rtx insn;
2075 {
2076 int i;
2077
2078 /* Scan backwards to the previous BARRIER. Then see if we can find a
2079 label that starts a basic block. Return the basic block number. */
2080
2081 for (insn = prev_nonnote_insn (insn);
2082 insn && GET_CODE (insn) != BARRIER;
2083 insn = prev_nonnote_insn (insn))
2084 ;
2085
2086 /* The start of the function is basic block zero. */
2087 if (insn == 0)
2088 return 0;
2089
2090 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2091 anything other than a CODE_LABEL or note, we can't find this code. */
2092 for (insn = next_nonnote_insn (insn);
2093 insn && GET_CODE (insn) == CODE_LABEL;
2094 insn = next_nonnote_insn (insn))
2095 {
2096 for (i = 0; i < n_basic_blocks; i++)
2097 if (insn == basic_block_head[i])
2098 return i;
2099 }
2100
2101 return -1;
2102 }
2103 \f
2104 /* Called when INSN is being moved from a location near the target of a jump.
2105 We leave a marker of the form (use (INSN)) immediately in front
2106 of WHERE for mark_target_live_regs. These markers will be deleted when
2107 reorg finishes.
2108
2109 We used to try to update the live status of registers if WHERE is at
2110 the start of a basic block, but that can't work since we may remove a
2111 BARRIER in relax_delay_slots. */
2112
2113 static void
2114 update_block (insn, where)
2115 rtx insn;
2116 rtx where;
2117 {
2118 int b;
2119
2120 /* Ignore if this was in a delay slot and it came from the target of
2121 a branch. */
2122 if (INSN_FROM_TARGET_P (insn))
2123 return;
2124
2125 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2126
2127 /* INSN might be making a value live in a block where it didn't use to
2128 be. So recompute liveness information for this block. */
2129
2130 b = find_basic_block (insn);
2131 if (b != -1)
2132 bb_ticks[b]++;
2133 }
2134
2135 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2136 the basic block containing the jump. */
2137
2138 static int
2139 reorg_redirect_jump (jump, nlabel)
2140 rtx jump;
2141 rtx nlabel;
2142 {
2143 int b = find_basic_block (jump);
2144
2145 if (b != -1)
2146 bb_ticks[b]++;
2147
2148 return redirect_jump (jump, nlabel);
2149 }
2150
2151 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2152 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2153 that reference values used in INSN. If we find one, then we move the
2154 REG_DEAD note to INSN.
2155
2156 This is needed to handle the case where an later insn (after INSN) has a
2157 REG_DEAD note for a register used by INSN, and this later insn subsequently
2158 gets moved before a CODE_LABEL because it is a redundant insn. In this
2159 case, mark_target_live_regs may be confused into thinking the register
2160 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2161
2162 static void
2163 update_reg_dead_notes (insn, delayed_insn)
2164 rtx insn, delayed_insn;
2165 {
2166 rtx p, link, next;
2167
2168 for (p = next_nonnote_insn (insn); p != delayed_insn;
2169 p = next_nonnote_insn (p))
2170 for (link = REG_NOTES (p); link; link = next)
2171 {
2172 next = XEXP (link, 1);
2173
2174 if (REG_NOTE_KIND (link) != REG_DEAD
2175 || GET_CODE (XEXP (link, 0)) != REG)
2176 continue;
2177
2178 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2179 {
2180 /* Move the REG_DEAD note from P to INSN. */
2181 remove_note (p, link);
2182 XEXP (link, 1) = REG_NOTES (insn);
2183 REG_NOTES (insn) = link;
2184 }
2185 }
2186 }
2187 \f
2188 /* Marks registers possibly live at the current place being scanned by
2189 mark_target_live_regs. Used only by next two function. */
2190
2191 static HARD_REG_SET current_live_regs;
2192
2193 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2194 Also only used by the next two functions. */
2195
2196 static HARD_REG_SET pending_dead_regs;
2197
2198 /* Utility function called from mark_target_live_regs via note_stores.
2199 It deadens any CLOBBERed registers and livens any SET registers. */
2200
2201 static void
2202 update_live_status (dest, x)
2203 rtx dest;
2204 rtx x;
2205 {
2206 int first_regno, last_regno;
2207 int i;
2208
2209 if (GET_CODE (dest) != REG
2210 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2211 return;
2212
2213 if (GET_CODE (dest) == SUBREG)
2214 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2215 else
2216 first_regno = REGNO (dest);
2217
2218 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2219
2220 if (GET_CODE (x) == CLOBBER)
2221 for (i = first_regno; i < last_regno; i++)
2222 CLEAR_HARD_REG_BIT (current_live_regs, i);
2223 else
2224 for (i = first_regno; i < last_regno; i++)
2225 {
2226 SET_HARD_REG_BIT (current_live_regs, i);
2227 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2228 }
2229 }
2230
2231 /* Similar to next_insn, but ignores insns in the delay slots of
2232 an annulled branch. */
2233
2234 static rtx
2235 next_insn_no_annul (insn)
2236 rtx insn;
2237 {
2238 if (insn)
2239 {
2240 /* If INSN is an annulled branch, skip any insns from the target
2241 of the branch. */
2242 if (INSN_ANNULLED_BRANCH_P (insn)
2243 && NEXT_INSN (PREV_INSN (insn)) != insn)
2244 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2245 insn = NEXT_INSN (insn);
2246
2247 insn = NEXT_INSN (insn);
2248 if (insn && GET_CODE (insn) == INSN
2249 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2250 insn = XVECEXP (PATTERN (insn), 0, 0);
2251 }
2252
2253 return insn;
2254 }
2255 \f
2256 /* Set the resources that are live at TARGET.
2257
2258 If TARGET is zero, we refer to the end of the current function and can
2259 return our precomputed value.
2260
2261 Otherwise, we try to find out what is live by consulting the basic block
2262 information. This is tricky, because we must consider the actions of
2263 reload and jump optimization, which occur after the basic block information
2264 has been computed.
2265
2266 Accordingly, we proceed as follows::
2267
2268 We find the previous BARRIER and look at all immediately following labels
2269 (with no intervening active insns) to see if any of them start a basic
2270 block. If we hit the start of the function first, we use block 0.
2271
2272 Once we have found a basic block and a corresponding first insns, we can
2273 accurately compute the live status from basic_block_live_regs and
2274 reg_renumber. (By starting at a label following a BARRIER, we are immune
2275 to actions taken by reload and jump.) Then we scan all insns between
2276 that point and our target. For each CLOBBER (or for call-clobbered regs
2277 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2278 a SET, mark them as live.
2279
2280 We have to be careful when using REG_DEAD notes because they are not
2281 updated by such things as find_equiv_reg. So keep track of registers
2282 marked as dead that haven't been assigned to, and mark them dead at the
2283 next CODE_LABEL since reload and jump won't propagate values across labels.
2284
2285 If we cannot find the start of a basic block (should be a very rare
2286 case, if it can happen at all), mark everything as potentially live.
2287
2288 Next, scan forward from TARGET looking for things set or clobbered
2289 before they are used. These are not live.
2290
2291 Because we can be called many times on the same target, save our results
2292 in a hash table indexed by INSN_UID. */
2293
2294 static void
2295 mark_target_live_regs (target, res)
2296 rtx target;
2297 struct resources *res;
2298 {
2299 int b = -1;
2300 int i;
2301 struct target_info *tinfo;
2302 rtx insn, next;
2303 rtx jump_insn = 0;
2304 rtx jump_target;
2305 HARD_REG_SET scratch;
2306 struct resources set, needed;
2307 int jump_count = 0;
2308
2309 /* Handle end of function. */
2310 if (target == 0)
2311 {
2312 *res = end_of_function_needs;
2313 return;
2314 }
2315
2316 /* We have to assume memory is needed, but the CC isn't. */
2317 res->memory = 1;
2318 res->volatil = 0;
2319 res->cc = 0;
2320
2321 /* See if we have computed this value already. */
2322 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2323 tinfo; tinfo = tinfo->next)
2324 if (tinfo->uid == INSN_UID (target))
2325 break;
2326
2327 /* Start by getting the basic block number. If we have saved information,
2328 we can get it from there unless the insn at the start of the basic block
2329 has been deleted. */
2330 if (tinfo && tinfo->block != -1
2331 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2332 b = tinfo->block;
2333
2334 if (b == -1)
2335 b = find_basic_block (target);
2336
2337 if (tinfo)
2338 {
2339 /* If the information is up-to-date, use it. Otherwise, we will
2340 update it below. */
2341 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2342 {
2343 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2344 return;
2345 }
2346 }
2347 else
2348 {
2349 /* Allocate a place to put our results and chain it into the
2350 hash table. */
2351 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2352 tinfo->uid = INSN_UID (target);
2353 tinfo->block = b;
2354 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2355 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2356 }
2357
2358 CLEAR_HARD_REG_SET (pending_dead_regs);
2359
2360 /* If we found a basic block, get the live registers from it and update
2361 them with anything set or killed between its start and the insn before
2362 TARGET. Otherwise, we must assume everything is live. */
2363 if (b != -1)
2364 {
2365 regset regs_live = basic_block_live_at_start[b];
2366 int offset, j;
2367 REGSET_ELT_TYPE bit;
2368 int regno;
2369 rtx start_insn, stop_insn;
2370
2371 /* Compute hard regs live at start of block -- this is the real hard regs
2372 marked live, plus live pseudo regs that have been renumbered to
2373 hard regs. */
2374
2375 #ifdef HARD_REG_SET
2376 current_live_regs = *regs_live;
2377 #else
2378 COPY_HARD_REG_SET (current_live_regs, regs_live);
2379 #endif
2380
2381 for (offset = 0, i = 0; offset < regset_size; offset++)
2382 {
2383 if (regs_live[offset] == 0)
2384 i += REGSET_ELT_BITS;
2385 else
2386 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2387 if ((regs_live[offset] & bit)
2388 && (regno = reg_renumber[i]) >= 0)
2389 for (j = regno;
2390 j < regno + HARD_REGNO_NREGS (regno,
2391 PSEUDO_REGNO_MODE (i));
2392 j++)
2393 SET_HARD_REG_BIT (current_live_regs, j);
2394 }
2395
2396 /* Get starting and ending insn, handling the case where each might
2397 be a SEQUENCE. */
2398 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2399 stop_insn = target;
2400
2401 if (GET_CODE (start_insn) == INSN
2402 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2403 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2404
2405 if (GET_CODE (stop_insn) == INSN
2406 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2407 stop_insn = next_insn (PREV_INSN (stop_insn));
2408
2409 for (insn = start_insn; insn != stop_insn;
2410 insn = next_insn_no_annul (insn))
2411 {
2412 rtx link;
2413 rtx real_insn = insn;
2414
2415 /* If this insn is from the target of a branch, it isn't going to
2416 be used in the sequel. If it is used in both cases, this
2417 test will not be true. */
2418 if (INSN_FROM_TARGET_P (insn))
2419 continue;
2420
2421 /* If this insn is a USE made by update_block, we care about the
2422 underlying insn. */
2423 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2424 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2425 real_insn = XEXP (PATTERN (insn), 0);
2426
2427 if (GET_CODE (real_insn) == CALL_INSN)
2428 {
2429 /* CALL clobbers all call-used regs that aren't fixed except
2430 sp, ap, and fp. Do this before setting the result of the
2431 call live. */
2432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2433 if (call_used_regs[i]
2434 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2435 && i != ARG_POINTER_REGNUM
2436 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2437 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2438 #endif
2439 #ifdef PIC_OFFSET_TABLE_REGNUM
2440 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2441 #endif
2442 )
2443 CLEAR_HARD_REG_BIT (current_live_regs, i);
2444
2445 /* A CALL_INSN sets any global register live, since it may
2446 have been modified by the call. */
2447 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2448 if (global_regs[i])
2449 SET_HARD_REG_BIT (current_live_regs, i);
2450 }
2451
2452 /* Mark anything killed in an insn to be deadened at the next
2453 label. Ignore USE insns; the only REG_DEAD notes will be for
2454 parameters. But they might be early. A CALL_INSN will usually
2455 clobber registers used for parameters. It isn't worth bothering
2456 with the unlikely case when it won't. */
2457 if ((GET_CODE (real_insn) == INSN
2458 && GET_CODE (PATTERN (real_insn)) != USE)
2459 || GET_CODE (real_insn) == JUMP_INSN
2460 || GET_CODE (real_insn) == CALL_INSN)
2461 {
2462 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2463 if (REG_NOTE_KIND (link) == REG_DEAD
2464 && GET_CODE (XEXP (link, 0)) == REG
2465 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2466 {
2467 int first_regno = REGNO (XEXP (link, 0));
2468 int last_regno
2469 = (first_regno
2470 + HARD_REGNO_NREGS (first_regno,
2471 GET_MODE (XEXP (link, 0))));
2472
2473 for (i = first_regno; i < last_regno; i++)
2474 SET_HARD_REG_BIT (pending_dead_regs, i);
2475 }
2476
2477 note_stores (PATTERN (real_insn), update_live_status);
2478
2479 /* If any registers were unused after this insn, kill them.
2480 These notes will always be accurate. */
2481 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2482 if (REG_NOTE_KIND (link) == REG_UNUSED
2483 && GET_CODE (XEXP (link, 0)) == REG
2484 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2485 {
2486 int first_regno = REGNO (XEXP (link, 0));
2487 int last_regno
2488 = (first_regno
2489 + HARD_REGNO_NREGS (first_regno,
2490 GET_MODE (XEXP (link, 0))));
2491
2492 for (i = first_regno; i < last_regno; i++)
2493 CLEAR_HARD_REG_BIT (current_live_regs, i);
2494 }
2495 }
2496
2497 else if (GET_CODE (real_insn) == CODE_LABEL)
2498 {
2499 /* A label clobbers the pending dead registers since neither
2500 reload nor jump will propagate a value across a label. */
2501 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2502 CLEAR_HARD_REG_SET (pending_dead_regs);
2503 }
2504
2505 /* The beginning of the epilogue corresponds to the end of the
2506 RTL chain when there are no epilogue insns. Certain resources
2507 are implicitly required at that point. */
2508 else if (GET_CODE (real_insn) == NOTE
2509 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2510 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2511 }
2512
2513 COPY_HARD_REG_SET (res->regs, current_live_regs);
2514 tinfo->block = b;
2515 tinfo->bb_tick = bb_ticks[b];
2516 }
2517 else
2518 /* We didn't find the start of a basic block. Assume everything
2519 in use. This should happen only extremely rarely. */
2520 SET_HARD_REG_SET (res->regs);
2521
2522 /* Now step forward from TARGET looking for registers that are set before
2523 they are used. These are dead. If we pass a label, any pending dead
2524 registers that weren't yet used can be made dead. Stop when we pass a
2525 conditional JUMP_INSN; follow the first few unconditional branches. */
2526
2527 CLEAR_RESOURCE (&set);
2528 CLEAR_RESOURCE (&needed);
2529
2530 for (insn = target; insn; insn = next)
2531 {
2532 rtx this_jump_insn = insn;
2533
2534 next = NEXT_INSN (insn);
2535 switch (GET_CODE (insn))
2536 {
2537 case CODE_LABEL:
2538 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2539 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2540 CLEAR_HARD_REG_SET (pending_dead_regs);
2541 continue;
2542
2543 case BARRIER:
2544 case NOTE:
2545 continue;
2546
2547 case INSN:
2548 if (GET_CODE (PATTERN (insn)) == USE)
2549 {
2550 /* If INSN is a USE made by update_block, we care about the
2551 underlying insn. Any registers set by the underlying insn
2552 are live since the insn is being done somewhere else. */
2553 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2554 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2555
2556 /* All other USE insns are to be ignored. */
2557 continue;
2558 }
2559 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2560 continue;
2561 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2562 {
2563 /* An unconditional jump can be used to fill the delay slot
2564 of a call, so search for a JUMP_INSN in any position. */
2565 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2566 {
2567 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2568 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2569 break;
2570 }
2571 }
2572 }
2573
2574 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2575 {
2576 if (jump_count++ < 10
2577 && (simplejump_p (this_jump_insn)
2578 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2579 {
2580 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2581 if (jump_insn == 0)
2582 {
2583 jump_insn = insn;
2584 jump_target = JUMP_LABEL (this_jump_insn);
2585 }
2586 }
2587 else
2588 break;
2589 }
2590
2591 mark_referenced_resources (insn, &needed, 1);
2592 mark_set_resources (insn, &set, 0, 1);
2593
2594 COPY_HARD_REG_SET (scratch, set.regs);
2595 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2596 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2597 }
2598
2599 /* If we hit an unconditional branch, we have another way of finding out
2600 what is live: we can see what is live at the branch target and include
2601 anything used but not set before the branch. The only things that are
2602 live are those that are live using the above test and the test below.
2603
2604 Don't try this if we expired our jump count above, since that would
2605 mean there may be an infinite loop in the function being compiled. */
2606
2607 if (jump_insn && jump_count < 10)
2608 {
2609 struct resources new_resources;
2610 rtx stop_insn = next_active_insn (jump_insn);
2611
2612 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2613 CLEAR_RESOURCE (&set);
2614 CLEAR_RESOURCE (&needed);
2615
2616 /* Include JUMP_INSN in the needed registers. */
2617 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2618 {
2619 mark_referenced_resources (insn, &needed, 1);
2620
2621 COPY_HARD_REG_SET (scratch, needed.regs);
2622 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2623 IOR_HARD_REG_SET (new_resources.regs, scratch);
2624
2625 mark_set_resources (insn, &set, 0, 1);
2626 }
2627
2628 AND_HARD_REG_SET (res->regs, new_resources.regs);
2629 }
2630
2631 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2632 }
2633 \f
2634 /* Scan a function looking for insns that need a delay slot and find insns to
2635 put into the delay slot.
2636
2637 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2638 as calls). We do these first since we don't want jump insns (that are
2639 easier to fill) to get the only insns that could be used for non-jump insns.
2640 When it is zero, only try to fill JUMP_INSNs.
2641
2642 When slots are filled in this manner, the insns (including the
2643 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2644 it is possible to tell whether a delay slot has really been filled
2645 or not. `final' knows how to deal with this, by communicating
2646 through FINAL_SEQUENCE. */
2647
2648 static void
2649 fill_simple_delay_slots (first, non_jumps_p)
2650 rtx first;
2651 int non_jumps_p;
2652 {
2653 register rtx insn, pat, trial, next_trial;
2654 register int i, j;
2655 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2656 struct resources needed, set;
2657 register int slots_to_fill, slots_filled;
2658 rtx delay_list;
2659
2660 for (i = 0; i < num_unfilled_slots; i++)
2661 {
2662 int flags;
2663 /* Get the next insn to fill. If it has already had any slots assigned,
2664 we can't do anything with it. Maybe we'll improve this later. */
2665
2666 insn = unfilled_slots_base[i];
2667 if (insn == 0
2668 || INSN_DELETED_P (insn)
2669 || (GET_CODE (insn) == INSN
2670 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2671 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2672 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2673 continue;
2674
2675 if (GET_CODE (insn) == JUMP_INSN)
2676 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2677 else
2678 flags = get_jump_flags (insn, NULL_RTX);
2679 slots_to_fill = num_delay_slots (insn);
2680 if (slots_to_fill == 0)
2681 abort ();
2682
2683 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2684 says how many. After initialization, first try optimizing
2685
2686 call _foo call _foo
2687 nop add %o7,.-L1,%o7
2688 b,a L1
2689 nop
2690
2691 If this case applies, the delay slot of the call is filled with
2692 the unconditional jump. This is done first to avoid having the
2693 delay slot of the call filled in the backward scan. Also, since
2694 the unconditional jump is likely to also have a delay slot, that
2695 insn must exist when it is subsequently scanned.
2696
2697 This is tried on each insn with delay slots as some machines
2698 have insns which perform calls, but are not represented as
2699 CALL_INSNs. */
2700
2701 slots_filled = 0;
2702 delay_list = 0;
2703
2704 if ((trial = next_active_insn (insn))
2705 && GET_CODE (trial) == JUMP_INSN
2706 && simplejump_p (trial)
2707 && eligible_for_delay (insn, slots_filled, trial, flags)
2708 && no_labels_between_p (insn, trial))
2709 {
2710 slots_filled++;
2711 delay_list = add_to_delay_list (trial, delay_list);
2712 /* Remove the unconditional jump from consideration for delay slot
2713 filling and unthread it. */
2714 if (unfilled_slots_base[i + 1] == trial)
2715 unfilled_slots_base[i + 1] = 0;
2716 {
2717 rtx next = NEXT_INSN (trial);
2718 rtx prev = PREV_INSN (trial);
2719 if (prev)
2720 NEXT_INSN (prev) = next;
2721 if (next)
2722 PREV_INSN (next) = prev;
2723 }
2724 }
2725
2726 /* Now, scan backwards from the insn to search for a potential
2727 delay-slot candidate. Stop searching when a label or jump is hit.
2728
2729 For each candidate, if it is to go into the delay slot (moved
2730 forward in execution sequence), it must not need or set any resources
2731 that were set by later insns and must not set any resources that
2732 are needed for those insns.
2733
2734 The delay slot insn itself sets resources unless it is a call
2735 (in which case the called routine, not the insn itself, is doing
2736 the setting). */
2737
2738 if (slots_filled < slots_to_fill)
2739 {
2740 CLEAR_RESOURCE (&needed);
2741 CLEAR_RESOURCE (&set);
2742 mark_set_resources (insn, &set, 0, 0);
2743 mark_referenced_resources (insn, &needed, 0);
2744
2745 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2746 trial = next_trial)
2747 {
2748 next_trial = prev_nonnote_insn (trial);
2749
2750 /* This must be an INSN or CALL_INSN. */
2751 pat = PATTERN (trial);
2752
2753 /* USE and CLOBBER at this level was just for flow; ignore it. */
2754 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2755 continue;
2756
2757 /* Check for resource conflict first, to avoid unnecessary
2758 splitting. */
2759 if (! insn_references_resource_p (trial, &set, 1)
2760 && ! insn_sets_resource_p (trial, &set, 1)
2761 && ! insn_sets_resource_p (trial, &needed, 1)
2762 #ifdef HAVE_cc0
2763 /* Can't separate set of cc0 from its use. */
2764 && ! (reg_mentioned_p (cc0_rtx, pat)
2765 && ! sets_cc0_p (cc0_rtx, pat))
2766 #endif
2767 )
2768 {
2769 trial = try_split (pat, trial, 1);
2770 next_trial = prev_nonnote_insn (trial);
2771 if (eligible_for_delay (insn, slots_filled, trial, flags))
2772 {
2773 /* In this case, we are searching backward, so if we
2774 find insns to put on the delay list, we want
2775 to put them at the head, rather than the
2776 tail, of the list. */
2777
2778 update_reg_dead_notes (trial, insn);
2779 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2780 trial, delay_list);
2781 update_block (trial, trial);
2782 delete_insn (trial);
2783 if (slots_to_fill == ++slots_filled)
2784 break;
2785 continue;
2786 }
2787 }
2788
2789 mark_set_resources (trial, &set, 0, 1);
2790 mark_referenced_resources (trial, &needed, 1);
2791 }
2792 }
2793
2794 /* If all needed slots haven't been filled, we come here. */
2795
2796 /* Try to optimize case of jumping around a single insn. */
2797 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2798 if (slots_filled != slots_to_fill
2799 && delay_list == 0
2800 && GET_CODE (insn) == JUMP_INSN && condjump_p (insn))
2801 {
2802 delay_list = optimize_skip (insn);
2803 if (delay_list)
2804 slots_filled += 1;
2805 }
2806 #endif
2807
2808 /* Try to get insns from beyond the insn needing the delay slot.
2809 These insns can neither set or reference resources set in insns being
2810 skipped, cannot set resources in the insn being skipped, and, if this
2811 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2812 call might not return).
2813
2814 If this is a conditional jump, see if it merges back to us early
2815 enough for us to pick up insns from the merge point. Don't do
2816 this if there is another branch to our label unless we pass all of
2817 them.
2818
2819 Another similar merge is if we jump to the same place that a
2820 later unconditional jump branches to. In that case, we don't
2821 care about the number of uses of our label. */
2822
2823 if (slots_filled != slots_to_fill
2824 && (GET_CODE (insn) != JUMP_INSN
2825 || (condjump_p (insn) && ! simplejump_p (insn)
2826 && JUMP_LABEL (insn) != 0)))
2827 {
2828 rtx target = 0;
2829 int maybe_never = 0;
2830 int passed_label = 0;
2831 int target_uses;
2832 struct resources needed_at_jump;
2833
2834 CLEAR_RESOURCE (&needed);
2835 CLEAR_RESOURCE (&set);
2836
2837 if (GET_CODE (insn) == CALL_INSN)
2838 {
2839 mark_set_resources (insn, &set, 0, 1);
2840 mark_referenced_resources (insn, &needed, 1);
2841 maybe_never = 1;
2842 }
2843 else
2844 {
2845 mark_set_resources (insn, &set, 0, 1);
2846 mark_referenced_resources (insn, &needed, 1);
2847 if (GET_CODE (insn) == JUMP_INSN)
2848 {
2849 /* Get our target and show how many more uses we want to
2850 see before we hit the label. */
2851 target = JUMP_LABEL (insn);
2852 target_uses = LABEL_NUSES (target) - 1;
2853 }
2854
2855 }
2856
2857 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2858 {
2859 rtx pat, trial_delay;
2860
2861 next_trial = next_nonnote_insn (trial);
2862
2863 if (GET_CODE (trial) == CODE_LABEL)
2864 {
2865 passed_label = 1;
2866
2867 /* If this is our target, see if we have seen all its uses.
2868 If so, indicate we have passed our target and ignore it.
2869 All other labels cause us to stop our search. */
2870 if (trial == target && target_uses == 0)
2871 {
2872 target = 0;
2873 continue;
2874 }
2875 else
2876 break;
2877 }
2878 else if (GET_CODE (trial) == BARRIER)
2879 break;
2880
2881 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2882 pat = PATTERN (trial);
2883
2884 /* Stand-alone USE and CLOBBER are just for flow. */
2885 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2886 continue;
2887
2888 /* If this already has filled delay slots, get the insn needing
2889 the delay slots. */
2890 if (GET_CODE (pat) == SEQUENCE)
2891 trial_delay = XVECEXP (pat, 0, 0);
2892 else
2893 trial_delay = trial;
2894
2895 /* If this is a jump insn to our target, indicate that we have
2896 seen another jump to it. If we aren't handling a conditional
2897 jump, stop our search. Otherwise, compute the needs at its
2898 target and add them to NEEDED. */
2899 if (GET_CODE (trial_delay) == JUMP_INSN)
2900 {
2901 if (target == 0)
2902 break;
2903 else if (JUMP_LABEL (trial_delay) == target)
2904 target_uses--;
2905 else
2906 {
2907 mark_target_live_regs
2908 (next_active_insn (JUMP_LABEL (trial_delay)),
2909 &needed_at_jump);
2910 needed.memory |= needed_at_jump.memory;
2911 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2912 }
2913 }
2914
2915 /* See if we have a resource problem before we try to
2916 split. */
2917 if (target == 0
2918 && GET_CODE (pat) != SEQUENCE
2919 && ! insn_references_resource_p (trial, &set, 1)
2920 && ! insn_sets_resource_p (trial, &set, 1)
2921 && ! insn_sets_resource_p (trial, &needed, 1)
2922 #ifdef HAVE_cc0
2923 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2924 #endif
2925 && ! (maybe_never && may_trap_p (pat))
2926 && (trial = try_split (pat, trial, 0))
2927 && eligible_for_delay (insn, slots_filled, trial, flags))
2928 {
2929 next_trial = next_nonnote_insn (trial);
2930 delay_list = add_to_delay_list (trial, delay_list);
2931
2932 #ifdef HAVE_cc0
2933 if (reg_mentioned_p (cc0_rtx, pat))
2934 link_cc0_insns (trial);
2935 #endif
2936
2937 if (passed_label)
2938 update_block (trial, trial);
2939 delete_insn (trial);
2940 if (slots_to_fill == ++slots_filled)
2941 break;
2942 continue;
2943 }
2944
2945 mark_set_resources (trial, &set, 0, 1);
2946 mark_referenced_resources (trial, &needed, 1);
2947
2948 /* Ensure we don't put insns between the setting of cc and the
2949 comparison by moving a setting of cc into an earlier delay
2950 slot since these insns could clobber the condition code. */
2951 set.cc = 1;
2952
2953 /* If this is a call or jump, we might not get here. */
2954 if (GET_CODE (trial) == CALL_INSN
2955 || GET_CODE (trial) == JUMP_INSN)
2956 maybe_never = 1;
2957 }
2958
2959 /* If there are slots left to fill and our search was stopped by an
2960 unconditional branch, try the insn at the branch target. We can
2961 redirect the branch if it works. */
2962 if (slots_to_fill != slots_filled
2963 && trial
2964 && GET_CODE (trial) == JUMP_INSN
2965 && simplejump_p (trial)
2966 && (target == 0 || JUMP_LABEL (trial) == target)
2967 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2968 && ! (GET_CODE (next_trial) == INSN
2969 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2970 && ! insn_references_resource_p (next_trial, &set, 1)
2971 && ! insn_sets_resource_p (next_trial, &set, 1)
2972 && ! insn_sets_resource_p (next_trial, &needed, 1)
2973 #ifdef HAVE_cc0
2974 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2975 #endif
2976 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2977 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2978 && eligible_for_delay (insn, slots_filled, next_trial, flags))
2979 {
2980 rtx new_label = next_active_insn (next_trial);
2981
2982 if (new_label != 0)
2983 new_label = get_label_before (new_label);
2984
2985 delay_list
2986 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2987 slots_filled++;
2988 reorg_redirect_jump (trial, new_label);
2989
2990 /* If we merged because we both jumped to the same place,
2991 redirect the original insn also. */
2992 if (target)
2993 reorg_redirect_jump (insn, new_label);
2994 }
2995 }
2996
2997 if (delay_list)
2998 unfilled_slots_base[i]
2999 = emit_delay_sequence (insn, delay_list,
3000 slots_filled, slots_to_fill);
3001
3002 if (slots_to_fill == slots_filled)
3003 unfilled_slots_base[i] = 0;
3004
3005 note_delay_statistics (slots_filled, 0);
3006 }
3007
3008 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3009 /* See if the epilogue needs any delay slots. Try to fill them if so.
3010 The only thing we can do is scan backwards from the end of the
3011 function. If we did this in a previous pass, it is incorrect to do it
3012 again. */
3013 if (current_function_epilogue_delay_list)
3014 return;
3015
3016 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3017 if (slots_to_fill == 0)
3018 return;
3019
3020 slots_filled = 0;
3021 CLEAR_RESOURCE (&needed);
3022 CLEAR_RESOURCE (&set);
3023
3024 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3025 trial = PREV_INSN (trial))
3026 {
3027 if (GET_CODE (trial) == NOTE)
3028 continue;
3029 pat = PATTERN (trial);
3030 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3031 continue;
3032
3033 if (! insn_references_resource_p (trial, &set, 1)
3034 && ! insn_sets_resource_p (trial, &needed, 1)
3035 #ifdef HAVE_cc0
3036 /* Don't want to mess with cc0 here. */
3037 && ! reg_mentioned_p (cc0_rtx, pat)
3038 #endif
3039 )
3040 {
3041 trial = try_split (pat, trial, 1);
3042 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3043 {
3044 /* Here as well we are searching backward, so put the
3045 insns we find on the head of the list. */
3046
3047 current_function_epilogue_delay_list
3048 = gen_rtx (INSN_LIST, VOIDmode, trial,
3049 current_function_epilogue_delay_list);
3050 mark_referenced_resources (trial, &end_of_function_needs, 1);
3051 update_block (trial, trial);
3052 delete_insn (trial);
3053
3054 /* Clear deleted bit so final.c will output the insn. */
3055 INSN_DELETED_P (trial) = 0;
3056
3057 if (slots_to_fill == ++slots_filled)
3058 break;
3059 continue;
3060 }
3061 }
3062
3063 mark_set_resources (trial, &set, 0, 1);
3064 mark_referenced_resources (trial, &needed, 1);
3065 }
3066
3067 note_delay_statistics (slots_filled, 0);
3068 #endif
3069 }
3070 \f
3071 /* Try to find insns to place in delay slots.
3072
3073 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3074 or is an unconditional branch if CONDITION is const_true_rtx.
3075 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3076
3077 THREAD is a flow-of-control, either the insns to be executed if the
3078 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3079
3080 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3081 to see if any potential delay slot insns set things needed there.
3082
3083 LIKELY is non-zero if it is extremely likely that the branch will be
3084 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3085 end of a loop back up to the top.
3086
3087 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3088 thread. I.e., it is the fallthrough code of our jump or the target of the
3089 jump when we are the only jump going there.
3090
3091 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3092 case, we can only take insns from the head of the thread for our delay
3093 slot. We then adjust the jump to point after the insns we have taken. */
3094
3095 static rtx
3096 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3097 thread_if_true, own_thread, own_opposite_thread,
3098 slots_to_fill, pslots_filled)
3099 rtx insn;
3100 rtx condition;
3101 rtx thread, opposite_thread;
3102 int likely;
3103 int thread_if_true;
3104 int own_thread, own_opposite_thread;
3105 int slots_to_fill, *pslots_filled;
3106 {
3107 rtx new_thread;
3108 rtx delay_list = 0;
3109 struct resources opposite_needed, set, needed;
3110 rtx trial;
3111 int lose = 0;
3112 int must_annul = 0;
3113 int flags;
3114
3115 /* Validate our arguments. */
3116 if ((condition == const_true_rtx && ! thread_if_true)
3117 || (! own_thread && ! thread_if_true))
3118 abort ();
3119
3120 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3121
3122 /* If our thread is the end of subroutine, we can't get any delay
3123 insns from that. */
3124 if (thread == 0)
3125 return 0;
3126
3127 /* If this is an unconditional branch, nothing is needed at the
3128 opposite thread. Otherwise, compute what is needed there. */
3129 if (condition == const_true_rtx)
3130 CLEAR_RESOURCE (&opposite_needed);
3131 else
3132 mark_target_live_regs (opposite_thread, &opposite_needed);
3133
3134 /* If the insn at THREAD can be split, do it here to avoid having to
3135 update THREAD and NEW_THREAD if it is done in the loop below. Also
3136 initialize NEW_THREAD. */
3137
3138 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3139
3140 /* Scan insns at THREAD. We are looking for an insn that can be removed
3141 from THREAD (it neither sets nor references resources that were set
3142 ahead of it and it doesn't set anything needs by the insns ahead of
3143 it) and that either can be placed in an annulling insn or aren't
3144 needed at OPPOSITE_THREAD. */
3145
3146 CLEAR_RESOURCE (&needed);
3147 CLEAR_RESOURCE (&set);
3148
3149 /* If we do not own this thread, we must stop as soon as we find
3150 something that we can't put in a delay slot, since all we can do
3151 is branch into THREAD at a later point. Therefore, labels stop
3152 the search if this is not the `true' thread. */
3153
3154 for (trial = thread;
3155 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3156 trial = next_nonnote_insn (trial))
3157 {
3158 rtx pat, old_trial;
3159
3160 /* If we have passed a label, we no longer own this thread. */
3161 if (GET_CODE (trial) == CODE_LABEL)
3162 {
3163 own_thread = 0;
3164 continue;
3165 }
3166
3167 pat = PATTERN (trial);
3168 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3169 continue;
3170
3171 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3172 don't separate or copy insns that set and use CC0. */
3173 if (! insn_references_resource_p (trial, &set, 1)
3174 && ! insn_sets_resource_p (trial, &set, 1)
3175 && ! insn_sets_resource_p (trial, &needed, 1)
3176 #ifdef HAVE_cc0
3177 && ! (reg_mentioned_p (cc0_rtx, pat)
3178 && (! own_thread || ! sets_cc0_p (pat)))
3179 #endif
3180 )
3181 {
3182 /* If TRIAL is redundant with some insn before INSN, we don't
3183 actually need to add it to the delay list; we can merely pretend
3184 we did. */
3185 if (redundant_insn_p (trial, insn, delay_list))
3186 {
3187 if (own_thread)
3188 {
3189 update_block (trial, thread);
3190 delete_insn (trial);
3191 }
3192 else
3193 new_thread = next_active_insn (trial);
3194
3195 continue;
3196 }
3197
3198 /* There are two ways we can win: If TRIAL doesn't set anything
3199 needed at the opposite thread and can't trap, or if it can
3200 go into an annulled delay slot. */
3201 if (condition == const_true_rtx
3202 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3203 && ! may_trap_p (pat)))
3204 {
3205 old_trial = trial;
3206 trial = try_split (pat, trial, 0);
3207 if (new_thread == old_trial)
3208 new_thread = trial;
3209 pat = PATTERN (trial);
3210 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3211 goto winner;
3212 }
3213 else if (0
3214 #ifdef ANNUL_IFTRUE_SLOTS
3215 || ! thread_if_true
3216 #endif
3217 #ifdef ANNUL_IFFALSE_SLOTS
3218 || thread_if_true
3219 #endif
3220 )
3221 {
3222 old_trial = trial;
3223 trial = try_split (pat, trial, 0);
3224 if (new_thread == old_trial)
3225 new_thread = trial;
3226 pat = PATTERN (trial);
3227 if ((thread_if_true
3228 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3229 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3230 {
3231 rtx temp;
3232
3233 must_annul = 1;
3234 winner:
3235
3236 #ifdef HAVE_cc0
3237 if (reg_mentioned_p (cc0_rtx, pat))
3238 link_cc0_insns (trial);
3239 #endif
3240
3241 /* If we own this thread, delete the insn. If this is the
3242 destination of a branch, show that a basic block status
3243 may have been updated. In any case, mark the new
3244 starting point of this thread. */
3245 if (own_thread)
3246 {
3247 update_block (trial, thread);
3248 delete_insn (trial);
3249 }
3250 else
3251 new_thread = next_active_insn (trial);
3252
3253 temp = own_thread ? trial : copy_rtx (trial);
3254 if (thread_if_true)
3255 INSN_FROM_TARGET_P (temp) = 1;
3256
3257 delay_list = add_to_delay_list (temp, delay_list);
3258
3259 if (slots_to_fill == ++(*pslots_filled))
3260 {
3261 /* Even though we have filled all the slots, we
3262 may be branching to a location that has a
3263 redundant insn. Skip any if so. */
3264 while (new_thread && ! own_thread
3265 && ! insn_sets_resource_p (new_thread, &set, 1)
3266 && ! insn_sets_resource_p (new_thread, &needed, 1)
3267 && ! insn_references_resource_p (new_thread,
3268 &set, 1)
3269 && redundant_insn_p (new_thread, insn,
3270 delay_list))
3271 new_thread = next_active_insn (new_thread);
3272 break;
3273 }
3274
3275 continue;
3276 }
3277 }
3278 }
3279
3280 /* This insn can't go into a delay slot. */
3281 lose = 1;
3282 mark_set_resources (trial, &set, 0, 1);
3283 mark_referenced_resources (trial, &needed, 1);
3284
3285 /* Ensure we don't put insns between the setting of cc and the comparison
3286 by moving a setting of cc into an earlier delay slot since these insns
3287 could clobber the condition code. */
3288 set.cc = 1;
3289
3290 /* If this insn is a register-register copy and the next insn has
3291 a use of our destination, change it to use our source. That way,
3292 it will become a candidate for our delay slot the next time
3293 through this loop. This case occurs commonly in loops that
3294 scan a list.
3295
3296 We could check for more complex cases than those tested below,
3297 but it doesn't seem worth it. It might also be a good idea to try
3298 to swap the two insns. That might do better.
3299
3300 We can't do this if the next insn modifies our destination, because
3301 that would make the replacement into the insn invalid. We also can't
3302 do this if it modifies our source, because it might be an earlyclobber
3303 operand. This latter test also prevents updating the contents of
3304 a PRE_INC. */
3305
3306 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3307 && GET_CODE (SET_SRC (pat)) == REG
3308 && GET_CODE (SET_DEST (pat)) == REG)
3309 {
3310 rtx next = next_nonnote_insn (trial);
3311
3312 if (next && GET_CODE (next) == INSN
3313 && GET_CODE (PATTERN (next)) != USE
3314 && ! reg_set_p (SET_DEST (pat), next)
3315 && ! reg_set_p (SET_SRC (pat), next)
3316 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3317 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3318 }
3319 }
3320
3321 /* If we stopped on a branch insn that has delay slots, see if we can
3322 steal some of the insns in those slots. */
3323 if (trial && GET_CODE (trial) == INSN
3324 && GET_CODE (PATTERN (trial)) == SEQUENCE
3325 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3326 {
3327 /* If this is the `true' thread, we will want to follow the jump,
3328 so we can only do this if we have taken everything up to here. */
3329 if (thread_if_true && trial == new_thread)
3330 delay_list
3331 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3332 delay_list, &set, &needed,
3333 &opposite_needed, slots_to_fill,
3334 pslots_filled, &must_annul,
3335 &new_thread);
3336 else if (! thread_if_true)
3337 delay_list
3338 = steal_delay_list_from_fallthrough (insn, condition,
3339 PATTERN (trial),
3340 delay_list, &set, &needed,
3341 &opposite_needed, slots_to_fill,
3342 pslots_filled, &must_annul);
3343 }
3344
3345 /* If we haven't found anything for this delay slot and it is very
3346 likely that the branch will be taken, see if the insn at our target
3347 increments or decrements a register with an increment that does not
3348 depend on the destination register. If so, try to place the opposite
3349 arithmetic insn after the jump insn and put the arithmetic insn in the
3350 delay slot. If we can't do this, return. */
3351 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3352 {
3353 rtx pat = PATTERN (new_thread);
3354 rtx dest;
3355 rtx src;
3356
3357 trial = new_thread;
3358 pat = PATTERN (trial);
3359
3360 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3361 || ! eligible_for_delay (insn, 0, trial, flags))
3362 return 0;
3363
3364 dest = SET_DEST (pat), src = SET_SRC (pat);
3365 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3366 && rtx_equal_p (XEXP (src, 0), dest)
3367 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3368 {
3369 rtx other = XEXP (src, 1);
3370 rtx new_arith;
3371 rtx ninsn;
3372
3373 /* If this is a constant adjustment, use the same code with
3374 the negated constant. Otherwise, reverse the sense of the
3375 arithmetic. */
3376 if (GET_CODE (other) == CONST_INT)
3377 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3378 negate_rtx (GET_MODE (src), other));
3379 else
3380 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3381 GET_MODE (src), dest, other);
3382
3383 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3384 insn);
3385
3386 if (recog_memoized (ninsn) < 0
3387 || (insn_extract (ninsn),
3388 ! constrain_operands (INSN_CODE (ninsn), 1)))
3389 {
3390 delete_insn (ninsn);
3391 return 0;
3392 }
3393
3394 if (own_thread)
3395 {
3396 update_block (trial, thread);
3397 delete_insn (trial);
3398 }
3399 else
3400 new_thread = next_active_insn (trial);
3401
3402 ninsn = own_thread ? trial : copy_rtx (trial);
3403 if (thread_if_true)
3404 INSN_FROM_TARGET_P (ninsn) = 1;
3405
3406 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3407 (*pslots_filled)++;
3408 }
3409 }
3410
3411 if (delay_list && must_annul)
3412 INSN_ANNULLED_BRANCH_P (insn) = 1;
3413
3414 /* If we are to branch into the middle of this thread, find an appropriate
3415 label or make a new one if none, and redirect INSN to it. If we hit the
3416 end of the function, use the end-of-function label. */
3417 if (new_thread != thread)
3418 {
3419 rtx label;
3420
3421 if (! thread_if_true)
3422 abort ();
3423
3424 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3425 && (simplejump_p (new_thread)
3426 || GET_CODE (PATTERN (new_thread)) == RETURN))
3427 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3428
3429 if (new_thread == 0)
3430 label = find_end_label ();
3431 else if (GET_CODE (new_thread) == CODE_LABEL)
3432 label = new_thread;
3433 else
3434 label = get_label_before (new_thread);
3435
3436 reorg_redirect_jump (insn, label);
3437 }
3438
3439 return delay_list;
3440 }
3441 \f
3442 /* Make another attempt to find insns to place in delay slots.
3443
3444 We previously looked for insns located in front of the delay insn
3445 and, for non-jump delay insns, located behind the delay insn.
3446
3447 Here only try to schedule jump insns and try to move insns from either
3448 the target or the following insns into the delay slot. If annulling is
3449 supported, we will be likely to do this. Otherwise, we can do this only
3450 if safe. */
3451
3452 static void
3453 fill_eager_delay_slots (first)
3454 rtx first;
3455 {
3456 register rtx insn;
3457 register int i;
3458 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3459
3460 for (i = 0; i < num_unfilled_slots; i++)
3461 {
3462 rtx condition;
3463 rtx target_label, insn_at_target, fallthrough_insn;
3464 rtx delay_list = 0;
3465 int own_target;
3466 int own_fallthrough;
3467 int prediction, slots_to_fill, slots_filled;
3468
3469 insn = unfilled_slots_base[i];
3470 if (insn == 0
3471 || INSN_DELETED_P (insn)
3472 || GET_CODE (insn) != JUMP_INSN
3473 || ! condjump_p (insn))
3474 continue;
3475
3476 slots_to_fill = num_delay_slots (insn);
3477 if (slots_to_fill == 0)
3478 abort ();
3479
3480 slots_filled = 0;
3481 target_label = JUMP_LABEL (insn);
3482 condition = get_branch_condition (insn, target_label);
3483
3484 if (condition == 0)
3485 continue;
3486
3487 /* Get the next active fallthough and target insns and see if we own
3488 them. Then see whether the branch is likely true. We don't need
3489 to do a lot of this for unconditional branches. */
3490
3491 insn_at_target = next_active_insn (target_label);
3492 own_target = own_thread_p (target_label, target_label, 0);
3493
3494 if (condition == const_true_rtx)
3495 {
3496 own_fallthrough = 0;
3497 fallthrough_insn = 0;
3498 prediction = 2;
3499 }
3500 else
3501 {
3502 fallthrough_insn = next_active_insn (insn);
3503 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3504 prediction = mostly_true_jump (insn, condition);
3505 }
3506
3507 /* If this insn is expected to branch, first try to get insns from our
3508 target, then our fallthrough insns. If it is not, expected to branch,
3509 try the other order. */
3510
3511 if (prediction > 0)
3512 {
3513 delay_list
3514 = fill_slots_from_thread (insn, condition, insn_at_target,
3515 fallthrough_insn, prediction == 2, 1,
3516 own_target, own_fallthrough,
3517 slots_to_fill, &slots_filled);
3518
3519 if (delay_list == 0 && own_fallthrough)
3520 {
3521 /* Even though we didn't find anything for delay slots,
3522 we might have found a redundant insn which we deleted
3523 from the thread that was filled. So we have to recompute
3524 the next insn at the target. */
3525 target_label = JUMP_LABEL (insn);
3526 insn_at_target = next_active_insn (target_label);
3527
3528 delay_list
3529 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3530 insn_at_target, 0, 0,
3531 own_fallthrough, own_target,
3532 slots_to_fill, &slots_filled);
3533 }
3534 }
3535 else
3536 {
3537 if (own_fallthrough)
3538 delay_list
3539 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3540 insn_at_target, 0, 0,
3541 own_fallthrough, own_target,
3542 slots_to_fill, &slots_filled);
3543
3544 if (delay_list == 0)
3545 delay_list
3546 = fill_slots_from_thread (insn, condition, insn_at_target,
3547 next_active_insn (insn), 0, 1,
3548 own_target, own_fallthrough,
3549 slots_to_fill, &slots_filled);
3550 }
3551
3552 if (delay_list)
3553 unfilled_slots_base[i]
3554 = emit_delay_sequence (insn, delay_list,
3555 slots_filled, slots_to_fill);
3556
3557 if (slots_to_fill == slots_filled)
3558 unfilled_slots_base[i] = 0;
3559
3560 note_delay_statistics (slots_filled, 1);
3561 }
3562 }
3563 \f
3564 /* Once we have tried two ways to fill a delay slot, make a pass over the
3565 code to try to improve the results and to do such things as more jump
3566 threading. */
3567
3568 static void
3569 relax_delay_slots (first)
3570 rtx first;
3571 {
3572 register rtx insn, next, pat;
3573 register rtx trial, delay_insn, target_label;
3574
3575 /* Look at every JUMP_INSN and see if we can improve it. */
3576 for (insn = first; insn; insn = next)
3577 {
3578 rtx other;
3579
3580 next = next_active_insn (insn);
3581
3582 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3583 the next insn, or jumps to a label that is not the last of a
3584 group of consecutive labels. */
3585 if (GET_CODE (insn) == JUMP_INSN
3586 && condjump_p (insn)
3587 && (target_label = JUMP_LABEL (insn)) != 0)
3588 {
3589 target_label = follow_jumps (target_label);
3590 target_label = prev_label (next_active_insn (target_label));
3591
3592 if (target_label == 0)
3593 target_label = find_end_label ();
3594
3595 if (next_active_insn (target_label) == next)
3596 {
3597 delete_jump (insn);
3598 continue;
3599 }
3600
3601 if (target_label != JUMP_LABEL (insn))
3602 reorg_redirect_jump (insn, target_label);
3603
3604 /* See if this jump branches around a unconditional jump.
3605 If so, invert this jump and point it to the target of the
3606 second jump. */
3607 if (next && GET_CODE (next) == JUMP_INSN
3608 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3609 && next_active_insn (target_label) == next_active_insn (next)
3610 && no_labels_between_p (insn, next))
3611 {
3612 rtx label = JUMP_LABEL (next);
3613
3614 /* Be careful how we do this to avoid deleting code or
3615 labels that are momentarily dead. See similar optimization
3616 in jump.c.
3617
3618 We also need to ensure we properly handle the case when
3619 invert_jump fails. */
3620
3621 ++LABEL_NUSES (target_label);
3622 if (label)
3623 ++LABEL_NUSES (label);
3624
3625 if (invert_jump (insn, label))
3626 {
3627 delete_insn (next);
3628 next = insn;
3629 }
3630
3631 if (label)
3632 --LABEL_NUSES (label);
3633
3634 if (--LABEL_NUSES (target_label) == 0)
3635 delete_insn (target_label);
3636
3637 continue;
3638 }
3639 }
3640
3641 /* If this is an unconditional jump and the previous insn is a
3642 conditional jump, try reversing the condition of the previous
3643 insn and swapping our targets. The next pass might be able to
3644 fill the slots.
3645
3646 Don't do this if we expect the conditional branch to be true, because
3647 we would then be making the more common case longer. */
3648
3649 if (GET_CODE (insn) == JUMP_INSN
3650 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3651 && (other = prev_active_insn (insn)) != 0
3652 && condjump_p (other)
3653 && no_labels_between_p (other, insn)
3654 && 0 < mostly_true_jump (other,
3655 get_branch_condition (other,
3656 JUMP_LABEL (other))))
3657 {
3658 rtx other_target = JUMP_LABEL (other);
3659 target_label = JUMP_LABEL (insn);
3660
3661 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3662 as we move the label. */
3663 if (other_target)
3664 ++LABEL_NUSES (other_target);
3665
3666 if (invert_jump (other, target_label))
3667 reorg_redirect_jump (insn, other_target);
3668
3669 if (other_target)
3670 --LABEL_NUSES (other_target);
3671 }
3672
3673 /* Now look only at cases where we have filled a delay slot. */
3674 if (GET_CODE (insn) != INSN
3675 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3676 continue;
3677
3678 pat = PATTERN (insn);
3679 delay_insn = XVECEXP (pat, 0, 0);
3680
3681 /* See if the first insn in the delay slot is redundant with some
3682 previous insn. Remove it from the delay slot if so; then set up
3683 to reprocess this insn. */
3684 if (redundant_insn_p (XVECEXP (pat, 0, 1), delay_insn, 0))
3685 {
3686 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3687 next = prev_active_insn (next);
3688 continue;
3689 }
3690
3691 /* Now look only at the cases where we have a filled JUMP_INSN. */
3692 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3693 || ! condjump_p (XVECEXP (PATTERN (insn), 0, 0)))
3694 continue;
3695
3696 target_label = JUMP_LABEL (delay_insn);
3697
3698 if (target_label)
3699 {
3700 /* If this jump goes to another unconditional jump, thread it, but
3701 don't convert a jump into a RETURN here. */
3702 trial = follow_jumps (target_label);
3703 trial = prev_label (next_active_insn (trial));
3704 if (trial == 0 && target_label != 0)
3705 trial = find_end_label ();
3706
3707 if (trial != target_label
3708 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3709 {
3710 reorg_redirect_jump (delay_insn, trial);
3711 target_label = trial;
3712 }
3713
3714 /* If the first insn at TARGET_LABEL is redundant with a previous
3715 insn, redirect the jump to the following insn process again. */
3716 trial = next_active_insn (target_label);
3717 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3718 && redundant_insn_p (trial, insn, 0))
3719 {
3720 trial = next_active_insn (trial);
3721 if (trial == 0)
3722 target_label = find_end_label ();
3723 else
3724 target_label = get_label_before (trial);
3725 reorg_redirect_jump (delay_insn, target_label);
3726 next = insn;
3727 continue;
3728 }
3729
3730 /* Similarly, if it is an unconditional jump with one insn in its
3731 delay list and that insn is redundant, thread the jump. */
3732 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3733 && XVECLEN (PATTERN (trial), 0) == 2
3734 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3735 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3736 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3737 && redundant_insn_p (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3738 {
3739 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3740 if (target_label == 0)
3741 target_label = find_end_label ();
3742
3743 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3744 insn))
3745 {
3746 reorg_redirect_jump (delay_insn, target_label);
3747 next = insn;
3748 continue;
3749 }
3750 }
3751 }
3752
3753 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3754 && prev_active_insn (target_label) == insn
3755 #ifdef HAVE_cc0
3756 /* If the last insn in the delay slot sets CC0 for some insn,
3757 various code assumes that it is in a delay slot. We could
3758 put it back where it belonged and delete the register notes,
3759 but it doesn't seem worthwhile in this uncommon case. */
3760 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3761 REG_CC_USER, NULL_RTX)
3762 #endif
3763 )
3764 {
3765 int i;
3766
3767 /* All this insn does is execute its delay list and jump to the
3768 following insn. So delete the jump and just execute the delay
3769 list insns.
3770
3771 We do this by deleting the INSN containing the SEQUENCE, then
3772 re-emitting the insns separately, and then deleting the jump.
3773 This allows the count of the jump target to be properly
3774 decremented. */
3775
3776 /* Clear the from target bit, since these insns are no longer
3777 in delay slots. */
3778 for (i = 0; i < XVECLEN (pat, 0); i++)
3779 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3780
3781 trial = PREV_INSN (insn);
3782 delete_insn (insn);
3783 emit_insn_after (pat, trial);
3784 delete_scheduled_jump (delay_insn);
3785 continue;
3786 }
3787
3788 /* See if this is an unconditional jump around a single insn which is
3789 identical to the one in its delay slot. In this case, we can just
3790 delete the branch and the insn in its delay slot. */
3791 if (next && GET_CODE (next) == INSN
3792 && prev_label (next_active_insn (next)) == target_label
3793 && simplejump_p (insn)
3794 && XVECLEN (pat, 0) == 2
3795 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3796 {
3797 delete_insn (insn);
3798 continue;
3799 }
3800
3801 /* See if this jump (with its delay slots) branches around another
3802 jump (without delay slots). If so, invert this jump and point
3803 it to the target of the second jump. We cannot do this for
3804 annulled jumps, though. Again, don't convert a jump to a RETURN
3805 here. */
3806 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3807 && next && GET_CODE (next) == JUMP_INSN
3808 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3809 && next_active_insn (target_label) == next_active_insn (next)
3810 && no_labels_between_p (insn, next))
3811 {
3812 rtx label = JUMP_LABEL (next);
3813 rtx old_label = JUMP_LABEL (delay_insn);
3814
3815 if (label == 0)
3816 label = find_end_label ();
3817
3818 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3819 {
3820 /* Be careful how we do this to avoid deleting code or labels
3821 that are momentarily dead. See similar optimization in
3822 jump.c */
3823 if (old_label)
3824 ++LABEL_NUSES (old_label);
3825
3826 if (invert_jump (delay_insn, label))
3827 {
3828 delete_insn (next);
3829 next = insn;
3830 }
3831
3832 if (old_label && --LABEL_NUSES (old_label) == 0)
3833 delete_insn (old_label);
3834 continue;
3835 }
3836 }
3837
3838 /* If we own the thread opposite the way this insn branches, see if we
3839 can merge its delay slots with following insns. */
3840 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3841 && own_thread_p (NEXT_INSN (insn), 0, 1))
3842 try_merge_delay_insns (insn, next);
3843 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3844 && own_thread_p (target_label, target_label, 0))
3845 try_merge_delay_insns (insn, next_active_insn (target_label));
3846
3847 /* If we get here, we haven't deleted INSN. But we may have deleted
3848 NEXT, so recompute it. */
3849 next = next_active_insn (insn);
3850 }
3851 }
3852 \f
3853 #ifdef HAVE_return
3854
3855 /* Look for filled jumps to the end of function label. We can try to convert
3856 them into RETURN insns if the insns in the delay slot are valid for the
3857 RETURN as well. */
3858
3859 static void
3860 make_return_insns (first)
3861 rtx first;
3862 {
3863 rtx insn, jump_insn, pat;
3864 rtx real_return_label = end_of_function_label;
3865 int slots, i;
3866
3867 /* See if there is a RETURN insn in the function other than the one we
3868 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3869 into a RETURN to jump to it. */
3870 for (insn = first; insn; insn = NEXT_INSN (insn))
3871 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3872 {
3873 real_return_label = get_label_before (insn);
3874 break;
3875 }
3876
3877 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3878 was equal to END_OF_FUNCTION_LABEL. */
3879 LABEL_NUSES (real_return_label)++;
3880
3881 /* Clear the list of insns to fill so we can use it. */
3882 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3883
3884 for (insn = first; insn; insn = NEXT_INSN (insn))
3885 {
3886 int flags;
3887
3888 /* Only look at filled JUMP_INSNs that go to the end of function
3889 label. */
3890 if (GET_CODE (insn) != INSN
3891 || GET_CODE (PATTERN (insn)) != SEQUENCE
3892 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3893 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3894 continue;
3895
3896 pat = PATTERN (insn);
3897 jump_insn = XVECEXP (pat, 0, 0);
3898
3899 /* If we can't make the jump into a RETURN, redirect it to the best
3900 RETURN and go on to the next insn. */
3901 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3902 {
3903 reorg_redirect_jump (jump_insn, real_return_label);
3904 continue;
3905 }
3906
3907 /* See if this RETURN can accept the insns current in its delay slot.
3908 It can if it has more or an equal number of slots and the contents
3909 of each is valid. */
3910
3911 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3912 slots = num_delay_slots (jump_insn);
3913 if (slots >= XVECLEN (pat, 0) - 1)
3914 {
3915 for (i = 1; i < XVECLEN (pat, 0); i++)
3916 if (! (
3917 #ifdef ANNUL_IFFALSE_SLOTS
3918 (INSN_ANNULLED_BRANCH_P (jump_insn)
3919 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3920 ? eligible_for_annul_false (jump_insn, i - 1,
3921 XVECEXP (pat, 0, i), flags) :
3922 #endif
3923 #ifdef ANNUL_IFTRUE_SLOTS
3924 (INSN_ANNULLED_BRANCH_P (jump_insn)
3925 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3926 ? eligible_for_annul_true (jump_insn, i - 1,
3927 XVECEXP (pat, 0, i), flags) :
3928 #endif
3929 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3930 break;
3931 }
3932 else
3933 i = 0;
3934
3935 if (i == XVECLEN (pat, 0))
3936 continue;
3937
3938 /* We have to do something with this insn. If it is an unconditional
3939 RETURN, delete the SEQUENCE and output the individual insns,
3940 followed by the RETURN. Then set things up so we try to find
3941 insns for its delay slots, if it needs some. */
3942 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3943 {
3944 rtx prev = PREV_INSN (insn);
3945
3946 delete_insn (insn);
3947 for (i = 1; i < XVECLEN (pat, 0); i++)
3948 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3949
3950 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3951 emit_barrier_after (insn);
3952
3953 if (slots)
3954 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3955 }
3956 else
3957 /* It is probably more efficient to keep this with its current
3958 delay slot as a branch to a RETURN. */
3959 reorg_redirect_jump (jump_insn, real_return_label);
3960 }
3961
3962 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3963 new delay slots we have created. */
3964 if (--LABEL_NUSES (real_return_label) == 0)
3965 delete_insn (real_return_label);
3966
3967 fill_simple_delay_slots (first, 1);
3968 fill_simple_delay_slots (first, 0);
3969 }
3970 #endif
3971 \f
3972 /* Try to find insns to place in delay slots. */
3973
3974 void
3975 dbr_schedule (first, file)
3976 rtx first;
3977 FILE *file;
3978 {
3979 rtx insn, next, epilogue_insn = 0;
3980 int i;
3981 #if 0
3982 int old_flag_no_peephole = flag_no_peephole;
3983
3984 /* Execute `final' once in prescan mode to delete any insns that won't be
3985 used. Don't let final try to do any peephole optimization--it will
3986 ruin dataflow information for this pass. */
3987
3988 flag_no_peephole = 1;
3989 final (first, 0, NO_DEBUG, 1, 1);
3990 flag_no_peephole = old_flag_no_peephole;
3991 #endif
3992
3993 /* If the current function has no insns other than the prologue and
3994 epilogue, then do not try to fill any delay slots. */
3995 if (n_basic_blocks == 0)
3996 return;
3997
3998 /* Find the highest INSN_UID and allocate and initialize our map from
3999 INSN_UID's to position in code. */
4000 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4001 {
4002 if (INSN_UID (insn) > max_uid)
4003 max_uid = INSN_UID (insn);
4004 if (GET_CODE (insn) == NOTE
4005 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4006 epilogue_insn = insn;
4007 }
4008
4009 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4010 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4011 uid_to_ruid[INSN_UID (insn)] = i;
4012
4013 /* Initialize the list of insns that need filling. */
4014 if (unfilled_firstobj == 0)
4015 {
4016 gcc_obstack_init (&unfilled_slots_obstack);
4017 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4018 }
4019
4020 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4021 {
4022 rtx target;
4023
4024 INSN_ANNULLED_BRANCH_P (insn) = 0;
4025 INSN_FROM_TARGET_P (insn) = 0;
4026
4027 /* Skip vector tables. We can't get attributes for them. */
4028 if (GET_CODE (insn) == JUMP_INSN
4029 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4030 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4031 continue;
4032
4033 if (num_delay_slots (insn) > 0)
4034 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4035
4036 /* Ensure all jumps go to the last of a set of consecutive labels. */
4037 if (GET_CODE (insn) == JUMP_INSN && condjump_p (insn)
4038 && JUMP_LABEL (insn) != 0
4039 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4040 != JUMP_LABEL (insn)))
4041 redirect_jump (insn, target);
4042 }
4043
4044 /* Indicate what resources are required to be valid at the end of the current
4045 function. The condition code never is and memory always is. If the
4046 frame pointer is needed, it is and so is the stack pointer unless
4047 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4048 stack pointer is. Registers used to return the function value are
4049 needed. Registers holding global variables are needed. */
4050
4051 end_of_function_needs.cc = 0;
4052 end_of_function_needs.memory = 1;
4053 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4054
4055 if (frame_pointer_needed)
4056 {
4057 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4058 #ifdef EXIT_IGNORE_STACK
4059 if (! EXIT_IGNORE_STACK)
4060 #endif
4061 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4062 }
4063 else
4064 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4065
4066 if (current_function_return_rtx != 0
4067 && GET_CODE (current_function_return_rtx) == REG)
4068 mark_referenced_resources (current_function_return_rtx,
4069 &end_of_function_needs, 1);
4070
4071 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4072 if (global_regs[i])
4073 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4074
4075 /* The registers required to be live at the end of the function are
4076 represented in the flow information as being dead just prior to
4077 reaching the end of the function. For example, the return of a value
4078 might be represented by a USE of the return register immediately
4079 followed by an unconditional jump to the return label where the
4080 return label is the end of the RTL chain. The end of the RTL chain
4081 is then taken to mean that the return register is live.
4082
4083 This sequence is no longer maintained when epilogue instructions are
4084 added to the RTL chain. To reconstruct the original meaning, the
4085 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4086 point where these registers become live (start_of_epilogue_needs).
4087 If epilogue instructions are present, the registers set by those
4088 instructions won't have been processed by flow. Thus, those
4089 registers are additionally required at the end of the RTL chain
4090 (end_of_function_needs). */
4091
4092 start_of_epilogue_needs = end_of_function_needs;
4093
4094 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4095 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4096
4097 /* Show we haven't computed an end-of-function label yet. */
4098 end_of_function_label = 0;
4099
4100 /* Allocate and initialize the tables used by mark_target_live_regs. */
4101 target_hash_table
4102 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4103 * sizeof (struct target_info *)));
4104 bzero (target_hash_table, TARGET_HASH_PRIME * sizeof (struct target_info *));
4105
4106 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4107 bzero (bb_ticks, n_basic_blocks * sizeof (int));
4108
4109 /* Initialize the statistics for this function. */
4110 bzero (num_insns_needing_delays, sizeof num_insns_needing_delays);
4111 bzero (num_filled_delays, sizeof num_filled_delays);
4112
4113 /* Now do the delay slot filling. Try everything twice in case earlier
4114 changes make more slots fillable. */
4115
4116 for (reorg_pass_number = 0;
4117 reorg_pass_number < MAX_REORG_PASSES;
4118 reorg_pass_number++)
4119 {
4120 fill_simple_delay_slots (first, 1);
4121 fill_simple_delay_slots (first, 0);
4122 fill_eager_delay_slots (first);
4123 relax_delay_slots (first);
4124 }
4125
4126 /* Delete any USE insns made by update_block; subsequent passes don't need
4127 them or know how to deal with them. */
4128 for (insn = first; insn; insn = next)
4129 {
4130 next = NEXT_INSN (insn);
4131
4132 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4133 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4134 next = delete_insn (insn);
4135 }
4136
4137 /* If we made an end of function label, indicate that it is now
4138 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4139 If it is now unused, delete it. */
4140 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4141 delete_insn (end_of_function_label);
4142
4143 #ifdef HAVE_return
4144 if (HAVE_return && end_of_function_label != 0)
4145 make_return_insns (first);
4146 #endif
4147
4148 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4149
4150 /* It is not clear why the line below is needed, but it does seem to be. */
4151 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4152
4153 /* Reposition the prologue and epilogue notes in case we moved the
4154 prologue/epilogue insns. */
4155 reposition_prologue_and_epilogue_notes (first);
4156
4157 if (file)
4158 {
4159 register int i, j, need_comma;
4160
4161 for (reorg_pass_number = 0;
4162 reorg_pass_number < MAX_REORG_PASSES;
4163 reorg_pass_number++)
4164 {
4165 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4166 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4167 {
4168 need_comma = 0;
4169 fprintf (file, ";; Reorg function #%d\n", i);
4170
4171 fprintf (file, ";; %d insns needing delay slots\n;; ",
4172 num_insns_needing_delays[i][reorg_pass_number]);
4173
4174 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4175 if (num_filled_delays[i][j][reorg_pass_number])
4176 {
4177 if (need_comma)
4178 fprintf (file, ", ");
4179 need_comma = 1;
4180 fprintf (file, "%d got %d delays",
4181 num_filled_delays[i][j][reorg_pass_number], j);
4182 }
4183 fprintf (file, "\n");
4184 }
4185 }
4186 }
4187 }
4188 #endif /* DELAY_SLOTS */
This page took 0.225796 seconds and 6 git commands to generate.