1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This is the jump-optimization pass of the compiler.
22 It is run two or three times: once before cse, sometimes once after cse,
23 and once after reload (before final).
25 jump_optimize deletes unreachable code and labels that are not used.
26 It also deletes jumps that jump to the following insn,
27 and simplifies jumps around unconditional jumps and jumps
28 to unconditional jumps.
30 Each CODE_LABEL has a count of the times it is used
31 stored in the LABEL_NUSES internal field, and each JUMP_INSN
32 has one label that it refers to stored in the
33 JUMP_LABEL internal field. With this we can detect labels that
34 become unused because of the deletion of all the jumps that
35 formerly used them. The JUMP_LABEL info is sometimes looked
38 Optionally, cross-jumping can be done. Currently it is done
39 only the last time (when after reload and before final).
40 In fact, the code for cross-jumping now assumes that register
41 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43 Jump optimization is done after cse when cse's constant-propagation
44 causes jumps to become unconditional or to be deleted.
46 Unreachable loops are not detected here, because the labels
47 have references and the insns appear reachable from the labels.
48 find_basic_blocks in flow.c finds and deletes such loops.
50 The subroutines delete_insn, redirect_jump, and invert_jump are used
51 from other passes as well. */
56 #include "hard-reg-set.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
63 /* ??? Eventually must record somehow the labels used by jumps
64 from nested functions. */
65 /* Pre-record the next or previous real insn for each label?
66 No, this pass is very fast anyway. */
67 /* Condense consecutive labels?
68 This would make life analysis faster, maybe. */
69 /* Optimize jump y; x: ... y: jumpif... x?
70 Don't know if it is worth bothering with. */
71 /* Optimize two cases of conditional jump to conditional jump?
72 This can never delete any instruction or make anything dead,
73 or even change what is live at any point.
74 So perhaps let combiner do it. */
76 /* Vector indexed by uid.
77 For each CODE_LABEL, index by its uid to get first unconditional jump
78 that jumps to the label.
79 For each JUMP_INSN, index by its uid to get the next unconditional jump
80 that jumps to the same label.
81 Element 0 is the start of a chain of all return insns.
82 (It is safe to use element 0 because insn uid 0 is not used. */
84 static rtx
*jump_chain
;
86 /* List of labels referred to from initializers.
87 These can never be deleted. */
90 /* Maximum index in jump_chain. */
92 static int max_jump_chain
;
94 /* Set nonzero by jump_optimize if control can fall through
95 to the end of the function. */
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
104 static int cross_jump_death_matters
= 0;
106 static int duplicate_loop_exit_test ();
107 void redirect_tablejump ();
108 static int delete_labelref_insn ();
109 static void mark_jump_label ();
111 static void delete_from_jump_chain ();
112 static int tension_vector_labels ();
113 static void find_cross_jump ();
114 static void do_cross_jump ();
115 static int jump_back_p ();
117 extern rtx
gen_jump ();
119 /* Delete no-op jumps and optimize jumps to jumps
120 and jumps around jumps.
121 Delete unused labels and unreachable code.
123 If CROSS_JUMP is 1, detect matching code
124 before a jump and its destination and unify them.
125 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
127 If NOOP_MOVES is nonzero, delete no-op move insns.
129 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
130 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
132 If `optimize' is zero, don't change any code,
133 just determine whether control drops off the end of the function.
134 This case occurs when we have -W and not -O.
135 It works because `delete_insn' checks the value of `optimize'
136 and refrains from actually deleting when that is 0. */
139 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
151 cross_jump_death_matters
= (cross_jump
== 2);
153 /* Initialize LABEL_NUSES and JUMP_LABEL fields. */
155 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
157 if (GET_CODE (insn
) == CODE_LABEL
)
158 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
159 else if (GET_CODE (insn
) == JUMP_INSN
)
160 JUMP_LABEL (insn
) = 0;
161 if (INSN_UID (insn
) > max_uid
)
162 max_uid
= INSN_UID (insn
);
167 /* Delete insns following barriers, up to next label. */
169 for (insn
= f
; insn
;)
171 if (GET_CODE (insn
) == BARRIER
)
173 insn
= NEXT_INSN (insn
);
174 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
176 if (GET_CODE (insn
) == NOTE
177 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
178 insn
= NEXT_INSN (insn
);
180 insn
= delete_insn (insn
);
182 /* INSN is now the code_label. */
185 insn
= NEXT_INSN (insn
);
188 /* Leave some extra room for labels and duplicate exit test insns
190 max_jump_chain
= max_uid
* 14 / 10;
191 jump_chain
= (rtx
*) alloca (max_jump_chain
* sizeof (rtx
));
192 bzero (jump_chain
, max_jump_chain
* sizeof (rtx
));
194 /* Mark the label each jump jumps to.
195 Combine consecutive labels, and count uses of labels.
197 For each label, make a chain (using `jump_chain')
198 of all the *unconditional* jumps that jump to it;
199 also make a chain of all returns. */
201 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
202 if ((GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
203 || GET_CODE (insn
) == CALL_INSN
)
204 && ! INSN_DELETED_P (insn
))
206 mark_jump_label (PATTERN (insn
), insn
, cross_jump
);
207 if (GET_CODE (insn
) == JUMP_INSN
)
209 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
211 jump_chain
[INSN_UID (insn
)]
212 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
213 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
215 if (GET_CODE (PATTERN (insn
)) == RETURN
)
217 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
218 jump_chain
[0] = insn
;
223 /* Keep track of labels used from static data;
224 they cannot ever be deleted. */
226 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
227 LABEL_NUSES (XEXP (insn
, 0))++;
229 /* Delete all labels already not referenced.
230 Also find the last insn. */
233 for (insn
= f
; insn
; )
235 if (GET_CODE (insn
) == CODE_LABEL
&& LABEL_NUSES (insn
) == 0)
236 insn
= delete_insn (insn
);
240 insn
= NEXT_INSN (insn
);
246 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
247 If so record that this function can drop off the end. */
253 /* One label can follow the end-note: the return label. */
254 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
255 /* Ordinary insns can follow it if returning a structure. */
256 || GET_CODE (insn
) == INSN
257 /* If machine uses explicit RETURN insns, no epilogue,
258 then one of them follows the note. */
259 || (GET_CODE (insn
) == JUMP_INSN
260 && GET_CODE (PATTERN (insn
)) == RETURN
)
261 /* Other kinds of notes can follow also. */
262 || (GET_CODE (insn
) == NOTE
263 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
264 insn
= PREV_INSN (insn
);
267 /* Report if control can fall through at the end of the function. */
268 if (insn
&& GET_CODE (insn
) == NOTE
269 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
270 && ! INSN_DELETED_P (insn
))
273 /* Zero the "deleted" flag of all the "deleted" insns. */
274 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
275 INSN_DELETED_P (insn
) = 0;
282 /* If we fall through to the epilogue, see if we can insert a RETURN insn
283 in front of it. If the machine allows it at this point (we might be
284 after reload for a leaf routine), it will improve optimization for it
286 insn
= get_last_insn ();
287 while (insn
&& GET_CODE (insn
) == NOTE
)
288 insn
= PREV_INSN (insn
);
290 if (insn
&& GET_CODE (insn
) != BARRIER
)
292 emit_jump_insn (gen_return ());
299 for (insn
= f
; insn
; )
301 register rtx next
= NEXT_INSN (insn
);
303 if (GET_CODE (insn
) == INSN
)
305 register rtx body
= PATTERN (insn
);
307 /* Combine stack_adjusts with following push_insns. */
309 if (GET_CODE (body
) == SET
310 && SET_DEST (body
) == stack_pointer_rtx
311 && GET_CODE (SET_SRC (body
)) == PLUS
312 && XEXP (SET_SRC (body
), 0) == stack_pointer_rtx
313 && GET_CODE (XEXP (SET_SRC (body
), 1)) == CONST_INT
314 && INTVAL (XEXP (SET_SRC (body
), 1)) > 0)
317 rtx stack_adjust_insn
= insn
;
318 int stack_adjust_amount
= INTVAL (XEXP (SET_SRC (body
), 1));
319 int total_pushed
= 0;
322 /* Find all successive push insns. */
324 /* Don't convert more than three pushes;
325 that starts adding too many displaced addresses
326 and the whole thing starts becoming a losing
331 p
= next_nonnote_insn (p
);
332 if (p
== 0 || GET_CODE (p
) != INSN
)
335 if (GET_CODE (pbody
) != SET
)
337 dest
= SET_DEST (pbody
);
338 /* Allow a no-op move between the adjust and the push. */
339 if (GET_CODE (dest
) == REG
340 && GET_CODE (SET_SRC (pbody
)) == REG
341 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
343 if (! (GET_CODE (dest
) == MEM
344 && GET_CODE (XEXP (dest
, 0)) == POST_INC
345 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
348 if (total_pushed
+ GET_MODE_SIZE (SET_DEST (pbody
))
349 > stack_adjust_amount
)
351 total_pushed
+= GET_MODE_SIZE (SET_DEST (pbody
));
354 /* Discard the amount pushed from the stack adjust;
355 maybe eliminate it entirely. */
356 if (total_pushed
>= stack_adjust_amount
)
358 delete_insn (stack_adjust_insn
);
359 total_pushed
= stack_adjust_amount
;
362 XEXP (SET_SRC (PATTERN (stack_adjust_insn
)), 1)
363 = GEN_INT (stack_adjust_amount
- total_pushed
);
365 /* Change the appropriate push insns to ordinary stores. */
367 while (total_pushed
> 0)
370 p
= next_nonnote_insn (p
);
371 if (GET_CODE (p
) != INSN
)
374 if (GET_CODE (pbody
) == SET
)
376 dest
= SET_DEST (pbody
);
377 if (! (GET_CODE (dest
) == MEM
378 && GET_CODE (XEXP (dest
, 0)) == POST_INC
379 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
381 total_pushed
-= GET_MODE_SIZE (SET_DEST (pbody
));
382 /* If this push doesn't fully fit in the space
383 of the stack adjust that we deleted,
384 make another stack adjust here for what we
385 didn't use up. There should be peepholes
386 to recognize the resulting sequence of insns. */
387 if (total_pushed
< 0)
389 emit_insn_before (gen_add2_insn (stack_pointer_rtx
,
390 GEN_INT (- total_pushed
)),
395 = plus_constant (stack_pointer_rtx
, total_pushed
);
400 /* Detect and delete no-op move instructions
401 resulting from not allocating a parameter in a register. */
403 if (GET_CODE (body
) == SET
404 && (SET_DEST (body
) == SET_SRC (body
)
405 || (GET_CODE (SET_DEST (body
)) == MEM
406 && GET_CODE (SET_SRC (body
)) == MEM
407 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
408 && ! (GET_CODE (SET_DEST (body
)) == MEM
409 && MEM_VOLATILE_P (SET_DEST (body
)))
410 && ! (GET_CODE (SET_SRC (body
)) == MEM
411 && MEM_VOLATILE_P (SET_SRC (body
))))
414 /* Detect and ignore no-op move instructions
415 resulting from smart or fortuitous register allocation. */
417 else if (GET_CODE (body
) == SET
)
419 int sreg
= true_regnum (SET_SRC (body
));
420 int dreg
= true_regnum (SET_DEST (body
));
422 if (sreg
== dreg
&& sreg
>= 0)
424 else if (sreg
>= 0 && dreg
>= 0)
427 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
428 sreg
, NULL_PTR
, dreg
,
429 GET_MODE (SET_SRC (body
)));
431 #ifdef PRESERVE_DEATH_INFO_REGNO_P
432 /* Deleting insn could lose a death-note for SREG or DREG
433 so don't do it if final needs accurate death-notes. */
434 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg
)
435 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg
))
438 /* DREG may have been the target of a REG_DEAD note in
439 the insn which makes INSN redundant. If so, reorg
440 would still think it is dead. So search for such a
441 note and delete it if we find it. */
442 for (trial
= prev_nonnote_insn (insn
);
443 trial
&& GET_CODE (trial
) != CODE_LABEL
;
444 trial
= prev_nonnote_insn (trial
))
445 if (find_regno_note (trial
, REG_DEAD
, dreg
))
447 remove_death (dreg
, trial
);
452 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
456 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
457 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
459 GET_MODE (SET_DEST (body
))))
461 /* This handles the case where we have two consecutive
462 assignments of the same constant to pseudos that didn't
463 get a hard reg. Each SET from the constant will be
464 converted into a SET of the spill register and an
465 output reload will be made following it. This produces
466 two loads of the same constant into the same spill
471 /* Look back for a death note for the first reg.
472 If there is one, it is no longer accurate. */
473 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
475 if ((GET_CODE (in_insn
) == INSN
476 || GET_CODE (in_insn
) == JUMP_INSN
)
477 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
479 remove_death (dreg
, in_insn
);
482 in_insn
= PREV_INSN (in_insn
);
485 /* Delete the second load of the value. */
489 else if (GET_CODE (body
) == PARALLEL
)
491 /* If each part is a set between two identical registers or
492 a USE or CLOBBER, delete the insn. */
496 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
498 tem
= XVECEXP (body
, 0, i
);
499 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
502 if (GET_CODE (tem
) != SET
503 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
504 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
512 #if !BYTES_BIG_ENDIAN /* Not worth the hair to detect this
513 in the big-endian case. */
514 /* Also delete insns to store bit fields if they are no-ops. */
515 else if (GET_CODE (body
) == SET
516 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
517 && XEXP (SET_DEST (body
), 2) == const0_rtx
518 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
519 && ! (GET_CODE (SET_SRC (body
)) == MEM
520 && MEM_VOLATILE_P (SET_SRC (body
))))
522 #endif /* not BYTES_BIG_ENDIAN */
527 /* Now iterate optimizing jumps until nothing changes over one pass. */
534 for (insn
= f
; insn
; insn
= next
)
537 rtx temp
, temp1
, temp2
, temp3
, temp4
, temp5
;
539 int this_is_simplejump
, this_is_condjump
, reversep
;
541 /* If NOT the first iteration, if this is the last jump pass
542 (just before final), do the special peephole optimizations.
543 Avoiding the first iteration gives ordinary jump opts
544 a chance to work before peephole opts. */
546 if (reload_completed
&& !first
&& !flag_no_peephole
)
547 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
)
551 /* That could have deleted some insns after INSN, so check now
552 what the following insn is. */
554 next
= NEXT_INSN (insn
);
556 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
557 jump. Try to optimize by duplicating the loop exit test if so.
558 This is only safe immediately after regscan, because it uses
559 the values of regno_first_uid and regno_last_uid. */
560 if (after_regscan
&& GET_CODE (insn
) == NOTE
561 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
562 && (temp1
= next_nonnote_insn (insn
)) != 0
563 && simplejump_p (temp1
))
565 temp
= PREV_INSN (insn
);
566 if (duplicate_loop_exit_test (insn
))
569 next
= NEXT_INSN (temp
);
574 if (GET_CODE (insn
) != JUMP_INSN
)
577 this_is_simplejump
= simplejump_p (insn
);
578 this_is_condjump
= condjump_p (insn
);
580 /* Tension the labels in dispatch tables. */
582 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
583 changed
|= tension_vector_labels (PATTERN (insn
), 0);
584 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
585 changed
|= tension_vector_labels (PATTERN (insn
), 1);
587 /* If a dispatch table always goes to the same place,
588 get rid of it and replace the insn that uses it. */
590 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
591 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
594 rtx pat
= PATTERN (insn
);
595 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
596 int len
= XVECLEN (pat
, diff_vec_p
);
597 rtx dispatch
= prev_real_insn (insn
);
599 for (i
= 0; i
< len
; i
++)
600 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
601 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
604 && GET_CODE (dispatch
) == JUMP_INSN
605 && JUMP_LABEL (dispatch
) != 0
606 /* Don't mess with a casesi insn. */
607 && !(GET_CODE (PATTERN (dispatch
)) == SET
608 && (GET_CODE (SET_SRC (PATTERN (dispatch
)))
610 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
612 redirect_tablejump (dispatch
,
613 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
618 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
620 /* If a jump references the end of the function, try to turn
621 it into a RETURN insn, possibly a conditional one. */
622 if (JUMP_LABEL (insn
)
623 && next_active_insn (JUMP_LABEL (insn
)) == 0)
624 changed
|= redirect_jump (insn
, NULL_RTX
);
626 /* Detect jump to following insn. */
627 if (reallabelprev
== insn
&& condjump_p (insn
))
634 /* If we have an unconditional jump preceded by a USE, try to put
635 the USE before the target and jump there. This simplifies many
636 of the optimizations below since we don't have to worry about
637 dealing with these USE insns. We only do this if the label
638 being branch to already has the identical USE or if code
639 never falls through to that label. */
641 if (this_is_simplejump
642 && (temp
= prev_nonnote_insn (insn
)) != 0
643 && GET_CODE (temp
) == INSN
&& GET_CODE (PATTERN (temp
)) == USE
644 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
645 && (GET_CODE (temp1
) == BARRIER
646 || (GET_CODE (temp1
) == INSN
647 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
)))))
649 if (GET_CODE (temp1
) == BARRIER
)
651 reorder_insns (temp
, temp
, temp1
);
652 temp1
= NEXT_INSN (temp1
);
657 redirect_jump (insn
, get_label_before (temp1
));
658 reallabelprev
= prev_real_insn (temp1
);
662 /* Simplify if (...) x = a; else x = b; by converting it
663 to x = b; if (...) x = a;
664 if B is sufficiently simple, the test doesn't involve X,
665 and nothing in the test modifies B or X.
667 If we have small register classes, we also can't do this if X
670 If the "x = b;" insn has any REG_NOTES, we don't do this because
671 of the possibility that we are running after CSE and there is a
672 REG_EQUAL note that is only valid if the branch has already been
673 taken. If we move the insn with the REG_EQUAL note, we may
674 fold the comparison to always be false in a later CSE pass.
675 (We could also delete the REG_NOTES when moving the insn, but it
676 seems simpler to not move it.) An exception is that we can move
677 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
678 value is the same as "b".
680 INSN is the branch over the `else' part.
684 TEMP to the jump insn preceding "x = a;"
686 TEMP2 to the insn that sets "x = b;"
687 TEMP3 to the insn that sets "x = a;" */
689 if (this_is_simplejump
690 && (temp3
= prev_active_insn (insn
)) != 0
691 && GET_CODE (temp3
) == INSN
692 && GET_CODE (PATTERN (temp3
)) == SET
693 && GET_CODE (temp1
= SET_DEST (PATTERN (temp3
))) == REG
694 #ifdef SMALL_REGISTER_CLASSES
695 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
697 && (temp2
= next_active_insn (insn
)) != 0
698 && GET_CODE (temp2
) == INSN
699 && GET_CODE (PATTERN (temp2
)) == SET
700 && rtx_equal_p (SET_DEST (PATTERN (temp2
)), temp1
)
701 && (GET_CODE (SET_SRC (PATTERN (temp2
))) == REG
702 || CONSTANT_P (SET_SRC (PATTERN (temp2
))))
703 && (REG_NOTES (temp2
) == 0
704 || ((REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUAL
705 || REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUIV
)
706 && XEXP (REG_NOTES (temp2
), 1) == 0
707 && rtx_equal_p (XEXP (REG_NOTES (temp2
), 0),
708 SET_SRC (PATTERN (temp2
)))))
709 && (temp
= prev_active_insn (temp3
)) != 0
710 && condjump_p (temp
) && ! simplejump_p (temp
)
711 /* TEMP must skip over the "x = a;" insn */
712 && prev_real_insn (JUMP_LABEL (temp
)) == insn
713 && no_labels_between_p (insn
, JUMP_LABEL (temp
))
714 /* There must be no other entries to the "x = b;" insn. */
715 && no_labels_between_p (JUMP_LABEL (temp
), temp2
)
716 /* INSN must either branch to the insn after TEMP2 or the insn
717 after TEMP2 must branch to the same place as INSN. */
718 && (reallabelprev
== temp2
719 || ((temp4
= next_active_insn (temp2
)) != 0
720 && simplejump_p (temp4
)
721 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
))))
723 /* The test expression, X, may be a complicated test with
724 multiple branches. See if we can find all the uses of
725 the label that TEMP branches to without hitting a CALL_INSN
726 or a jump to somewhere else. */
727 rtx target
= JUMP_LABEL (temp
);
728 int nuses
= LABEL_NUSES (target
);
731 /* Set P to the first jump insn that goes around "x = a;". */
732 for (p
= temp
; nuses
&& p
; p
= prev_nonnote_insn (p
))
734 if (GET_CODE (p
) == JUMP_INSN
)
736 if (condjump_p (p
) && ! simplejump_p (p
)
737 && JUMP_LABEL (p
) == target
)
746 else if (GET_CODE (p
) == CALL_INSN
)
751 /* We cannot insert anything between a set of cc and its use
752 so if P uses cc0, we must back up to the previous insn. */
753 q
= prev_nonnote_insn (p
);
754 if (q
&& GET_RTX_CLASS (GET_CODE (q
)) == 'i'
755 && sets_cc0_p (PATTERN (q
)))
762 /* If we found all the uses and there was no data conflict, we
763 can move the assignment unless we can branch into the middle
766 && no_labels_between_p (p
, insn
)
767 && ! reg_referenced_between_p (temp1
, p
, NEXT_INSN (temp3
))
768 && ! reg_set_between_p (temp1
, p
, temp3
)
769 && (GET_CODE (SET_SRC (PATTERN (temp2
))) == CONST_INT
770 || ! reg_set_between_p (SET_SRC (PATTERN (temp2
)),
773 reorder_insns_with_line_notes (temp2
, temp2
, p
);
775 /* Set NEXT to an insn that we know won't go away. */
776 next
= next_active_insn (insn
);
778 /* Delete the jump around the set. Note that we must do
779 this before we redirect the test jumps so that it won't
780 delete the code immediately following the assignment
781 we moved (which might be a jump). */
785 /* We either have two consecutive labels or a jump to
786 a jump, so adjust all the JUMP_INSNs to branch to where
788 for (p
= NEXT_INSN (p
); p
!= next
; p
= NEXT_INSN (p
))
789 if (GET_CODE (p
) == JUMP_INSN
)
790 redirect_jump (p
, target
);
797 /* We deal with four cases:
799 1) x = a; if (...) x = b; and either A or B is zero,
800 2) if (...) x = 0; and jumps are expensive,
801 3) x = a; if (...) x = b; and A and B are constants where all the
802 set bits in A are also set in B and jumps are expensive, and
803 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
806 In each of these try to use a store-flag insn to avoid the jump.
807 (If the jump would be faster, the machine should not have
808 defined the scc insns!). These cases are often made by the
809 previous optimization.
811 INSN here is the jump around the store. We set:
813 TEMP to the "x = b;" insn.
815 TEMP2 to B (const0_rtx in the second case).
816 TEMP3 to A (X in the second case).
817 TEMP4 to the condition being tested.
818 TEMP5 to the earliest insn used to find the condition. */
820 if (/* We can't do this after reload has completed. */
822 && this_is_condjump
&& ! this_is_simplejump
823 /* Set TEMP to the "x = b;" insn. */
824 && (temp
= next_nonnote_insn (insn
)) != 0
825 && GET_CODE (temp
) == INSN
826 && GET_CODE (PATTERN (temp
)) == SET
827 && GET_CODE (temp1
= SET_DEST (PATTERN (temp
))) == REG
828 #ifdef SMALL_REGISTER_CLASSES
829 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
831 && GET_MODE_CLASS (GET_MODE (temp1
)) == MODE_INT
832 && (GET_CODE (temp2
= SET_SRC (PATTERN (temp
))) == REG
833 || GET_CODE (temp2
) == CONST_INT
)
834 /* Allow either form, but prefer the former if both apply. */
835 && (((temp3
= reg_set_last (temp1
, insn
)) != 0
836 && ((GET_CODE (temp3
) == REG
837 #ifdef SMALL_REGISTER_CLASSES
838 && REGNO (temp3
) >= FIRST_PSEUDO_REGISTER
841 || GET_CODE (temp3
) == CONST_INT
))
842 /* Make the latter case look like x = x; if (...) x = 0; */
843 || ((temp3
= temp1
, BRANCH_COST
>= 2)
844 && temp2
== const0_rtx
))
845 /* INSN must either branch to the insn after TEMP or the insn
846 after TEMP must branch to the same place as INSN. */
847 && (reallabelprev
== temp
848 || ((temp4
= next_active_insn (temp
)) != 0
849 && simplejump_p (temp4
)
850 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
)))
851 && (temp4
= get_condition (insn
, &temp5
)) != 0
853 /* If B is zero, OK; if A is zero, can only do (1) if we
854 can reverse the condition. See if (3) applies possibly
855 by reversing the condition. Prefer reversing to (4) when
856 branches are very expensive. */
857 && ((reversep
= 0, temp2
== const0_rtx
)
858 || (temp3
== const0_rtx
859 && (reversep
= can_reverse_comparison_p (temp4
, insn
)))
861 && GET_CODE (temp2
) == CONST_INT
862 && GET_CODE (temp3
) == CONST_INT
863 && ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp2
)
864 || ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp3
)
865 && (reversep
= can_reverse_comparison_p (temp4
,
867 || BRANCH_COST
>= 3))
869 enum rtx_code code
= GET_CODE (temp4
);
870 rtx uval
, cval
, var
= temp1
;
874 /* If necessary, reverse the condition. */
876 code
= reverse_condition (code
), uval
= temp2
, cval
= temp3
;
878 uval
= temp3
, cval
= temp2
;
880 /* See if we can do this with a store-flag insn. */
883 /* If CVAL is non-zero, normalize to -1. Otherwise,
884 if UVAL is the constant 1, it is best to just compute
885 the result directly. If UVAL is constant and STORE_FLAG_VALUE
886 includes all of its bits, it is best to compute the flag
887 value unnormalized and `and' it with UVAL. Otherwise,
888 normalize to -1 and `and' with UVAL. */
889 normalizep
= (cval
!= const0_rtx
? -1
890 : (uval
== const1_rtx
? 1
891 : (GET_CODE (uval
) == CONST_INT
892 && (INTVAL (uval
) & ~STORE_FLAG_VALUE
) == 0)
895 /* We will be putting the store-flag insn immediately in
896 front of the comparison that was originally being done,
897 so we know all the variables in TEMP4 will be valid.
898 However, this might be in front of the assignment of
899 A to VAR. If it is, it would clobber the store-flag
902 Therefore, emit into a temporary which will be copied to
903 VAR immediately after TEMP. */
905 target
= emit_store_flag (gen_reg_rtx (GET_MODE (var
)), code
,
906 XEXP (temp4
, 0), XEXP (temp4
, 1),
908 (code
== LTU
|| code
== LEU
909 || code
== GEU
|| code
== GTU
),
915 /* Both CVAL and UVAL are non-zero. */
916 if (cval
!= const0_rtx
&& uval
!= const0_rtx
)
920 tem1
= expand_and (uval
, target
, NULL_RTX
);
921 if (GET_CODE (cval
) == CONST_INT
922 && GET_CODE (uval
) == CONST_INT
923 && (INTVAL (cval
) & INTVAL (uval
)) == INTVAL (cval
))
927 tem2
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
928 target
, NULL_RTX
, 0);
929 tem2
= expand_and (cval
, tem2
, tem2
);
932 /* If we usually make new pseudos, do so here. This
933 turns out to help machines that have conditional
936 if (flag_expensive_optimizations
)
939 target
= expand_binop (GET_MODE (var
), ior_optab
,
943 else if (normalizep
!= 1)
944 target
= expand_and (uval
, target
,
945 (GET_CODE (target
) == REG
946 && ! preserve_subexpressions_p ()
947 ? target
: NULL_RTX
));
949 seq
= gen_sequence ();
951 emit_insn_before (seq
, temp5
);
952 emit_insn_after (gen_move_insn (var
, target
), insn
);
954 next
= NEXT_INSN (insn
);
956 delete_insn (prev_nonnote_insn (insn
));
966 /* If branches are expensive, convert
967 if (foo) bar++; to bar += (foo != 0);
968 and similarly for "bar--;"
970 INSN is the conditional branch around the arithmetic. We set:
972 TEMP is the arithmetic insn.
973 TEMP1 is the SET doing the arithmetic.
974 TEMP2 is the operand being incremented or decremented.
975 TEMP3 to the condition being tested.
976 TEMP4 to the earliest insn used to find the condition. */
979 && ! reload_completed
980 && this_is_condjump
&& ! this_is_simplejump
981 && (temp
= next_nonnote_insn (insn
)) != 0
982 && (temp1
= single_set (temp
)) != 0
983 && (temp2
= SET_DEST (temp1
),
984 GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
)
985 && GET_CODE (SET_SRC (temp1
)) == PLUS
986 && (XEXP (SET_SRC (temp1
), 1) == const1_rtx
987 || XEXP (SET_SRC (temp1
), 1) == constm1_rtx
)
988 && rtx_equal_p (temp2
, XEXP (SET_SRC (temp1
), 0))
989 /* INSN must either branch to the insn after TEMP or the insn
990 after TEMP must branch to the same place as INSN. */
991 && (reallabelprev
== temp
992 || ((temp3
= next_active_insn (temp
)) != 0
993 && simplejump_p (temp3
)
994 && JUMP_LABEL (temp3
) == JUMP_LABEL (insn
)))
995 && (temp3
= get_condition (insn
, &temp4
)) != 0
996 && can_reverse_comparison_p (temp3
, insn
))
998 rtx temp6
, target
= 0, seq
, init_insn
= 0, init
= temp2
;
999 enum rtx_code code
= reverse_condition (GET_CODE (temp3
));
1003 /* It must be the case that TEMP2 is not modified in the range
1004 [TEMP4, INSN). The one exception we make is if the insn
1005 before INSN sets TEMP2 to something which is also unchanged
1006 in that range. In that case, we can move the initialization
1007 into our sequence. */
1009 if ((temp5
= prev_active_insn (insn
)) != 0
1010 && GET_CODE (temp5
) == INSN
1011 && (temp6
= single_set (temp5
)) != 0
1012 && rtx_equal_p (temp2
, SET_DEST (temp6
))
1013 && (CONSTANT_P (SET_SRC (temp6
))
1014 || GET_CODE (SET_SRC (temp6
)) == REG
1015 || GET_CODE (SET_SRC (temp6
)) == SUBREG
))
1017 emit_insn (PATTERN (temp5
));
1019 init
= SET_SRC (temp6
);
1022 if (CONSTANT_P (init
)
1023 || ! reg_set_between_p (init
, PREV_INSN (temp4
), insn
))
1024 target
= emit_store_flag (gen_reg_rtx (GET_MODE (temp2
)), code
,
1025 XEXP (temp3
, 0), XEXP (temp3
, 1),
1027 (code
== LTU
|| code
== LEU
1028 || code
== GTU
|| code
== GEU
), 1);
1030 /* If we can do the store-flag, do the addition or
1034 target
= expand_binop (GET_MODE (temp2
),
1035 (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1036 ? add_optab
: sub_optab
),
1037 temp2
, target
, temp2
, OPTAB_WIDEN
);
1041 /* Put the result back in temp2 in case it isn't already.
1042 Then replace the jump, possible a CC0-setting insn in
1043 front of the jump, and TEMP, with the sequence we have
1046 if (target
!= temp2
)
1047 emit_move_insn (temp2
, target
);
1052 emit_insns_before (seq
, temp4
);
1056 delete_insn (init_insn
);
1058 next
= NEXT_INSN (insn
);
1060 delete_insn (prev_nonnote_insn (insn
));
1070 /* Simplify if (...) x = 1; else {...} if (x) ...
1071 We recognize this case scanning backwards as well.
1073 TEMP is the assignment to x;
1074 TEMP1 is the label at the head of the second if. */
1075 /* ?? This should call get_condition to find the values being
1076 compared, instead of looking for a COMPARE insn when HAVE_cc0
1077 is not defined. This would allow it to work on the m88k. */
1078 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1079 is not defined and the condition is tested by a separate compare
1080 insn. This is because the code below assumes that the result
1081 of the compare dies in the following branch.
1083 Not only that, but there might be other insns between the
1084 compare and branch whose results are live. Those insns need
1087 A way to fix this is to move the insns at JUMP_LABEL (insn)
1088 to before INSN. If we are running before flow, they will
1089 be deleted if they aren't needed. But this doesn't work
1092 This is really a special-case of jump threading, anyway. The
1093 right thing to do is to replace this and jump threading with
1094 much simpler code in cse.
1096 This code has been turned off in the non-cc0 case in the
1100 else if (this_is_simplejump
1101 /* Safe to skip USE and CLOBBER insns here
1102 since they will not be deleted. */
1103 && (temp
= prev_active_insn (insn
))
1104 && no_labels_between_p (temp
, insn
)
1105 && GET_CODE (temp
) == INSN
1106 && GET_CODE (PATTERN (temp
)) == SET
1107 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1108 && CONSTANT_P (SET_SRC (PATTERN (temp
)))
1109 && (temp1
= next_active_insn (JUMP_LABEL (insn
)))
1110 /* If we find that the next value tested is `x'
1111 (TEMP1 is the insn where this happens), win. */
1112 && GET_CODE (temp1
) == INSN
1113 && GET_CODE (PATTERN (temp1
)) == SET
1115 /* Does temp1 `tst' the value of x? */
1116 && SET_SRC (PATTERN (temp1
)) == SET_DEST (PATTERN (temp
))
1117 && SET_DEST (PATTERN (temp1
)) == cc0_rtx
1118 && (temp1
= next_nonnote_insn (temp1
))
1120 /* Does temp1 compare the value of x against zero? */
1121 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1122 && XEXP (SET_SRC (PATTERN (temp1
)), 1) == const0_rtx
1123 && (XEXP (SET_SRC (PATTERN (temp1
)), 0)
1124 == SET_DEST (PATTERN (temp
)))
1125 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1126 && (temp1
= find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1128 && condjump_p (temp1
))
1130 /* Get the if_then_else from the condjump. */
1131 rtx choice
= SET_SRC (PATTERN (temp1
));
1132 if (GET_CODE (choice
) == IF_THEN_ELSE
)
1134 enum rtx_code code
= GET_CODE (XEXP (choice
, 0));
1135 rtx val
= SET_SRC (PATTERN (temp
));
1137 = simplify_relational_operation (code
, GET_MODE (SET_DEST (PATTERN (temp
))),
1141 if (cond
== const_true_rtx
)
1142 ultimate
= XEXP (choice
, 1);
1143 else if (cond
== const0_rtx
)
1144 ultimate
= XEXP (choice
, 2);
1148 if (ultimate
== pc_rtx
)
1149 ultimate
= get_label_after (temp1
);
1150 else if (ultimate
&& GET_CODE (ultimate
) != RETURN
)
1151 ultimate
= XEXP (ultimate
, 0);
1154 changed
|= redirect_jump (insn
, ultimate
);
1160 /* @@ This needs a bit of work before it will be right.
1162 Any type of comparison can be accepted for the first and
1163 second compare. When rewriting the first jump, we must
1164 compute the what conditions can reach label3, and use the
1165 appropriate code. We can not simply reverse/swap the code
1166 of the first jump. In some cases, the second jump must be
1170 < == converts to > ==
1171 < != converts to == >
1174 If the code is written to only accept an '==' test for the second
1175 compare, then all that needs to be done is to swap the condition
1176 of the first branch.
1178 It is questionable whether we want this optimization anyways,
1179 since if the user wrote code like this because he/she knew that
1180 the jump to label1 is taken most of the time, then rewriting
1181 this gives slower code. */
1182 /* @@ This should call get_condition to find the values being
1183 compared, instead of looking for a COMPARE insn when HAVE_cc0
1184 is not defined. This would allow it to work on the m88k. */
1185 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1186 is not defined and the condition is tested by a separate compare
1187 insn. This is because the code below assumes that the result
1188 of the compare dies in the following branch. */
1190 /* Simplify test a ~= b
1204 where ~= is an inequality, e.g. >, and ~~= is the swapped
1207 We recognize this case scanning backwards.
1209 TEMP is the conditional jump to `label2';
1210 TEMP1 is the test for `a == b';
1211 TEMP2 is the conditional jump to `label1';
1212 TEMP3 is the test for `a ~= b'. */
1213 else if (this_is_simplejump
1214 && (temp
= prev_active_insn (insn
))
1215 && no_labels_between_p (temp
, insn
)
1216 && condjump_p (temp
)
1217 && (temp1
= prev_active_insn (temp
))
1218 && no_labels_between_p (temp1
, temp
)
1219 && GET_CODE (temp1
) == INSN
1220 && GET_CODE (PATTERN (temp1
)) == SET
1222 && sets_cc0_p (PATTERN (temp1
)) == 1
1224 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1225 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1226 && (temp
== find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1228 && (temp2
= prev_active_insn (temp1
))
1229 && no_labels_between_p (temp2
, temp1
)
1230 && condjump_p (temp2
)
1231 && JUMP_LABEL (temp2
) == next_nonnote_insn (NEXT_INSN (insn
))
1232 && (temp3
= prev_active_insn (temp2
))
1233 && no_labels_between_p (temp3
, temp2
)
1234 && GET_CODE (PATTERN (temp3
)) == SET
1235 && rtx_equal_p (SET_DEST (PATTERN (temp3
)),
1236 SET_DEST (PATTERN (temp1
)))
1237 && rtx_equal_p (SET_SRC (PATTERN (temp1
)),
1238 SET_SRC (PATTERN (temp3
)))
1239 && ! inequality_comparisons_p (PATTERN (temp
))
1240 && inequality_comparisons_p (PATTERN (temp2
)))
1242 rtx fallthrough_label
= JUMP_LABEL (temp2
);
1244 ++LABEL_NUSES (fallthrough_label
);
1245 if (swap_jump (temp2
, JUMP_LABEL (insn
)))
1251 if (--LABEL_NUSES (fallthrough_label
) == 0)
1252 delete_insn (fallthrough_label
);
1255 /* Simplify if (...) {... x = 1;} if (x) ...
1257 We recognize this case backwards.
1259 TEMP is the test of `x';
1260 TEMP1 is the assignment to `x' at the end of the
1261 previous statement. */
1262 /* @@ This should call get_condition to find the values being
1263 compared, instead of looking for a COMPARE insn when HAVE_cc0
1264 is not defined. This would allow it to work on the m88k. */
1265 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1266 is not defined and the condition is tested by a separate compare
1267 insn. This is because the code below assumes that the result
1268 of the compare dies in the following branch. */
1270 /* ??? This has to be turned off. The problem is that the
1271 unconditional jump might indirectly end up branching to the
1272 label between TEMP1 and TEMP. We can't detect this, in general,
1273 since it may become a jump to there after further optimizations.
1274 If that jump is done, it will be deleted, so we will retry
1275 this optimization in the next pass, thus an infinite loop.
1277 The present code prevents this by putting the jump after the
1278 label, but this is not logically correct. */
1280 else if (this_is_condjump
1281 /* Safe to skip USE and CLOBBER insns here
1282 since they will not be deleted. */
1283 && (temp
= prev_active_insn (insn
))
1284 && no_labels_between_p (temp
, insn
)
1285 && GET_CODE (temp
) == INSN
1286 && GET_CODE (PATTERN (temp
)) == SET
1288 && sets_cc0_p (PATTERN (temp
)) == 1
1289 && GET_CODE (SET_SRC (PATTERN (temp
))) == REG
1291 /* Temp must be a compare insn, we can not accept a register
1292 to register move here, since it may not be simply a
1294 && GET_CODE (SET_SRC (PATTERN (temp
))) == COMPARE
1295 && XEXP (SET_SRC (PATTERN (temp
)), 1) == const0_rtx
1296 && GET_CODE (XEXP (SET_SRC (PATTERN (temp
)), 0)) == REG
1297 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1298 && insn
== find_next_ref (SET_DEST (PATTERN (temp
)), temp
)
1300 /* May skip USE or CLOBBER insns here
1301 for checking for opportunity, since we
1302 take care of them later. */
1303 && (temp1
= prev_active_insn (temp
))
1304 && GET_CODE (temp1
) == INSN
1305 && GET_CODE (PATTERN (temp1
)) == SET
1307 && SET_SRC (PATTERN (temp
)) == SET_DEST (PATTERN (temp1
))
1309 && (XEXP (SET_SRC (PATTERN (temp
)), 0)
1310 == SET_DEST (PATTERN (temp1
)))
1312 && CONSTANT_P (SET_SRC (PATTERN (temp1
)))
1313 /* If this isn't true, cse will do the job. */
1314 && ! no_labels_between_p (temp1
, temp
))
1316 /* Get the if_then_else from the condjump. */
1317 rtx choice
= SET_SRC (PATTERN (insn
));
1318 if (GET_CODE (choice
) == IF_THEN_ELSE
1319 && (GET_CODE (XEXP (choice
, 0)) == EQ
1320 || GET_CODE (XEXP (choice
, 0)) == NE
))
1322 int want_nonzero
= (GET_CODE (XEXP (choice
, 0)) == NE
);
1327 /* Get the place that condjump will jump to
1328 if it is reached from here. */
1329 if ((SET_SRC (PATTERN (temp1
)) != const0_rtx
)
1331 ultimate
= XEXP (choice
, 1);
1333 ultimate
= XEXP (choice
, 2);
1334 /* Get it as a CODE_LABEL. */
1335 if (ultimate
== pc_rtx
)
1336 ultimate
= get_label_after (insn
);
1338 /* Get the label out of the LABEL_REF. */
1339 ultimate
= XEXP (ultimate
, 0);
1341 /* Insert the jump immediately before TEMP, specifically
1342 after the label that is between TEMP1 and TEMP. */
1343 last_insn
= PREV_INSN (temp
);
1345 /* If we would be branching to the next insn, the jump
1346 would immediately be deleted and the re-inserted in
1347 a subsequent pass over the code. So don't do anything
1349 if (next_active_insn (last_insn
)
1350 != next_active_insn (ultimate
))
1352 emit_barrier_after (last_insn
);
1353 p
= emit_jump_insn_after (gen_jump (ultimate
),
1355 JUMP_LABEL (p
) = ultimate
;
1356 ++LABEL_NUSES (ultimate
);
1357 if (INSN_UID (ultimate
) < max_jump_chain
1358 && INSN_CODE (p
) < max_jump_chain
)
1360 jump_chain
[INSN_UID (p
)]
1361 = jump_chain
[INSN_UID (ultimate
)];
1362 jump_chain
[INSN_UID (ultimate
)] = p
;
1370 /* Detect a conditional jump going to the same place
1371 as an immediately following unconditional jump. */
1372 else if (this_is_condjump
1373 && (temp
= next_active_insn (insn
)) != 0
1374 && simplejump_p (temp
)
1375 && (next_active_insn (JUMP_LABEL (insn
))
1376 == next_active_insn (JUMP_LABEL (temp
))))
1382 /* Detect a conditional jump jumping over an unconditional jump. */
1384 else if (this_is_condjump
&& ! this_is_simplejump
1385 && reallabelprev
!= 0
1386 && GET_CODE (reallabelprev
) == JUMP_INSN
1387 && prev_active_insn (reallabelprev
) == insn
1388 && no_labels_between_p (insn
, reallabelprev
)
1389 && simplejump_p (reallabelprev
))
1391 /* When we invert the unconditional jump, we will be
1392 decrementing the usage count of its old label.
1393 Make sure that we don't delete it now because that
1394 might cause the following code to be deleted. */
1395 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
1396 rtx prev_label
= JUMP_LABEL (insn
);
1398 ++LABEL_NUSES (prev_label
);
1400 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
)))
1402 /* It is very likely that if there are USE insns before
1403 this jump, they hold REG_DEAD notes. These REG_DEAD
1404 notes are no longer valid due to this optimization,
1405 and will cause the life-analysis that following passes
1406 (notably delayed-branch scheduling) to think that
1407 these registers are dead when they are not.
1409 To prevent this trouble, we just remove the USE insns
1410 from the insn chain. */
1412 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
1413 && GET_CODE (PATTERN (prev_uses
)) == USE
)
1415 rtx useless
= prev_uses
;
1416 prev_uses
= prev_nonnote_insn (prev_uses
);
1417 delete_insn (useless
);
1420 delete_insn (reallabelprev
);
1425 /* We can now safely delete the label if it is unreferenced
1426 since the delete_insn above has deleted the BARRIER. */
1427 if (--LABEL_NUSES (prev_label
) == 0)
1428 delete_insn (prev_label
);
1433 /* Detect a jump to a jump. */
1435 nlabel
= follow_jumps (JUMP_LABEL (insn
));
1436 if (nlabel
!= JUMP_LABEL (insn
)
1437 && redirect_jump (insn
, nlabel
))
1443 /* Look for if (foo) bar; else break; */
1444 /* The insns look like this:
1445 insn = condjump label1;
1446 ...range1 (some insns)...
1449 ...range2 (some insns)...
1450 jump somewhere unconditionally
1453 rtx label1
= next_label (insn
);
1454 rtx range1end
= label1
? prev_active_insn (label1
) : 0;
1455 /* Don't do this optimization on the first round, so that
1456 jump-around-a-jump gets simplified before we ask here
1457 whether a jump is unconditional.
1459 Also don't do it when we are called after reload since
1460 it will confuse reorg. */
1462 && (reload_completed
? ! flag_delayed_branch
: 1)
1463 /* Make sure INSN is something we can invert. */
1464 && condjump_p (insn
)
1466 && JUMP_LABEL (insn
) == label1
1467 && LABEL_NUSES (label1
) == 1
1468 && GET_CODE (range1end
) == JUMP_INSN
1469 && simplejump_p (range1end
))
1471 rtx label2
= next_label (label1
);
1472 rtx range2end
= label2
? prev_active_insn (label2
) : 0;
1473 if (range1end
!= range2end
1474 && JUMP_LABEL (range1end
) == label2
1475 && GET_CODE (range2end
) == JUMP_INSN
1476 && GET_CODE (NEXT_INSN (range2end
)) == BARRIER
1477 /* Invert the jump condition, so we
1478 still execute the same insns in each case. */
1479 && invert_jump (insn
, label1
))
1481 rtx range1beg
= next_active_insn (insn
);
1482 rtx range2beg
= next_active_insn (label1
);
1483 rtx range1after
, range2after
;
1484 rtx range1before
, range2before
;
1486 /* Include in each range any line number before it. */
1487 while (PREV_INSN (range1beg
)
1488 && GET_CODE (PREV_INSN (range1beg
)) == NOTE
1489 && NOTE_LINE_NUMBER (PREV_INSN (range1beg
)) > 0)
1490 range1beg
= PREV_INSN (range1beg
);
1492 while (PREV_INSN (range2beg
)
1493 && GET_CODE (PREV_INSN (range2beg
)) == NOTE
1494 && NOTE_LINE_NUMBER (PREV_INSN (range2beg
)) > 0)
1495 range2beg
= PREV_INSN (range2beg
);
1497 /* Don't move NOTEs for blocks or loops; shift them
1498 outside the ranges, where they'll stay put. */
1499 range1beg
= squeeze_notes (range1beg
, range1end
);
1500 range2beg
= squeeze_notes (range2beg
, range2end
);
1502 /* Get current surrounds of the 2 ranges. */
1503 range1before
= PREV_INSN (range1beg
);
1504 range2before
= PREV_INSN (range2beg
);
1505 range1after
= NEXT_INSN (range1end
);
1506 range2after
= NEXT_INSN (range2end
);
1508 /* Splice range2 where range1 was. */
1509 NEXT_INSN (range1before
) = range2beg
;
1510 PREV_INSN (range2beg
) = range1before
;
1511 NEXT_INSN (range2end
) = range1after
;
1512 PREV_INSN (range1after
) = range2end
;
1513 /* Splice range1 where range2 was. */
1514 NEXT_INSN (range2before
) = range1beg
;
1515 PREV_INSN (range1beg
) = range2before
;
1516 NEXT_INSN (range1end
) = range2after
;
1517 PREV_INSN (range2after
) = range1end
;
1524 /* Now that the jump has been tensioned,
1525 try cross jumping: check for identical code
1526 before the jump and before its target label. */
1528 /* First, cross jumping of conditional jumps: */
1530 if (cross_jump
&& condjump_p (insn
))
1532 rtx newjpos
, newlpos
;
1533 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
1535 /* A conditional jump may be crossjumped
1536 only if the place it jumps to follows
1537 an opposing jump that comes back here. */
1539 if (x
!= 0 && ! jump_back_p (x
, insn
))
1540 /* We have no opposing jump;
1541 cannot cross jump this insn. */
1545 /* TARGET is nonzero if it is ok to cross jump
1546 to code before TARGET. If so, see if matches. */
1548 find_cross_jump (insn
, x
, 2,
1549 &newjpos
, &newlpos
);
1553 do_cross_jump (insn
, newjpos
, newlpos
);
1554 /* Make the old conditional jump
1555 into an unconditional one. */
1556 SET_SRC (PATTERN (insn
))
1557 = gen_rtx (LABEL_REF
, VOIDmode
, JUMP_LABEL (insn
));
1558 INSN_CODE (insn
) = -1;
1559 emit_barrier_after (insn
);
1560 /* Add to jump_chain unless this is a new label
1561 whose UID is too large. */
1562 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
1564 jump_chain
[INSN_UID (insn
)]
1565 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1566 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
1573 /* Cross jumping of unconditional jumps:
1574 a few differences. */
1576 if (cross_jump
&& simplejump_p (insn
))
1578 rtx newjpos
, newlpos
;
1583 /* TARGET is nonzero if it is ok to cross jump
1584 to code before TARGET. If so, see if matches. */
1585 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
1586 &newjpos
, &newlpos
);
1588 /* If cannot cross jump to code before the label,
1589 see if we can cross jump to another jump to
1591 /* Try each other jump to this label. */
1592 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
1593 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1594 target
!= 0 && newjpos
== 0;
1595 target
= jump_chain
[INSN_UID (target
)])
1597 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
1598 /* Ignore TARGET if it's deleted. */
1599 && ! INSN_DELETED_P (target
))
1600 find_cross_jump (insn
, target
, 2,
1601 &newjpos
, &newlpos
);
1605 do_cross_jump (insn
, newjpos
, newlpos
);
1611 /* This code was dead in the previous jump.c! */
1612 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
1614 /* Return insns all "jump to the same place"
1615 so we can cross-jump between any two of them. */
1617 rtx newjpos
, newlpos
, target
;
1621 /* If cannot cross jump to code before the label,
1622 see if we can cross jump to another jump to
1624 /* Try each other jump to this label. */
1625 for (target
= jump_chain
[0];
1626 target
!= 0 && newjpos
== 0;
1627 target
= jump_chain
[INSN_UID (target
)])
1629 && ! INSN_DELETED_P (target
)
1630 && GET_CODE (PATTERN (target
)) == RETURN
)
1631 find_cross_jump (insn
, target
, 2,
1632 &newjpos
, &newlpos
);
1636 do_cross_jump (insn
, newjpos
, newlpos
);
1647 /* Delete extraneous line number notes.
1648 Note that two consecutive notes for different lines are not really
1649 extraneous. There should be some indication where that line belonged,
1650 even if it became empty. */
1655 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1656 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0)
1658 /* Delete this note if it is identical to previous note. */
1660 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
1661 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
1671 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1672 If so, delete it, and record that this function can drop off the end. */
1678 /* One label can follow the end-note: the return label. */
1679 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
1680 /* Ordinary insns can follow it if returning a structure. */
1681 || GET_CODE (insn
) == INSN
1682 /* If machine uses explicit RETURN insns, no epilogue,
1683 then one of them follows the note. */
1684 || (GET_CODE (insn
) == JUMP_INSN
1685 && GET_CODE (PATTERN (insn
)) == RETURN
)
1686 /* Other kinds of notes can follow also. */
1687 || (GET_CODE (insn
) == NOTE
1688 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
1689 insn
= PREV_INSN (insn
);
1692 /* Report if control can fall through at the end of the function. */
1693 if (insn
&& GET_CODE (insn
) == NOTE
1694 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
)
1700 /* Show JUMP_CHAIN no longer valid. */
1704 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1705 jump. Assume that this unconditional jump is to the exit test code. If
1706 the code is sufficiently simple, make a copy of it before INSN,
1707 followed by a jump to the exit of the loop. Then delete the unconditional
1710 Note that it is possible we can get confused here if the jump immediately
1711 after the loop start branches outside the loop but within an outer loop.
1712 If we are near the exit of that loop, we will copy its exit test. This
1713 will not generate incorrect code, but could suppress some optimizations.
1714 However, such cases are degenerate loops anyway.
1716 Return 1 if we made the change, else 0.
1718 This is only safe immediately after a regscan pass because it uses the
1719 values of regno_first_uid and regno_last_uid. */
1722 duplicate_loop_exit_test (loop_start
)
1728 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
1730 int max_reg
= max_reg_num ();
1733 /* Scan the exit code. We do not perform this optimization if any insn:
1737 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1738 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1739 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1742 Also, don't do this if the exit code is more than 20 insns. */
1744 for (insn
= exitcode
;
1746 && ! (GET_CODE (insn
) == NOTE
1747 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
1748 insn
= NEXT_INSN (insn
))
1750 switch (GET_CODE (insn
))
1756 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
1757 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1758 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
1763 if (++num_insns
> 20
1764 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
1765 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
1771 /* Unless INSN is zero, we can do the optimization. */
1777 /* See if any insn sets a register only used in the loop exit code and
1778 not a user variable. If so, replace it with a new register. */
1779 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
1780 if (GET_CODE (insn
) == INSN
1781 && (set
= single_set (insn
)) != 0
1782 && GET_CODE (SET_DEST (set
)) == REG
1783 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
1784 && regno_first_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
))
1786 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
1787 if (regno_last_uid
[REGNO (SET_DEST (set
))] == INSN_UID (p
))
1792 /* We can do the replacement. Allocate reg_map if this is the
1793 first replacement we found. */
1796 reg_map
= (rtx
*) alloca (max_reg
* sizeof (rtx
));
1797 bzero (reg_map
, max_reg
* sizeof (rtx
));
1800 REG_LOOP_TEST_P (SET_DEST (set
)) = 1;
1802 reg_map
[REGNO (SET_DEST (set
))]
1803 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
1807 /* Now copy each insn. */
1808 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
1809 switch (GET_CODE (insn
))
1812 copy
= emit_barrier_before (loop_start
);
1815 /* Only copy line-number notes. */
1816 if (NOTE_LINE_NUMBER (insn
) >= 0)
1818 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
1819 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
1824 copy
= emit_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
1826 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
1828 mark_jump_label (PATTERN (copy
), copy
, 0);
1830 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1832 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1833 if (REG_NOTE_KIND (link
) != REG_LABEL
)
1835 = copy_rtx (gen_rtx (EXPR_LIST
, REG_NOTE_KIND (link
),
1836 XEXP (link
, 0), REG_NOTES (copy
)));
1837 if (reg_map
&& REG_NOTES (copy
))
1838 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
1842 copy
= emit_jump_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
1844 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
1845 mark_jump_label (PATTERN (copy
), copy
, 0);
1846 if (REG_NOTES (insn
))
1848 REG_NOTES (copy
) = copy_rtx (REG_NOTES (insn
));
1850 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
1853 /* If this is a simple jump, add it to the jump chain. */
1855 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
1856 && simplejump_p (copy
))
1858 jump_chain
[INSN_UID (copy
)]
1859 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
1860 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
1868 /* Now clean up by emitting a jump to the end label and deleting the jump
1869 at the start of the loop. */
1870 if (GET_CODE (copy
) != BARRIER
)
1872 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
1874 mark_jump_label (PATTERN (copy
), copy
, 0);
1875 if (INSN_UID (copy
) < max_jump_chain
1876 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
1878 jump_chain
[INSN_UID (copy
)]
1879 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
1880 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
1882 emit_barrier_before (loop_start
);
1885 delete_insn (next_nonnote_insn (loop_start
));
1887 /* Mark the exit code as the virtual top of the converted loop. */
1888 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
1893 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
1894 loop-end notes between START and END out before START. Assume that
1895 END is not such a note. START may be such a note. Returns the value
1896 of the new starting insn, which may be different if the original start
1900 squeeze_notes (start
, end
)
1906 for (insn
= start
; insn
!= end
; insn
= next
)
1908 next
= NEXT_INSN (insn
);
1909 if (GET_CODE (insn
) == NOTE
1910 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
1911 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1912 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
1913 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
1914 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
1915 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
1921 rtx prev
= PREV_INSN (insn
);
1922 PREV_INSN (insn
) = PREV_INSN (start
);
1923 NEXT_INSN (insn
) = start
;
1924 NEXT_INSN (PREV_INSN (insn
)) = insn
;
1925 PREV_INSN (NEXT_INSN (insn
)) = insn
;
1926 NEXT_INSN (prev
) = next
;
1927 PREV_INSN (next
) = prev
;
1935 /* Compare the instructions before insn E1 with those before E2
1936 to find an opportunity for cross jumping.
1937 (This means detecting identical sequences of insns followed by
1938 jumps to the same place, or followed by a label and a jump
1939 to that label, and replacing one with a jump to the other.)
1941 Assume E1 is a jump that jumps to label E2
1942 (that is not always true but it might as well be).
1943 Find the longest possible equivalent sequences
1944 and store the first insns of those sequences into *F1 and *F2.
1945 Store zero there if no equivalent preceding instructions are found.
1947 We give up if we find a label in stream 1.
1948 Actually we could transfer that label into stream 2. */
1951 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
1956 register rtx i1
= e1
, i2
= e2
;
1957 register rtx p1
, p2
;
1960 rtx last1
= 0, last2
= 0;
1961 rtx afterlast1
= 0, afterlast2
= 0;
1969 i1
= prev_nonnote_insn (i1
);
1971 i2
= PREV_INSN (i2
);
1972 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
1973 i2
= PREV_INSN (i2
);
1978 /* Don't allow the range of insns preceding E1 or E2
1979 to include the other (E2 or E1). */
1980 if (i2
== e1
|| i1
== e2
)
1983 /* If we will get to this code by jumping, those jumps will be
1984 tensioned to go directly to the new label (before I2),
1985 so this cross-jumping won't cost extra. So reduce the minimum. */
1986 if (GET_CODE (i1
) == CODE_LABEL
)
1992 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
1999 /* If cross_jump_death_matters is not 0, the insn's mode
2000 indicates whether or not the insn contains any stack-like
2003 if (cross_jump_death_matters
&& GET_MODE (i1
) == QImode
)
2005 /* If register stack conversion has already been done, then
2006 death notes must also be compared before it is certain that
2007 the two instruction streams match. */
2010 HARD_REG_SET i1_regset
, i2_regset
;
2012 CLEAR_HARD_REG_SET (i1_regset
);
2013 CLEAR_HARD_REG_SET (i2_regset
);
2015 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
2016 if (REG_NOTE_KIND (note
) == REG_DEAD
2017 && STACK_REG_P (XEXP (note
, 0)))
2018 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
2020 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
2021 if (REG_NOTE_KIND (note
) == REG_DEAD
2022 && STACK_REG_P (XEXP (note
, 0)))
2023 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
2025 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
2034 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
2035 || ! rtx_renumbered_equal_p (p1
, p2
))
2037 /* The following code helps take care of G++ cleanups. */
2041 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
2042 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
2043 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
2044 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
2045 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
2046 /* If the equivalences are not to a constant, they may
2047 reference pseudos that no longer exist, so we can't
2049 && CONSTANT_P (XEXP (equiv1
, 0))
2050 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
2052 rtx s1
= single_set (i1
);
2053 rtx s2
= single_set (i2
);
2054 if (s1
!= 0 && s2
!= 0
2055 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
2057 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
2058 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
2059 if (! rtx_renumbered_equal_p (p1
, p2
))
2061 else if (apply_change_group ())
2066 /* Insns fail to match; cross jumping is limited to the following
2070 /* Don't allow the insn after a compare to be shared by
2071 cross-jumping unless the compare is also shared.
2072 Here, if either of these non-matching insns is a compare,
2073 exclude the following insn from possible cross-jumping. */
2074 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
2075 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
2078 /* If cross-jumping here will feed a jump-around-jump
2079 optimization, this jump won't cost extra, so reduce
2081 if (GET_CODE (i1
) == JUMP_INSN
2083 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
2089 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
2091 /* Ok, this insn is potentially includable in a cross-jump here. */
2092 afterlast1
= last1
, afterlast2
= last2
;
2093 last1
= i1
, last2
= i2
, --minimum
;
2097 /* We have to be careful that we do not cross-jump into the middle of
2098 USE-CALL_INSN-CLOBBER sequence. This sequence is used instead of
2099 putting the USE and CLOBBERs inside the CALL_INSN. The delay slot
2100 scheduler needs to know what registers are used and modified by the
2101 CALL_INSN and needs the adjacent USE and CLOBBERs to do so.
2103 ??? At some point we should probably change this so that these are
2104 part of the CALL_INSN. The way we are doing it now is a kludge that
2105 is now causing trouble. */
2107 if (last1
!= 0 && GET_CODE (last1
) == CALL_INSN
2108 && (prev1
= prev_nonnote_insn (last1
))
2109 && GET_CODE (prev1
) == INSN
2110 && GET_CODE (PATTERN (prev1
)) == USE
)
2112 /* Remove this CALL_INSN from the range we can cross-jump. */
2113 last1
= next_real_insn (last1
);
2114 last2
= next_real_insn (last2
);
2119 /* Skip past CLOBBERS since they may be right after a CALL_INSN. It
2120 isn't worth checking for the CALL_INSN. */
2121 while (last1
!= 0 && GET_CODE (PATTERN (last1
)) == CLOBBER
)
2122 last1
= next_real_insn (last1
), last2
= next_real_insn (last2
);
2124 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
2125 *f1
= last1
, *f2
= last2
;
2129 do_cross_jump (insn
, newjpos
, newlpos
)
2130 rtx insn
, newjpos
, newlpos
;
2132 /* Find an existing label at this point
2133 or make a new one if there is none. */
2134 register rtx label
= get_label_before (newlpos
);
2136 /* Make the same jump insn jump to the new point. */
2137 if (GET_CODE (PATTERN (insn
)) == RETURN
)
2139 /* Remove from jump chain of returns. */
2140 delete_from_jump_chain (insn
);
2141 /* Change the insn. */
2142 PATTERN (insn
) = gen_jump (label
);
2143 INSN_CODE (insn
) = -1;
2144 JUMP_LABEL (insn
) = label
;
2145 LABEL_NUSES (label
)++;
2146 /* Add to new the jump chain. */
2147 if (INSN_UID (label
) < max_jump_chain
2148 && INSN_UID (insn
) < max_jump_chain
)
2150 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
2151 jump_chain
[INSN_UID (label
)] = insn
;
2155 redirect_jump (insn
, label
);
2157 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2158 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2159 the NEWJPOS stream. */
2161 while (newjpos
!= insn
)
2165 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
2166 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
2167 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
2168 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
2169 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
2170 remove_note (newlpos
, lnote
);
2172 delete_insn (newjpos
);
2173 newjpos
= next_real_insn (newjpos
);
2174 newlpos
= next_real_insn (newlpos
);
2178 /* Return the label before INSN, or put a new label there. */
2181 get_label_before (insn
)
2186 /* Find an existing label at this point
2187 or make a new one if there is none. */
2188 label
= prev_nonnote_insn (insn
);
2190 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2192 rtx prev
= PREV_INSN (insn
);
2194 /* Don't put a label between a CALL_INSN and USE insns that precede
2197 if (GET_CODE (insn
) == CALL_INSN
2198 || (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
2199 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
))
2200 while (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == USE
)
2201 prev
= PREV_INSN (prev
);
2203 label
= gen_label_rtx ();
2204 emit_label_after (label
, prev
);
2205 LABEL_NUSES (label
) = 0;
2210 /* Return the label after INSN, or put a new label there. */
2213 get_label_after (insn
)
2218 /* Find an existing label at this point
2219 or make a new one if there is none. */
2220 label
= next_nonnote_insn (insn
);
2222 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2224 /* Don't put a label between a CALL_INSN and CLOBBER insns
2227 if (GET_CODE (insn
) == CALL_INSN
2228 || (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
2229 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
))
2230 while (GET_CODE (NEXT_INSN (insn
)) == INSN
2231 && GET_CODE (PATTERN (NEXT_INSN (insn
))) == CLOBBER
)
2232 insn
= NEXT_INSN (insn
);
2234 label
= gen_label_rtx ();
2235 emit_label_after (label
, insn
);
2236 LABEL_NUSES (label
) = 0;
2241 /* Return 1 if INSN is a jump that jumps to right after TARGET
2242 only on the condition that TARGET itself would drop through.
2243 Assumes that TARGET is a conditional jump. */
2246 jump_back_p (insn
, target
)
2250 enum rtx_code codei
, codet
;
2252 if (simplejump_p (insn
) || ! condjump_p (insn
)
2253 || simplejump_p (target
)
2254 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
2257 cinsn
= XEXP (SET_SRC (PATTERN (insn
)), 0);
2258 ctarget
= XEXP (SET_SRC (PATTERN (target
)), 0);
2260 codei
= GET_CODE (cinsn
);
2261 codet
= GET_CODE (ctarget
);
2263 if (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
)
2265 if (! can_reverse_comparison_p (cinsn
, insn
))
2267 codei
= reverse_condition (codei
);
2270 if (XEXP (SET_SRC (PATTERN (target
)), 2) == pc_rtx
)
2272 if (! can_reverse_comparison_p (ctarget
, target
))
2274 codet
= reverse_condition (codet
);
2277 return (codei
== codet
2278 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
2279 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
2282 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2283 return non-zero if it is safe to reverse this comparison. It is if our
2284 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2285 this is known to be an integer comparison. */
2288 can_reverse_comparison_p (comparison
, insn
)
2294 /* If this is not actually a comparison, we can't reverse it. */
2295 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
2298 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
2299 /* If this is an NE comparison, it is safe to reverse it to an EQ
2300 comparison and vice versa, even for floating point. If no operands
2301 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2302 always false and NE is always true, so the reversal is also valid. */
2303 || GET_CODE (comparison
) == NE
2304 || GET_CODE (comparison
) == EQ
)
2307 arg0
= XEXP (comparison
, 0);
2309 /* Make sure ARG0 is one of the actual objects being compared. If we
2310 can't do this, we can't be sure the comparison can be reversed.
2312 Handle cc0 and a MODE_CC register. */
2313 if ((GET_CODE (arg0
) == REG
&& GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
)
2319 rtx prev
= prev_nonnote_insn (insn
);
2320 rtx set
= single_set (prev
);
2322 if (set
== 0 || SET_DEST (set
) != arg0
)
2325 arg0
= SET_SRC (set
);
2327 if (GET_CODE (arg0
) == COMPARE
)
2328 arg0
= XEXP (arg0
, 0);
2331 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2332 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2333 return (GET_CODE (arg0
) == CONST_INT
2334 || (GET_MODE (arg0
) != VOIDmode
2335 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_CC
2336 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_FLOAT
));
2339 /* Given an rtx-code for a comparison, return the code
2340 for the negated comparison.
2341 WATCH OUT! reverse_condition is not safe to use on a jump
2342 that might be acting on the results of an IEEE floating point comparison,
2343 because of the special treatment of non-signaling nans in comparisons.
2344 Use can_reverse_comparison_p to be sure. */
2347 reverse_condition (code
)
2388 /* Similar, but return the code when two operands of a comparison are swapped.
2389 This IS safe for IEEE floating-point. */
2392 swap_condition (code
)
2431 /* Given a comparison CODE, return the corresponding unsigned comparison.
2432 If CODE is an equality comparison or already an unsigned comparison,
2433 CODE is returned. */
2436 unsigned_condition (code
)
2466 /* Similarly, return the signed version of a comparison. */
2469 signed_condition (code
)
2499 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2500 truth of CODE1 implies the truth of CODE2. */
2503 comparison_dominates_p (code1
, code2
)
2504 enum rtx_code code1
, code2
;
2512 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
)
2540 /* Return 1 if INSN is an unconditional jump and nothing else. */
2546 return (GET_CODE (insn
) == JUMP_INSN
2547 && GET_CODE (PATTERN (insn
)) == SET
2548 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
2549 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
2552 /* Return nonzero if INSN is a (possibly) conditional jump
2553 and nothing more. */
2559 register rtx x
= PATTERN (insn
);
2560 if (GET_CODE (x
) != SET
)
2562 if (GET_CODE (SET_DEST (x
)) != PC
)
2564 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
2566 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2568 if (XEXP (SET_SRC (x
), 2) == pc_rtx
2569 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
2570 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
2572 if (XEXP (SET_SRC (x
), 1) == pc_rtx
2573 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
2574 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
2579 /* Return 1 if X is an RTX that does nothing but set the condition codes
2580 and CLOBBER or USE registers.
2581 Return -1 if X does explicitly set the condition codes,
2582 but also does other things. */
2589 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
2591 if (GET_CODE (x
) == PARALLEL
)
2595 int other_things
= 0;
2596 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
2598 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
2599 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
2601 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
2604 return ! sets_cc0
? 0 : other_things
? -1 : 1;
2612 /* Follow any unconditional jump at LABEL;
2613 return the ultimate label reached by any such chain of jumps.
2614 If LABEL is not followed by a jump, return LABEL.
2615 If the chain loops or we can't find end, return LABEL,
2616 since that tells caller to avoid changing the insn.
2618 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2619 a USE or CLOBBER. */
2622 follow_jumps (label
)
2627 register rtx value
= label
;
2632 && (insn
= next_active_insn (value
)) != 0
2633 && GET_CODE (insn
) == JUMP_INSN
2634 && (JUMP_LABEL (insn
) != 0 || GET_CODE (PATTERN (insn
)) == RETURN
)
2635 && (next
= NEXT_INSN (insn
))
2636 && GET_CODE (next
) == BARRIER
);
2639 /* Don't chain through the insn that jumps into a loop
2640 from outside the loop,
2641 since that would create multiple loop entry jumps
2642 and prevent loop optimization. */
2644 if (!reload_completed
)
2645 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
2646 if (GET_CODE (tem
) == NOTE
2647 && NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
)
2650 /* If we have found a cycle, make the insn jump to itself. */
2651 if (JUMP_LABEL (insn
) == label
)
2653 value
= JUMP_LABEL (insn
);
2660 /* Assuming that field IDX of X is a vector of label_refs,
2661 replace each of them by the ultimate label reached by it.
2662 Return nonzero if a change is made.
2663 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2666 tension_vector_labels (x
, idx
)
2672 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
2674 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
2675 register rtx nlabel
= follow_jumps (olabel
);
2676 if (nlabel
&& nlabel
!= olabel
)
2678 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
2679 ++LABEL_NUSES (nlabel
);
2680 if (--LABEL_NUSES (olabel
) == 0)
2681 delete_insn (olabel
);
2688 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2689 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2690 in INSN, then store one of them in JUMP_LABEL (INSN).
2691 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2692 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2693 Also, when there are consecutive labels, canonicalize on the last of them.
2695 Note that two labels separated by a loop-beginning note
2696 must be kept distinct if we have not yet done loop-optimization,
2697 because the gap between them is where loop-optimize
2698 will want to move invariant code to. CROSS_JUMP tells us
2699 that loop-optimization is done with.
2701 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2702 two labels distinct if they are separated by only USE or CLOBBER insns. */
2705 mark_jump_label (x
, insn
, cross_jump
)
2710 register RTX_CODE code
= GET_CODE (x
);
2728 /* If this is a constant-pool reference, see if it is a label. */
2729 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2730 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
2731 mark_jump_label (get_pool_constant (XEXP (x
, 0)), insn
, cross_jump
);
2736 register rtx label
= XEXP (x
, 0);
2738 if (GET_CODE (label
) != CODE_LABEL
)
2740 /* Ignore references to labels of containing functions. */
2741 if (LABEL_REF_NONLOCAL_P (x
))
2743 /* If there are other labels following this one,
2744 replace it with the last of the consecutive labels. */
2745 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
2747 if (GET_CODE (next
) == CODE_LABEL
)
2749 else if (cross_jump
&& GET_CODE (next
) == INSN
2750 && (GET_CODE (PATTERN (next
)) == USE
2751 || GET_CODE (PATTERN (next
)) == CLOBBER
))
2753 else if (GET_CODE (next
) != NOTE
)
2755 else if (! cross_jump
2756 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
2757 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
))
2760 XEXP (x
, 0) = label
;
2761 ++LABEL_NUSES (label
);
2764 if (GET_CODE (insn
) == JUMP_INSN
)
2765 JUMP_LABEL (insn
) = label
;
2766 else if (! find_reg_note (insn
, REG_LABEL
, label
))
2768 rtx next
= next_real_insn (label
);
2769 /* Don't record labels that refer to dispatch tables.
2770 This is not necessary, since the tablejump
2771 references the same label.
2772 And if we did record them, flow.c would make worse code. */
2774 || ! (GET_CODE (next
) == JUMP_INSN
2775 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
2776 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
)))
2777 REG_NOTES (insn
) = gen_rtx (EXPR_LIST
, REG_LABEL
, label
,
2784 /* Do walk the labels in a vector, but not the first operand of an
2785 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2789 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
2791 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
2792 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, cross_jump
);
2797 fmt
= GET_RTX_FORMAT (code
);
2798 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2801 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
);
2802 else if (fmt
[i
] == 'E')
2805 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2806 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
);
2811 /* If all INSN does is set the pc, delete it,
2812 and delete the insn that set the condition codes for it
2813 if that's what the previous thing was. */
2819 register rtx x
= PATTERN (insn
);
2822 if (GET_CODE (x
) == SET
2823 && GET_CODE (SET_DEST (x
)) == PC
)
2825 prev
= prev_nonnote_insn (insn
);
2827 /* We assume that at this stage
2828 CC's are always set explicitly
2829 and always immediately before the jump that
2830 will use them. So if the previous insn
2831 exists to set the CC's, delete it
2832 (unless it performs auto-increments, etc.). */
2833 if (prev
&& GET_CODE (prev
) == INSN
2834 && sets_cc0_p (PATTERN (prev
)))
2836 if (sets_cc0_p (PATTERN (prev
)) > 0
2837 && !FIND_REG_INC_NOTE (prev
, NULL_RTX
))
2840 /* Otherwise, show that cc0 won't be used. */
2841 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_UNUSED
,
2842 cc0_rtx
, REG_NOTES (prev
));
2848 /* If we are running before flow.c, we need do nothing since flow.c
2849 will delete the set of the condition code if it is dead. We also
2850 can't know if the register being used as the condition code is
2851 dead or not at this point.
2853 Otherwise, look at all our REG_DEAD notes. If a previous insn
2854 does nothing other than set a register that dies in this jump,
2855 we can delete the insn. */
2857 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2861 if (REG_NOTE_KIND (note
) != REG_DEAD
2862 /* Verify that the REG_NOTE has a legal value. */
2863 || GET_CODE (XEXP (note
, 0)) != REG
)
2866 for (our_prev
= prev_nonnote_insn (insn
);
2867 our_prev
&& GET_CODE (our_prev
) == INSN
;
2868 our_prev
= prev_nonnote_insn (our_prev
))
2870 /* If we reach a SEQUENCE, it is too complex to try to
2871 do anything with it, so give up. */
2872 if (GET_CODE (PATTERN (our_prev
)) == SEQUENCE
)
2875 if (GET_CODE (PATTERN (our_prev
)) == USE
2876 && GET_CODE (XEXP (PATTERN (our_prev
), 0)) == INSN
)
2877 /* reorg creates USEs that look like this. We leave them
2878 alone because reorg needs them for its own purposes. */
2881 if (reg_set_p (XEXP (note
, 0), PATTERN (our_prev
)))
2883 if (FIND_REG_INC_NOTE (our_prev
, NULL_RTX
))
2886 if (GET_CODE (PATTERN (our_prev
)) == PARALLEL
)
2888 /* If we find a SET of something else, we can't
2893 for (i
= 0; i
< XVECLEN (PATTERN (our_prev
), 0); i
++)
2895 rtx part
= XVECEXP (PATTERN (our_prev
), 0, i
);
2897 if (GET_CODE (part
) == SET
2898 && SET_DEST (part
) != XEXP (note
, 0))
2902 if (i
== XVECLEN (PATTERN (our_prev
), 0))
2903 delete_insn (our_prev
);
2905 else if (GET_CODE (PATTERN (our_prev
)) == SET
2906 && SET_DEST (PATTERN (our_prev
)) == XEXP (note
, 0))
2907 delete_insn (our_prev
);
2912 /* If OUR_PREV references the register that dies here,
2913 it is an additional use. Hence any prior SET isn't
2915 if (reg_overlap_mentioned_p (XEXP (note
, 0),
2916 PATTERN (our_prev
)))
2922 /* Now delete the jump insn itself. */
2927 /* Delete insn INSN from the chain of insns and update label ref counts.
2928 May delete some following insns as a consequence; may even delete
2929 a label elsewhere and insns that follow it.
2931 Returns the first insn after INSN that was not deleted. */
2937 register rtx next
= NEXT_INSN (insn
);
2938 register rtx prev
= PREV_INSN (insn
);
2939 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
2940 register int dont_really_delete
= 0;
2942 while (next
&& INSN_DELETED_P (next
))
2943 next
= NEXT_INSN (next
);
2945 /* This insn is already deleted => return first following nondeleted. */
2946 if (INSN_DELETED_P (insn
))
2949 /* Don't delete user-declared labels. Convert them to special NOTEs
2951 if (was_code_label
&& LABEL_NAME (insn
) != 0)
2953 PUT_CODE (insn
, NOTE
);
2954 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
2955 NOTE_SOURCE_FILE (insn
) = 0;
2956 dont_really_delete
= 1;
2959 /* Mark this insn as deleted. */
2960 INSN_DELETED_P (insn
) = 1;
2962 /* If this is an unconditional jump, delete it from the jump chain. */
2963 if (simplejump_p (insn
))
2964 delete_from_jump_chain (insn
);
2966 /* If instruction is followed by a barrier,
2967 delete the barrier too. */
2969 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
2971 INSN_DELETED_P (next
) = 1;
2972 next
= NEXT_INSN (next
);
2975 /* Patch out INSN (and the barrier if any) */
2977 if (optimize
&& ! dont_really_delete
)
2981 NEXT_INSN (prev
) = next
;
2982 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2983 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
2984 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
2989 PREV_INSN (next
) = prev
;
2990 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2991 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
2994 if (prev
&& NEXT_INSN (prev
) == 0)
2995 set_last_insn (prev
);
2998 /* If deleting a jump, decrement the count of the label,
2999 and delete the label if it is now unused. */
3001 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
3002 if (--LABEL_NUSES (JUMP_LABEL (insn
)) == 0)
3004 /* This can delete NEXT or PREV,
3005 either directly if NEXT is JUMP_LABEL (INSN),
3006 or indirectly through more levels of jumps. */
3007 delete_insn (JUMP_LABEL (insn
));
3008 /* I feel a little doubtful about this loop,
3009 but I see no clean and sure alternative way
3010 to find the first insn after INSN that is not now deleted.
3011 I hope this works. */
3012 while (next
&& INSN_DELETED_P (next
))
3013 next
= NEXT_INSN (next
);
3017 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
3018 prev
= PREV_INSN (prev
);
3020 /* If INSN was a label and a dispatch table follows it,
3021 delete the dispatch table. The tablejump must have gone already.
3022 It isn't useful to fall through into a table. */
3025 && NEXT_INSN (insn
) != 0
3026 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3027 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
3028 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
3029 next
= delete_insn (NEXT_INSN (insn
));
3031 /* If INSN was a label, delete insns following it if now unreachable. */
3033 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
3035 register RTX_CODE code
;
3037 && ((code
= GET_CODE (next
)) == INSN
3038 || code
== JUMP_INSN
|| code
== CALL_INSN
3040 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
3043 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
3044 next
= NEXT_INSN (next
);
3045 /* Keep going past other deleted labels to delete what follows. */
3046 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
3047 next
= NEXT_INSN (next
);
3049 /* Note: if this deletes a jump, it can cause more
3050 deletion of unreachable code, after a different label.
3051 As long as the value from this recursive call is correct,
3052 this invocation functions correctly. */
3053 next
= delete_insn (next
);
3060 /* Advance from INSN till reaching something not deleted
3061 then return that. May return INSN itself. */
3064 next_nondeleted_insn (insn
)
3067 while (INSN_DELETED_P (insn
))
3068 insn
= NEXT_INSN (insn
);
3072 /* Delete a range of insns from FROM to TO, inclusive.
3073 This is for the sake of peephole optimization, so assume
3074 that whatever these insns do will still be done by a new
3075 peephole insn that will replace them. */
3078 delete_for_peephole (from
, to
)
3079 register rtx from
, to
;
3081 register rtx insn
= from
;
3085 register rtx next
= NEXT_INSN (insn
);
3086 register rtx prev
= PREV_INSN (insn
);
3088 if (GET_CODE (insn
) != NOTE
)
3090 INSN_DELETED_P (insn
) = 1;
3092 /* Patch this insn out of the chain. */
3093 /* We don't do this all at once, because we
3094 must preserve all NOTEs. */
3096 NEXT_INSN (prev
) = next
;
3099 PREV_INSN (next
) = prev
;
3107 /* Note that if TO is an unconditional jump
3108 we *do not* delete the BARRIER that follows,
3109 since the peephole that replaces this sequence
3110 is also an unconditional jump in that case. */
3113 /* Invert the condition of the jump JUMP, and make it jump
3114 to label NLABEL instead of where it jumps now. */
3117 invert_jump (jump
, nlabel
)
3120 register rtx olabel
= JUMP_LABEL (jump
);
3122 /* We have to either invert the condition and change the label or
3123 do neither. Either operation could fail. We first try to invert
3124 the jump. If that succeeds, we try changing the label. If that fails,
3125 we invert the jump back to what it was. */
3127 if (! invert_exp (PATTERN (jump
), jump
))
3130 if (redirect_jump (jump
, nlabel
))
3133 if (! invert_exp (PATTERN (jump
), jump
))
3134 /* This should just be putting it back the way it was. */
3140 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3142 Return 1 if we can do so, 0 if we cannot find a way to do so that
3143 matches a pattern. */
3146 invert_exp (x
, insn
)
3150 register RTX_CODE code
;
3154 code
= GET_CODE (x
);
3156 if (code
== IF_THEN_ELSE
)
3158 register rtx comp
= XEXP (x
, 0);
3161 /* We can do this in two ways: The preferable way, which can only
3162 be done if this is not an integer comparison, is to reverse
3163 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3164 of the IF_THEN_ELSE. If we can't do either, fail. */
3166 if (can_reverse_comparison_p (comp
, insn
)
3167 && validate_change (insn
, &XEXP (x
, 0),
3168 gen_rtx (reverse_condition (GET_CODE (comp
)),
3169 GET_MODE (comp
), XEXP (comp
, 0),
3170 XEXP (comp
, 1)), 0))
3174 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
3175 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
3176 return apply_change_group ();
3179 fmt
= GET_RTX_FORMAT (code
);
3180 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3183 if (! invert_exp (XEXP (x
, i
), insn
))
3188 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3189 if (!invert_exp (XVECEXP (x
, i
, j
), insn
))
3197 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3198 If the old jump target label is unused as a result,
3199 it and the code following it may be deleted.
3201 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3204 The return value will be 1 if the change was made, 0 if it wasn't (this
3205 can only occur for NLABEL == 0). */
3208 redirect_jump (jump
, nlabel
)
3211 register rtx olabel
= JUMP_LABEL (jump
);
3213 if (nlabel
== olabel
)
3216 if (! redirect_exp (&PATTERN (jump
), olabel
, nlabel
, jump
))
3219 /* If this is an unconditional branch, delete it from the jump_chain of
3220 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3221 have UID's in range and JUMP_CHAIN is valid). */
3222 if (jump_chain
&& (simplejump_p (jump
)
3223 || GET_CODE (PATTERN (jump
)) == RETURN
))
3225 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
3227 delete_from_jump_chain (jump
);
3228 if (label_index
< max_jump_chain
3229 && INSN_UID (jump
) < max_jump_chain
)
3231 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
3232 jump_chain
[label_index
] = jump
;
3236 JUMP_LABEL (jump
) = nlabel
;
3238 ++LABEL_NUSES (nlabel
);
3240 if (olabel
&& --LABEL_NUSES (olabel
) == 0)
3241 delete_insn (olabel
);
3246 /* Delete the instruction JUMP from any jump chain it might be on. */
3249 delete_from_jump_chain (jump
)
3253 rtx olabel
= JUMP_LABEL (jump
);
3255 /* Handle unconditional jumps. */
3256 if (jump_chain
&& olabel
!= 0
3257 && INSN_UID (olabel
) < max_jump_chain
3258 && simplejump_p (jump
))
3259 index
= INSN_UID (olabel
);
3260 /* Handle return insns. */
3261 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
3265 if (jump_chain
[index
] == jump
)
3266 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
3271 for (insn
= jump_chain
[index
];
3273 insn
= jump_chain
[INSN_UID (insn
)])
3274 if (jump_chain
[INSN_UID (insn
)] == jump
)
3276 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
3282 /* If NLABEL is nonzero, throughout the rtx at LOC,
3283 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3284 zero, alter (RETURN) to (LABEL_REF NLABEL).
3286 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3287 validity with validate_change. Convert (set (pc) (label_ref olabel))
3290 Return 0 if we found a change we would like to make but it is invalid.
3291 Otherwise, return 1. */
3294 redirect_exp (loc
, olabel
, nlabel
, insn
)
3299 register rtx x
= *loc
;
3300 register RTX_CODE code
= GET_CODE (x
);
3304 if (code
== LABEL_REF
)
3306 if (XEXP (x
, 0) == olabel
)
3309 XEXP (x
, 0) = nlabel
;
3311 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3315 else if (code
== RETURN
&& olabel
== 0)
3317 x
= gen_rtx (LABEL_REF
, VOIDmode
, nlabel
);
3318 if (loc
== &PATTERN (insn
))
3319 x
= gen_rtx (SET
, VOIDmode
, pc_rtx
, x
);
3320 return validate_change (insn
, loc
, x
, 0);
3323 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
3324 && GET_CODE (SET_SRC (x
)) == LABEL_REF
3325 && XEXP (SET_SRC (x
), 0) == olabel
)
3326 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3328 fmt
= GET_RTX_FORMAT (code
);
3329 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3332 if (! redirect_exp (&XEXP (x
, i
), olabel
, nlabel
, insn
))
3337 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3338 if (! redirect_exp (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
))
3346 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3348 If the old jump target label (before the dispatch table) becomes unused,
3349 it and the dispatch table may be deleted. In that case, find the insn
3350 before the jump references that label and delete it and logical successors
3354 redirect_tablejump (jump
, nlabel
)
3357 register rtx olabel
= JUMP_LABEL (jump
);
3359 /* Add this jump to the jump_chain of NLABEL. */
3360 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
3361 && INSN_UID (jump
) < max_jump_chain
)
3363 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
3364 jump_chain
[INSN_UID (nlabel
)] = jump
;
3367 PATTERN (jump
) = gen_jump (nlabel
);
3368 JUMP_LABEL (jump
) = nlabel
;
3369 ++LABEL_NUSES (nlabel
);
3370 INSN_CODE (jump
) = -1;
3372 if (--LABEL_NUSES (olabel
) == 0)
3374 delete_labelref_insn (jump
, olabel
, 0);
3375 delete_insn (olabel
);
3379 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3380 If we found one, delete it and then delete this insn if DELETE_THIS is
3381 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3384 delete_labelref_insn (insn
, label
, delete_this
)
3391 if (GET_CODE (insn
) != NOTE
3392 && reg_mentioned_p (label
, PATTERN (insn
)))
3403 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
3404 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
3418 /* Like rtx_equal_p except that it considers two REGs as equal
3419 if they renumber to the same value. */
3422 rtx_renumbered_equal_p (x
, y
)
3426 register RTX_CODE code
= GET_CODE (x
);
3431 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
3432 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
3433 && GET_CODE (SUBREG_REG (y
)) == REG
)))
3437 if (GET_MODE (x
) != GET_MODE (y
))
3440 /* If we haven't done any renumbering, don't
3441 make any assumptions. */
3442 if (reg_renumber
== 0)
3443 return rtx_equal_p (x
, y
);
3447 i
= REGNO (SUBREG_REG (x
));
3448 if (reg_renumber
[i
] >= 0)
3449 i
= reg_renumber
[i
];
3450 i
+= SUBREG_WORD (x
);
3455 if (reg_renumber
[i
] >= 0)
3456 i
= reg_renumber
[i
];
3458 if (GET_CODE (y
) == SUBREG
)
3460 j
= REGNO (SUBREG_REG (y
));
3461 if (reg_renumber
[j
] >= 0)
3462 j
= reg_renumber
[j
];
3463 j
+= SUBREG_WORD (y
);
3468 if (reg_renumber
[j
] >= 0)
3469 j
= reg_renumber
[j
];
3473 /* Now we have disposed of all the cases
3474 in which different rtx codes can match. */
3475 if (code
!= GET_CODE (y
))
3486 return XINT (x
, 0) == XINT (y
, 0);
3489 /* We can't assume nonlocal labels have their following insns yet. */
3490 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
3491 return XEXP (x
, 0) == XEXP (y
, 0);
3492 /* Two label-refs are equivalent if they point at labels
3493 in the same position in the instruction stream. */
3494 return (next_real_insn (XEXP (x
, 0))
3495 == next_real_insn (XEXP (y
, 0)));
3498 return XSTR (x
, 0) == XSTR (y
, 0);
3501 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3503 if (GET_MODE (x
) != GET_MODE (y
))
3506 /* Compare the elements. If any pair of corresponding elements
3507 fail to match, return 0 for the whole things. */
3509 fmt
= GET_RTX_FORMAT (code
);
3510 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3516 if (XWINT (x
, i
) != XWINT (y
, i
))
3521 if (XINT (x
, i
) != XINT (y
, i
))
3526 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
3531 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
3536 if (XEXP (x
, i
) != XEXP (y
, i
))
3543 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
3545 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3546 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
3557 /* If X is a hard register or equivalent to one or a subregister of one,
3558 return the hard register number. If X is a pseudo register that was not
3559 assigned a hard register, return the pseudo register number. Otherwise,
3560 return -1. Any rtx is valid for X. */
3566 if (GET_CODE (x
) == REG
)
3568 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
3569 return reg_renumber
[REGNO (x
)];
3572 if (GET_CODE (x
) == SUBREG
)
3574 int base
= true_regnum (SUBREG_REG (x
));
3575 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
3576 return SUBREG_WORD (x
) + base
;
3581 /* Optimize code of the form:
3583 for (x = a[i]; x; ...)
3585 for (x = a[i]; x; ...)
3589 Loop optimize will change the above code into
3593 { ...; if (! (x = ...)) break; }
3596 { ...; if (! (x = ...)) break; }
3599 In general, if the first test fails, the program can branch
3600 directly to `foo' and skip the second try which is doomed to fail.
3601 We run this after loop optimization and before flow analysis. */
3603 /* When comparing the insn patterns, we track the fact that different
3604 pseudo-register numbers may have been used in each computation.
3605 The following array stores an equivalence -- same_regs[I] == J means
3606 that pseudo register I was used in the first set of tests in a context
3607 where J was used in the second set. We also count the number of such
3608 pending equivalences. If nonzero, the expressions really aren't the
3611 static short *same_regs
;
3613 static int num_same_regs
;
3615 /* Track any registers modified between the target of the first jump and
3616 the second jump. They never compare equal. */
3618 static char *modified_regs
;
3620 /* Record if memory was modified. */
3622 static int modified_mem
;
3624 /* Called via note_stores on each insn between the target of the first
3625 branch and the second branch. It marks any changed registers. */
3628 mark_modified_reg (dest
, x
)
3634 if (GET_CODE (dest
) == SUBREG
)
3635 dest
= SUBREG_REG (dest
);
3637 if (GET_CODE (dest
) == MEM
)
3640 if (GET_CODE (dest
) != REG
)
3643 regno
= REGNO (dest
);
3644 if (regno
>= FIRST_PSEUDO_REGISTER
)
3645 modified_regs
[regno
] = 1;
3647 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
3648 modified_regs
[regno
+ i
] = 1;
3651 /* F is the first insn in the chain of insns. */
3654 thread_jumps (f
, max_reg
, verbose
)
3659 /* Basic algorithm is to find a conditional branch,
3660 the label it may branch to, and the branch after
3661 that label. If the two branches test the same condition,
3662 walk back from both branch paths until the insn patterns
3663 differ, or code labels are hit. If we make it back to
3664 the target of the first branch, then we know that the first branch
3665 will either always succeed or always fail depending on the relative
3666 senses of the two branches. So adjust the first branch accordingly
3669 rtx label
, b1
, b2
, t1
, t2
;
3670 enum rtx_code code1
, code2
;
3671 rtx b1op0
, b1op1
, b2op0
, b2op1
;
3676 /* Allocate register tables and quick-reset table. */
3677 modified_regs
= (char *) alloca (max_reg
* sizeof (char));
3678 same_regs
= (short *) alloca (max_reg
* sizeof (short));
3679 all_reset
= (short *) alloca (max_reg
* sizeof (short));
3680 for (i
= 0; i
< max_reg
; i
++)
3687 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
3689 /* Get to a candidate branch insn. */
3690 if (GET_CODE (b1
) != JUMP_INSN
3691 || ! condjump_p (b1
) || simplejump_p (b1
)
3692 || JUMP_LABEL (b1
) == 0)
3695 bzero (modified_regs
, max_reg
* sizeof (char));
3698 bcopy (all_reset
, same_regs
, max_reg
* sizeof (short));
3701 label
= JUMP_LABEL (b1
);
3703 /* Look for a branch after the target. Record any registers and
3704 memory modified between the target and the branch. Stop when we
3705 get to a label since we can't know what was changed there. */
3706 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
3708 if (GET_CODE (b2
) == CODE_LABEL
)
3711 else if (GET_CODE (b2
) == JUMP_INSN
)
3713 /* If this is an unconditional jump and is the only use of
3714 its target label, we can follow it. */
3715 if (simplejump_p (b2
)
3716 && JUMP_LABEL (b2
) != 0
3717 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
3719 b2
= JUMP_LABEL (b2
);
3726 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
3729 if (GET_CODE (b2
) == CALL_INSN
)
3732 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3733 if (call_used_regs
[i
] && ! fixed_regs
[i
]
3734 && i
!= STACK_POINTER_REGNUM
3735 && i
!= FRAME_POINTER_REGNUM
3736 && i
!= ARG_POINTER_REGNUM
)
3737 modified_regs
[i
] = 1;
3740 note_stores (PATTERN (b2
), mark_modified_reg
);
3743 /* Check the next candidate branch insn from the label
3746 || GET_CODE (b2
) != JUMP_INSN
3748 || ! condjump_p (b2
)
3749 || simplejump_p (b2
))
3752 /* Get the comparison codes and operands, reversing the
3753 codes if appropriate. If we don't have comparison codes,
3754 we can't do anything. */
3755 b1op0
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 0);
3756 b1op1
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 1);
3757 code1
= GET_CODE (XEXP (SET_SRC (PATTERN (b1
)), 0));
3758 if (XEXP (SET_SRC (PATTERN (b1
)), 1) == pc_rtx
)
3759 code1
= reverse_condition (code1
);
3761 b2op0
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 0);
3762 b2op1
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 1);
3763 code2
= GET_CODE (XEXP (SET_SRC (PATTERN (b2
)), 0));
3764 if (XEXP (SET_SRC (PATTERN (b2
)), 1) == pc_rtx
)
3765 code2
= reverse_condition (code2
);
3767 /* If they test the same things and knowing that B1 branches
3768 tells us whether or not B2 branches, check if we
3769 can thread the branch. */
3770 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
3771 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
3772 && (comparison_dominates_p (code1
, code2
)
3773 || comparison_dominates_p (code1
, reverse_condition (code2
))))
3775 t1
= prev_nonnote_insn (b1
);
3776 t2
= prev_nonnote_insn (b2
);
3778 while (t1
!= 0 && t2
!= 0)
3780 if (t1
== 0 || t2
== 0)
3785 /* We have reached the target of the first branch.
3786 If there are no pending register equivalents,
3787 we know that this branch will either always
3788 succeed (if the senses of the two branches are
3789 the same) or always fail (if not). */
3792 if (num_same_regs
!= 0)
3795 if (comparison_dominates_p (code1
, code2
))
3796 new_label
= JUMP_LABEL (b2
);
3798 new_label
= get_label_after (b2
);
3800 if (JUMP_LABEL (b1
) != new_label
3801 && redirect_jump (b1
, new_label
))
3806 /* If either of these is not a normal insn (it might be
3807 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3808 have already been skipped above.) Similarly, fail
3809 if the insns are different. */
3810 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
3811 || recog_memoized (t1
) != recog_memoized (t2
)
3812 || ! rtx_equal_for_thread_p (PATTERN (t1
),
3816 t1
= prev_nonnote_insn (t1
);
3817 t2
= prev_nonnote_insn (t2
);
3824 /* This is like RTX_EQUAL_P except that it knows about our handling of
3825 possibly equivalent registers and knows to consider volatile and
3826 modified objects as not equal.
3828 YINSN is the insn containing Y. */
3831 rtx_equal_for_thread_p (x
, y
, yinsn
)
3837 register enum rtx_code code
;
3840 code
= GET_CODE (x
);
3841 /* Rtx's of different codes cannot be equal. */
3842 if (code
!= GET_CODE (y
))
3845 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3846 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3848 if (GET_MODE (x
) != GET_MODE (y
))
3851 /* Handle special-cases first. */
3855 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
3858 /* If neither is user variable or hard register, check for possible
3860 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
3861 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3862 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
3865 if (same_regs
[REGNO (x
)] == -1)
3867 same_regs
[REGNO (x
)] = REGNO (y
);
3870 /* If this is the first time we are seeing a register on the `Y'
3871 side, see if it is the last use. If not, we can't thread the
3872 jump, so mark it as not equivalent. */
3873 if (regno_last_uid
[REGNO (y
)] != INSN_UID (yinsn
))
3879 return (same_regs
[REGNO (x
)] == REGNO (y
));
3884 /* If memory modified or either volatile, not equivalent.
3885 Else, check address. */
3886 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
3889 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
3892 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
3898 /* Cancel a pending `same_regs' if setting equivalenced registers.
3899 Then process source. */
3900 if (GET_CODE (SET_DEST (x
)) == REG
3901 && GET_CODE (SET_DEST (y
)) == REG
)
3903 if (same_regs
[REGNO (SET_DEST (x
))] == REGNO (SET_DEST (y
)))
3905 same_regs
[REGNO (SET_DEST (x
))] = -1;
3908 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
3912 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
3915 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
3918 return XEXP (x
, 0) == XEXP (y
, 0);
3921 return XSTR (x
, 0) == XSTR (y
, 0);
3927 fmt
= GET_RTX_FORMAT (code
);
3928 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3933 if (XWINT (x
, i
) != XWINT (y
, i
))
3939 if (XINT (x
, i
) != XINT (y
, i
))
3945 /* Two vectors must have the same length. */
3946 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
3949 /* And the corresponding elements must match. */
3950 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3951 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
3952 XVECEXP (y
, i
, j
), yinsn
) == 0)
3957 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
3963 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
3968 /* These are just backpointers, so they don't matter. */
3974 /* It is believed that rtx's at this level will never
3975 contain anything but integers and other rtx's,
3976 except for within LABEL_REFs and SYMBOL_REFs. */