1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This is the jump-optimization pass of the compiler.
22 It is run two or three times: once before cse, sometimes once after cse,
23 and once after reload (before final).
25 jump_optimize deletes unreachable code and labels that are not used.
26 It also deletes jumps that jump to the following insn,
27 and simplifies jumps around unconditional jumps and jumps
28 to unconditional jumps.
30 Each CODE_LABEL has a count of the times it is used
31 stored in the LABEL_NUSES internal field, and each JUMP_INSN
32 has one label that it refers to stored in the
33 JUMP_LABEL internal field. With this we can detect labels that
34 become unused because of the deletion of all the jumps that
35 formerly used them. The JUMP_LABEL info is sometimes looked
38 Optionally, cross-jumping can be done. Currently it is done
39 only the last time (when after reload and before final).
40 In fact, the code for cross-jumping now assumes that register
41 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43 Jump optimization is done after cse when cse's constant-propagation
44 causes jumps to become unconditional or to be deleted.
46 Unreachable loops are not detected here, because the labels
47 have references and the insns appear reachable from the labels.
48 find_basic_blocks in flow.c finds and deletes such loops.
50 The subroutines delete_insn, redirect_jump, and invert_jump are used
51 from other passes as well. */
56 #include "hard-reg-set.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
63 /* ??? Eventually must record somehow the labels used by jumps
64 from nested functions. */
65 /* Pre-record the next or previous real insn for each label?
66 No, this pass is very fast anyway. */
67 /* Condense consecutive labels?
68 This would make life analysis faster, maybe. */
69 /* Optimize jump y; x: ... y: jumpif... x?
70 Don't know if it is worth bothering with. */
71 /* Optimize two cases of conditional jump to conditional jump?
72 This can never delete any instruction or make anything dead,
73 or even change what is live at any point.
74 So perhaps let combiner do it. */
76 /* Vector indexed by uid.
77 For each CODE_LABEL, index by its uid to get first unconditional jump
78 that jumps to the label.
79 For each JUMP_INSN, index by its uid to get the next unconditional jump
80 that jumps to the same label.
81 Element 0 is the start of a chain of all return insns.
82 (It is safe to use element 0 because insn uid 0 is not used. */
84 static rtx
*jump_chain
;
86 /* List of labels referred to from initializers.
87 These can never be deleted. */
90 /* Maximum index in jump_chain. */
92 static int max_jump_chain
;
94 /* Set nonzero by jump_optimize if control can fall through
95 to the end of the function. */
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
104 static int cross_jump_death_matters
= 0;
106 static int duplicate_loop_exit_test ();
107 void redirect_tablejump ();
108 static int delete_labelref_insn ();
109 static void mark_jump_label ();
111 void delete_computation ();
112 static void delete_from_jump_chain ();
113 static int tension_vector_labels ();
114 static void find_cross_jump ();
115 static void do_cross_jump ();
116 static int jump_back_p ();
118 extern rtx
gen_jump ();
120 /* Delete no-op jumps and optimize jumps to jumps
121 and jumps around jumps.
122 Delete unused labels and unreachable code.
124 If CROSS_JUMP is 1, detect matching code
125 before a jump and its destination and unify them.
126 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
128 If NOOP_MOVES is nonzero, delete no-op move insns.
130 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
131 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
133 If `optimize' is zero, don't change any code,
134 just determine whether control drops off the end of the function.
135 This case occurs when we have -W and not -O.
136 It works because `delete_insn' checks the value of `optimize'
137 and refrains from actually deleting when that is 0. */
140 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
146 register rtx insn
, next
;
152 cross_jump_death_matters
= (cross_jump
== 2);
154 /* Initialize LABEL_NUSES and JUMP_LABEL fields. */
156 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
158 if (GET_CODE (insn
) == CODE_LABEL
)
159 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
160 else if (GET_CODE (insn
) == JUMP_INSN
)
161 JUMP_LABEL (insn
) = 0;
162 if (INSN_UID (insn
) > max_uid
)
163 max_uid
= INSN_UID (insn
);
168 /* Delete insns following barriers, up to next label. */
170 for (insn
= f
; insn
;)
172 if (GET_CODE (insn
) == BARRIER
)
174 insn
= NEXT_INSN (insn
);
175 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
177 if (GET_CODE (insn
) == NOTE
178 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
179 insn
= NEXT_INSN (insn
);
181 insn
= delete_insn (insn
);
183 /* INSN is now the code_label. */
186 insn
= NEXT_INSN (insn
);
189 /* Leave some extra room for labels and duplicate exit test insns
191 max_jump_chain
= max_uid
* 14 / 10;
192 jump_chain
= (rtx
*) alloca (max_jump_chain
* sizeof (rtx
));
193 bzero (jump_chain
, max_jump_chain
* sizeof (rtx
));
195 /* Mark the label each jump jumps to.
196 Combine consecutive labels, and count uses of labels.
198 For each label, make a chain (using `jump_chain')
199 of all the *unconditional* jumps that jump to it;
200 also make a chain of all returns. */
202 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
203 if ((GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
204 || GET_CODE (insn
) == CALL_INSN
)
205 && ! INSN_DELETED_P (insn
))
207 mark_jump_label (PATTERN (insn
), insn
, cross_jump
);
208 if (GET_CODE (insn
) == JUMP_INSN
)
210 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
212 jump_chain
[INSN_UID (insn
)]
213 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
214 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
216 if (GET_CODE (PATTERN (insn
)) == RETURN
)
218 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
219 jump_chain
[0] = insn
;
224 /* Keep track of labels used from static data;
225 they cannot ever be deleted. */
227 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
228 LABEL_NUSES (XEXP (insn
, 0))++;
230 /* Delete all labels already not referenced.
231 Also find the last insn. */
234 for (insn
= f
; insn
; )
236 if (GET_CODE (insn
) == CODE_LABEL
&& LABEL_NUSES (insn
) == 0)
237 insn
= delete_insn (insn
);
241 insn
= NEXT_INSN (insn
);
247 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
248 If so record that this function can drop off the end. */
254 /* One label can follow the end-note: the return label. */
255 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
256 /* Ordinary insns can follow it if returning a structure. */
257 || GET_CODE (insn
) == INSN
258 /* If machine uses explicit RETURN insns, no epilogue,
259 then one of them follows the note. */
260 || (GET_CODE (insn
) == JUMP_INSN
261 && GET_CODE (PATTERN (insn
)) == RETURN
)
262 /* Other kinds of notes can follow also. */
263 || (GET_CODE (insn
) == NOTE
264 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
265 insn
= PREV_INSN (insn
);
268 /* Report if control can fall through at the end of the function. */
269 if (insn
&& GET_CODE (insn
) == NOTE
270 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
271 && ! INSN_DELETED_P (insn
))
274 /* Zero the "deleted" flag of all the "deleted" insns. */
275 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
276 INSN_DELETED_P (insn
) = 0;
283 /* If we fall through to the epilogue, see if we can insert a RETURN insn
284 in front of it. If the machine allows it at this point (we might be
285 after reload for a leaf routine), it will improve optimization for it
287 insn
= get_last_insn ();
288 while (insn
&& GET_CODE (insn
) == NOTE
)
289 insn
= PREV_INSN (insn
);
291 if (insn
&& GET_CODE (insn
) != BARRIER
)
293 emit_jump_insn (gen_return ());
300 for (insn
= f
; insn
; )
302 next
= NEXT_INSN (insn
);
304 if (GET_CODE (insn
) == INSN
)
306 register rtx body
= PATTERN (insn
);
308 /* Combine stack_adjusts with following push_insns. */
310 if (GET_CODE (body
) == SET
311 && SET_DEST (body
) == stack_pointer_rtx
312 && GET_CODE (SET_SRC (body
)) == PLUS
313 && XEXP (SET_SRC (body
), 0) == stack_pointer_rtx
314 && GET_CODE (XEXP (SET_SRC (body
), 1)) == CONST_INT
315 && INTVAL (XEXP (SET_SRC (body
), 1)) > 0)
318 rtx stack_adjust_insn
= insn
;
319 int stack_adjust_amount
= INTVAL (XEXP (SET_SRC (body
), 1));
320 int total_pushed
= 0;
323 /* Find all successive push insns. */
325 /* Don't convert more than three pushes;
326 that starts adding too many displaced addresses
327 and the whole thing starts becoming a losing
332 p
= next_nonnote_insn (p
);
333 if (p
== 0 || GET_CODE (p
) != INSN
)
336 if (GET_CODE (pbody
) != SET
)
338 dest
= SET_DEST (pbody
);
339 /* Allow a no-op move between the adjust and the push. */
340 if (GET_CODE (dest
) == REG
341 && GET_CODE (SET_SRC (pbody
)) == REG
342 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
344 if (! (GET_CODE (dest
) == MEM
345 && GET_CODE (XEXP (dest
, 0)) == POST_INC
346 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
349 if (total_pushed
+ GET_MODE_SIZE (SET_DEST (pbody
))
350 > stack_adjust_amount
)
352 total_pushed
+= GET_MODE_SIZE (SET_DEST (pbody
));
355 /* Discard the amount pushed from the stack adjust;
356 maybe eliminate it entirely. */
357 if (total_pushed
>= stack_adjust_amount
)
359 delete_insn (stack_adjust_insn
);
360 total_pushed
= stack_adjust_amount
;
363 XEXP (SET_SRC (PATTERN (stack_adjust_insn
)), 1)
364 = GEN_INT (stack_adjust_amount
- total_pushed
);
366 /* Change the appropriate push insns to ordinary stores. */
368 while (total_pushed
> 0)
371 p
= next_nonnote_insn (p
);
372 if (GET_CODE (p
) != INSN
)
375 if (GET_CODE (pbody
) == SET
)
377 dest
= SET_DEST (pbody
);
378 if (! (GET_CODE (dest
) == MEM
379 && GET_CODE (XEXP (dest
, 0)) == POST_INC
380 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
382 total_pushed
-= GET_MODE_SIZE (SET_DEST (pbody
));
383 /* If this push doesn't fully fit in the space
384 of the stack adjust that we deleted,
385 make another stack adjust here for what we
386 didn't use up. There should be peepholes
387 to recognize the resulting sequence of insns. */
388 if (total_pushed
< 0)
390 emit_insn_before (gen_add2_insn (stack_pointer_rtx
,
391 GEN_INT (- total_pushed
)),
396 = plus_constant (stack_pointer_rtx
, total_pushed
);
401 /* Detect and delete no-op move instructions
402 resulting from not allocating a parameter in a register. */
404 if (GET_CODE (body
) == SET
405 && (SET_DEST (body
) == SET_SRC (body
)
406 || (GET_CODE (SET_DEST (body
)) == MEM
407 && GET_CODE (SET_SRC (body
)) == MEM
408 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
409 && ! (GET_CODE (SET_DEST (body
)) == MEM
410 && MEM_VOLATILE_P (SET_DEST (body
)))
411 && ! (GET_CODE (SET_SRC (body
)) == MEM
412 && MEM_VOLATILE_P (SET_SRC (body
))))
415 /* Detect and ignore no-op move instructions
416 resulting from smart or fortuitous register allocation. */
418 else if (GET_CODE (body
) == SET
)
420 int sreg
= true_regnum (SET_SRC (body
));
421 int dreg
= true_regnum (SET_DEST (body
));
423 if (sreg
== dreg
&& sreg
>= 0)
425 else if (sreg
>= 0 && dreg
>= 0)
428 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
429 sreg
, NULL_PTR
, dreg
,
430 GET_MODE (SET_SRC (body
)));
432 #ifdef PRESERVE_DEATH_INFO_REGNO_P
433 /* Deleting insn could lose a death-note for SREG or DREG
434 so don't do it if final needs accurate death-notes. */
435 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg
)
436 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg
))
439 /* DREG may have been the target of a REG_DEAD note in
440 the insn which makes INSN redundant. If so, reorg
441 would still think it is dead. So search for such a
442 note and delete it if we find it. */
443 for (trial
= prev_nonnote_insn (insn
);
444 trial
&& GET_CODE (trial
) != CODE_LABEL
;
445 trial
= prev_nonnote_insn (trial
))
446 if (find_regno_note (trial
, REG_DEAD
, dreg
))
448 remove_death (dreg
, trial
);
453 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
457 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
458 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
460 GET_MODE (SET_DEST (body
))))
462 /* This handles the case where we have two consecutive
463 assignments of the same constant to pseudos that didn't
464 get a hard reg. Each SET from the constant will be
465 converted into a SET of the spill register and an
466 output reload will be made following it. This produces
467 two loads of the same constant into the same spill
472 /* Look back for a death note for the first reg.
473 If there is one, it is no longer accurate. */
474 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
476 if ((GET_CODE (in_insn
) == INSN
477 || GET_CODE (in_insn
) == JUMP_INSN
)
478 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
480 remove_death (dreg
, in_insn
);
483 in_insn
= PREV_INSN (in_insn
);
486 /* Delete the second load of the value. */
490 else if (GET_CODE (body
) == PARALLEL
)
492 /* If each part is a set between two identical registers or
493 a USE or CLOBBER, delete the insn. */
497 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
499 tem
= XVECEXP (body
, 0, i
);
500 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
503 if (GET_CODE (tem
) != SET
504 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
505 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
513 #if !BYTES_BIG_ENDIAN /* Not worth the hair to detect this
514 in the big-endian case. */
515 /* Also delete insns to store bit fields if they are no-ops. */
516 else if (GET_CODE (body
) == SET
517 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
518 && XEXP (SET_DEST (body
), 2) == const0_rtx
519 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
520 && ! (GET_CODE (SET_SRC (body
)) == MEM
521 && MEM_VOLATILE_P (SET_SRC (body
))))
523 #endif /* not BYTES_BIG_ENDIAN */
528 /* If we haven't yet gotten to reload and we have just run regscan,
529 delete any insn that sets a register that isn't used elsewhere.
530 This helps some of the optimizations below by having less insns
531 being jumped around. */
533 if (! reload_completed
&& after_regscan
)
534 for (insn
= f
; insn
; insn
= next
)
536 rtx set
= single_set (insn
);
538 next
= NEXT_INSN (insn
);
540 if (set
&& GET_CODE (SET_DEST (set
)) == REG
541 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
542 && regno_first_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
)
543 && regno_last_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
)
544 && ! side_effects_p (SET_SRC (set
)))
548 /* Now iterate optimizing jumps until nothing changes over one pass. */
554 for (insn
= f
; insn
; insn
= next
)
557 rtx temp
, temp1
, temp2
, temp3
, temp4
, temp5
, temp6
;
559 int this_is_simplejump
, this_is_condjump
, reversep
;
561 /* If NOT the first iteration, if this is the last jump pass
562 (just before final), do the special peephole optimizations.
563 Avoiding the first iteration gives ordinary jump opts
564 a chance to work before peephole opts. */
566 if (reload_completed
&& !first
&& !flag_no_peephole
)
567 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
)
571 /* That could have deleted some insns after INSN, so check now
572 what the following insn is. */
574 next
= NEXT_INSN (insn
);
576 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
577 jump. Try to optimize by duplicating the loop exit test if so.
578 This is only safe immediately after regscan, because it uses
579 the values of regno_first_uid and regno_last_uid. */
580 if (after_regscan
&& GET_CODE (insn
) == NOTE
581 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
582 && (temp1
= next_nonnote_insn (insn
)) != 0
583 && simplejump_p (temp1
))
585 temp
= PREV_INSN (insn
);
586 if (duplicate_loop_exit_test (insn
))
589 next
= NEXT_INSN (temp
);
594 if (GET_CODE (insn
) != JUMP_INSN
)
597 this_is_simplejump
= simplejump_p (insn
);
598 this_is_condjump
= condjump_p (insn
);
600 /* Tension the labels in dispatch tables. */
602 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
603 changed
|= tension_vector_labels (PATTERN (insn
), 0);
604 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
605 changed
|= tension_vector_labels (PATTERN (insn
), 1);
607 /* If a dispatch table always goes to the same place,
608 get rid of it and replace the insn that uses it. */
610 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
611 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
614 rtx pat
= PATTERN (insn
);
615 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
616 int len
= XVECLEN (pat
, diff_vec_p
);
617 rtx dispatch
= prev_real_insn (insn
);
619 for (i
= 0; i
< len
; i
++)
620 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
621 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
625 && GET_CODE (dispatch
) == JUMP_INSN
626 && JUMP_LABEL (dispatch
) != 0
627 /* Don't mess with a casesi insn. */
628 && !(GET_CODE (PATTERN (dispatch
)) == SET
629 && (GET_CODE (SET_SRC (PATTERN (dispatch
)))
631 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
633 redirect_tablejump (dispatch
,
634 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
639 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
641 /* If a jump references the end of the function, try to turn
642 it into a RETURN insn, possibly a conditional one. */
643 if (JUMP_LABEL (insn
)
644 && next_active_insn (JUMP_LABEL (insn
)) == 0)
645 changed
|= redirect_jump (insn
, NULL_RTX
);
647 /* Detect jump to following insn. */
648 if (reallabelprev
== insn
&& condjump_p (insn
))
655 /* If we have an unconditional jump preceded by a USE, try to put
656 the USE before the target and jump there. This simplifies many
657 of the optimizations below since we don't have to worry about
658 dealing with these USE insns. We only do this if the label
659 being branch to already has the identical USE or if code
660 never falls through to that label. */
662 if (this_is_simplejump
663 && (temp
= prev_nonnote_insn (insn
)) != 0
664 && GET_CODE (temp
) == INSN
&& GET_CODE (PATTERN (temp
)) == USE
665 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
666 && (GET_CODE (temp1
) == BARRIER
667 || (GET_CODE (temp1
) == INSN
668 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
)))))
670 if (GET_CODE (temp1
) == BARRIER
)
672 emit_insn_after (PATTERN (temp
), temp1
);
673 temp1
= NEXT_INSN (temp1
);
677 redirect_jump (insn
, get_label_before (temp1
));
678 reallabelprev
= prev_real_insn (temp1
);
682 /* Simplify if (...) x = a; else x = b; by converting it
683 to x = b; if (...) x = a;
684 if B is sufficiently simple, the test doesn't involve X,
685 and nothing in the test modifies B or X.
687 If we have small register classes, we also can't do this if X
690 If the "x = b;" insn has any REG_NOTES, we don't do this because
691 of the possibility that we are running after CSE and there is a
692 REG_EQUAL note that is only valid if the branch has already been
693 taken. If we move the insn with the REG_EQUAL note, we may
694 fold the comparison to always be false in a later CSE pass.
695 (We could also delete the REG_NOTES when moving the insn, but it
696 seems simpler to not move it.) An exception is that we can move
697 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
698 value is the same as "b".
700 INSN is the branch over the `else' part.
704 TEMP to the jump insn preceding "x = a;"
706 TEMP2 to the insn that sets "x = b;"
707 TEMP3 to the insn that sets "x = a;"
708 TEMP4 to the set of "x = b"; */
710 if (this_is_simplejump
711 && (temp3
= prev_active_insn (insn
)) != 0
712 && GET_CODE (temp3
) == INSN
713 && (temp4
= single_set (temp3
)) != 0
714 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
715 #ifdef SMALL_REGISTER_CLASSES
716 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
718 && (temp2
= next_active_insn (insn
)) != 0
719 && GET_CODE (temp2
) == INSN
720 && (temp4
= single_set (temp2
)) != 0
721 && rtx_equal_p (SET_DEST (temp4
), temp1
)
722 && (GET_CODE (SET_SRC (temp4
)) == REG
723 || GET_CODE (SET_SRC (temp4
)) == SUBREG
724 || CONSTANT_P (SET_SRC (temp4
)))
725 && (REG_NOTES (temp2
) == 0
726 || ((REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUAL
727 || REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUIV
)
728 && XEXP (REG_NOTES (temp2
), 1) == 0
729 && rtx_equal_p (XEXP (REG_NOTES (temp2
), 0),
731 && (temp
= prev_active_insn (temp3
)) != 0
732 && condjump_p (temp
) && ! simplejump_p (temp
)
733 /* TEMP must skip over the "x = a;" insn */
734 && prev_real_insn (JUMP_LABEL (temp
)) == insn
735 && no_labels_between_p (insn
, JUMP_LABEL (temp
))
736 /* There must be no other entries to the "x = b;" insn. */
737 && no_labels_between_p (JUMP_LABEL (temp
), temp2
)
738 /* INSN must either branch to the insn after TEMP2 or the insn
739 after TEMP2 must branch to the same place as INSN. */
740 && (reallabelprev
== temp2
741 || ((temp5
= next_active_insn (temp2
)) != 0
742 && simplejump_p (temp5
)
743 && JUMP_LABEL (temp5
) == JUMP_LABEL (insn
))))
745 /* The test expression, X, may be a complicated test with
746 multiple branches. See if we can find all the uses of
747 the label that TEMP branches to without hitting a CALL_INSN
748 or a jump to somewhere else. */
749 rtx target
= JUMP_LABEL (temp
);
750 int nuses
= LABEL_NUSES (target
);
753 /* Set P to the first jump insn that goes around "x = a;". */
754 for (p
= temp
; nuses
&& p
; p
= prev_nonnote_insn (p
))
756 if (GET_CODE (p
) == JUMP_INSN
)
758 if (condjump_p (p
) && ! simplejump_p (p
)
759 && JUMP_LABEL (p
) == target
)
768 else if (GET_CODE (p
) == CALL_INSN
)
773 /* We cannot insert anything between a set of cc and its use
774 so if P uses cc0, we must back up to the previous insn. */
775 q
= prev_nonnote_insn (p
);
776 if (q
&& GET_RTX_CLASS (GET_CODE (q
)) == 'i'
777 && sets_cc0_p (PATTERN (q
)))
784 /* If we found all the uses and there was no data conflict, we
785 can move the assignment unless we can branch into the middle
788 && no_labels_between_p (p
, insn
)
789 && ! reg_referenced_between_p (temp1
, p
, NEXT_INSN (temp3
))
790 && ! reg_set_between_p (temp1
, p
, temp3
)
791 && (GET_CODE (SET_SRC (temp4
)) == CONST_INT
792 || ! reg_set_between_p (SET_SRC (temp4
), p
, temp2
)))
794 emit_insn_after_with_line_notes (PATTERN (temp2
), p
, temp2
);
797 /* Set NEXT to an insn that we know won't go away. */
798 next
= next_active_insn (insn
);
800 /* Delete the jump around the set. Note that we must do
801 this before we redirect the test jumps so that it won't
802 delete the code immediately following the assignment
803 we moved (which might be a jump). */
807 /* We either have two consecutive labels or a jump to
808 a jump, so adjust all the JUMP_INSNs to branch to where
810 for (p
= NEXT_INSN (p
); p
!= next
; p
= NEXT_INSN (p
))
811 if (GET_CODE (p
) == JUMP_INSN
)
812 redirect_jump (p
, target
);
820 /* If we have if (...) x = exp; and branches are expensive,
821 EXP is a single insn, does not have any side effects, cannot
822 trap, and is not too costly, convert this to
823 t = exp; if (...) x = t;
825 Don't do this when we have CC0 because it is unlikely to help
826 and we'd need to worry about where to place the new insn and
827 the potential for conflicts. We also can't do this when we have
828 notes on the insn for the same reason as above.
832 TEMP to the "x = exp;" insn.
833 TEMP1 to the single set in the "x = exp; insn.
836 if (! reload_completed
837 && this_is_condjump
&& ! this_is_simplejump
839 && (temp
= next_nonnote_insn (insn
)) != 0
840 && GET_CODE (temp
) == INSN
841 && REG_NOTES (temp
) == 0
842 && (reallabelprev
== temp
843 || ((temp2
= next_active_insn (temp
)) != 0
844 && simplejump_p (temp2
)
845 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
846 && (temp1
= single_set (temp
)) != 0
847 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
848 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
849 #ifdef SMALL_REGISTER_CLASSES
850 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
852 && GET_CODE (SET_SRC (temp1
)) != REG
853 && GET_CODE (SET_SRC (temp1
)) != SUBREG
854 && GET_CODE (SET_SRC (temp1
)) != CONST_INT
855 && ! side_effects_p (SET_SRC (temp1
))
856 && ! may_trap_p (SET_SRC (temp1
))
857 && rtx_cost (SET_SRC (temp1
)) < 10)
859 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
861 if (validate_change (temp
, &SET_DEST (temp1
), new, 0))
863 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
864 emit_insn_after_with_line_notes (PATTERN (temp
),
865 PREV_INSN (insn
), temp
);
870 /* Similarly, if it takes two insns to compute EXP but they
871 have the same destination. Here TEMP3 will be the second
872 insn and TEMP4 the SET from that insn. */
874 if (! reload_completed
875 && this_is_condjump
&& ! this_is_simplejump
877 && (temp
= next_nonnote_insn (insn
)) != 0
878 && GET_CODE (temp
) == INSN
879 && REG_NOTES (temp
) == 0
880 && (temp3
= next_nonnote_insn (temp
)) != 0
881 && GET_CODE (temp3
) == INSN
882 && REG_NOTES (temp3
) == 0
883 && (reallabelprev
== temp3
884 || ((temp2
= next_active_insn (temp3
)) != 0
885 && simplejump_p (temp2
)
886 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
887 && (temp1
= single_set (temp
)) != 0
888 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
889 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
890 #ifdef SMALL_REGISTER_CLASSES
891 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
893 && ! side_effects_p (SET_SRC (temp1
))
894 && ! may_trap_p (SET_SRC (temp1
))
895 && rtx_cost (SET_SRC (temp1
)) < 10
896 && (temp4
= single_set (temp3
)) != 0
897 && rtx_equal_p (SET_DEST (temp4
), temp2
)
898 && ! side_effects_p (SET_SRC (temp4
))
899 && ! may_trap_p (SET_SRC (temp4
))
900 && rtx_cost (SET_SRC (temp4
)) < 10)
902 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
904 if (validate_change (temp
, &SET_DEST (temp1
), new, 0))
906 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
907 emit_insn_after_with_line_notes (PATTERN (temp
),
908 PREV_INSN (insn
), temp
);
909 emit_insn_after_with_line_notes
910 (replace_rtx (PATTERN (temp3
), temp2
, new),
911 PREV_INSN (insn
), temp3
);
917 /* Finally, handle the case where two insns are used to
918 compute EXP but a temporary register is used. Here we must
919 ensure that the temporary register is not used anywhere else. */
921 if (! reload_completed
923 && this_is_condjump
&& ! this_is_simplejump
925 && (temp
= next_nonnote_insn (insn
)) != 0
926 && GET_CODE (temp
) == INSN
927 && REG_NOTES (temp
) == 0
928 && (temp3
= next_nonnote_insn (temp
)) != 0
929 && GET_CODE (temp3
) == INSN
930 && REG_NOTES (temp3
) == 0
931 && (reallabelprev
== temp3
932 || ((temp2
= next_active_insn (temp3
)) != 0
933 && simplejump_p (temp2
)
934 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
935 && (temp1
= single_set (temp
)) != 0
936 && (temp5
= SET_DEST (temp1
), GET_CODE (temp5
) == REG
)
937 && REGNO (temp5
) >= FIRST_PSEUDO_REGISTER
938 && regno_first_uid
[REGNO (temp5
)] == INSN_UID (temp
)
939 && regno_last_uid
[REGNO (temp5
)] == INSN_UID (temp3
)
940 && ! side_effects_p (SET_SRC (temp1
))
941 && ! may_trap_p (SET_SRC (temp1
))
942 && rtx_cost (SET_SRC (temp1
)) < 10
943 && (temp4
= single_set (temp3
)) != 0
944 && (temp2
= SET_DEST (temp4
), GET_CODE (temp2
) == REG
)
945 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
946 #ifdef SMALL_REGISTER_CLASSES
947 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
949 && rtx_equal_p (SET_DEST (temp4
), temp2
)
950 && ! side_effects_p (SET_SRC (temp4
))
951 && ! may_trap_p (SET_SRC (temp4
))
952 && rtx_cost (SET_SRC (temp4
)) < 10)
954 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
956 if (validate_change (temp3
, &SET_DEST (temp4
), new, 0))
958 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
959 emit_insn_after_with_line_notes (PATTERN (temp
),
960 PREV_INSN (insn
), temp
);
961 emit_insn_after_with_line_notes (PATTERN (temp3
),
962 PREV_INSN (insn
), temp3
);
967 #endif /* HAVE_cc0 */
969 /* We deal with four cases:
971 1) x = a; if (...) x = b; and either A or B is zero,
972 2) if (...) x = 0; and jumps are expensive,
973 3) x = a; if (...) x = b; and A and B are constants where all the
974 set bits in A are also set in B and jumps are expensive, and
975 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
977 5) if (...) x = b; if jumps are even more expensive.
979 In each of these try to use a store-flag insn to avoid the jump.
980 (If the jump would be faster, the machine should not have
981 defined the scc insns!). These cases are often made by the
982 previous optimization.
984 INSN here is the jump around the store. We set:
986 TEMP to the "x = b;" insn.
988 TEMP2 to B (const0_rtx in the second case).
989 TEMP3 to A (X in the second case).
990 TEMP4 to the condition being tested.
991 TEMP5 to the earliest insn used to find the condition. */
993 if (/* We can't do this after reload has completed. */
995 && this_is_condjump
&& ! this_is_simplejump
996 /* Set TEMP to the "x = b;" insn. */
997 && (temp
= next_nonnote_insn (insn
)) != 0
998 && GET_CODE (temp
) == INSN
999 && GET_CODE (PATTERN (temp
)) == SET
1000 && GET_CODE (temp1
= SET_DEST (PATTERN (temp
))) == REG
1001 #ifdef SMALL_REGISTER_CLASSES
1002 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
1004 && GET_MODE_CLASS (GET_MODE (temp1
)) == MODE_INT
1005 && (GET_CODE (temp2
= SET_SRC (PATTERN (temp
))) == REG
1006 || GET_CODE (temp2
) == SUBREG
1007 || GET_CODE (temp2
) == CONST_INT
)
1008 /* Allow either form, but prefer the former if both apply.
1009 There is no point in using the old value of TEMP1 if
1010 it is a register, since cse will alias them. It can
1011 lose if the old value were a hard register since CSE
1012 won't replace hard registers. */
1013 && (((temp3
= reg_set_last (temp1
, insn
)) != 0
1014 && GET_CODE (temp3
) == CONST_INT
)
1015 /* Make the latter case look like x = x; if (...) x = 0; */
1018 && temp2
== const0_rtx
)
1019 || BRANCH_COST
>= 3)))
1020 /* INSN must either branch to the insn after TEMP or the insn
1021 after TEMP must branch to the same place as INSN. */
1022 && (reallabelprev
== temp
1023 || ((temp4
= next_active_insn (temp
)) != 0
1024 && simplejump_p (temp4
)
1025 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
)))
1026 && (temp4
= get_condition (insn
, &temp5
)) != 0
1027 /* We must be comparing objects whose modes imply the size.
1028 We could handle BLKmode if (1) emit_store_flag could
1029 and (2) we could find the size reliably. */
1030 && GET_MODE (XEXP (temp4
, 0)) != BLKmode
1032 /* If B is zero, OK; if A is zero, can only do (1) if we
1033 can reverse the condition. See if (3) applies possibly
1034 by reversing the condition. Prefer reversing to (4) when
1035 branches are very expensive. */
1036 && ((reversep
= 0, temp2
== const0_rtx
)
1037 || (temp3
== const0_rtx
1038 && (reversep
= can_reverse_comparison_p (temp4
, insn
)))
1039 || (BRANCH_COST
>= 2
1040 && GET_CODE (temp2
) == CONST_INT
1041 && GET_CODE (temp3
) == CONST_INT
1042 && ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp2
)
1043 || ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp3
)
1044 && (reversep
= can_reverse_comparison_p (temp4
,
1046 || BRANCH_COST
>= 3)
1048 /* If the previous insn sets CC0 and something else, we can't
1049 do this since we are going to delete that insn. */
1051 && ! ((temp6
= prev_nonnote_insn (insn
)) != 0
1052 && GET_CODE (temp6
) == INSN
1053 && (sets_cc0_p (PATTERN (temp6
)) == -1
1054 || (sets_cc0_p (PATTERN (temp6
)) == 1
1055 && FIND_REG_INC_NOTE (temp6
, NULL_RTX
))))
1059 enum rtx_code code
= GET_CODE (temp4
);
1060 rtx uval
, cval
, var
= temp1
;
1064 /* If necessary, reverse the condition. */
1066 code
= reverse_condition (code
), uval
= temp2
, cval
= temp3
;
1068 uval
= temp3
, cval
= temp2
;
1070 /* See if we can do this with a store-flag insn. */
1073 /* If CVAL is non-zero, normalize to -1. Otherwise,
1074 if UVAL is the constant 1, it is best to just compute
1075 the result directly. If UVAL is constant and STORE_FLAG_VALUE
1076 includes all of its bits, it is best to compute the flag
1077 value unnormalized and `and' it with UVAL. Otherwise,
1078 normalize to -1 and `and' with UVAL. */
1079 normalizep
= (cval
!= const0_rtx
? -1
1080 : (uval
== const1_rtx
? 1
1081 : (GET_CODE (uval
) == CONST_INT
1082 && (INTVAL (uval
) & ~STORE_FLAG_VALUE
) == 0)
1085 /* We will be putting the store-flag insn immediately in
1086 front of the comparison that was originally being done,
1087 so we know all the variables in TEMP4 will be valid.
1088 However, this might be in front of the assignment of
1089 A to VAR. If it is, it would clobber the store-flag
1090 we will be emitting.
1092 Therefore, emit into a temporary which will be copied to
1093 VAR immediately after TEMP. */
1095 target
= emit_store_flag (gen_reg_rtx (GET_MODE (var
)), code
,
1096 XEXP (temp4
, 0), XEXP (temp4
, 1),
1098 (code
== LTU
|| code
== LEU
1099 || code
== GEU
|| code
== GTU
),
1106 /* Put the store-flag insns in front of the first insn
1107 used to compute the condition to ensure that we
1108 use the same values of them as the current
1109 comparison. However, the remainder of the insns we
1110 generate will be placed directly in front of the
1111 jump insn, in case any of the pseudos we use
1112 are modified earlier. */
1117 emit_insns_before (seq
, temp5
);
1121 /* Both CVAL and UVAL are non-zero. */
1122 if (cval
!= const0_rtx
&& uval
!= const0_rtx
)
1126 tem1
= expand_and (uval
, target
, NULL_RTX
);
1127 if (GET_CODE (cval
) == CONST_INT
1128 && GET_CODE (uval
) == CONST_INT
1129 && (INTVAL (cval
) & INTVAL (uval
)) == INTVAL (cval
))
1133 tem2
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1134 target
, NULL_RTX
, 0);
1135 tem2
= expand_and (cval
, tem2
,
1136 (GET_CODE (tem2
) == REG
1140 /* If we usually make new pseudos, do so here. This
1141 turns out to help machines that have conditional
1144 if (flag_expensive_optimizations
)
1147 target
= expand_binop (GET_MODE (var
), ior_optab
,
1151 else if (normalizep
!= 1)
1152 target
= expand_and (uval
, target
,
1153 (GET_CODE (target
) == REG
1154 && ! preserve_subexpressions_p ()
1155 ? target
: NULL_RTX
));
1157 emit_move_insn (var
, target
);
1162 /* If INSN uses CC0, we must not separate it from the
1163 insn that sets cc0. */
1165 if (reg_mentioned_p (cc0_rtx
, PATTERN (before
)))
1166 before
= prev_nonnote_insn (before
);
1169 emit_insns_before (seq
, before
);
1172 next
= NEXT_INSN (insn
);
1182 /* If branches are expensive, convert
1183 if (foo) bar++; to bar += (foo != 0);
1184 and similarly for "bar--;"
1186 INSN is the conditional branch around the arithmetic. We set:
1188 TEMP is the arithmetic insn.
1189 TEMP1 is the SET doing the arithmetic.
1190 TEMP2 is the operand being incremented or decremented.
1191 TEMP3 to the condition being tested.
1192 TEMP4 to the earliest insn used to find the condition. */
1194 if ((BRANCH_COST
>= 2
1195 #if defined (HAVE_incscc) || defined (HAVE_decscc)
1200 && ! reload_completed
1201 && this_is_condjump
&& ! this_is_simplejump
1202 && (temp
= next_nonnote_insn (insn
)) != 0
1203 && (temp1
= single_set (temp
)) != 0
1204 && (temp2
= SET_DEST (temp1
),
1205 GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
)
1206 && GET_CODE (SET_SRC (temp1
)) == PLUS
1207 && (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1208 || XEXP (SET_SRC (temp1
), 1) == constm1_rtx
)
1209 && rtx_equal_p (temp2
, XEXP (SET_SRC (temp1
), 0))
1210 /* INSN must either branch to the insn after TEMP or the insn
1211 after TEMP must branch to the same place as INSN. */
1212 && (reallabelprev
== temp
1213 || ((temp3
= next_active_insn (temp
)) != 0
1214 && simplejump_p (temp3
)
1215 && JUMP_LABEL (temp3
) == JUMP_LABEL (insn
)))
1216 && (temp3
= get_condition (insn
, &temp4
)) != 0
1217 /* We must be comparing objects whose modes imply the size.
1218 We could handle BLKmode if (1) emit_store_flag could
1219 and (2) we could find the size reliably. */
1220 && GET_MODE (XEXP (temp3
, 0)) != BLKmode
1221 && can_reverse_comparison_p (temp3
, insn
))
1223 rtx temp6
, target
= 0, seq
, init_insn
= 0, init
= temp2
;
1224 enum rtx_code code
= reverse_condition (GET_CODE (temp3
));
1228 /* It must be the case that TEMP2 is not modified in the range
1229 [TEMP4, INSN). The one exception we make is if the insn
1230 before INSN sets TEMP2 to something which is also unchanged
1231 in that range. In that case, we can move the initialization
1232 into our sequence. */
1234 if ((temp5
= prev_active_insn (insn
)) != 0
1235 && GET_CODE (temp5
) == INSN
1236 && (temp6
= single_set (temp5
)) != 0
1237 && rtx_equal_p (temp2
, SET_DEST (temp6
))
1238 && (CONSTANT_P (SET_SRC (temp6
))
1239 || GET_CODE (SET_SRC (temp6
)) == REG
1240 || GET_CODE (SET_SRC (temp6
)) == SUBREG
))
1242 emit_insn (PATTERN (temp5
));
1244 init
= SET_SRC (temp6
);
1247 if (CONSTANT_P (init
)
1248 || ! reg_set_between_p (init
, PREV_INSN (temp4
), insn
))
1249 target
= emit_store_flag (gen_reg_rtx (GET_MODE (temp2
)), code
,
1250 XEXP (temp3
, 0), XEXP (temp3
, 1),
1252 (code
== LTU
|| code
== LEU
1253 || code
== GTU
|| code
== GEU
), 1);
1255 /* If we can do the store-flag, do the addition or
1259 target
= expand_binop (GET_MODE (temp2
),
1260 (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1261 ? add_optab
: sub_optab
),
1262 temp2
, target
, temp2
, 0, OPTAB_WIDEN
);
1266 /* Put the result back in temp2 in case it isn't already.
1267 Then replace the jump, possible a CC0-setting insn in
1268 front of the jump, and TEMP, with the sequence we have
1271 if (target
!= temp2
)
1272 emit_move_insn (temp2
, target
);
1277 emit_insns_before (seq
, temp4
);
1281 delete_insn (init_insn
);
1283 next
= NEXT_INSN (insn
);
1285 delete_insn (prev_nonnote_insn (insn
));
1295 /* Simplify if (...) x = 1; else {...} if (x) ...
1296 We recognize this case scanning backwards as well.
1298 TEMP is the assignment to x;
1299 TEMP1 is the label at the head of the second if. */
1300 /* ?? This should call get_condition to find the values being
1301 compared, instead of looking for a COMPARE insn when HAVE_cc0
1302 is not defined. This would allow it to work on the m88k. */
1303 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1304 is not defined and the condition is tested by a separate compare
1305 insn. This is because the code below assumes that the result
1306 of the compare dies in the following branch.
1308 Not only that, but there might be other insns between the
1309 compare and branch whose results are live. Those insns need
1312 A way to fix this is to move the insns at JUMP_LABEL (insn)
1313 to before INSN. If we are running before flow, they will
1314 be deleted if they aren't needed. But this doesn't work
1317 This is really a special-case of jump threading, anyway. The
1318 right thing to do is to replace this and jump threading with
1319 much simpler code in cse.
1321 This code has been turned off in the non-cc0 case in the
1325 else if (this_is_simplejump
1326 /* Safe to skip USE and CLOBBER insns here
1327 since they will not be deleted. */
1328 && (temp
= prev_active_insn (insn
))
1329 && no_labels_between_p (temp
, insn
)
1330 && GET_CODE (temp
) == INSN
1331 && GET_CODE (PATTERN (temp
)) == SET
1332 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1333 && CONSTANT_P (SET_SRC (PATTERN (temp
)))
1334 && (temp1
= next_active_insn (JUMP_LABEL (insn
)))
1335 /* If we find that the next value tested is `x'
1336 (TEMP1 is the insn where this happens), win. */
1337 && GET_CODE (temp1
) == INSN
1338 && GET_CODE (PATTERN (temp1
)) == SET
1340 /* Does temp1 `tst' the value of x? */
1341 && SET_SRC (PATTERN (temp1
)) == SET_DEST (PATTERN (temp
))
1342 && SET_DEST (PATTERN (temp1
)) == cc0_rtx
1343 && (temp1
= next_nonnote_insn (temp1
))
1345 /* Does temp1 compare the value of x against zero? */
1346 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1347 && XEXP (SET_SRC (PATTERN (temp1
)), 1) == const0_rtx
1348 && (XEXP (SET_SRC (PATTERN (temp1
)), 0)
1349 == SET_DEST (PATTERN (temp
)))
1350 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1351 && (temp1
= find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1353 && condjump_p (temp1
))
1355 /* Get the if_then_else from the condjump. */
1356 rtx choice
= SET_SRC (PATTERN (temp1
));
1357 if (GET_CODE (choice
) == IF_THEN_ELSE
)
1359 enum rtx_code code
= GET_CODE (XEXP (choice
, 0));
1360 rtx val
= SET_SRC (PATTERN (temp
));
1362 = simplify_relational_operation (code
, GET_MODE (SET_DEST (PATTERN (temp
))),
1366 if (cond
== const_true_rtx
)
1367 ultimate
= XEXP (choice
, 1);
1368 else if (cond
== const0_rtx
)
1369 ultimate
= XEXP (choice
, 2);
1373 if (ultimate
== pc_rtx
)
1374 ultimate
= get_label_after (temp1
);
1375 else if (ultimate
&& GET_CODE (ultimate
) != RETURN
)
1376 ultimate
= XEXP (ultimate
, 0);
1379 changed
|= redirect_jump (insn
, ultimate
);
1385 /* @@ This needs a bit of work before it will be right.
1387 Any type of comparison can be accepted for the first and
1388 second compare. When rewriting the first jump, we must
1389 compute the what conditions can reach label3, and use the
1390 appropriate code. We can not simply reverse/swap the code
1391 of the first jump. In some cases, the second jump must be
1395 < == converts to > ==
1396 < != converts to == >
1399 If the code is written to only accept an '==' test for the second
1400 compare, then all that needs to be done is to swap the condition
1401 of the first branch.
1403 It is questionable whether we want this optimization anyways,
1404 since if the user wrote code like this because he/she knew that
1405 the jump to label1 is taken most of the time, then rewriting
1406 this gives slower code. */
1407 /* @@ This should call get_condition to find the values being
1408 compared, instead of looking for a COMPARE insn when HAVE_cc0
1409 is not defined. This would allow it to work on the m88k. */
1410 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1411 is not defined and the condition is tested by a separate compare
1412 insn. This is because the code below assumes that the result
1413 of the compare dies in the following branch. */
1415 /* Simplify test a ~= b
1429 where ~= is an inequality, e.g. >, and ~~= is the swapped
1432 We recognize this case scanning backwards.
1434 TEMP is the conditional jump to `label2';
1435 TEMP1 is the test for `a == b';
1436 TEMP2 is the conditional jump to `label1';
1437 TEMP3 is the test for `a ~= b'. */
1438 else if (this_is_simplejump
1439 && (temp
= prev_active_insn (insn
))
1440 && no_labels_between_p (temp
, insn
)
1441 && condjump_p (temp
)
1442 && (temp1
= prev_active_insn (temp
))
1443 && no_labels_between_p (temp1
, temp
)
1444 && GET_CODE (temp1
) == INSN
1445 && GET_CODE (PATTERN (temp1
)) == SET
1447 && sets_cc0_p (PATTERN (temp1
)) == 1
1449 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1450 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1451 && (temp
== find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1453 && (temp2
= prev_active_insn (temp1
))
1454 && no_labels_between_p (temp2
, temp1
)
1455 && condjump_p (temp2
)
1456 && JUMP_LABEL (temp2
) == next_nonnote_insn (NEXT_INSN (insn
))
1457 && (temp3
= prev_active_insn (temp2
))
1458 && no_labels_between_p (temp3
, temp2
)
1459 && GET_CODE (PATTERN (temp3
)) == SET
1460 && rtx_equal_p (SET_DEST (PATTERN (temp3
)),
1461 SET_DEST (PATTERN (temp1
)))
1462 && rtx_equal_p (SET_SRC (PATTERN (temp1
)),
1463 SET_SRC (PATTERN (temp3
)))
1464 && ! inequality_comparisons_p (PATTERN (temp
))
1465 && inequality_comparisons_p (PATTERN (temp2
)))
1467 rtx fallthrough_label
= JUMP_LABEL (temp2
);
1469 ++LABEL_NUSES (fallthrough_label
);
1470 if (swap_jump (temp2
, JUMP_LABEL (insn
)))
1476 if (--LABEL_NUSES (fallthrough_label
) == 0)
1477 delete_insn (fallthrough_label
);
1480 /* Simplify if (...) {... x = 1;} if (x) ...
1482 We recognize this case backwards.
1484 TEMP is the test of `x';
1485 TEMP1 is the assignment to `x' at the end of the
1486 previous statement. */
1487 /* @@ This should call get_condition to find the values being
1488 compared, instead of looking for a COMPARE insn when HAVE_cc0
1489 is not defined. This would allow it to work on the m88k. */
1490 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1491 is not defined and the condition is tested by a separate compare
1492 insn. This is because the code below assumes that the result
1493 of the compare dies in the following branch. */
1495 /* ??? This has to be turned off. The problem is that the
1496 unconditional jump might indirectly end up branching to the
1497 label between TEMP1 and TEMP. We can't detect this, in general,
1498 since it may become a jump to there after further optimizations.
1499 If that jump is done, it will be deleted, so we will retry
1500 this optimization in the next pass, thus an infinite loop.
1502 The present code prevents this by putting the jump after the
1503 label, but this is not logically correct. */
1505 else if (this_is_condjump
1506 /* Safe to skip USE and CLOBBER insns here
1507 since they will not be deleted. */
1508 && (temp
= prev_active_insn (insn
))
1509 && no_labels_between_p (temp
, insn
)
1510 && GET_CODE (temp
) == INSN
1511 && GET_CODE (PATTERN (temp
)) == SET
1513 && sets_cc0_p (PATTERN (temp
)) == 1
1514 && GET_CODE (SET_SRC (PATTERN (temp
))) == REG
1516 /* Temp must be a compare insn, we can not accept a register
1517 to register move here, since it may not be simply a
1519 && GET_CODE (SET_SRC (PATTERN (temp
))) == COMPARE
1520 && XEXP (SET_SRC (PATTERN (temp
)), 1) == const0_rtx
1521 && GET_CODE (XEXP (SET_SRC (PATTERN (temp
)), 0)) == REG
1522 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1523 && insn
== find_next_ref (SET_DEST (PATTERN (temp
)), temp
)
1525 /* May skip USE or CLOBBER insns here
1526 for checking for opportunity, since we
1527 take care of them later. */
1528 && (temp1
= prev_active_insn (temp
))
1529 && GET_CODE (temp1
) == INSN
1530 && GET_CODE (PATTERN (temp1
)) == SET
1532 && SET_SRC (PATTERN (temp
)) == SET_DEST (PATTERN (temp1
))
1534 && (XEXP (SET_SRC (PATTERN (temp
)), 0)
1535 == SET_DEST (PATTERN (temp1
)))
1537 && CONSTANT_P (SET_SRC (PATTERN (temp1
)))
1538 /* If this isn't true, cse will do the job. */
1539 && ! no_labels_between_p (temp1
, temp
))
1541 /* Get the if_then_else from the condjump. */
1542 rtx choice
= SET_SRC (PATTERN (insn
));
1543 if (GET_CODE (choice
) == IF_THEN_ELSE
1544 && (GET_CODE (XEXP (choice
, 0)) == EQ
1545 || GET_CODE (XEXP (choice
, 0)) == NE
))
1547 int want_nonzero
= (GET_CODE (XEXP (choice
, 0)) == NE
);
1552 /* Get the place that condjump will jump to
1553 if it is reached from here. */
1554 if ((SET_SRC (PATTERN (temp1
)) != const0_rtx
)
1556 ultimate
= XEXP (choice
, 1);
1558 ultimate
= XEXP (choice
, 2);
1559 /* Get it as a CODE_LABEL. */
1560 if (ultimate
== pc_rtx
)
1561 ultimate
= get_label_after (insn
);
1563 /* Get the label out of the LABEL_REF. */
1564 ultimate
= XEXP (ultimate
, 0);
1566 /* Insert the jump immediately before TEMP, specifically
1567 after the label that is between TEMP1 and TEMP. */
1568 last_insn
= PREV_INSN (temp
);
1570 /* If we would be branching to the next insn, the jump
1571 would immediately be deleted and the re-inserted in
1572 a subsequent pass over the code. So don't do anything
1574 if (next_active_insn (last_insn
)
1575 != next_active_insn (ultimate
))
1577 emit_barrier_after (last_insn
);
1578 p
= emit_jump_insn_after (gen_jump (ultimate
),
1580 JUMP_LABEL (p
) = ultimate
;
1581 ++LABEL_NUSES (ultimate
);
1582 if (INSN_UID (ultimate
) < max_jump_chain
1583 && INSN_CODE (p
) < max_jump_chain
)
1585 jump_chain
[INSN_UID (p
)]
1586 = jump_chain
[INSN_UID (ultimate
)];
1587 jump_chain
[INSN_UID (ultimate
)] = p
;
1595 /* Detect a conditional jump going to the same place
1596 as an immediately following unconditional jump. */
1597 else if (this_is_condjump
1598 && (temp
= next_active_insn (insn
)) != 0
1599 && simplejump_p (temp
)
1600 && (next_active_insn (JUMP_LABEL (insn
))
1601 == next_active_insn (JUMP_LABEL (temp
))))
1607 /* Detect a conditional jump jumping over an unconditional jump. */
1609 else if (this_is_condjump
&& ! this_is_simplejump
1610 && reallabelprev
!= 0
1611 && GET_CODE (reallabelprev
) == JUMP_INSN
1612 && prev_active_insn (reallabelprev
) == insn
1613 && no_labels_between_p (insn
, reallabelprev
)
1614 && simplejump_p (reallabelprev
))
1616 /* When we invert the unconditional jump, we will be
1617 decrementing the usage count of its old label.
1618 Make sure that we don't delete it now because that
1619 might cause the following code to be deleted. */
1620 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
1621 rtx prev_label
= JUMP_LABEL (insn
);
1623 ++LABEL_NUSES (prev_label
);
1625 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
)))
1627 /* It is very likely that if there are USE insns before
1628 this jump, they hold REG_DEAD notes. These REG_DEAD
1629 notes are no longer valid due to this optimization,
1630 and will cause the life-analysis that following passes
1631 (notably delayed-branch scheduling) to think that
1632 these registers are dead when they are not.
1634 To prevent this trouble, we just remove the USE insns
1635 from the insn chain. */
1637 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
1638 && GET_CODE (PATTERN (prev_uses
)) == USE
)
1640 rtx useless
= prev_uses
;
1641 prev_uses
= prev_nonnote_insn (prev_uses
);
1642 delete_insn (useless
);
1645 delete_insn (reallabelprev
);
1650 /* We can now safely delete the label if it is unreferenced
1651 since the delete_insn above has deleted the BARRIER. */
1652 if (--LABEL_NUSES (prev_label
) == 0)
1653 delete_insn (prev_label
);
1658 /* Detect a jump to a jump. */
1660 nlabel
= follow_jumps (JUMP_LABEL (insn
));
1661 if (nlabel
!= JUMP_LABEL (insn
)
1662 && redirect_jump (insn
, nlabel
))
1668 /* Look for if (foo) bar; else break; */
1669 /* The insns look like this:
1670 insn = condjump label1;
1671 ...range1 (some insns)...
1674 ...range2 (some insns)...
1675 jump somewhere unconditionally
1678 rtx label1
= next_label (insn
);
1679 rtx range1end
= label1
? prev_active_insn (label1
) : 0;
1680 /* Don't do this optimization on the first round, so that
1681 jump-around-a-jump gets simplified before we ask here
1682 whether a jump is unconditional.
1684 Also don't do it when we are called after reload since
1685 it will confuse reorg. */
1687 && (reload_completed
? ! flag_delayed_branch
: 1)
1688 /* Make sure INSN is something we can invert. */
1689 && condjump_p (insn
)
1691 && JUMP_LABEL (insn
) == label1
1692 && LABEL_NUSES (label1
) == 1
1693 && GET_CODE (range1end
) == JUMP_INSN
1694 && simplejump_p (range1end
))
1696 rtx label2
= next_label (label1
);
1697 rtx range2end
= label2
? prev_active_insn (label2
) : 0;
1698 if (range1end
!= range2end
1699 && JUMP_LABEL (range1end
) == label2
1700 && GET_CODE (range2end
) == JUMP_INSN
1701 && GET_CODE (NEXT_INSN (range2end
)) == BARRIER
1702 /* Invert the jump condition, so we
1703 still execute the same insns in each case. */
1704 && invert_jump (insn
, label1
))
1706 rtx range1beg
= next_active_insn (insn
);
1707 rtx range2beg
= next_active_insn (label1
);
1708 rtx range1after
, range2after
;
1709 rtx range1before
, range2before
;
1711 /* Include in each range any line number before it. */
1712 while (PREV_INSN (range1beg
)
1713 && GET_CODE (PREV_INSN (range1beg
)) == NOTE
1714 && NOTE_LINE_NUMBER (PREV_INSN (range1beg
)) > 0)
1715 range1beg
= PREV_INSN (range1beg
);
1717 while (PREV_INSN (range2beg
)
1718 && GET_CODE (PREV_INSN (range2beg
)) == NOTE
1719 && NOTE_LINE_NUMBER (PREV_INSN (range2beg
)) > 0)
1720 range2beg
= PREV_INSN (range2beg
);
1722 /* Don't move NOTEs for blocks or loops; shift them
1723 outside the ranges, where they'll stay put. */
1724 range1beg
= squeeze_notes (range1beg
, range1end
);
1725 range2beg
= squeeze_notes (range2beg
, range2end
);
1727 /* Get current surrounds of the 2 ranges. */
1728 range1before
= PREV_INSN (range1beg
);
1729 range2before
= PREV_INSN (range2beg
);
1730 range1after
= NEXT_INSN (range1end
);
1731 range2after
= NEXT_INSN (range2end
);
1733 /* Splice range2 where range1 was. */
1734 NEXT_INSN (range1before
) = range2beg
;
1735 PREV_INSN (range2beg
) = range1before
;
1736 NEXT_INSN (range2end
) = range1after
;
1737 PREV_INSN (range1after
) = range2end
;
1738 /* Splice range1 where range2 was. */
1739 NEXT_INSN (range2before
) = range1beg
;
1740 PREV_INSN (range1beg
) = range2before
;
1741 NEXT_INSN (range1end
) = range2after
;
1742 PREV_INSN (range2after
) = range1end
;
1749 /* Now that the jump has been tensioned,
1750 try cross jumping: check for identical code
1751 before the jump and before its target label. */
1753 /* First, cross jumping of conditional jumps: */
1755 if (cross_jump
&& condjump_p (insn
))
1757 rtx newjpos
, newlpos
;
1758 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
1760 /* A conditional jump may be crossjumped
1761 only if the place it jumps to follows
1762 an opposing jump that comes back here. */
1764 if (x
!= 0 && ! jump_back_p (x
, insn
))
1765 /* We have no opposing jump;
1766 cannot cross jump this insn. */
1770 /* TARGET is nonzero if it is ok to cross jump
1771 to code before TARGET. If so, see if matches. */
1773 find_cross_jump (insn
, x
, 2,
1774 &newjpos
, &newlpos
);
1778 do_cross_jump (insn
, newjpos
, newlpos
);
1779 /* Make the old conditional jump
1780 into an unconditional one. */
1781 SET_SRC (PATTERN (insn
))
1782 = gen_rtx (LABEL_REF
, VOIDmode
, JUMP_LABEL (insn
));
1783 INSN_CODE (insn
) = -1;
1784 emit_barrier_after (insn
);
1785 /* Add to jump_chain unless this is a new label
1786 whose UID is too large. */
1787 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
1789 jump_chain
[INSN_UID (insn
)]
1790 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1791 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
1798 /* Cross jumping of unconditional jumps:
1799 a few differences. */
1801 if (cross_jump
&& simplejump_p (insn
))
1803 rtx newjpos
, newlpos
;
1808 /* TARGET is nonzero if it is ok to cross jump
1809 to code before TARGET. If so, see if matches. */
1810 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
1811 &newjpos
, &newlpos
);
1813 /* If cannot cross jump to code before the label,
1814 see if we can cross jump to another jump to
1816 /* Try each other jump to this label. */
1817 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
1818 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1819 target
!= 0 && newjpos
== 0;
1820 target
= jump_chain
[INSN_UID (target
)])
1822 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
1823 /* Ignore TARGET if it's deleted. */
1824 && ! INSN_DELETED_P (target
))
1825 find_cross_jump (insn
, target
, 2,
1826 &newjpos
, &newlpos
);
1830 do_cross_jump (insn
, newjpos
, newlpos
);
1836 /* This code was dead in the previous jump.c! */
1837 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
1839 /* Return insns all "jump to the same place"
1840 so we can cross-jump between any two of them. */
1842 rtx newjpos
, newlpos
, target
;
1846 /* If cannot cross jump to code before the label,
1847 see if we can cross jump to another jump to
1849 /* Try each other jump to this label. */
1850 for (target
= jump_chain
[0];
1851 target
!= 0 && newjpos
== 0;
1852 target
= jump_chain
[INSN_UID (target
)])
1854 && ! INSN_DELETED_P (target
)
1855 && GET_CODE (PATTERN (target
)) == RETURN
)
1856 find_cross_jump (insn
, target
, 2,
1857 &newjpos
, &newlpos
);
1861 do_cross_jump (insn
, newjpos
, newlpos
);
1872 /* Delete extraneous line number notes.
1873 Note that two consecutive notes for different lines are not really
1874 extraneous. There should be some indication where that line belonged,
1875 even if it became empty. */
1880 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1881 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0)
1883 /* Delete this note if it is identical to previous note. */
1885 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
1886 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
1896 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1897 If so, delete it, and record that this function can drop off the end. */
1903 /* One label can follow the end-note: the return label. */
1904 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
1905 /* Ordinary insns can follow it if returning a structure. */
1906 || GET_CODE (insn
) == INSN
1907 /* If machine uses explicit RETURN insns, no epilogue,
1908 then one of them follows the note. */
1909 || (GET_CODE (insn
) == JUMP_INSN
1910 && GET_CODE (PATTERN (insn
)) == RETURN
)
1911 /* Other kinds of notes can follow also. */
1912 || (GET_CODE (insn
) == NOTE
1913 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
1914 insn
= PREV_INSN (insn
);
1917 /* Report if control can fall through at the end of the function. */
1918 if (insn
&& GET_CODE (insn
) == NOTE
1919 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
)
1925 /* Show JUMP_CHAIN no longer valid. */
1929 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1930 jump. Assume that this unconditional jump is to the exit test code. If
1931 the code is sufficiently simple, make a copy of it before INSN,
1932 followed by a jump to the exit of the loop. Then delete the unconditional
1935 Note that it is possible we can get confused here if the jump immediately
1936 after the loop start branches outside the loop but within an outer loop.
1937 If we are near the exit of that loop, we will copy its exit test. This
1938 will not generate incorrect code, but could suppress some optimizations.
1939 However, such cases are degenerate loops anyway.
1941 Return 1 if we made the change, else 0.
1943 This is only safe immediately after a regscan pass because it uses the
1944 values of regno_first_uid and regno_last_uid. */
1947 duplicate_loop_exit_test (loop_start
)
1953 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
1955 int max_reg
= max_reg_num ();
1958 /* Scan the exit code. We do not perform this optimization if any insn:
1962 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1963 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1964 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1967 Also, don't do this if the exit code is more than 20 insns. */
1969 for (insn
= exitcode
;
1971 && ! (GET_CODE (insn
) == NOTE
1972 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
1973 insn
= NEXT_INSN (insn
))
1975 switch (GET_CODE (insn
))
1981 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
1982 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1983 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
1988 if (++num_insns
> 20
1989 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
1990 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
1996 /* Unless INSN is zero, we can do the optimization. */
2002 /* See if any insn sets a register only used in the loop exit code and
2003 not a user variable. If so, replace it with a new register. */
2004 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2005 if (GET_CODE (insn
) == INSN
2006 && (set
= single_set (insn
)) != 0
2007 && GET_CODE (SET_DEST (set
)) == REG
2008 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
2009 && regno_first_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
))
2011 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
2012 if (regno_last_uid
[REGNO (SET_DEST (set
))] == INSN_UID (p
))
2017 /* We can do the replacement. Allocate reg_map if this is the
2018 first replacement we found. */
2021 reg_map
= (rtx
*) alloca (max_reg
* sizeof (rtx
));
2022 bzero (reg_map
, max_reg
* sizeof (rtx
));
2025 REG_LOOP_TEST_P (SET_DEST (set
)) = 1;
2027 reg_map
[REGNO (SET_DEST (set
))]
2028 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
2032 /* Now copy each insn. */
2033 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2034 switch (GET_CODE (insn
))
2037 copy
= emit_barrier_before (loop_start
);
2040 /* Only copy line-number notes. */
2041 if (NOTE_LINE_NUMBER (insn
) >= 0)
2043 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
2044 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
2049 copy
= emit_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2051 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2053 mark_jump_label (PATTERN (copy
), copy
, 0);
2055 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2057 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2058 if (REG_NOTE_KIND (link
) != REG_LABEL
)
2060 = copy_rtx (gen_rtx (EXPR_LIST
, REG_NOTE_KIND (link
),
2061 XEXP (link
, 0), REG_NOTES (copy
)));
2062 if (reg_map
&& REG_NOTES (copy
))
2063 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2067 copy
= emit_jump_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2069 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2070 mark_jump_label (PATTERN (copy
), copy
, 0);
2071 if (REG_NOTES (insn
))
2073 REG_NOTES (copy
) = copy_rtx (REG_NOTES (insn
));
2075 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2078 /* If this is a simple jump, add it to the jump chain. */
2080 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
2081 && simplejump_p (copy
))
2083 jump_chain
[INSN_UID (copy
)]
2084 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2085 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2093 /* Now clean up by emitting a jump to the end label and deleting the jump
2094 at the start of the loop. */
2095 if (GET_CODE (copy
) != BARRIER
)
2097 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
2099 mark_jump_label (PATTERN (copy
), copy
, 0);
2100 if (INSN_UID (copy
) < max_jump_chain
2101 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
2103 jump_chain
[INSN_UID (copy
)]
2104 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2105 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2107 emit_barrier_before (loop_start
);
2110 delete_insn (next_nonnote_insn (loop_start
));
2112 /* Mark the exit code as the virtual top of the converted loop. */
2113 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
2118 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2119 loop-end notes between START and END out before START. Assume that
2120 END is not such a note. START may be such a note. Returns the value
2121 of the new starting insn, which may be different if the original start
2125 squeeze_notes (start
, end
)
2131 for (insn
= start
; insn
!= end
; insn
= next
)
2133 next
= NEXT_INSN (insn
);
2134 if (GET_CODE (insn
) == NOTE
2135 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
2136 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2137 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2138 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
2139 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
2140 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
2146 rtx prev
= PREV_INSN (insn
);
2147 PREV_INSN (insn
) = PREV_INSN (start
);
2148 NEXT_INSN (insn
) = start
;
2149 NEXT_INSN (PREV_INSN (insn
)) = insn
;
2150 PREV_INSN (NEXT_INSN (insn
)) = insn
;
2151 NEXT_INSN (prev
) = next
;
2152 PREV_INSN (next
) = prev
;
2160 /* Compare the instructions before insn E1 with those before E2
2161 to find an opportunity for cross jumping.
2162 (This means detecting identical sequences of insns followed by
2163 jumps to the same place, or followed by a label and a jump
2164 to that label, and replacing one with a jump to the other.)
2166 Assume E1 is a jump that jumps to label E2
2167 (that is not always true but it might as well be).
2168 Find the longest possible equivalent sequences
2169 and store the first insns of those sequences into *F1 and *F2.
2170 Store zero there if no equivalent preceding instructions are found.
2172 We give up if we find a label in stream 1.
2173 Actually we could transfer that label into stream 2. */
2176 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
2181 register rtx i1
= e1
, i2
= e2
;
2182 register rtx p1
, p2
;
2185 rtx last1
= 0, last2
= 0;
2186 rtx afterlast1
= 0, afterlast2
= 0;
2194 i1
= prev_nonnote_insn (i1
);
2196 i2
= PREV_INSN (i2
);
2197 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
2198 i2
= PREV_INSN (i2
);
2203 /* Don't allow the range of insns preceding E1 or E2
2204 to include the other (E2 or E1). */
2205 if (i2
== e1
|| i1
== e2
)
2208 /* If we will get to this code by jumping, those jumps will be
2209 tensioned to go directly to the new label (before I2),
2210 so this cross-jumping won't cost extra. So reduce the minimum. */
2211 if (GET_CODE (i1
) == CODE_LABEL
)
2217 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
2224 /* If cross_jump_death_matters is not 0, the insn's mode
2225 indicates whether or not the insn contains any stack-like
2228 if (cross_jump_death_matters
&& GET_MODE (i1
) == QImode
)
2230 /* If register stack conversion has already been done, then
2231 death notes must also be compared before it is certain that
2232 the two instruction streams match. */
2235 HARD_REG_SET i1_regset
, i2_regset
;
2237 CLEAR_HARD_REG_SET (i1_regset
);
2238 CLEAR_HARD_REG_SET (i2_regset
);
2240 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
2241 if (REG_NOTE_KIND (note
) == REG_DEAD
2242 && STACK_REG_P (XEXP (note
, 0)))
2243 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
2245 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
2246 if (REG_NOTE_KIND (note
) == REG_DEAD
2247 && STACK_REG_P (XEXP (note
, 0)))
2248 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
2250 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
2259 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
2260 || ! rtx_renumbered_equal_p (p1
, p2
))
2262 /* The following code helps take care of G++ cleanups. */
2266 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
2267 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
2268 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
2269 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
2270 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
2271 /* If the equivalences are not to a constant, they may
2272 reference pseudos that no longer exist, so we can't
2274 && CONSTANT_P (XEXP (equiv1
, 0))
2275 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
2277 rtx s1
= single_set (i1
);
2278 rtx s2
= single_set (i2
);
2279 if (s1
!= 0 && s2
!= 0
2280 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
2282 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
2283 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
2284 if (! rtx_renumbered_equal_p (p1
, p2
))
2286 else if (apply_change_group ())
2291 /* Insns fail to match; cross jumping is limited to the following
2295 /* Don't allow the insn after a compare to be shared by
2296 cross-jumping unless the compare is also shared.
2297 Here, if either of these non-matching insns is a compare,
2298 exclude the following insn from possible cross-jumping. */
2299 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
2300 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
2303 /* If cross-jumping here will feed a jump-around-jump
2304 optimization, this jump won't cost extra, so reduce
2306 if (GET_CODE (i1
) == JUMP_INSN
2308 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
2314 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
2316 /* Ok, this insn is potentially includable in a cross-jump here. */
2317 afterlast1
= last1
, afterlast2
= last2
;
2318 last1
= i1
, last2
= i2
, --minimum
;
2322 /* We have to be careful that we do not cross-jump into the middle of
2323 USE-CALL_INSN-CLOBBER sequence. This sequence is used instead of
2324 putting the USE and CLOBBERs inside the CALL_INSN. The delay slot
2325 scheduler needs to know what registers are used and modified by the
2326 CALL_INSN and needs the adjacent USE and CLOBBERs to do so.
2328 ??? At some point we should probably change this so that these are
2329 part of the CALL_INSN. The way we are doing it now is a kludge that
2330 is now causing trouble. */
2332 if (last1
!= 0 && GET_CODE (last1
) == CALL_INSN
2333 && (prev1
= prev_nonnote_insn (last1
))
2334 && GET_CODE (prev1
) == INSN
2335 && GET_CODE (PATTERN (prev1
)) == USE
)
2337 /* Remove this CALL_INSN from the range we can cross-jump. */
2338 last1
= next_real_insn (last1
);
2339 last2
= next_real_insn (last2
);
2344 /* Skip past CLOBBERS since they may be right after a CALL_INSN. It
2345 isn't worth checking for the CALL_INSN. */
2346 while (last1
!= 0 && GET_CODE (PATTERN (last1
)) == CLOBBER
)
2347 last1
= next_real_insn (last1
), last2
= next_real_insn (last2
);
2349 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
2350 *f1
= last1
, *f2
= last2
;
2354 do_cross_jump (insn
, newjpos
, newlpos
)
2355 rtx insn
, newjpos
, newlpos
;
2357 /* Find an existing label at this point
2358 or make a new one if there is none. */
2359 register rtx label
= get_label_before (newlpos
);
2361 /* Make the same jump insn jump to the new point. */
2362 if (GET_CODE (PATTERN (insn
)) == RETURN
)
2364 /* Remove from jump chain of returns. */
2365 delete_from_jump_chain (insn
);
2366 /* Change the insn. */
2367 PATTERN (insn
) = gen_jump (label
);
2368 INSN_CODE (insn
) = -1;
2369 JUMP_LABEL (insn
) = label
;
2370 LABEL_NUSES (label
)++;
2371 /* Add to new the jump chain. */
2372 if (INSN_UID (label
) < max_jump_chain
2373 && INSN_UID (insn
) < max_jump_chain
)
2375 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
2376 jump_chain
[INSN_UID (label
)] = insn
;
2380 redirect_jump (insn
, label
);
2382 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2383 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2384 the NEWJPOS stream. */
2386 while (newjpos
!= insn
)
2390 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
2391 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
2392 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
2393 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
2394 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
2395 remove_note (newlpos
, lnote
);
2397 delete_insn (newjpos
);
2398 newjpos
= next_real_insn (newjpos
);
2399 newlpos
= next_real_insn (newlpos
);
2403 /* Return the label before INSN, or put a new label there. */
2406 get_label_before (insn
)
2411 /* Find an existing label at this point
2412 or make a new one if there is none. */
2413 label
= prev_nonnote_insn (insn
);
2415 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2417 rtx prev
= PREV_INSN (insn
);
2419 /* Don't put a label between a CALL_INSN and USE insns that precede
2422 if (GET_CODE (insn
) == CALL_INSN
2423 || (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
2424 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
))
2425 while (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == USE
)
2426 prev
= PREV_INSN (prev
);
2428 label
= gen_label_rtx ();
2429 emit_label_after (label
, prev
);
2430 LABEL_NUSES (label
) = 0;
2435 /* Return the label after INSN, or put a new label there. */
2438 get_label_after (insn
)
2443 /* Find an existing label at this point
2444 or make a new one if there is none. */
2445 label
= next_nonnote_insn (insn
);
2447 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2449 /* Don't put a label between a CALL_INSN and CLOBBER insns
2452 if (GET_CODE (insn
) == CALL_INSN
2453 || (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
2454 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
))
2455 while (GET_CODE (NEXT_INSN (insn
)) == INSN
2456 && GET_CODE (PATTERN (NEXT_INSN (insn
))) == CLOBBER
)
2457 insn
= NEXT_INSN (insn
);
2459 label
= gen_label_rtx ();
2460 emit_label_after (label
, insn
);
2461 LABEL_NUSES (label
) = 0;
2466 /* Return 1 if INSN is a jump that jumps to right after TARGET
2467 only on the condition that TARGET itself would drop through.
2468 Assumes that TARGET is a conditional jump. */
2471 jump_back_p (insn
, target
)
2475 enum rtx_code codei
, codet
;
2477 if (simplejump_p (insn
) || ! condjump_p (insn
)
2478 || simplejump_p (target
)
2479 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
2482 cinsn
= XEXP (SET_SRC (PATTERN (insn
)), 0);
2483 ctarget
= XEXP (SET_SRC (PATTERN (target
)), 0);
2485 codei
= GET_CODE (cinsn
);
2486 codet
= GET_CODE (ctarget
);
2488 if (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
)
2490 if (! can_reverse_comparison_p (cinsn
, insn
))
2492 codei
= reverse_condition (codei
);
2495 if (XEXP (SET_SRC (PATTERN (target
)), 2) == pc_rtx
)
2497 if (! can_reverse_comparison_p (ctarget
, target
))
2499 codet
= reverse_condition (codet
);
2502 return (codei
== codet
2503 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
2504 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
2507 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2508 return non-zero if it is safe to reverse this comparison. It is if our
2509 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2510 this is known to be an integer comparison. */
2513 can_reverse_comparison_p (comparison
, insn
)
2519 /* If this is not actually a comparison, we can't reverse it. */
2520 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
2523 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
2524 /* If this is an NE comparison, it is safe to reverse it to an EQ
2525 comparison and vice versa, even for floating point. If no operands
2526 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2527 always false and NE is always true, so the reversal is also valid. */
2528 || GET_CODE (comparison
) == NE
2529 || GET_CODE (comparison
) == EQ
)
2532 arg0
= XEXP (comparison
, 0);
2534 /* Make sure ARG0 is one of the actual objects being compared. If we
2535 can't do this, we can't be sure the comparison can be reversed.
2537 Handle cc0 and a MODE_CC register. */
2538 if ((GET_CODE (arg0
) == REG
&& GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
)
2544 rtx prev
= prev_nonnote_insn (insn
);
2545 rtx set
= single_set (prev
);
2547 if (set
== 0 || SET_DEST (set
) != arg0
)
2550 arg0
= SET_SRC (set
);
2552 if (GET_CODE (arg0
) == COMPARE
)
2553 arg0
= XEXP (arg0
, 0);
2556 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2557 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2558 return (GET_CODE (arg0
) == CONST_INT
2559 || (GET_MODE (arg0
) != VOIDmode
2560 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_CC
2561 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_FLOAT
));
2564 /* Given an rtx-code for a comparison, return the code
2565 for the negated comparison.
2566 WATCH OUT! reverse_condition is not safe to use on a jump
2567 that might be acting on the results of an IEEE floating point comparison,
2568 because of the special treatment of non-signaling nans in comparisons.
2569 Use can_reverse_comparison_p to be sure. */
2572 reverse_condition (code
)
2613 /* Similar, but return the code when two operands of a comparison are swapped.
2614 This IS safe for IEEE floating-point. */
2617 swap_condition (code
)
2656 /* Given a comparison CODE, return the corresponding unsigned comparison.
2657 If CODE is an equality comparison or already an unsigned comparison,
2658 CODE is returned. */
2661 unsigned_condition (code
)
2691 /* Similarly, return the signed version of a comparison. */
2694 signed_condition (code
)
2724 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2725 truth of CODE1 implies the truth of CODE2. */
2728 comparison_dominates_p (code1
, code2
)
2729 enum rtx_code code1
, code2
;
2737 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
)
2765 /* Return 1 if INSN is an unconditional jump and nothing else. */
2771 return (GET_CODE (insn
) == JUMP_INSN
2772 && GET_CODE (PATTERN (insn
)) == SET
2773 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
2774 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
2777 /* Return nonzero if INSN is a (possibly) conditional jump
2778 and nothing more. */
2784 register rtx x
= PATTERN (insn
);
2785 if (GET_CODE (x
) != SET
)
2787 if (GET_CODE (SET_DEST (x
)) != PC
)
2789 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
2791 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2793 if (XEXP (SET_SRC (x
), 2) == pc_rtx
2794 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
2795 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
2797 if (XEXP (SET_SRC (x
), 1) == pc_rtx
2798 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
2799 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
2804 /* Return 1 if X is an RTX that does nothing but set the condition codes
2805 and CLOBBER or USE registers.
2806 Return -1 if X does explicitly set the condition codes,
2807 but also does other things. */
2814 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
2816 if (GET_CODE (x
) == PARALLEL
)
2820 int other_things
= 0;
2821 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
2823 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
2824 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
2826 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
2829 return ! sets_cc0
? 0 : other_things
? -1 : 1;
2837 /* Follow any unconditional jump at LABEL;
2838 return the ultimate label reached by any such chain of jumps.
2839 If LABEL is not followed by a jump, return LABEL.
2840 If the chain loops or we can't find end, return LABEL,
2841 since that tells caller to avoid changing the insn.
2843 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2844 a USE or CLOBBER. */
2847 follow_jumps (label
)
2852 register rtx value
= label
;
2857 && (insn
= next_active_insn (value
)) != 0
2858 && GET_CODE (insn
) == JUMP_INSN
2859 && (JUMP_LABEL (insn
) != 0 || GET_CODE (PATTERN (insn
)) == RETURN
)
2860 && (next
= NEXT_INSN (insn
))
2861 && GET_CODE (next
) == BARRIER
);
2864 /* Don't chain through the insn that jumps into a loop
2865 from outside the loop,
2866 since that would create multiple loop entry jumps
2867 and prevent loop optimization. */
2869 if (!reload_completed
)
2870 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
2871 if (GET_CODE (tem
) == NOTE
2872 && NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
)
2875 /* If we have found a cycle, make the insn jump to itself. */
2876 if (JUMP_LABEL (insn
) == label
)
2878 value
= JUMP_LABEL (insn
);
2885 /* Assuming that field IDX of X is a vector of label_refs,
2886 replace each of them by the ultimate label reached by it.
2887 Return nonzero if a change is made.
2888 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2891 tension_vector_labels (x
, idx
)
2897 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
2899 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
2900 register rtx nlabel
= follow_jumps (olabel
);
2901 if (nlabel
&& nlabel
!= olabel
)
2903 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
2904 ++LABEL_NUSES (nlabel
);
2905 if (--LABEL_NUSES (olabel
) == 0)
2906 delete_insn (olabel
);
2913 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2914 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2915 in INSN, then store one of them in JUMP_LABEL (INSN).
2916 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2917 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2918 Also, when there are consecutive labels, canonicalize on the last of them.
2920 Note that two labels separated by a loop-beginning note
2921 must be kept distinct if we have not yet done loop-optimization,
2922 because the gap between them is where loop-optimize
2923 will want to move invariant code to. CROSS_JUMP tells us
2924 that loop-optimization is done with.
2926 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2927 two labels distinct if they are separated by only USE or CLOBBER insns. */
2930 mark_jump_label (x
, insn
, cross_jump
)
2935 register RTX_CODE code
= GET_CODE (x
);
2953 /* If this is a constant-pool reference, see if it is a label. */
2954 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2955 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
2956 mark_jump_label (get_pool_constant (XEXP (x
, 0)), insn
, cross_jump
);
2961 register rtx label
= XEXP (x
, 0);
2963 if (GET_CODE (label
) != CODE_LABEL
)
2965 /* Ignore references to labels of containing functions. */
2966 if (LABEL_REF_NONLOCAL_P (x
))
2968 /* If there are other labels following this one,
2969 replace it with the last of the consecutive labels. */
2970 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
2972 if (GET_CODE (next
) == CODE_LABEL
)
2974 else if (cross_jump
&& GET_CODE (next
) == INSN
2975 && (GET_CODE (PATTERN (next
)) == USE
2976 || GET_CODE (PATTERN (next
)) == CLOBBER
))
2978 else if (GET_CODE (next
) != NOTE
)
2980 else if (! cross_jump
2981 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
2982 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
))
2985 XEXP (x
, 0) = label
;
2986 ++LABEL_NUSES (label
);
2989 if (GET_CODE (insn
) == JUMP_INSN
)
2990 JUMP_LABEL (insn
) = label
;
2991 else if (! find_reg_note (insn
, REG_LABEL
, label
))
2993 rtx next
= next_real_insn (label
);
2994 /* Don't record labels that refer to dispatch tables.
2995 This is not necessary, since the tablejump
2996 references the same label.
2997 And if we did record them, flow.c would make worse code. */
2999 || ! (GET_CODE (next
) == JUMP_INSN
3000 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3001 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
)))
3003 REG_NOTES (insn
) = gen_rtx (EXPR_LIST
, REG_LABEL
, label
,
3005 /* Record in the note whether label is nonlocal. */
3006 LABEL_REF_NONLOCAL_P (REG_NOTES (insn
))
3007 = LABEL_REF_NONLOCAL_P (x
);
3014 /* Do walk the labels in a vector, but not the first operand of an
3015 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3019 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
3021 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
3022 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, cross_jump
);
3027 fmt
= GET_RTX_FORMAT (code
);
3028 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3031 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
);
3032 else if (fmt
[i
] == 'E')
3035 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3036 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
);
3041 /* If all INSN does is set the pc, delete it,
3042 and delete the insn that set the condition codes for it
3043 if that's what the previous thing was. */
3049 register rtx set
= single_set (insn
);
3051 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3052 delete_computation (insn
);
3055 /* Delete INSN and recursively delete insns that compute values used only
3056 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3057 If we are running before flow.c, we need do nothing since flow.c will
3058 delete dead code. We also can't know if the registers being used are
3059 dead or not at this point.
3061 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3062 nothing other than set a register that dies in this insn, we can delete
3065 On machines with CC0, if CC0 is used in this insn, we may be able to
3066 delete the insn that set it. */
3069 delete_computation (insn
)
3075 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
3077 rtx prev
= prev_nonnote_insn (insn
);
3078 /* We assume that at this stage
3079 CC's are always set explicitly
3080 and always immediately before the jump that
3081 will use them. So if the previous insn
3082 exists to set the CC's, delete it
3083 (unless it performs auto-increments, etc.). */
3084 if (prev
&& GET_CODE (prev
) == INSN
3085 && sets_cc0_p (PATTERN (prev
)))
3087 if (sets_cc0_p (PATTERN (prev
)) > 0
3088 && !FIND_REG_INC_NOTE (prev
, NULL_RTX
))
3089 delete_computation (prev
);
3091 /* Otherwise, show that cc0 won't be used. */
3092 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_UNUSED
,
3093 cc0_rtx
, REG_NOTES (prev
));
3098 for (note
= REG_NOTES (insn
); note
; note
= next
)
3102 next
= XEXP (note
, 1);
3104 if (REG_NOTE_KIND (note
) != REG_DEAD
3105 /* Verify that the REG_NOTE is legitimate. */
3106 || GET_CODE (XEXP (note
, 0)) != REG
)
3109 for (our_prev
= prev_nonnote_insn (insn
);
3110 our_prev
&& GET_CODE (our_prev
) == INSN
;
3111 our_prev
= prev_nonnote_insn (our_prev
))
3113 /* If we reach a SEQUENCE, it is too complex to try to
3114 do anything with it, so give up. */
3115 if (GET_CODE (PATTERN (our_prev
)) == SEQUENCE
)
3118 if (GET_CODE (PATTERN (our_prev
)) == USE
3119 && GET_CODE (XEXP (PATTERN (our_prev
), 0)) == INSN
)
3120 /* reorg creates USEs that look like this. We leave them
3121 alone because reorg needs them for its own purposes. */
3124 if (reg_set_p (XEXP (note
, 0), PATTERN (our_prev
)))
3126 if (FIND_REG_INC_NOTE (our_prev
, NULL_RTX
))
3129 if (GET_CODE (PATTERN (our_prev
)) == PARALLEL
)
3131 /* If we find a SET of something else, we can't
3136 for (i
= 0; i
< XVECLEN (PATTERN (our_prev
), 0); i
++)
3138 rtx part
= XVECEXP (PATTERN (our_prev
), 0, i
);
3140 if (GET_CODE (part
) == SET
3141 && SET_DEST (part
) != XEXP (note
, 0))
3145 if (i
== XVECLEN (PATTERN (our_prev
), 0))
3146 delete_computation (our_prev
);
3148 else if (GET_CODE (PATTERN (our_prev
)) == SET
3149 && SET_DEST (PATTERN (our_prev
)) == XEXP (note
, 0))
3150 delete_computation (our_prev
);
3155 /* If OUR_PREV references the register that dies here, it is an
3156 additional use. Hence any prior SET isn't dead. However, this
3157 insn becomes the new place for the REG_DEAD note. */
3158 if (reg_overlap_mentioned_p (XEXP (note
, 0),
3159 PATTERN (our_prev
)))
3161 XEXP (note
, 1) = REG_NOTES (our_prev
);
3162 REG_NOTES (our_prev
) = note
;
3171 /* Delete insn INSN from the chain of insns and update label ref counts.
3172 May delete some following insns as a consequence; may even delete
3173 a label elsewhere and insns that follow it.
3175 Returns the first insn after INSN that was not deleted. */
3181 register rtx next
= NEXT_INSN (insn
);
3182 register rtx prev
= PREV_INSN (insn
);
3183 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
3184 register int dont_really_delete
= 0;
3186 while (next
&& INSN_DELETED_P (next
))
3187 next
= NEXT_INSN (next
);
3189 /* This insn is already deleted => return first following nondeleted. */
3190 if (INSN_DELETED_P (insn
))
3193 /* Don't delete user-declared labels. Convert them to special NOTEs
3195 if (was_code_label
&& LABEL_NAME (insn
) != 0
3196 && optimize
&& ! dont_really_delete
)
3198 PUT_CODE (insn
, NOTE
);
3199 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
3200 NOTE_SOURCE_FILE (insn
) = 0;
3201 dont_really_delete
= 1;
3204 /* Mark this insn as deleted. */
3205 INSN_DELETED_P (insn
) = 1;
3207 /* If this is an unconditional jump, delete it from the jump chain. */
3208 if (simplejump_p (insn
))
3209 delete_from_jump_chain (insn
);
3211 /* If instruction is followed by a barrier,
3212 delete the barrier too. */
3214 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
3216 INSN_DELETED_P (next
) = 1;
3217 next
= NEXT_INSN (next
);
3220 /* Patch out INSN (and the barrier if any) */
3222 if (optimize
&& ! dont_really_delete
)
3226 NEXT_INSN (prev
) = next
;
3227 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3228 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
3229 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
3234 PREV_INSN (next
) = prev
;
3235 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
3236 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
3239 if (prev
&& NEXT_INSN (prev
) == 0)
3240 set_last_insn (prev
);
3243 /* If deleting a jump, decrement the count of the label,
3244 and delete the label if it is now unused. */
3246 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
3247 if (--LABEL_NUSES (JUMP_LABEL (insn
)) == 0)
3249 /* This can delete NEXT or PREV,
3250 either directly if NEXT is JUMP_LABEL (INSN),
3251 or indirectly through more levels of jumps. */
3252 delete_insn (JUMP_LABEL (insn
));
3253 /* I feel a little doubtful about this loop,
3254 but I see no clean and sure alternative way
3255 to find the first insn after INSN that is not now deleted.
3256 I hope this works. */
3257 while (next
&& INSN_DELETED_P (next
))
3258 next
= NEXT_INSN (next
);
3262 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
3263 prev
= PREV_INSN (prev
);
3265 /* If INSN was a label and a dispatch table follows it,
3266 delete the dispatch table. The tablejump must have gone already.
3267 It isn't useful to fall through into a table. */
3270 && NEXT_INSN (insn
) != 0
3271 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3272 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
3273 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
3274 next
= delete_insn (NEXT_INSN (insn
));
3276 /* If INSN was a label, delete insns following it if now unreachable. */
3278 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
3280 register RTX_CODE code
;
3282 && ((code
= GET_CODE (next
)) == INSN
3283 || code
== JUMP_INSN
|| code
== CALL_INSN
3285 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
3288 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
3289 next
= NEXT_INSN (next
);
3290 /* Keep going past other deleted labels to delete what follows. */
3291 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
3292 next
= NEXT_INSN (next
);
3294 /* Note: if this deletes a jump, it can cause more
3295 deletion of unreachable code, after a different label.
3296 As long as the value from this recursive call is correct,
3297 this invocation functions correctly. */
3298 next
= delete_insn (next
);
3305 /* Advance from INSN till reaching something not deleted
3306 then return that. May return INSN itself. */
3309 next_nondeleted_insn (insn
)
3312 while (INSN_DELETED_P (insn
))
3313 insn
= NEXT_INSN (insn
);
3317 /* Delete a range of insns from FROM to TO, inclusive.
3318 This is for the sake of peephole optimization, so assume
3319 that whatever these insns do will still be done by a new
3320 peephole insn that will replace them. */
3323 delete_for_peephole (from
, to
)
3324 register rtx from
, to
;
3326 register rtx insn
= from
;
3330 register rtx next
= NEXT_INSN (insn
);
3331 register rtx prev
= PREV_INSN (insn
);
3333 if (GET_CODE (insn
) != NOTE
)
3335 INSN_DELETED_P (insn
) = 1;
3337 /* Patch this insn out of the chain. */
3338 /* We don't do this all at once, because we
3339 must preserve all NOTEs. */
3341 NEXT_INSN (prev
) = next
;
3344 PREV_INSN (next
) = prev
;
3352 /* Note that if TO is an unconditional jump
3353 we *do not* delete the BARRIER that follows,
3354 since the peephole that replaces this sequence
3355 is also an unconditional jump in that case. */
3358 /* Invert the condition of the jump JUMP, and make it jump
3359 to label NLABEL instead of where it jumps now. */
3362 invert_jump (jump
, nlabel
)
3365 register rtx olabel
= JUMP_LABEL (jump
);
3367 /* We have to either invert the condition and change the label or
3368 do neither. Either operation could fail. We first try to invert
3369 the jump. If that succeeds, we try changing the label. If that fails,
3370 we invert the jump back to what it was. */
3372 if (! invert_exp (PATTERN (jump
), jump
))
3375 if (redirect_jump (jump
, nlabel
))
3378 if (! invert_exp (PATTERN (jump
), jump
))
3379 /* This should just be putting it back the way it was. */
3385 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3387 Return 1 if we can do so, 0 if we cannot find a way to do so that
3388 matches a pattern. */
3391 invert_exp (x
, insn
)
3395 register RTX_CODE code
;
3399 code
= GET_CODE (x
);
3401 if (code
== IF_THEN_ELSE
)
3403 register rtx comp
= XEXP (x
, 0);
3406 /* We can do this in two ways: The preferable way, which can only
3407 be done if this is not an integer comparison, is to reverse
3408 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3409 of the IF_THEN_ELSE. If we can't do either, fail. */
3411 if (can_reverse_comparison_p (comp
, insn
)
3412 && validate_change (insn
, &XEXP (x
, 0),
3413 gen_rtx (reverse_condition (GET_CODE (comp
)),
3414 GET_MODE (comp
), XEXP (comp
, 0),
3415 XEXP (comp
, 1)), 0))
3419 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
3420 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
3421 return apply_change_group ();
3424 fmt
= GET_RTX_FORMAT (code
);
3425 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3428 if (! invert_exp (XEXP (x
, i
), insn
))
3433 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3434 if (!invert_exp (XVECEXP (x
, i
, j
), insn
))
3442 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3443 If the old jump target label is unused as a result,
3444 it and the code following it may be deleted.
3446 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3449 The return value will be 1 if the change was made, 0 if it wasn't (this
3450 can only occur for NLABEL == 0). */
3453 redirect_jump (jump
, nlabel
)
3456 register rtx olabel
= JUMP_LABEL (jump
);
3458 if (nlabel
== olabel
)
3461 if (! redirect_exp (&PATTERN (jump
), olabel
, nlabel
, jump
))
3464 /* If this is an unconditional branch, delete it from the jump_chain of
3465 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3466 have UID's in range and JUMP_CHAIN is valid). */
3467 if (jump_chain
&& (simplejump_p (jump
)
3468 || GET_CODE (PATTERN (jump
)) == RETURN
))
3470 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
3472 delete_from_jump_chain (jump
);
3473 if (label_index
< max_jump_chain
3474 && INSN_UID (jump
) < max_jump_chain
)
3476 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
3477 jump_chain
[label_index
] = jump
;
3481 JUMP_LABEL (jump
) = nlabel
;
3483 ++LABEL_NUSES (nlabel
);
3485 if (olabel
&& --LABEL_NUSES (olabel
) == 0)
3486 delete_insn (olabel
);
3491 /* Delete the instruction JUMP from any jump chain it might be on. */
3494 delete_from_jump_chain (jump
)
3498 rtx olabel
= JUMP_LABEL (jump
);
3500 /* Handle unconditional jumps. */
3501 if (jump_chain
&& olabel
!= 0
3502 && INSN_UID (olabel
) < max_jump_chain
3503 && simplejump_p (jump
))
3504 index
= INSN_UID (olabel
);
3505 /* Handle return insns. */
3506 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
3510 if (jump_chain
[index
] == jump
)
3511 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
3516 for (insn
= jump_chain
[index
];
3518 insn
= jump_chain
[INSN_UID (insn
)])
3519 if (jump_chain
[INSN_UID (insn
)] == jump
)
3521 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
3527 /* If NLABEL is nonzero, throughout the rtx at LOC,
3528 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3529 zero, alter (RETURN) to (LABEL_REF NLABEL).
3531 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3532 validity with validate_change. Convert (set (pc) (label_ref olabel))
3535 Return 0 if we found a change we would like to make but it is invalid.
3536 Otherwise, return 1. */
3539 redirect_exp (loc
, olabel
, nlabel
, insn
)
3544 register rtx x
= *loc
;
3545 register RTX_CODE code
= GET_CODE (x
);
3549 if (code
== LABEL_REF
)
3551 if (XEXP (x
, 0) == olabel
)
3554 XEXP (x
, 0) = nlabel
;
3556 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3560 else if (code
== RETURN
&& olabel
== 0)
3562 x
= gen_rtx (LABEL_REF
, VOIDmode
, nlabel
);
3563 if (loc
== &PATTERN (insn
))
3564 x
= gen_rtx (SET
, VOIDmode
, pc_rtx
, x
);
3565 return validate_change (insn
, loc
, x
, 0);
3568 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
3569 && GET_CODE (SET_SRC (x
)) == LABEL_REF
3570 && XEXP (SET_SRC (x
), 0) == olabel
)
3571 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3573 fmt
= GET_RTX_FORMAT (code
);
3574 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3577 if (! redirect_exp (&XEXP (x
, i
), olabel
, nlabel
, insn
))
3582 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3583 if (! redirect_exp (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
))
3591 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3593 If the old jump target label (before the dispatch table) becomes unused,
3594 it and the dispatch table may be deleted. In that case, find the insn
3595 before the jump references that label and delete it and logical successors
3599 redirect_tablejump (jump
, nlabel
)
3602 register rtx olabel
= JUMP_LABEL (jump
);
3604 /* Add this jump to the jump_chain of NLABEL. */
3605 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
3606 && INSN_UID (jump
) < max_jump_chain
)
3608 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
3609 jump_chain
[INSN_UID (nlabel
)] = jump
;
3612 PATTERN (jump
) = gen_jump (nlabel
);
3613 JUMP_LABEL (jump
) = nlabel
;
3614 ++LABEL_NUSES (nlabel
);
3615 INSN_CODE (jump
) = -1;
3617 if (--LABEL_NUSES (olabel
) == 0)
3619 delete_labelref_insn (jump
, olabel
, 0);
3620 delete_insn (olabel
);
3624 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3625 If we found one, delete it and then delete this insn if DELETE_THIS is
3626 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3629 delete_labelref_insn (insn
, label
, delete_this
)
3636 if (GET_CODE (insn
) != NOTE
3637 && reg_mentioned_p (label
, PATTERN (insn
)))
3648 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
3649 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
3663 /* Like rtx_equal_p except that it considers two REGs as equal
3664 if they renumber to the same value. */
3667 rtx_renumbered_equal_p (x
, y
)
3671 register RTX_CODE code
= GET_CODE (x
);
3676 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
3677 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
3678 && GET_CODE (SUBREG_REG (y
)) == REG
)))
3682 if (GET_MODE (x
) != GET_MODE (y
))
3685 /* If we haven't done any renumbering, don't
3686 make any assumptions. */
3687 if (reg_renumber
== 0)
3688 return rtx_equal_p (x
, y
);
3692 i
= REGNO (SUBREG_REG (x
));
3693 if (reg_renumber
[i
] >= 0)
3694 i
= reg_renumber
[i
];
3695 i
+= SUBREG_WORD (x
);
3700 if (reg_renumber
[i
] >= 0)
3701 i
= reg_renumber
[i
];
3703 if (GET_CODE (y
) == SUBREG
)
3705 j
= REGNO (SUBREG_REG (y
));
3706 if (reg_renumber
[j
] >= 0)
3707 j
= reg_renumber
[j
];
3708 j
+= SUBREG_WORD (y
);
3713 if (reg_renumber
[j
] >= 0)
3714 j
= reg_renumber
[j
];
3718 /* Now we have disposed of all the cases
3719 in which different rtx codes can match. */
3720 if (code
!= GET_CODE (y
))
3731 return XINT (x
, 0) == XINT (y
, 0);
3734 /* We can't assume nonlocal labels have their following insns yet. */
3735 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
3736 return XEXP (x
, 0) == XEXP (y
, 0);
3737 /* Two label-refs are equivalent if they point at labels
3738 in the same position in the instruction stream. */
3739 return (next_real_insn (XEXP (x
, 0))
3740 == next_real_insn (XEXP (y
, 0)));
3743 return XSTR (x
, 0) == XSTR (y
, 0);
3746 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3748 if (GET_MODE (x
) != GET_MODE (y
))
3751 /* Compare the elements. If any pair of corresponding elements
3752 fail to match, return 0 for the whole things. */
3754 fmt
= GET_RTX_FORMAT (code
);
3755 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3761 if (XWINT (x
, i
) != XWINT (y
, i
))
3766 if (XINT (x
, i
) != XINT (y
, i
))
3771 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
3776 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
3781 if (XEXP (x
, i
) != XEXP (y
, i
))
3788 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
3790 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3791 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
3802 /* If X is a hard register or equivalent to one or a subregister of one,
3803 return the hard register number. If X is a pseudo register that was not
3804 assigned a hard register, return the pseudo register number. Otherwise,
3805 return -1. Any rtx is valid for X. */
3811 if (GET_CODE (x
) == REG
)
3813 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
3814 return reg_renumber
[REGNO (x
)];
3817 if (GET_CODE (x
) == SUBREG
)
3819 int base
= true_regnum (SUBREG_REG (x
));
3820 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
3821 return SUBREG_WORD (x
) + base
;
3826 /* Optimize code of the form:
3828 for (x = a[i]; x; ...)
3830 for (x = a[i]; x; ...)
3834 Loop optimize will change the above code into
3838 { ...; if (! (x = ...)) break; }
3841 { ...; if (! (x = ...)) break; }
3844 In general, if the first test fails, the program can branch
3845 directly to `foo' and skip the second try which is doomed to fail.
3846 We run this after loop optimization and before flow analysis. */
3848 /* When comparing the insn patterns, we track the fact that different
3849 pseudo-register numbers may have been used in each computation.
3850 The following array stores an equivalence -- same_regs[I] == J means
3851 that pseudo register I was used in the first set of tests in a context
3852 where J was used in the second set. We also count the number of such
3853 pending equivalences. If nonzero, the expressions really aren't the
3856 static short *same_regs
;
3858 static int num_same_regs
;
3860 /* Track any registers modified between the target of the first jump and
3861 the second jump. They never compare equal. */
3863 static char *modified_regs
;
3865 /* Record if memory was modified. */
3867 static int modified_mem
;
3869 /* Called via note_stores on each insn between the target of the first
3870 branch and the second branch. It marks any changed registers. */
3873 mark_modified_reg (dest
, x
)
3879 if (GET_CODE (dest
) == SUBREG
)
3880 dest
= SUBREG_REG (dest
);
3882 if (GET_CODE (dest
) == MEM
)
3885 if (GET_CODE (dest
) != REG
)
3888 regno
= REGNO (dest
);
3889 if (regno
>= FIRST_PSEUDO_REGISTER
)
3890 modified_regs
[regno
] = 1;
3892 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
3893 modified_regs
[regno
+ i
] = 1;
3896 /* F is the first insn in the chain of insns. */
3899 thread_jumps (f
, max_reg
, verbose
)
3904 /* Basic algorithm is to find a conditional branch,
3905 the label it may branch to, and the branch after
3906 that label. If the two branches test the same condition,
3907 walk back from both branch paths until the insn patterns
3908 differ, or code labels are hit. If we make it back to
3909 the target of the first branch, then we know that the first branch
3910 will either always succeed or always fail depending on the relative
3911 senses of the two branches. So adjust the first branch accordingly
3914 rtx label
, b1
, b2
, t1
, t2
;
3915 enum rtx_code code1
, code2
;
3916 rtx b1op0
, b1op1
, b2op0
, b2op1
;
3921 /* Allocate register tables and quick-reset table. */
3922 modified_regs
= (char *) alloca (max_reg
* sizeof (char));
3923 same_regs
= (short *) alloca (max_reg
* sizeof (short));
3924 all_reset
= (short *) alloca (max_reg
* sizeof (short));
3925 for (i
= 0; i
< max_reg
; i
++)
3932 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
3934 /* Get to a candidate branch insn. */
3935 if (GET_CODE (b1
) != JUMP_INSN
3936 || ! condjump_p (b1
) || simplejump_p (b1
)
3937 || JUMP_LABEL (b1
) == 0)
3940 bzero (modified_regs
, max_reg
* sizeof (char));
3943 bcopy (all_reset
, same_regs
, max_reg
* sizeof (short));
3946 label
= JUMP_LABEL (b1
);
3948 /* Look for a branch after the target. Record any registers and
3949 memory modified between the target and the branch. Stop when we
3950 get to a label since we can't know what was changed there. */
3951 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
3953 if (GET_CODE (b2
) == CODE_LABEL
)
3956 else if (GET_CODE (b2
) == JUMP_INSN
)
3958 /* If this is an unconditional jump and is the only use of
3959 its target label, we can follow it. */
3960 if (simplejump_p (b2
)
3961 && JUMP_LABEL (b2
) != 0
3962 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
3964 b2
= JUMP_LABEL (b2
);
3971 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
3974 if (GET_CODE (b2
) == CALL_INSN
)
3977 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3978 if (call_used_regs
[i
] && ! fixed_regs
[i
]
3979 && i
!= STACK_POINTER_REGNUM
3980 && i
!= FRAME_POINTER_REGNUM
3981 && i
!= ARG_POINTER_REGNUM
)
3982 modified_regs
[i
] = 1;
3985 note_stores (PATTERN (b2
), mark_modified_reg
);
3988 /* Check the next candidate branch insn from the label
3991 || GET_CODE (b2
) != JUMP_INSN
3993 || ! condjump_p (b2
)
3994 || simplejump_p (b2
))
3997 /* Get the comparison codes and operands, reversing the
3998 codes if appropriate. If we don't have comparison codes,
3999 we can't do anything. */
4000 b1op0
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 0);
4001 b1op1
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 1);
4002 code1
= GET_CODE (XEXP (SET_SRC (PATTERN (b1
)), 0));
4003 if (XEXP (SET_SRC (PATTERN (b1
)), 1) == pc_rtx
)
4004 code1
= reverse_condition (code1
);
4006 b2op0
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 0);
4007 b2op1
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 1);
4008 code2
= GET_CODE (XEXP (SET_SRC (PATTERN (b2
)), 0));
4009 if (XEXP (SET_SRC (PATTERN (b2
)), 1) == pc_rtx
)
4010 code2
= reverse_condition (code2
);
4012 /* If they test the same things and knowing that B1 branches
4013 tells us whether or not B2 branches, check if we
4014 can thread the branch. */
4015 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
4016 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
4017 && (comparison_dominates_p (code1
, code2
)
4018 || comparison_dominates_p (code1
, reverse_condition (code2
))))
4020 t1
= prev_nonnote_insn (b1
);
4021 t2
= prev_nonnote_insn (b2
);
4023 while (t1
!= 0 && t2
!= 0)
4025 if (t1
== 0 || t2
== 0)
4030 /* We have reached the target of the first branch.
4031 If there are no pending register equivalents,
4032 we know that this branch will either always
4033 succeed (if the senses of the two branches are
4034 the same) or always fail (if not). */
4037 if (num_same_regs
!= 0)
4040 if (comparison_dominates_p (code1
, code2
))
4041 new_label
= JUMP_LABEL (b2
);
4043 new_label
= get_label_after (b2
);
4045 if (JUMP_LABEL (b1
) != new_label
4046 && redirect_jump (b1
, new_label
))
4051 /* If either of these is not a normal insn (it might be
4052 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4053 have already been skipped above.) Similarly, fail
4054 if the insns are different. */
4055 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
4056 || recog_memoized (t1
) != recog_memoized (t2
)
4057 || ! rtx_equal_for_thread_p (PATTERN (t1
),
4061 t1
= prev_nonnote_insn (t1
);
4062 t2
= prev_nonnote_insn (t2
);
4069 /* This is like RTX_EQUAL_P except that it knows about our handling of
4070 possibly equivalent registers and knows to consider volatile and
4071 modified objects as not equal.
4073 YINSN is the insn containing Y. */
4076 rtx_equal_for_thread_p (x
, y
, yinsn
)
4082 register enum rtx_code code
;
4085 code
= GET_CODE (x
);
4086 /* Rtx's of different codes cannot be equal. */
4087 if (code
!= GET_CODE (y
))
4090 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4091 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4093 if (GET_MODE (x
) != GET_MODE (y
))
4096 /* Handle special-cases first. */
4100 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
4103 /* If neither is user variable or hard register, check for possible
4105 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
4106 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4107 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
4110 if (same_regs
[REGNO (x
)] == -1)
4112 same_regs
[REGNO (x
)] = REGNO (y
);
4115 /* If this is the first time we are seeing a register on the `Y'
4116 side, see if it is the last use. If not, we can't thread the
4117 jump, so mark it as not equivalent. */
4118 if (regno_last_uid
[REGNO (y
)] != INSN_UID (yinsn
))
4124 return (same_regs
[REGNO (x
)] == REGNO (y
));
4129 /* If memory modified or either volatile, not equivalent.
4130 Else, check address. */
4131 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4134 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
4137 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4143 /* Cancel a pending `same_regs' if setting equivalenced registers.
4144 Then process source. */
4145 if (GET_CODE (SET_DEST (x
)) == REG
4146 && GET_CODE (SET_DEST (y
)) == REG
)
4148 if (same_regs
[REGNO (SET_DEST (x
))] == REGNO (SET_DEST (y
)))
4150 same_regs
[REGNO (SET_DEST (x
))] = -1;
4153 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
4157 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
4160 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
4163 return XEXP (x
, 0) == XEXP (y
, 0);
4166 return XSTR (x
, 0) == XSTR (y
, 0);
4172 fmt
= GET_RTX_FORMAT (code
);
4173 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4178 if (XWINT (x
, i
) != XWINT (y
, i
))
4184 if (XINT (x
, i
) != XINT (y
, i
))
4190 /* Two vectors must have the same length. */
4191 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4194 /* And the corresponding elements must match. */
4195 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4196 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
4197 XVECEXP (y
, i
, j
), yinsn
) == 0)
4202 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
4208 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4213 /* These are just backpointers, so they don't matter. */
4219 /* It is believed that rtx's at this level will never
4220 contain anything but integers and other rtx's,
4221 except for within LABEL_REFs and SYMBOL_REFs. */