1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
57 #include "hard-reg-set.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
64 /* ??? Eventually must record somehow the labels used by jumps
65 from nested functions. */
66 /* Pre-record the next or previous real insn for each label?
67 No, this pass is very fast anyway. */
68 /* Condense consecutive labels?
69 This would make life analysis faster, maybe. */
70 /* Optimize jump y; x: ... y: jumpif... x?
71 Don't know if it is worth bothering with. */
72 /* Optimize two cases of conditional jump to conditional jump?
73 This can never delete any instruction or make anything dead,
74 or even change what is live at any point.
75 So perhaps let combiner do it. */
77 /* Vector indexed by uid.
78 For each CODE_LABEL, index by its uid to get first unconditional jump
79 that jumps to the label.
80 For each JUMP_INSN, index by its uid to get the next unconditional jump
81 that jumps to the same label.
82 Element 0 is the start of a chain of all return insns.
83 (It is safe to use element 0 because insn uid 0 is not used. */
85 static rtx
*jump_chain
;
87 /* List of labels referred to from initializers.
88 These can never be deleted. */
91 /* Maximum index in jump_chain. */
93 static int max_jump_chain
;
95 /* Set nonzero by jump_optimize if control can fall through
96 to the end of the function. */
99 /* Indicates whether death notes are significant in cross jump analysis.
100 Normally they are not significant, because of A and B jump to C,
101 and R dies in A, it must die in B. But this might not be true after
102 stack register conversion, and we must compare death notes in that
105 static int cross_jump_death_matters
= 0;
107 static int duplicate_loop_exit_test
PROTO((rtx
));
108 static void find_cross_jump
PROTO((rtx
, rtx
, int, rtx
*, rtx
*));
109 static void do_cross_jump
PROTO((rtx
, rtx
, rtx
));
110 static int jump_back_p
PROTO((rtx
, rtx
));
111 static int tension_vector_labels
PROTO((rtx
, int));
112 static void mark_jump_label
PROTO((rtx
, rtx
, int));
113 static void delete_computation
PROTO((rtx
));
114 static void delete_from_jump_chain
PROTO((rtx
));
115 static int delete_labelref_insn
PROTO((rtx
, rtx
, int));
116 static void redirect_tablejump
PROTO((rtx
, rtx
));
118 /* Delete no-op jumps and optimize jumps to jumps
119 and jumps around jumps.
120 Delete unused labels and unreachable code.
122 If CROSS_JUMP is 1, detect matching code
123 before a jump and its destination and unify them.
124 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
126 If NOOP_MOVES is nonzero, delete no-op move insns.
128 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
129 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
131 If `optimize' is zero, don't change any code,
132 just determine whether control drops off the end of the function.
133 This case occurs when we have -W and not -O.
134 It works because `delete_insn' checks the value of `optimize'
135 and refrains from actually deleting when that is 0. */
138 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
144 register rtx insn
, next
, note
;
150 cross_jump_death_matters
= (cross_jump
== 2);
152 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
153 notes whose labels don't occur in the insn any more. */
155 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
157 if (GET_CODE (insn
) == CODE_LABEL
)
158 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
159 else if (GET_CODE (insn
) == JUMP_INSN
)
160 JUMP_LABEL (insn
) = 0;
161 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
162 for (note
= REG_NOTES (insn
); note
; note
= next
)
164 next
= XEXP (note
, 1);
165 if (REG_NOTE_KIND (note
) == REG_LABEL
166 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
167 remove_note (insn
, note
);
170 if (INSN_UID (insn
) > max_uid
)
171 max_uid
= INSN_UID (insn
);
176 /* Delete insns following barriers, up to next label. */
178 for (insn
= f
; insn
;)
180 if (GET_CODE (insn
) == BARRIER
)
182 insn
= NEXT_INSN (insn
);
183 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
185 if (GET_CODE (insn
) == NOTE
186 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
187 insn
= NEXT_INSN (insn
);
189 insn
= delete_insn (insn
);
191 /* INSN is now the code_label. */
194 insn
= NEXT_INSN (insn
);
197 /* Leave some extra room for labels and duplicate exit test insns
199 max_jump_chain
= max_uid
* 14 / 10;
200 jump_chain
= (rtx
*) alloca (max_jump_chain
* sizeof (rtx
));
201 bzero ((char *) jump_chain
, max_jump_chain
* sizeof (rtx
));
203 /* Mark the label each jump jumps to.
204 Combine consecutive labels, and count uses of labels.
206 For each label, make a chain (using `jump_chain')
207 of all the *unconditional* jumps that jump to it;
208 also make a chain of all returns. */
210 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
211 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
212 && ! INSN_DELETED_P (insn
))
214 mark_jump_label (PATTERN (insn
), insn
, cross_jump
);
215 if (GET_CODE (insn
) == JUMP_INSN
)
217 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
219 jump_chain
[INSN_UID (insn
)]
220 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
221 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
223 if (GET_CODE (PATTERN (insn
)) == RETURN
)
225 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
226 jump_chain
[0] = insn
;
231 /* Keep track of labels used from static data;
232 they cannot ever be deleted. */
234 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
235 LABEL_NUSES (XEXP (insn
, 0))++;
237 /* Delete all labels already not referenced.
238 Also find the last insn. */
241 for (insn
= f
; insn
; )
243 if (GET_CODE (insn
) == CODE_LABEL
&& LABEL_NUSES (insn
) == 0)
244 insn
= delete_insn (insn
);
248 insn
= NEXT_INSN (insn
);
254 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
255 If so record that this function can drop off the end. */
261 /* One label can follow the end-note: the return label. */
262 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
263 /* Ordinary insns can follow it if returning a structure. */
264 || GET_CODE (insn
) == INSN
265 /* If machine uses explicit RETURN insns, no epilogue,
266 then one of them follows the note. */
267 || (GET_CODE (insn
) == JUMP_INSN
268 && GET_CODE (PATTERN (insn
)) == RETURN
)
269 /* A barrier can follow the return insn. */
270 || GET_CODE (insn
) == BARRIER
271 /* Other kinds of notes can follow also. */
272 || (GET_CODE (insn
) == NOTE
273 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
274 insn
= PREV_INSN (insn
);
277 /* Report if control can fall through at the end of the function. */
278 if (insn
&& GET_CODE (insn
) == NOTE
279 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
280 && ! INSN_DELETED_P (insn
))
283 /* Zero the "deleted" flag of all the "deleted" insns. */
284 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
285 INSN_DELETED_P (insn
) = 0;
292 /* If we fall through to the epilogue, see if we can insert a RETURN insn
293 in front of it. If the machine allows it at this point (we might be
294 after reload for a leaf routine), it will improve optimization for it
296 insn
= get_last_insn ();
297 while (insn
&& GET_CODE (insn
) == NOTE
)
298 insn
= PREV_INSN (insn
);
300 if (insn
&& GET_CODE (insn
) != BARRIER
)
302 emit_jump_insn (gen_return ());
309 for (insn
= f
; insn
; )
311 next
= NEXT_INSN (insn
);
313 if (GET_CODE (insn
) == INSN
)
315 register rtx body
= PATTERN (insn
);
317 /* Combine stack_adjusts with following push_insns. */
319 if (GET_CODE (body
) == SET
320 && SET_DEST (body
) == stack_pointer_rtx
321 && GET_CODE (SET_SRC (body
)) == PLUS
322 && XEXP (SET_SRC (body
), 0) == stack_pointer_rtx
323 && GET_CODE (XEXP (SET_SRC (body
), 1)) == CONST_INT
324 && INTVAL (XEXP (SET_SRC (body
), 1)) > 0)
327 rtx stack_adjust_insn
= insn
;
328 int stack_adjust_amount
= INTVAL (XEXP (SET_SRC (body
), 1));
329 int total_pushed
= 0;
332 /* Find all successive push insns. */
334 /* Don't convert more than three pushes;
335 that starts adding too many displaced addresses
336 and the whole thing starts becoming a losing
341 p
= next_nonnote_insn (p
);
342 if (p
== 0 || GET_CODE (p
) != INSN
)
345 if (GET_CODE (pbody
) != SET
)
347 dest
= SET_DEST (pbody
);
348 /* Allow a no-op move between the adjust and the push. */
349 if (GET_CODE (dest
) == REG
350 && GET_CODE (SET_SRC (pbody
)) == REG
351 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
353 if (! (GET_CODE (dest
) == MEM
354 && GET_CODE (XEXP (dest
, 0)) == POST_INC
355 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
358 if (total_pushed
+ GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)))
359 > stack_adjust_amount
)
361 total_pushed
+= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
364 /* Discard the amount pushed from the stack adjust;
365 maybe eliminate it entirely. */
366 if (total_pushed
>= stack_adjust_amount
)
368 delete_computation (stack_adjust_insn
);
369 total_pushed
= stack_adjust_amount
;
372 XEXP (SET_SRC (PATTERN (stack_adjust_insn
)), 1)
373 = GEN_INT (stack_adjust_amount
- total_pushed
);
375 /* Change the appropriate push insns to ordinary stores. */
377 while (total_pushed
> 0)
380 p
= next_nonnote_insn (p
);
381 if (GET_CODE (p
) != INSN
)
384 if (GET_CODE (pbody
) == SET
)
386 dest
= SET_DEST (pbody
);
387 if (! (GET_CODE (dest
) == MEM
388 && GET_CODE (XEXP (dest
, 0)) == POST_INC
389 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
391 total_pushed
-= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
392 /* If this push doesn't fully fit in the space
393 of the stack adjust that we deleted,
394 make another stack adjust here for what we
395 didn't use up. There should be peepholes
396 to recognize the resulting sequence of insns. */
397 if (total_pushed
< 0)
399 emit_insn_before (gen_add2_insn (stack_pointer_rtx
,
400 GEN_INT (- total_pushed
)),
405 = plus_constant (stack_pointer_rtx
, total_pushed
);
410 /* Detect and delete no-op move instructions
411 resulting from not allocating a parameter in a register. */
413 if (GET_CODE (body
) == SET
414 && (SET_DEST (body
) == SET_SRC (body
)
415 || (GET_CODE (SET_DEST (body
)) == MEM
416 && GET_CODE (SET_SRC (body
)) == MEM
417 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
418 && ! (GET_CODE (SET_DEST (body
)) == MEM
419 && MEM_VOLATILE_P (SET_DEST (body
)))
420 && ! (GET_CODE (SET_SRC (body
)) == MEM
421 && MEM_VOLATILE_P (SET_SRC (body
))))
422 delete_computation (insn
);
424 /* Detect and ignore no-op move instructions
425 resulting from smart or fortuitous register allocation. */
427 else if (GET_CODE (body
) == SET
)
429 int sreg
= true_regnum (SET_SRC (body
));
430 int dreg
= true_regnum (SET_DEST (body
));
432 if (sreg
== dreg
&& sreg
>= 0)
434 else if (sreg
>= 0 && dreg
>= 0)
437 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
438 sreg
, NULL_PTR
, dreg
,
439 GET_MODE (SET_SRC (body
)));
441 #ifdef PRESERVE_DEATH_INFO_REGNO_P
442 /* Deleting insn could lose a death-note for SREG or DREG
443 so don't do it if final needs accurate death-notes. */
444 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg
)
445 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg
))
448 /* DREG may have been the target of a REG_DEAD note in
449 the insn which makes INSN redundant. If so, reorg
450 would still think it is dead. So search for such a
451 note and delete it if we find it. */
452 for (trial
= prev_nonnote_insn (insn
);
453 trial
&& GET_CODE (trial
) != CODE_LABEL
;
454 trial
= prev_nonnote_insn (trial
))
455 if (find_regno_note (trial
, REG_DEAD
, dreg
))
457 remove_death (dreg
, trial
);
462 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
466 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
467 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
469 GET_MODE (SET_DEST (body
))))
471 /* This handles the case where we have two consecutive
472 assignments of the same constant to pseudos that didn't
473 get a hard reg. Each SET from the constant will be
474 converted into a SET of the spill register and an
475 output reload will be made following it. This produces
476 two loads of the same constant into the same spill
481 /* Look back for a death note for the first reg.
482 If there is one, it is no longer accurate. */
483 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
485 if ((GET_CODE (in_insn
) == INSN
486 || GET_CODE (in_insn
) == JUMP_INSN
)
487 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
489 remove_death (dreg
, in_insn
);
492 in_insn
= PREV_INSN (in_insn
);
495 /* Delete the second load of the value. */
499 else if (GET_CODE (body
) == PARALLEL
)
501 /* If each part is a set between two identical registers or
502 a USE or CLOBBER, delete the insn. */
506 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
508 tem
= XVECEXP (body
, 0, i
);
509 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
512 if (GET_CODE (tem
) != SET
513 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
514 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
522 /* Also delete insns to store bit fields if they are no-ops. */
523 /* Not worth the hair to detect this in the big-endian case. */
524 else if (! BYTES_BIG_ENDIAN
525 && GET_CODE (body
) == SET
526 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
527 && XEXP (SET_DEST (body
), 2) == const0_rtx
528 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
529 && ! (GET_CODE (SET_SRC (body
)) == MEM
530 && MEM_VOLATILE_P (SET_SRC (body
))))
536 /* If we haven't yet gotten to reload and we have just run regscan,
537 delete any insn that sets a register that isn't used elsewhere.
538 This helps some of the optimizations below by having less insns
539 being jumped around. */
541 if (! reload_completed
&& after_regscan
)
542 for (insn
= f
; insn
; insn
= next
)
544 rtx set
= single_set (insn
);
546 next
= NEXT_INSN (insn
);
548 if (set
&& GET_CODE (SET_DEST (set
)) == REG
549 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
550 && regno_first_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
)
551 /* We use regno_last_note_uid so as not to delete the setting
552 of a reg that's used in notes. A subsequent optimization
553 might arrange to use that reg for real. */
554 && regno_last_note_uid
[REGNO (SET_DEST (set
))] == INSN_UID (insn
)
555 && ! side_effects_p (SET_SRC (set
))
556 && ! find_reg_note (insn
, REG_RETVAL
, 0))
560 /* Now iterate optimizing jumps until nothing changes over one pass. */
566 for (insn
= f
; insn
; insn
= next
)
569 rtx temp
, temp1
, temp2
, temp3
, temp4
, temp5
, temp6
;
571 int this_is_simplejump
, this_is_condjump
, reversep
;
572 int this_is_condjump_in_parallel
;
574 /* If NOT the first iteration, if this is the last jump pass
575 (just before final), do the special peephole optimizations.
576 Avoiding the first iteration gives ordinary jump opts
577 a chance to work before peephole opts. */
579 if (reload_completed
&& !first
&& !flag_no_peephole
)
580 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
)
584 /* That could have deleted some insns after INSN, so check now
585 what the following insn is. */
587 next
= NEXT_INSN (insn
);
589 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
590 jump. Try to optimize by duplicating the loop exit test if so.
591 This is only safe immediately after regscan, because it uses
592 the values of regno_first_uid and regno_last_uid. */
593 if (after_regscan
&& GET_CODE (insn
) == NOTE
594 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
595 && (temp1
= next_nonnote_insn (insn
)) != 0
596 && simplejump_p (temp1
))
598 temp
= PREV_INSN (insn
);
599 if (duplicate_loop_exit_test (insn
))
602 next
= NEXT_INSN (temp
);
607 if (GET_CODE (insn
) != JUMP_INSN
)
610 this_is_simplejump
= simplejump_p (insn
);
611 this_is_condjump
= condjump_p (insn
);
612 this_is_condjump_in_parallel
= condjump_in_parallel_p (insn
);
614 /* Tension the labels in dispatch tables. */
616 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
617 changed
|= tension_vector_labels (PATTERN (insn
), 0);
618 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
619 changed
|= tension_vector_labels (PATTERN (insn
), 1);
621 /* If a dispatch table always goes to the same place,
622 get rid of it and replace the insn that uses it. */
624 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
625 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
628 rtx pat
= PATTERN (insn
);
629 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
630 int len
= XVECLEN (pat
, diff_vec_p
);
631 rtx dispatch
= prev_real_insn (insn
);
633 for (i
= 0; i
< len
; i
++)
634 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
635 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
639 && GET_CODE (dispatch
) == JUMP_INSN
640 && JUMP_LABEL (dispatch
) != 0
641 /* Don't mess with a casesi insn. */
642 && !(GET_CODE (PATTERN (dispatch
)) == SET
643 && (GET_CODE (SET_SRC (PATTERN (dispatch
)))
645 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
647 redirect_tablejump (dispatch
,
648 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
653 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
655 /* If a jump references the end of the function, try to turn
656 it into a RETURN insn, possibly a conditional one. */
657 if (JUMP_LABEL (insn
)
658 && (next_active_insn (JUMP_LABEL (insn
)) == 0
659 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn
))))
661 changed
|= redirect_jump (insn
, NULL_RTX
);
663 /* Detect jump to following insn. */
664 if (reallabelprev
== insn
&& condjump_p (insn
))
666 next
= next_real_insn (JUMP_LABEL (insn
));
672 /* If we have an unconditional jump preceded by a USE, try to put
673 the USE before the target and jump there. This simplifies many
674 of the optimizations below since we don't have to worry about
675 dealing with these USE insns. We only do this if the label
676 being branch to already has the identical USE or if code
677 never falls through to that label. */
679 if (this_is_simplejump
680 && (temp
= prev_nonnote_insn (insn
)) != 0
681 && GET_CODE (temp
) == INSN
&& GET_CODE (PATTERN (temp
)) == USE
682 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
683 && (GET_CODE (temp1
) == BARRIER
684 || (GET_CODE (temp1
) == INSN
685 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
)))))
687 if (GET_CODE (temp1
) == BARRIER
)
689 emit_insn_after (PATTERN (temp
), temp1
);
690 temp1
= NEXT_INSN (temp1
);
694 redirect_jump (insn
, get_label_before (temp1
));
695 reallabelprev
= prev_real_insn (temp1
);
699 /* Simplify if (...) x = a; else x = b; by converting it
700 to x = b; if (...) x = a;
701 if B is sufficiently simple, the test doesn't involve X,
702 and nothing in the test modifies B or X.
704 If we have small register classes, we also can't do this if X
707 If the "x = b;" insn has any REG_NOTES, we don't do this because
708 of the possibility that we are running after CSE and there is a
709 REG_EQUAL note that is only valid if the branch has already been
710 taken. If we move the insn with the REG_EQUAL note, we may
711 fold the comparison to always be false in a later CSE pass.
712 (We could also delete the REG_NOTES when moving the insn, but it
713 seems simpler to not move it.) An exception is that we can move
714 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
715 value is the same as "b".
717 INSN is the branch over the `else' part.
721 TEMP to the jump insn preceding "x = a;"
723 TEMP2 to the insn that sets "x = b;"
724 TEMP3 to the insn that sets "x = a;"
725 TEMP4 to the set of "x = b"; */
727 if (this_is_simplejump
728 && (temp3
= prev_active_insn (insn
)) != 0
729 && GET_CODE (temp3
) == INSN
730 && (temp4
= single_set (temp3
)) != 0
731 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
732 #ifdef SMALL_REGISTER_CLASSES
733 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
735 && (temp2
= next_active_insn (insn
)) != 0
736 && GET_CODE (temp2
) == INSN
737 && (temp4
= single_set (temp2
)) != 0
738 && rtx_equal_p (SET_DEST (temp4
), temp1
)
739 && (GET_CODE (SET_SRC (temp4
)) == REG
740 || GET_CODE (SET_SRC (temp4
)) == SUBREG
741 || CONSTANT_P (SET_SRC (temp4
)))
742 && (REG_NOTES (temp2
) == 0
743 || ((REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUAL
744 || REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUIV
)
745 && XEXP (REG_NOTES (temp2
), 1) == 0
746 && rtx_equal_p (XEXP (REG_NOTES (temp2
), 0),
748 && (temp
= prev_active_insn (temp3
)) != 0
749 && condjump_p (temp
) && ! simplejump_p (temp
)
750 /* TEMP must skip over the "x = a;" insn */
751 && prev_real_insn (JUMP_LABEL (temp
)) == insn
752 && no_labels_between_p (insn
, JUMP_LABEL (temp
))
753 /* There must be no other entries to the "x = b;" insn. */
754 && no_labels_between_p (JUMP_LABEL (temp
), temp2
)
755 /* INSN must either branch to the insn after TEMP2 or the insn
756 after TEMP2 must branch to the same place as INSN. */
757 && (reallabelprev
== temp2
758 || ((temp5
= next_active_insn (temp2
)) != 0
759 && simplejump_p (temp5
)
760 && JUMP_LABEL (temp5
) == JUMP_LABEL (insn
))))
762 /* The test expression, X, may be a complicated test with
763 multiple branches. See if we can find all the uses of
764 the label that TEMP branches to without hitting a CALL_INSN
765 or a jump to somewhere else. */
766 rtx target
= JUMP_LABEL (temp
);
767 int nuses
= LABEL_NUSES (target
);
770 /* Set P to the first jump insn that goes around "x = a;". */
771 for (p
= temp
; nuses
&& p
; p
= prev_nonnote_insn (p
))
773 if (GET_CODE (p
) == JUMP_INSN
)
775 if (condjump_p (p
) && ! simplejump_p (p
)
776 && JUMP_LABEL (p
) == target
)
785 else if (GET_CODE (p
) == CALL_INSN
)
790 /* We cannot insert anything between a set of cc and its use
791 so if P uses cc0, we must back up to the previous insn. */
792 q
= prev_nonnote_insn (p
);
793 if (q
&& GET_RTX_CLASS (GET_CODE (q
)) == 'i'
794 && sets_cc0_p (PATTERN (q
)))
801 /* If we found all the uses and there was no data conflict, we
802 can move the assignment unless we can branch into the middle
805 && no_labels_between_p (p
, insn
)
806 && ! reg_referenced_between_p (temp1
, p
, NEXT_INSN (temp3
))
807 && ! reg_set_between_p (temp1
, p
, temp3
)
808 && (GET_CODE (SET_SRC (temp4
)) == CONST_INT
809 || ! reg_set_between_p (SET_SRC (temp4
), p
, temp2
)))
811 emit_insn_after_with_line_notes (PATTERN (temp2
), p
, temp2
);
814 /* Set NEXT to an insn that we know won't go away. */
815 next
= next_active_insn (insn
);
817 /* Delete the jump around the set. Note that we must do
818 this before we redirect the test jumps so that it won't
819 delete the code immediately following the assignment
820 we moved (which might be a jump). */
824 /* We either have two consecutive labels or a jump to
825 a jump, so adjust all the JUMP_INSNs to branch to where
827 for (p
= NEXT_INSN (p
); p
!= next
; p
= NEXT_INSN (p
))
828 if (GET_CODE (p
) == JUMP_INSN
)
829 redirect_jump (p
, target
);
837 /* If we have if (...) x = exp; and branches are expensive,
838 EXP is a single insn, does not have any side effects, cannot
839 trap, and is not too costly, convert this to
840 t = exp; if (...) x = t;
842 Don't do this when we have CC0 because it is unlikely to help
843 and we'd need to worry about where to place the new insn and
844 the potential for conflicts. We also can't do this when we have
845 notes on the insn for the same reason as above.
849 TEMP to the "x = exp;" insn.
850 TEMP1 to the single set in the "x = exp; insn.
853 if (! reload_completed
854 && this_is_condjump
&& ! this_is_simplejump
856 && (temp
= next_nonnote_insn (insn
)) != 0
857 && GET_CODE (temp
) == INSN
858 && REG_NOTES (temp
) == 0
859 && (reallabelprev
== temp
860 || ((temp2
= next_active_insn (temp
)) != 0
861 && simplejump_p (temp2
)
862 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
863 && (temp1
= single_set (temp
)) != 0
864 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
865 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
866 #ifdef SMALL_REGISTER_CLASSES
867 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
869 && GET_CODE (SET_SRC (temp1
)) != REG
870 && GET_CODE (SET_SRC (temp1
)) != SUBREG
871 && GET_CODE (SET_SRC (temp1
)) != CONST_INT
872 && ! side_effects_p (SET_SRC (temp1
))
873 && ! may_trap_p (SET_SRC (temp1
))
874 && rtx_cost (SET_SRC (temp1
)) < 10)
876 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
878 if (validate_change (temp
, &SET_DEST (temp1
), new, 0))
880 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
881 emit_insn_after_with_line_notes (PATTERN (temp
),
882 PREV_INSN (insn
), temp
);
884 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
888 /* Similarly, if it takes two insns to compute EXP but they
889 have the same destination. Here TEMP3 will be the second
890 insn and TEMP4 the SET from that insn. */
892 if (! reload_completed
893 && this_is_condjump
&& ! this_is_simplejump
895 && (temp
= next_nonnote_insn (insn
)) != 0
896 && GET_CODE (temp
) == INSN
897 && REG_NOTES (temp
) == 0
898 && (temp3
= next_nonnote_insn (temp
)) != 0
899 && GET_CODE (temp3
) == INSN
900 && REG_NOTES (temp3
) == 0
901 && (reallabelprev
== temp3
902 || ((temp2
= next_active_insn (temp3
)) != 0
903 && simplejump_p (temp2
)
904 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
905 && (temp1
= single_set (temp
)) != 0
906 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
907 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
908 #ifdef SMALL_REGISTER_CLASSES
909 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
911 && ! side_effects_p (SET_SRC (temp1
))
912 && ! may_trap_p (SET_SRC (temp1
))
913 && rtx_cost (SET_SRC (temp1
)) < 10
914 && (temp4
= single_set (temp3
)) != 0
915 && rtx_equal_p (SET_DEST (temp4
), temp2
)
916 && ! side_effects_p (SET_SRC (temp4
))
917 && ! may_trap_p (SET_SRC (temp4
))
918 && rtx_cost (SET_SRC (temp4
)) < 10)
920 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
922 if (validate_change (temp
, &SET_DEST (temp1
), new, 0))
924 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
925 emit_insn_after_with_line_notes (PATTERN (temp
),
926 PREV_INSN (insn
), temp
);
927 emit_insn_after_with_line_notes
928 (replace_rtx (PATTERN (temp3
), temp2
, new),
929 PREV_INSN (insn
), temp3
);
932 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
936 /* Finally, handle the case where two insns are used to
937 compute EXP but a temporary register is used. Here we must
938 ensure that the temporary register is not used anywhere else. */
940 if (! reload_completed
942 && this_is_condjump
&& ! this_is_simplejump
944 && (temp
= next_nonnote_insn (insn
)) != 0
945 && GET_CODE (temp
) == INSN
946 && REG_NOTES (temp
) == 0
947 && (temp3
= next_nonnote_insn (temp
)) != 0
948 && GET_CODE (temp3
) == INSN
949 && REG_NOTES (temp3
) == 0
950 && (reallabelprev
== temp3
951 || ((temp2
= next_active_insn (temp3
)) != 0
952 && simplejump_p (temp2
)
953 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
954 && (temp1
= single_set (temp
)) != 0
955 && (temp5
= SET_DEST (temp1
),
956 (GET_CODE (temp5
) == REG
957 || (GET_CODE (temp5
) == SUBREG
958 && (temp5
= SUBREG_REG (temp5
),
959 GET_CODE (temp5
) == REG
))))
960 && REGNO (temp5
) >= FIRST_PSEUDO_REGISTER
961 && regno_first_uid
[REGNO (temp5
)] == INSN_UID (temp
)
962 && regno_last_uid
[REGNO (temp5
)] == INSN_UID (temp3
)
963 && ! side_effects_p (SET_SRC (temp1
))
964 && ! may_trap_p (SET_SRC (temp1
))
965 && rtx_cost (SET_SRC (temp1
)) < 10
966 && (temp4
= single_set (temp3
)) != 0
967 && (temp2
= SET_DEST (temp4
), GET_CODE (temp2
) == REG
)
968 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
969 #ifdef SMALL_REGISTER_CLASSES
970 && REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
972 && rtx_equal_p (SET_DEST (temp4
), temp2
)
973 && ! side_effects_p (SET_SRC (temp4
))
974 && ! may_trap_p (SET_SRC (temp4
))
975 && rtx_cost (SET_SRC (temp4
)) < 10)
977 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
979 if (validate_change (temp3
, &SET_DEST (temp4
), new, 0))
981 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
982 emit_insn_after_with_line_notes (PATTERN (temp
),
983 PREV_INSN (insn
), temp
);
984 emit_insn_after_with_line_notes (PATTERN (temp3
),
985 PREV_INSN (insn
), temp3
);
988 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
991 #endif /* HAVE_cc0 */
993 /* Try to use a conditional move (if the target has them), or a
994 store-flag insn. The general case is:
996 1) x = a; if (...) x = b; and
999 If the jump would be faster, the machine should not have defined
1000 the movcc or scc insns!. These cases are often made by the
1001 previous optimization.
1003 The second case is treated as x = x; if (...) x = b;.
1005 INSN here is the jump around the store. We set:
1007 TEMP to the "x = b;" insn.
1010 TEMP3 to A (X in the second case).
1011 TEMP4 to the condition being tested.
1012 TEMP5 to the earliest insn used to find the condition. */
1014 if (/* We can't do this after reload has completed. */
1016 && this_is_condjump
&& ! this_is_simplejump
1017 /* Set TEMP to the "x = b;" insn. */
1018 && (temp
= next_nonnote_insn (insn
)) != 0
1019 && GET_CODE (temp
) == INSN
1020 && GET_CODE (PATTERN (temp
)) == SET
1021 && GET_CODE (temp1
= SET_DEST (PATTERN (temp
))) == REG
1022 #ifdef SMALL_REGISTER_CLASSES
1023 && REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
1025 && (GET_CODE (temp2
= SET_SRC (PATTERN (temp
))) == REG
1026 || GET_CODE (temp2
) == SUBREG
1027 /* ??? How about floating point constants? */
1028 || GET_CODE (temp2
) == CONST_INT
)
1029 /* Allow either form, but prefer the former if both apply.
1030 There is no point in using the old value of TEMP1 if
1031 it is a register, since cse will alias them. It can
1032 lose if the old value were a hard register since CSE
1033 won't replace hard registers. */
1034 && (((temp3
= reg_set_last (temp1
, insn
)) != 0)
1035 /* Make the latter case look like x = x; if (...) x = b; */
1036 || (temp3
= temp1
, 1))
1037 /* INSN must either branch to the insn after TEMP or the insn
1038 after TEMP must branch to the same place as INSN. */
1039 && (reallabelprev
== temp
1040 || ((temp4
= next_active_insn (temp
)) != 0
1041 && simplejump_p (temp4
)
1042 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
)))
1043 && (temp4
= get_condition (insn
, &temp5
)) != 0
1044 /* We must be comparing objects whose modes imply the size.
1045 We could handle BLKmode if (1) emit_store_flag could
1046 and (2) we could find the size reliably. */
1047 && GET_MODE (XEXP (temp4
, 0)) != BLKmode
1048 /* No point in doing any of this if branches are cheap or we
1049 don't have conditional moves. */
1050 && (BRANCH_COST
>= 2
1051 #ifdef HAVE_conditional_move
1056 /* If the previous insn sets CC0 and something else, we can't
1057 do this since we are going to delete that insn. */
1059 && ! ((temp6
= prev_nonnote_insn (insn
)) != 0
1060 && GET_CODE (temp6
) == INSN
1061 && (sets_cc0_p (PATTERN (temp6
)) == -1
1062 || (sets_cc0_p (PATTERN (temp6
)) == 1
1063 && FIND_REG_INC_NOTE (temp6
, NULL_RTX
))))
1067 #ifdef HAVE_conditional_move
1068 /* First try a conditional move. */
1070 enum rtx_code code
= GET_CODE (temp4
);
1072 rtx cond0
, cond1
, aval
, bval
;
1075 /* Copy the compared variables into cond0 and cond1, so that
1076 any side effects performed in or after the old comparison,
1077 will not affect our compare which will come later. */
1078 /* ??? Is it possible to just use the comparison in the jump
1079 insn? After all, we're going to delete it. We'd have
1080 to modify emit_conditional_move to take a comparison rtx
1081 instead or write a new function. */
1082 cond0
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 0)));
1083 /* We want the target to be able to simplify comparisons with
1084 zero (and maybe other constants as well), so don't create
1085 pseudos for them. There's no need to either. */
1086 if (GET_CODE (XEXP (temp4
, 1)) == CONST_INT
1087 || GET_CODE (XEXP (temp4
, 1)) == CONST_DOUBLE
)
1088 cond1
= XEXP (temp4
, 1);
1090 cond1
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 1)));
1096 target
= emit_conditional_move (var
, code
,
1097 cond0
, cond1
, VOIDmode
,
1098 aval
, bval
, GET_MODE (var
),
1099 (code
== LTU
|| code
== GEU
1100 || code
== LEU
|| code
== GTU
));
1106 /* Save the conditional move sequence but don't emit it
1107 yet. On some machines, like the alpha, it is possible
1108 that temp5 == insn, so next generate the sequence that
1109 saves the compared values and then emit both
1110 sequences ensuring seq1 occurs before seq2. */
1111 seq2
= get_insns ();
1114 /* Now that we can't fail, generate the copy insns that
1115 preserve the compared values. */
1117 emit_move_insn (cond0
, XEXP (temp4
, 0));
1118 if (cond1
!= XEXP (temp4
, 1))
1119 emit_move_insn (cond1
, XEXP (temp4
, 1));
1120 seq1
= get_insns ();
1123 emit_insns_before (seq1
, temp5
);
1124 emit_insns_before (seq2
, insn
);
1126 /* ??? We can also delete the insn that sets X to A.
1127 Flow will do it too though. */
1129 next
= NEXT_INSN (insn
);
1139 /* That didn't work, try a store-flag insn.
1141 We further divide the cases into:
1143 1) x = a; if (...) x = b; and either A or B is zero,
1144 2) if (...) x = 0; and jumps are expensive,
1145 3) x = a; if (...) x = b; and A and B are constants where all
1146 the set bits in A are also set in B and jumps are expensive,
1147 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1149 5) if (...) x = b; if jumps are even more expensive. */
1151 if (GET_MODE_CLASS (GET_MODE (temp1
)) == MODE_INT
1152 && ((GET_CODE (temp3
) == CONST_INT
)
1153 /* Make the latter case look like
1154 x = x; if (...) x = 0; */
1157 && temp2
== const0_rtx
)
1158 || BRANCH_COST
>= 3)))
1159 /* If B is zero, OK; if A is zero, can only do (1) if we
1160 can reverse the condition. See if (3) applies possibly
1161 by reversing the condition. Prefer reversing to (4) when
1162 branches are very expensive. */
1163 && ((reversep
= 0, temp2
== const0_rtx
)
1164 || (temp3
== const0_rtx
1165 && (reversep
= can_reverse_comparison_p (temp4
, insn
)))
1166 || (BRANCH_COST
>= 2
1167 && GET_CODE (temp2
) == CONST_INT
1168 && GET_CODE (temp3
) == CONST_INT
1169 && ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp2
)
1170 || ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp3
)
1171 && (reversep
= can_reverse_comparison_p (temp4
,
1173 || BRANCH_COST
>= 3)
1176 enum rtx_code code
= GET_CODE (temp4
);
1177 rtx uval
, cval
, var
= temp1
;
1181 /* If necessary, reverse the condition. */
1183 code
= reverse_condition (code
), uval
= temp2
, cval
= temp3
;
1185 uval
= temp3
, cval
= temp2
;
1187 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1188 is the constant 1, it is best to just compute the result
1189 directly. If UVAL is constant and STORE_FLAG_VALUE
1190 includes all of its bits, it is best to compute the flag
1191 value unnormalized and `and' it with UVAL. Otherwise,
1192 normalize to -1 and `and' with UVAL. */
1193 normalizep
= (cval
!= const0_rtx
? -1
1194 : (uval
== const1_rtx
? 1
1195 : (GET_CODE (uval
) == CONST_INT
1196 && (INTVAL (uval
) & ~STORE_FLAG_VALUE
) == 0)
1199 /* We will be putting the store-flag insn immediately in
1200 front of the comparison that was originally being done,
1201 so we know all the variables in TEMP4 will be valid.
1202 However, this might be in front of the assignment of
1203 A to VAR. If it is, it would clobber the store-flag
1204 we will be emitting.
1206 Therefore, emit into a temporary which will be copied to
1207 VAR immediately after TEMP. */
1210 target
= emit_store_flag (gen_reg_rtx (GET_MODE (var
)), code
,
1211 XEXP (temp4
, 0), XEXP (temp4
, 1),
1213 (code
== LTU
|| code
== LEU
1214 || code
== GEU
|| code
== GTU
),
1224 /* Put the store-flag insns in front of the first insn
1225 used to compute the condition to ensure that we
1226 use the same values of them as the current
1227 comparison. However, the remainder of the insns we
1228 generate will be placed directly in front of the
1229 jump insn, in case any of the pseudos we use
1230 are modified earlier. */
1232 emit_insns_before (seq
, temp5
);
1236 /* Both CVAL and UVAL are non-zero. */
1237 if (cval
!= const0_rtx
&& uval
!= const0_rtx
)
1241 tem1
= expand_and (uval
, target
, NULL_RTX
);
1242 if (GET_CODE (cval
) == CONST_INT
1243 && GET_CODE (uval
) == CONST_INT
1244 && (INTVAL (cval
) & INTVAL (uval
)) == INTVAL (cval
))
1248 tem2
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1249 target
, NULL_RTX
, 0);
1250 tem2
= expand_and (cval
, tem2
,
1251 (GET_CODE (tem2
) == REG
1255 /* If we usually make new pseudos, do so here. This
1256 turns out to help machines that have conditional
1258 /* ??? Conditional moves have already been handled.
1259 This may be obsolete. */
1261 if (flag_expensive_optimizations
)
1264 target
= expand_binop (GET_MODE (var
), ior_optab
,
1268 else if (normalizep
!= 1)
1270 /* We know that either CVAL or UVAL is zero. If
1271 UVAL is zero, negate TARGET and `and' with CVAL.
1272 Otherwise, `and' with UVAL. */
1273 if (uval
== const0_rtx
)
1275 target
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1276 target
, NULL_RTX
, 0);
1280 target
= expand_and (uval
, target
,
1281 (GET_CODE (target
) == REG
1282 && ! preserve_subexpressions_p ()
1283 ? target
: NULL_RTX
));
1286 emit_move_insn (var
, target
);
1290 /* If INSN uses CC0, we must not separate it from the
1291 insn that sets cc0. */
1292 if (reg_mentioned_p (cc0_rtx
, PATTERN (before
)))
1293 before
= prev_nonnote_insn (before
);
1295 emit_insns_before (seq
, before
);
1298 next
= NEXT_INSN (insn
);
1308 /* If branches are expensive, convert
1309 if (foo) bar++; to bar += (foo != 0);
1310 and similarly for "bar--;"
1312 INSN is the conditional branch around the arithmetic. We set:
1314 TEMP is the arithmetic insn.
1315 TEMP1 is the SET doing the arithmetic.
1316 TEMP2 is the operand being incremented or decremented.
1317 TEMP3 to the condition being tested.
1318 TEMP4 to the earliest insn used to find the condition. */
1320 if ((BRANCH_COST
>= 2
1328 && ! reload_completed
1329 && this_is_condjump
&& ! this_is_simplejump
1330 && (temp
= next_nonnote_insn (insn
)) != 0
1331 && (temp1
= single_set (temp
)) != 0
1332 && (temp2
= SET_DEST (temp1
),
1333 GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
)
1334 && GET_CODE (SET_SRC (temp1
)) == PLUS
1335 && (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1336 || XEXP (SET_SRC (temp1
), 1) == constm1_rtx
)
1337 && rtx_equal_p (temp2
, XEXP (SET_SRC (temp1
), 0))
1338 && ! side_effects_p (temp2
)
1339 && ! may_trap_p (temp2
)
1340 /* INSN must either branch to the insn after TEMP or the insn
1341 after TEMP must branch to the same place as INSN. */
1342 && (reallabelprev
== temp
1343 || ((temp3
= next_active_insn (temp
)) != 0
1344 && simplejump_p (temp3
)
1345 && JUMP_LABEL (temp3
) == JUMP_LABEL (insn
)))
1346 && (temp3
= get_condition (insn
, &temp4
)) != 0
1347 /* We must be comparing objects whose modes imply the size.
1348 We could handle BLKmode if (1) emit_store_flag could
1349 and (2) we could find the size reliably. */
1350 && GET_MODE (XEXP (temp3
, 0)) != BLKmode
1351 && can_reverse_comparison_p (temp3
, insn
))
1353 rtx temp6
, target
= 0, seq
, init_insn
= 0, init
= temp2
;
1354 enum rtx_code code
= reverse_condition (GET_CODE (temp3
));
1358 /* It must be the case that TEMP2 is not modified in the range
1359 [TEMP4, INSN). The one exception we make is if the insn
1360 before INSN sets TEMP2 to something which is also unchanged
1361 in that range. In that case, we can move the initialization
1362 into our sequence. */
1364 if ((temp5
= prev_active_insn (insn
)) != 0
1365 && GET_CODE (temp5
) == INSN
1366 && (temp6
= single_set (temp5
)) != 0
1367 && rtx_equal_p (temp2
, SET_DEST (temp6
))
1368 && (CONSTANT_P (SET_SRC (temp6
))
1369 || GET_CODE (SET_SRC (temp6
)) == REG
1370 || GET_CODE (SET_SRC (temp6
)) == SUBREG
))
1372 emit_insn (PATTERN (temp5
));
1374 init
= SET_SRC (temp6
);
1377 if (CONSTANT_P (init
)
1378 || ! reg_set_between_p (init
, PREV_INSN (temp4
), insn
))
1379 target
= emit_store_flag (gen_reg_rtx (GET_MODE (temp2
)), code
,
1380 XEXP (temp3
, 0), XEXP (temp3
, 1),
1382 (code
== LTU
|| code
== LEU
1383 || code
== GTU
|| code
== GEU
), 1);
1385 /* If we can do the store-flag, do the addition or
1389 target
= expand_binop (GET_MODE (temp2
),
1390 (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1391 ? add_optab
: sub_optab
),
1392 temp2
, target
, temp2
, 0, OPTAB_WIDEN
);
1396 /* Put the result back in temp2 in case it isn't already.
1397 Then replace the jump, possible a CC0-setting insn in
1398 front of the jump, and TEMP, with the sequence we have
1401 if (target
!= temp2
)
1402 emit_move_insn (temp2
, target
);
1407 emit_insns_before (seq
, temp4
);
1411 delete_insn (init_insn
);
1413 next
= NEXT_INSN (insn
);
1415 delete_insn (prev_nonnote_insn (insn
));
1425 /* Simplify if (...) x = 1; else {...} if (x) ...
1426 We recognize this case scanning backwards as well.
1428 TEMP is the assignment to x;
1429 TEMP1 is the label at the head of the second if. */
1430 /* ?? This should call get_condition to find the values being
1431 compared, instead of looking for a COMPARE insn when HAVE_cc0
1432 is not defined. This would allow it to work on the m88k. */
1433 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1434 is not defined and the condition is tested by a separate compare
1435 insn. This is because the code below assumes that the result
1436 of the compare dies in the following branch.
1438 Not only that, but there might be other insns between the
1439 compare and branch whose results are live. Those insns need
1442 A way to fix this is to move the insns at JUMP_LABEL (insn)
1443 to before INSN. If we are running before flow, they will
1444 be deleted if they aren't needed. But this doesn't work
1447 This is really a special-case of jump threading, anyway. The
1448 right thing to do is to replace this and jump threading with
1449 much simpler code in cse.
1451 This code has been turned off in the non-cc0 case in the
1455 else if (this_is_simplejump
1456 /* Safe to skip USE and CLOBBER insns here
1457 since they will not be deleted. */
1458 && (temp
= prev_active_insn (insn
))
1459 && no_labels_between_p (temp
, insn
)
1460 && GET_CODE (temp
) == INSN
1461 && GET_CODE (PATTERN (temp
)) == SET
1462 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1463 && CONSTANT_P (SET_SRC (PATTERN (temp
)))
1464 && (temp1
= next_active_insn (JUMP_LABEL (insn
)))
1465 /* If we find that the next value tested is `x'
1466 (TEMP1 is the insn where this happens), win. */
1467 && GET_CODE (temp1
) == INSN
1468 && GET_CODE (PATTERN (temp1
)) == SET
1470 /* Does temp1 `tst' the value of x? */
1471 && SET_SRC (PATTERN (temp1
)) == SET_DEST (PATTERN (temp
))
1472 && SET_DEST (PATTERN (temp1
)) == cc0_rtx
1473 && (temp1
= next_nonnote_insn (temp1
))
1475 /* Does temp1 compare the value of x against zero? */
1476 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1477 && XEXP (SET_SRC (PATTERN (temp1
)), 1) == const0_rtx
1478 && (XEXP (SET_SRC (PATTERN (temp1
)), 0)
1479 == SET_DEST (PATTERN (temp
)))
1480 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1481 && (temp1
= find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1483 && condjump_p (temp1
))
1485 /* Get the if_then_else from the condjump. */
1486 rtx choice
= SET_SRC (PATTERN (temp1
));
1487 if (GET_CODE (choice
) == IF_THEN_ELSE
)
1489 enum rtx_code code
= GET_CODE (XEXP (choice
, 0));
1490 rtx val
= SET_SRC (PATTERN (temp
));
1492 = simplify_relational_operation (code
, GET_MODE (SET_DEST (PATTERN (temp
))),
1496 if (cond
== const_true_rtx
)
1497 ultimate
= XEXP (choice
, 1);
1498 else if (cond
== const0_rtx
)
1499 ultimate
= XEXP (choice
, 2);
1503 if (ultimate
== pc_rtx
)
1504 ultimate
= get_label_after (temp1
);
1505 else if (ultimate
&& GET_CODE (ultimate
) != RETURN
)
1506 ultimate
= XEXP (ultimate
, 0);
1509 changed
|= redirect_jump (insn
, ultimate
);
1515 /* @@ This needs a bit of work before it will be right.
1517 Any type of comparison can be accepted for the first and
1518 second compare. When rewriting the first jump, we must
1519 compute the what conditions can reach label3, and use the
1520 appropriate code. We can not simply reverse/swap the code
1521 of the first jump. In some cases, the second jump must be
1525 < == converts to > ==
1526 < != converts to == >
1529 If the code is written to only accept an '==' test for the second
1530 compare, then all that needs to be done is to swap the condition
1531 of the first branch.
1533 It is questionable whether we want this optimization anyways,
1534 since if the user wrote code like this because he/she knew that
1535 the jump to label1 is taken most of the time, then rewriting
1536 this gives slower code. */
1537 /* @@ This should call get_condition to find the values being
1538 compared, instead of looking for a COMPARE insn when HAVE_cc0
1539 is not defined. This would allow it to work on the m88k. */
1540 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1541 is not defined and the condition is tested by a separate compare
1542 insn. This is because the code below assumes that the result
1543 of the compare dies in the following branch. */
1545 /* Simplify test a ~= b
1559 where ~= is an inequality, e.g. >, and ~~= is the swapped
1562 We recognize this case scanning backwards.
1564 TEMP is the conditional jump to `label2';
1565 TEMP1 is the test for `a == b';
1566 TEMP2 is the conditional jump to `label1';
1567 TEMP3 is the test for `a ~= b'. */
1568 else if (this_is_simplejump
1569 && (temp
= prev_active_insn (insn
))
1570 && no_labels_between_p (temp
, insn
)
1571 && condjump_p (temp
)
1572 && (temp1
= prev_active_insn (temp
))
1573 && no_labels_between_p (temp1
, temp
)
1574 && GET_CODE (temp1
) == INSN
1575 && GET_CODE (PATTERN (temp1
)) == SET
1577 && sets_cc0_p (PATTERN (temp1
)) == 1
1579 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1580 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1581 && (temp
== find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1583 && (temp2
= prev_active_insn (temp1
))
1584 && no_labels_between_p (temp2
, temp1
)
1585 && condjump_p (temp2
)
1586 && JUMP_LABEL (temp2
) == next_nonnote_insn (NEXT_INSN (insn
))
1587 && (temp3
= prev_active_insn (temp2
))
1588 && no_labels_between_p (temp3
, temp2
)
1589 && GET_CODE (PATTERN (temp3
)) == SET
1590 && rtx_equal_p (SET_DEST (PATTERN (temp3
)),
1591 SET_DEST (PATTERN (temp1
)))
1592 && rtx_equal_p (SET_SRC (PATTERN (temp1
)),
1593 SET_SRC (PATTERN (temp3
)))
1594 && ! inequality_comparisons_p (PATTERN (temp
))
1595 && inequality_comparisons_p (PATTERN (temp2
)))
1597 rtx fallthrough_label
= JUMP_LABEL (temp2
);
1599 ++LABEL_NUSES (fallthrough_label
);
1600 if (swap_jump (temp2
, JUMP_LABEL (insn
)))
1606 if (--LABEL_NUSES (fallthrough_label
) == 0)
1607 delete_insn (fallthrough_label
);
1610 /* Simplify if (...) {... x = 1;} if (x) ...
1612 We recognize this case backwards.
1614 TEMP is the test of `x';
1615 TEMP1 is the assignment to `x' at the end of the
1616 previous statement. */
1617 /* @@ This should call get_condition to find the values being
1618 compared, instead of looking for a COMPARE insn when HAVE_cc0
1619 is not defined. This would allow it to work on the m88k. */
1620 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1621 is not defined and the condition is tested by a separate compare
1622 insn. This is because the code below assumes that the result
1623 of the compare dies in the following branch. */
1625 /* ??? This has to be turned off. The problem is that the
1626 unconditional jump might indirectly end up branching to the
1627 label between TEMP1 and TEMP. We can't detect this, in general,
1628 since it may become a jump to there after further optimizations.
1629 If that jump is done, it will be deleted, so we will retry
1630 this optimization in the next pass, thus an infinite loop.
1632 The present code prevents this by putting the jump after the
1633 label, but this is not logically correct. */
1635 else if (this_is_condjump
1636 /* Safe to skip USE and CLOBBER insns here
1637 since they will not be deleted. */
1638 && (temp
= prev_active_insn (insn
))
1639 && no_labels_between_p (temp
, insn
)
1640 && GET_CODE (temp
) == INSN
1641 && GET_CODE (PATTERN (temp
)) == SET
1643 && sets_cc0_p (PATTERN (temp
)) == 1
1644 && GET_CODE (SET_SRC (PATTERN (temp
))) == REG
1646 /* Temp must be a compare insn, we can not accept a register
1647 to register move here, since it may not be simply a
1649 && GET_CODE (SET_SRC (PATTERN (temp
))) == COMPARE
1650 && XEXP (SET_SRC (PATTERN (temp
)), 1) == const0_rtx
1651 && GET_CODE (XEXP (SET_SRC (PATTERN (temp
)), 0)) == REG
1652 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1653 && insn
== find_next_ref (SET_DEST (PATTERN (temp
)), temp
)
1655 /* May skip USE or CLOBBER insns here
1656 for checking for opportunity, since we
1657 take care of them later. */
1658 && (temp1
= prev_active_insn (temp
))
1659 && GET_CODE (temp1
) == INSN
1660 && GET_CODE (PATTERN (temp1
)) == SET
1662 && SET_SRC (PATTERN (temp
)) == SET_DEST (PATTERN (temp1
))
1664 && (XEXP (SET_SRC (PATTERN (temp
)), 0)
1665 == SET_DEST (PATTERN (temp1
)))
1667 && CONSTANT_P (SET_SRC (PATTERN (temp1
)))
1668 /* If this isn't true, cse will do the job. */
1669 && ! no_labels_between_p (temp1
, temp
))
1671 /* Get the if_then_else from the condjump. */
1672 rtx choice
= SET_SRC (PATTERN (insn
));
1673 if (GET_CODE (choice
) == IF_THEN_ELSE
1674 && (GET_CODE (XEXP (choice
, 0)) == EQ
1675 || GET_CODE (XEXP (choice
, 0)) == NE
))
1677 int want_nonzero
= (GET_CODE (XEXP (choice
, 0)) == NE
);
1682 /* Get the place that condjump will jump to
1683 if it is reached from here. */
1684 if ((SET_SRC (PATTERN (temp1
)) != const0_rtx
)
1686 ultimate
= XEXP (choice
, 1);
1688 ultimate
= XEXP (choice
, 2);
1689 /* Get it as a CODE_LABEL. */
1690 if (ultimate
== pc_rtx
)
1691 ultimate
= get_label_after (insn
);
1693 /* Get the label out of the LABEL_REF. */
1694 ultimate
= XEXP (ultimate
, 0);
1696 /* Insert the jump immediately before TEMP, specifically
1697 after the label that is between TEMP1 and TEMP. */
1698 last_insn
= PREV_INSN (temp
);
1700 /* If we would be branching to the next insn, the jump
1701 would immediately be deleted and the re-inserted in
1702 a subsequent pass over the code. So don't do anything
1704 if (next_active_insn (last_insn
)
1705 != next_active_insn (ultimate
))
1707 emit_barrier_after (last_insn
);
1708 p
= emit_jump_insn_after (gen_jump (ultimate
),
1710 JUMP_LABEL (p
) = ultimate
;
1711 ++LABEL_NUSES (ultimate
);
1712 if (INSN_UID (ultimate
) < max_jump_chain
1713 && INSN_CODE (p
) < max_jump_chain
)
1715 jump_chain
[INSN_UID (p
)]
1716 = jump_chain
[INSN_UID (ultimate
)];
1717 jump_chain
[INSN_UID (ultimate
)] = p
;
1725 /* Detect a conditional jump going to the same place
1726 as an immediately following unconditional jump. */
1727 else if (this_is_condjump
1728 && (temp
= next_active_insn (insn
)) != 0
1729 && simplejump_p (temp
)
1730 && (next_active_insn (JUMP_LABEL (insn
))
1731 == next_active_insn (JUMP_LABEL (temp
))))
1737 /* Detect a conditional jump jumping over an unconditional jump. */
1739 else if ((this_is_condjump
|| this_is_condjump_in_parallel
)
1740 && ! this_is_simplejump
1741 && reallabelprev
!= 0
1742 && GET_CODE (reallabelprev
) == JUMP_INSN
1743 && prev_active_insn (reallabelprev
) == insn
1744 && no_labels_between_p (insn
, reallabelprev
)
1745 && simplejump_p (reallabelprev
))
1747 /* When we invert the unconditional jump, we will be
1748 decrementing the usage count of its old label.
1749 Make sure that we don't delete it now because that
1750 might cause the following code to be deleted. */
1751 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
1752 rtx prev_label
= JUMP_LABEL (insn
);
1755 ++LABEL_NUSES (prev_label
);
1757 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
)))
1759 /* It is very likely that if there are USE insns before
1760 this jump, they hold REG_DEAD notes. These REG_DEAD
1761 notes are no longer valid due to this optimization,
1762 and will cause the life-analysis that following passes
1763 (notably delayed-branch scheduling) to think that
1764 these registers are dead when they are not.
1766 To prevent this trouble, we just remove the USE insns
1767 from the insn chain. */
1769 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
1770 && GET_CODE (PATTERN (prev_uses
)) == USE
)
1772 rtx useless
= prev_uses
;
1773 prev_uses
= prev_nonnote_insn (prev_uses
);
1774 delete_insn (useless
);
1777 delete_insn (reallabelprev
);
1782 /* We can now safely delete the label if it is unreferenced
1783 since the delete_insn above has deleted the BARRIER. */
1784 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
1785 delete_insn (prev_label
);
1790 /* Detect a jump to a jump. */
1792 nlabel
= follow_jumps (JUMP_LABEL (insn
));
1793 if (nlabel
!= JUMP_LABEL (insn
)
1794 && redirect_jump (insn
, nlabel
))
1800 /* Look for if (foo) bar; else break; */
1801 /* The insns look like this:
1802 insn = condjump label1;
1803 ...range1 (some insns)...
1806 ...range2 (some insns)...
1807 jump somewhere unconditionally
1810 rtx label1
= next_label (insn
);
1811 rtx range1end
= label1
? prev_active_insn (label1
) : 0;
1812 /* Don't do this optimization on the first round, so that
1813 jump-around-a-jump gets simplified before we ask here
1814 whether a jump is unconditional.
1816 Also don't do it when we are called after reload since
1817 it will confuse reorg. */
1819 && (reload_completed
? ! flag_delayed_branch
: 1)
1820 /* Make sure INSN is something we can invert. */
1821 && condjump_p (insn
)
1823 && JUMP_LABEL (insn
) == label1
1824 && LABEL_NUSES (label1
) == 1
1825 && GET_CODE (range1end
) == JUMP_INSN
1826 && simplejump_p (range1end
))
1828 rtx label2
= next_label (label1
);
1829 rtx range2end
= label2
? prev_active_insn (label2
) : 0;
1830 if (range1end
!= range2end
1831 && JUMP_LABEL (range1end
) == label2
1832 && GET_CODE (range2end
) == JUMP_INSN
1833 && GET_CODE (NEXT_INSN (range2end
)) == BARRIER
1834 /* Invert the jump condition, so we
1835 still execute the same insns in each case. */
1836 && invert_jump (insn
, label1
))
1838 rtx range1beg
= next_active_insn (insn
);
1839 rtx range2beg
= next_active_insn (label1
);
1840 rtx range1after
, range2after
;
1841 rtx range1before
, range2before
;
1844 /* Include in each range any notes before it, to be
1845 sure that we get the line number note if any, even
1846 if there are other notes here. */
1847 while (PREV_INSN (range1beg
)
1848 && GET_CODE (PREV_INSN (range1beg
)) == NOTE
)
1849 range1beg
= PREV_INSN (range1beg
);
1851 while (PREV_INSN (range2beg
)
1852 && GET_CODE (PREV_INSN (range2beg
)) == NOTE
)
1853 range2beg
= PREV_INSN (range2beg
);
1855 /* Don't move NOTEs for blocks or loops; shift them
1856 outside the ranges, where they'll stay put. */
1857 range1beg
= squeeze_notes (range1beg
, range1end
);
1858 range2beg
= squeeze_notes (range2beg
, range2end
);
1860 /* Get current surrounds of the 2 ranges. */
1861 range1before
= PREV_INSN (range1beg
);
1862 range2before
= PREV_INSN (range2beg
);
1863 range1after
= NEXT_INSN (range1end
);
1864 range2after
= NEXT_INSN (range2end
);
1866 /* Splice range2 where range1 was. */
1867 NEXT_INSN (range1before
) = range2beg
;
1868 PREV_INSN (range2beg
) = range1before
;
1869 NEXT_INSN (range2end
) = range1after
;
1870 PREV_INSN (range1after
) = range2end
;
1871 /* Splice range1 where range2 was. */
1872 NEXT_INSN (range2before
) = range1beg
;
1873 PREV_INSN (range1beg
) = range2before
;
1874 NEXT_INSN (range1end
) = range2after
;
1875 PREV_INSN (range2after
) = range1end
;
1877 /* Check for a loop end note between the end of
1878 range2, and the next code label. If there is one,
1879 then what we have really seen is
1880 if (foo) break; end_of_loop;
1881 and moved the break sequence outside the loop.
1882 We must move the LOOP_END note to where the
1883 loop really ends now, or we will confuse loop
1884 optimization. Stop if we find a LOOP_BEG note
1885 first, since we don't want to move the LOOP_END
1886 note in that case. */
1887 for (;range2after
!= label2
; range2after
= rangenext
)
1889 rangenext
= NEXT_INSN (range2after
);
1890 if (GET_CODE (range2after
) == NOTE
)
1892 if (NOTE_LINE_NUMBER (range2after
)
1893 == NOTE_INSN_LOOP_END
)
1895 NEXT_INSN (PREV_INSN (range2after
))
1897 PREV_INSN (rangenext
)
1898 = PREV_INSN (range2after
);
1899 PREV_INSN (range2after
)
1900 = PREV_INSN (range1beg
);
1901 NEXT_INSN (range2after
) = range1beg
;
1902 NEXT_INSN (PREV_INSN (range1beg
))
1904 PREV_INSN (range1beg
) = range2after
;
1906 else if (NOTE_LINE_NUMBER (range2after
)
1907 == NOTE_INSN_LOOP_BEG
)
1917 /* Now that the jump has been tensioned,
1918 try cross jumping: check for identical code
1919 before the jump and before its target label. */
1921 /* First, cross jumping of conditional jumps: */
1923 if (cross_jump
&& condjump_p (insn
))
1925 rtx newjpos
, newlpos
;
1926 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
1928 /* A conditional jump may be crossjumped
1929 only if the place it jumps to follows
1930 an opposing jump that comes back here. */
1932 if (x
!= 0 && ! jump_back_p (x
, insn
))
1933 /* We have no opposing jump;
1934 cannot cross jump this insn. */
1938 /* TARGET is nonzero if it is ok to cross jump
1939 to code before TARGET. If so, see if matches. */
1941 find_cross_jump (insn
, x
, 2,
1942 &newjpos
, &newlpos
);
1946 do_cross_jump (insn
, newjpos
, newlpos
);
1947 /* Make the old conditional jump
1948 into an unconditional one. */
1949 SET_SRC (PATTERN (insn
))
1950 = gen_rtx (LABEL_REF
, VOIDmode
, JUMP_LABEL (insn
));
1951 INSN_CODE (insn
) = -1;
1952 emit_barrier_after (insn
);
1953 /* Add to jump_chain unless this is a new label
1954 whose UID is too large. */
1955 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
1957 jump_chain
[INSN_UID (insn
)]
1958 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1959 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
1966 /* Cross jumping of unconditional jumps:
1967 a few differences. */
1969 if (cross_jump
&& simplejump_p (insn
))
1971 rtx newjpos
, newlpos
;
1976 /* TARGET is nonzero if it is ok to cross jump
1977 to code before TARGET. If so, see if matches. */
1978 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
1979 &newjpos
, &newlpos
);
1981 /* If cannot cross jump to code before the label,
1982 see if we can cross jump to another jump to
1984 /* Try each other jump to this label. */
1985 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
1986 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1987 target
!= 0 && newjpos
== 0;
1988 target
= jump_chain
[INSN_UID (target
)])
1990 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
1991 /* Ignore TARGET if it's deleted. */
1992 && ! INSN_DELETED_P (target
))
1993 find_cross_jump (insn
, target
, 2,
1994 &newjpos
, &newlpos
);
1998 do_cross_jump (insn
, newjpos
, newlpos
);
2004 /* This code was dead in the previous jump.c! */
2005 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
2007 /* Return insns all "jump to the same place"
2008 so we can cross-jump between any two of them. */
2010 rtx newjpos
, newlpos
, target
;
2014 /* If cannot cross jump to code before the label,
2015 see if we can cross jump to another jump to
2017 /* Try each other jump to this label. */
2018 for (target
= jump_chain
[0];
2019 target
!= 0 && newjpos
== 0;
2020 target
= jump_chain
[INSN_UID (target
)])
2022 && ! INSN_DELETED_P (target
)
2023 && GET_CODE (PATTERN (target
)) == RETURN
)
2024 find_cross_jump (insn
, target
, 2,
2025 &newjpos
, &newlpos
);
2029 do_cross_jump (insn
, newjpos
, newlpos
);
2040 /* Delete extraneous line number notes.
2041 Note that two consecutive notes for different lines are not really
2042 extraneous. There should be some indication where that line belonged,
2043 even if it became empty. */
2048 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2049 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0)
2051 /* Delete this note if it is identical to previous note. */
2053 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
2054 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
2067 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2068 in front of it. If the machine allows it at this point (we might be
2069 after reload for a leaf routine), it will improve optimization for it
2070 to be there. We do this both here and at the start of this pass since
2071 the RETURN might have been deleted by some of our optimizations. */
2072 insn
= get_last_insn ();
2073 while (insn
&& GET_CODE (insn
) == NOTE
)
2074 insn
= PREV_INSN (insn
);
2076 if (insn
&& GET_CODE (insn
) != BARRIER
)
2078 emit_jump_insn (gen_return ());
2084 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2085 If so, delete it, and record that this function can drop off the end. */
2091 /* One label can follow the end-note: the return label. */
2092 && ((GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
2093 /* Ordinary insns can follow it if returning a structure. */
2094 || GET_CODE (insn
) == INSN
2095 /* If machine uses explicit RETURN insns, no epilogue,
2096 then one of them follows the note. */
2097 || (GET_CODE (insn
) == JUMP_INSN
2098 && GET_CODE (PATTERN (insn
)) == RETURN
)
2099 /* A barrier can follow the return insn. */
2100 || GET_CODE (insn
) == BARRIER
2101 /* Other kinds of notes can follow also. */
2102 || (GET_CODE (insn
) == NOTE
2103 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)))
2104 insn
= PREV_INSN (insn
);
2107 /* Report if control can fall through at the end of the function. */
2108 if (insn
&& GET_CODE (insn
) == NOTE
2109 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
)
2115 /* Show JUMP_CHAIN no longer valid. */
2119 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2120 jump. Assume that this unconditional jump is to the exit test code. If
2121 the code is sufficiently simple, make a copy of it before INSN,
2122 followed by a jump to the exit of the loop. Then delete the unconditional
2125 Note that it is possible we can get confused here if the jump immediately
2126 after the loop start branches outside the loop but within an outer loop.
2127 If we are near the exit of that loop, we will copy its exit test. This
2128 will not generate incorrect code, but could suppress some optimizations.
2129 However, such cases are degenerate loops anyway.
2131 Return 1 if we made the change, else 0.
2133 This is only safe immediately after a regscan pass because it uses the
2134 values of regno_first_uid and regno_last_uid. */
2137 duplicate_loop_exit_test (loop_start
)
2140 rtx insn
, set
, reg
, p
, link
;
2143 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
2145 int max_reg
= max_reg_num ();
2148 /* Scan the exit code. We do not perform this optimization if any insn:
2152 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2153 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2154 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2157 Also, don't do this if the exit code is more than 20 insns. */
2159 for (insn
= exitcode
;
2161 && ! (GET_CODE (insn
) == NOTE
2162 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
2163 insn
= NEXT_INSN (insn
))
2165 switch (GET_CODE (insn
))
2171 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2172 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2173 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
2178 if (++num_insns
> 20
2179 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
2180 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2186 /* Unless INSN is zero, we can do the optimization. */
2192 /* See if any insn sets a register only used in the loop exit code and
2193 not a user variable. If so, replace it with a new register. */
2194 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2195 if (GET_CODE (insn
) == INSN
2196 && (set
= single_set (insn
)) != 0
2197 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
2198 || (GET_CODE (reg
) == SUBREG
2199 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
2200 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
2201 && regno_first_uid
[REGNO (reg
)] == INSN_UID (insn
))
2203 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
2204 if (regno_last_uid
[REGNO (reg
)] == INSN_UID (p
))
2209 /* We can do the replacement. Allocate reg_map if this is the
2210 first replacement we found. */
2213 reg_map
= (rtx
*) alloca (max_reg
* sizeof (rtx
));
2214 bzero ((char *) reg_map
, max_reg
* sizeof (rtx
));
2217 REG_LOOP_TEST_P (reg
) = 1;
2219 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
2223 /* Now copy each insn. */
2224 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2225 switch (GET_CODE (insn
))
2228 copy
= emit_barrier_before (loop_start
);
2231 /* Only copy line-number notes. */
2232 if (NOTE_LINE_NUMBER (insn
) >= 0)
2234 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
2235 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
2240 copy
= emit_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2242 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2244 mark_jump_label (PATTERN (copy
), copy
, 0);
2246 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2248 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2249 if (REG_NOTE_KIND (link
) != REG_LABEL
)
2251 = copy_rtx (gen_rtx (EXPR_LIST
, REG_NOTE_KIND (link
),
2252 XEXP (link
, 0), REG_NOTES (copy
)));
2253 if (reg_map
&& REG_NOTES (copy
))
2254 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2258 copy
= emit_jump_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2260 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2261 mark_jump_label (PATTERN (copy
), copy
, 0);
2262 if (REG_NOTES (insn
))
2264 REG_NOTES (copy
) = copy_rtx (REG_NOTES (insn
));
2266 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2269 /* If this is a simple jump, add it to the jump chain. */
2271 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
2272 && simplejump_p (copy
))
2274 jump_chain
[INSN_UID (copy
)]
2275 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2276 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2284 /* Now clean up by emitting a jump to the end label and deleting the jump
2285 at the start of the loop. */
2286 if (! copy
|| GET_CODE (copy
) != BARRIER
)
2288 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
2290 mark_jump_label (PATTERN (copy
), copy
, 0);
2291 if (INSN_UID (copy
) < max_jump_chain
2292 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
2294 jump_chain
[INSN_UID (copy
)]
2295 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2296 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2298 emit_barrier_before (loop_start
);
2301 /* Mark the exit code as the virtual top of the converted loop. */
2302 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
2304 delete_insn (next_nonnote_insn (loop_start
));
2309 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2310 loop-end notes between START and END out before START. Assume that
2311 END is not such a note. START may be such a note. Returns the value
2312 of the new starting insn, which may be different if the original start
2316 squeeze_notes (start
, end
)
2322 for (insn
= start
; insn
!= end
; insn
= next
)
2324 next
= NEXT_INSN (insn
);
2325 if (GET_CODE (insn
) == NOTE
2326 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
2327 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2328 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2329 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
2330 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
2331 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
2337 rtx prev
= PREV_INSN (insn
);
2338 PREV_INSN (insn
) = PREV_INSN (start
);
2339 NEXT_INSN (insn
) = start
;
2340 NEXT_INSN (PREV_INSN (insn
)) = insn
;
2341 PREV_INSN (NEXT_INSN (insn
)) = insn
;
2342 NEXT_INSN (prev
) = next
;
2343 PREV_INSN (next
) = prev
;
2351 /* Compare the instructions before insn E1 with those before E2
2352 to find an opportunity for cross jumping.
2353 (This means detecting identical sequences of insns followed by
2354 jumps to the same place, or followed by a label and a jump
2355 to that label, and replacing one with a jump to the other.)
2357 Assume E1 is a jump that jumps to label E2
2358 (that is not always true but it might as well be).
2359 Find the longest possible equivalent sequences
2360 and store the first insns of those sequences into *F1 and *F2.
2361 Store zero there if no equivalent preceding instructions are found.
2363 We give up if we find a label in stream 1.
2364 Actually we could transfer that label into stream 2. */
2367 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
2372 register rtx i1
= e1
, i2
= e2
;
2373 register rtx p1
, p2
;
2376 rtx last1
= 0, last2
= 0;
2377 rtx afterlast1
= 0, afterlast2
= 0;
2385 i1
= prev_nonnote_insn (i1
);
2387 i2
= PREV_INSN (i2
);
2388 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
2389 i2
= PREV_INSN (i2
);
2394 /* Don't allow the range of insns preceding E1 or E2
2395 to include the other (E2 or E1). */
2396 if (i2
== e1
|| i1
== e2
)
2399 /* If we will get to this code by jumping, those jumps will be
2400 tensioned to go directly to the new label (before I2),
2401 so this cross-jumping won't cost extra. So reduce the minimum. */
2402 if (GET_CODE (i1
) == CODE_LABEL
)
2408 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
2414 /* If this is a CALL_INSN, compare register usage information.
2415 If we don't check this on stack register machines, the two
2416 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2417 numbers of stack registers in the same basic block.
2418 If we don't check this on machines with delay slots, a delay slot may
2419 be filled that clobbers a parameter expected by the subroutine.
2421 ??? We take the simple route for now and assume that if they're
2422 equal, they were constructed identically. */
2424 if (GET_CODE (i1
) == CALL_INSN
2425 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1
),
2426 CALL_INSN_FUNCTION_USAGE (i2
)))
2430 /* If cross_jump_death_matters is not 0, the insn's mode
2431 indicates whether or not the insn contains any stack-like
2434 if (!lose
&& cross_jump_death_matters
&& GET_MODE (i1
) == QImode
)
2436 /* If register stack conversion has already been done, then
2437 death notes must also be compared before it is certain that
2438 the two instruction streams match. */
2441 HARD_REG_SET i1_regset
, i2_regset
;
2443 CLEAR_HARD_REG_SET (i1_regset
);
2444 CLEAR_HARD_REG_SET (i2_regset
);
2446 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
2447 if (REG_NOTE_KIND (note
) == REG_DEAD
2448 && STACK_REG_P (XEXP (note
, 0)))
2449 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
2451 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
2452 if (REG_NOTE_KIND (note
) == REG_DEAD
2453 && STACK_REG_P (XEXP (note
, 0)))
2454 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
2456 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
2465 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
2466 || ! rtx_renumbered_equal_p (p1
, p2
))
2468 /* The following code helps take care of G++ cleanups. */
2472 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
2473 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
2474 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
2475 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
2476 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
2477 /* If the equivalences are not to a constant, they may
2478 reference pseudos that no longer exist, so we can't
2480 && CONSTANT_P (XEXP (equiv1
, 0))
2481 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
2483 rtx s1
= single_set (i1
);
2484 rtx s2
= single_set (i2
);
2485 if (s1
!= 0 && s2
!= 0
2486 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
2488 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
2489 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
2490 if (! rtx_renumbered_equal_p (p1
, p2
))
2492 else if (apply_change_group ())
2497 /* Insns fail to match; cross jumping is limited to the following
2501 /* Don't allow the insn after a compare to be shared by
2502 cross-jumping unless the compare is also shared.
2503 Here, if either of these non-matching insns is a compare,
2504 exclude the following insn from possible cross-jumping. */
2505 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
2506 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
2509 /* If cross-jumping here will feed a jump-around-jump
2510 optimization, this jump won't cost extra, so reduce
2512 if (GET_CODE (i1
) == JUMP_INSN
2514 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
2520 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
2522 /* Ok, this insn is potentially includable in a cross-jump here. */
2523 afterlast1
= last1
, afterlast2
= last2
;
2524 last1
= i1
, last2
= i2
, --minimum
;
2528 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
2529 *f1
= last1
, *f2
= last2
;
2533 do_cross_jump (insn
, newjpos
, newlpos
)
2534 rtx insn
, newjpos
, newlpos
;
2536 /* Find an existing label at this point
2537 or make a new one if there is none. */
2538 register rtx label
= get_label_before (newlpos
);
2540 /* Make the same jump insn jump to the new point. */
2541 if (GET_CODE (PATTERN (insn
)) == RETURN
)
2543 /* Remove from jump chain of returns. */
2544 delete_from_jump_chain (insn
);
2545 /* Change the insn. */
2546 PATTERN (insn
) = gen_jump (label
);
2547 INSN_CODE (insn
) = -1;
2548 JUMP_LABEL (insn
) = label
;
2549 LABEL_NUSES (label
)++;
2550 /* Add to new the jump chain. */
2551 if (INSN_UID (label
) < max_jump_chain
2552 && INSN_UID (insn
) < max_jump_chain
)
2554 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
2555 jump_chain
[INSN_UID (label
)] = insn
;
2559 redirect_jump (insn
, label
);
2561 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2562 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2563 the NEWJPOS stream. */
2565 while (newjpos
!= insn
)
2569 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
2570 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
2571 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
2572 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
2573 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
2574 remove_note (newlpos
, lnote
);
2576 delete_insn (newjpos
);
2577 newjpos
= next_real_insn (newjpos
);
2578 newlpos
= next_real_insn (newlpos
);
2582 /* Return the label before INSN, or put a new label there. */
2585 get_label_before (insn
)
2590 /* Find an existing label at this point
2591 or make a new one if there is none. */
2592 label
= prev_nonnote_insn (insn
);
2594 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2596 rtx prev
= PREV_INSN (insn
);
2598 label
= gen_label_rtx ();
2599 emit_label_after (label
, prev
);
2600 LABEL_NUSES (label
) = 0;
2605 /* Return the label after INSN, or put a new label there. */
2608 get_label_after (insn
)
2613 /* Find an existing label at this point
2614 or make a new one if there is none. */
2615 label
= next_nonnote_insn (insn
);
2617 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
2619 label
= gen_label_rtx ();
2620 emit_label_after (label
, insn
);
2621 LABEL_NUSES (label
) = 0;
2626 /* Return 1 if INSN is a jump that jumps to right after TARGET
2627 only on the condition that TARGET itself would drop through.
2628 Assumes that TARGET is a conditional jump. */
2631 jump_back_p (insn
, target
)
2635 enum rtx_code codei
, codet
;
2637 if (simplejump_p (insn
) || ! condjump_p (insn
)
2638 || simplejump_p (target
)
2639 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
2642 cinsn
= XEXP (SET_SRC (PATTERN (insn
)), 0);
2643 ctarget
= XEXP (SET_SRC (PATTERN (target
)), 0);
2645 codei
= GET_CODE (cinsn
);
2646 codet
= GET_CODE (ctarget
);
2648 if (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
)
2650 if (! can_reverse_comparison_p (cinsn
, insn
))
2652 codei
= reverse_condition (codei
);
2655 if (XEXP (SET_SRC (PATTERN (target
)), 2) == pc_rtx
)
2657 if (! can_reverse_comparison_p (ctarget
, target
))
2659 codet
= reverse_condition (codet
);
2662 return (codei
== codet
2663 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
2664 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
2667 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2668 return non-zero if it is safe to reverse this comparison. It is if our
2669 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2670 this is known to be an integer comparison. */
2673 can_reverse_comparison_p (comparison
, insn
)
2679 /* If this is not actually a comparison, we can't reverse it. */
2680 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
2683 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
2684 /* If this is an NE comparison, it is safe to reverse it to an EQ
2685 comparison and vice versa, even for floating point. If no operands
2686 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2687 always false and NE is always true, so the reversal is also valid. */
2689 || GET_CODE (comparison
) == NE
2690 || GET_CODE (comparison
) == EQ
)
2693 arg0
= XEXP (comparison
, 0);
2695 /* Make sure ARG0 is one of the actual objects being compared. If we
2696 can't do this, we can't be sure the comparison can be reversed.
2698 Handle cc0 and a MODE_CC register. */
2699 if ((GET_CODE (arg0
) == REG
&& GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
)
2705 rtx prev
= prev_nonnote_insn (insn
);
2706 rtx set
= single_set (prev
);
2708 if (set
== 0 || SET_DEST (set
) != arg0
)
2711 arg0
= SET_SRC (set
);
2713 if (GET_CODE (arg0
) == COMPARE
)
2714 arg0
= XEXP (arg0
, 0);
2717 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2718 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2719 return (GET_CODE (arg0
) == CONST_INT
2720 || (GET_MODE (arg0
) != VOIDmode
2721 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_CC
2722 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_FLOAT
));
2725 /* Given an rtx-code for a comparison, return the code
2726 for the negated comparison.
2727 WATCH OUT! reverse_condition is not safe to use on a jump
2728 that might be acting on the results of an IEEE floating point comparison,
2729 because of the special treatment of non-signaling nans in comparisons.
2730 Use can_reverse_comparison_p to be sure. */
2733 reverse_condition (code
)
2774 /* Similar, but return the code when two operands of a comparison are swapped.
2775 This IS safe for IEEE floating-point. */
2778 swap_condition (code
)
2817 /* Given a comparison CODE, return the corresponding unsigned comparison.
2818 If CODE is an equality comparison or already an unsigned comparison,
2819 CODE is returned. */
2822 unsigned_condition (code
)
2852 /* Similarly, return the signed version of a comparison. */
2855 signed_condition (code
)
2885 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2886 truth of CODE1 implies the truth of CODE2. */
2889 comparison_dominates_p (code1
, code2
)
2890 enum rtx_code code1
, code2
;
2898 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
)
2903 if (code2
== LE
|| code2
== NE
)
2908 if (code2
== GE
|| code2
== NE
)
2913 if (code2
== LEU
|| code2
== NE
)
2918 if (code2
== GEU
|| code2
== NE
)
2926 /* Return 1 if INSN is an unconditional jump and nothing else. */
2932 return (GET_CODE (insn
) == JUMP_INSN
2933 && GET_CODE (PATTERN (insn
)) == SET
2934 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
2935 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
2938 /* Return nonzero if INSN is a (possibly) conditional jump
2939 and nothing more. */
2945 register rtx x
= PATTERN (insn
);
2946 if (GET_CODE (x
) != SET
)
2948 if (GET_CODE (SET_DEST (x
)) != PC
)
2950 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
2952 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2954 if (XEXP (SET_SRC (x
), 2) == pc_rtx
2955 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
2956 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
2958 if (XEXP (SET_SRC (x
), 1) == pc_rtx
2959 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
2960 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
2965 /* Return nonzero if INSN is a (possibly) conditional jump
2966 and nothing more. */
2969 condjump_in_parallel_p (insn
)
2972 register rtx x
= PATTERN (insn
);
2974 if (GET_CODE (x
) != PARALLEL
)
2977 x
= XVECEXP (x
, 0, 0);
2979 if (GET_CODE (x
) != SET
)
2981 if (GET_CODE (SET_DEST (x
)) != PC
)
2983 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
2985 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2987 if (XEXP (SET_SRC (x
), 2) == pc_rtx
2988 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
2989 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
2991 if (XEXP (SET_SRC (x
), 1) == pc_rtx
2992 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
2993 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
2998 /* Return 1 if X is an RTX that does nothing but set the condition codes
2999 and CLOBBER or USE registers.
3000 Return -1 if X does explicitly set the condition codes,
3001 but also does other things. */
3008 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
3010 if (GET_CODE (x
) == PARALLEL
)
3014 int other_things
= 0;
3015 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3017 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
3018 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
3020 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
3023 return ! sets_cc0
? 0 : other_things
? -1 : 1;
3031 /* Follow any unconditional jump at LABEL;
3032 return the ultimate label reached by any such chain of jumps.
3033 If LABEL is not followed by a jump, return LABEL.
3034 If the chain loops or we can't find end, return LABEL,
3035 since that tells caller to avoid changing the insn.
3037 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3038 a USE or CLOBBER. */
3041 follow_jumps (label
)
3046 register rtx value
= label
;
3051 && (insn
= next_active_insn (value
)) != 0
3052 && GET_CODE (insn
) == JUMP_INSN
3053 && (JUMP_LABEL (insn
) != 0 || GET_CODE (PATTERN (insn
)) == RETURN
)
3054 && (next
= NEXT_INSN (insn
))
3055 && GET_CODE (next
) == BARRIER
);
3058 /* Don't chain through the insn that jumps into a loop
3059 from outside the loop,
3060 since that would create multiple loop entry jumps
3061 and prevent loop optimization. */
3063 if (!reload_completed
)
3064 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
3065 if (GET_CODE (tem
) == NOTE
3066 && NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
)
3069 /* If we have found a cycle, make the insn jump to itself. */
3070 if (JUMP_LABEL (insn
) == label
)
3073 tem
= next_active_insn (JUMP_LABEL (insn
));
3074 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
3075 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
3078 value
= JUMP_LABEL (insn
);
3085 /* Assuming that field IDX of X is a vector of label_refs,
3086 replace each of them by the ultimate label reached by it.
3087 Return nonzero if a change is made.
3088 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3091 tension_vector_labels (x
, idx
)
3097 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
3099 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
3100 register rtx nlabel
= follow_jumps (olabel
);
3101 if (nlabel
&& nlabel
!= olabel
)
3103 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
3104 ++LABEL_NUSES (nlabel
);
3105 if (--LABEL_NUSES (olabel
) == 0)
3106 delete_insn (olabel
);
3113 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3114 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3115 in INSN, then store one of them in JUMP_LABEL (INSN).
3116 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3117 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3118 Also, when there are consecutive labels, canonicalize on the last of them.
3120 Note that two labels separated by a loop-beginning note
3121 must be kept distinct if we have not yet done loop-optimization,
3122 because the gap between them is where loop-optimize
3123 will want to move invariant code to. CROSS_JUMP tells us
3124 that loop-optimization is done with.
3126 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3127 two labels distinct if they are separated by only USE or CLOBBER insns. */
3130 mark_jump_label (x
, insn
, cross_jump
)
3135 register RTX_CODE code
= GET_CODE (x
);
3153 /* If this is a constant-pool reference, see if it is a label. */
3154 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
3155 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
3156 mark_jump_label (get_pool_constant (XEXP (x
, 0)), insn
, cross_jump
);
3161 rtx label
= XEXP (x
, 0);
3166 if (GET_CODE (label
) != CODE_LABEL
)
3169 /* Ignore references to labels of containing functions. */
3170 if (LABEL_REF_NONLOCAL_P (x
))
3173 /* If there are other labels following this one,
3174 replace it with the last of the consecutive labels. */
3175 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
3177 if (GET_CODE (next
) == CODE_LABEL
)
3179 else if (cross_jump
&& GET_CODE (next
) == INSN
3180 && (GET_CODE (PATTERN (next
)) == USE
3181 || GET_CODE (PATTERN (next
)) == CLOBBER
))
3183 else if (GET_CODE (next
) != NOTE
)
3185 else if (! cross_jump
3186 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
3187 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
))
3191 XEXP (x
, 0) = label
;
3192 ++LABEL_NUSES (label
);
3196 if (GET_CODE (insn
) == JUMP_INSN
)
3197 JUMP_LABEL (insn
) = label
;
3199 /* If we've changed OLABEL and we had a REG_LABEL note
3200 for it, update it as well. */
3201 else if (label
!= olabel
3202 && (note
= find_reg_note (insn
, REG_LABEL
, olabel
)) != 0)
3203 XEXP (note
, 0) = label
;
3205 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3207 else if (! find_reg_note (insn
, REG_LABEL
, label
))
3209 rtx next
= next_real_insn (label
);
3210 /* Don't record labels that refer to dispatch tables.
3211 This is not necessary, since the tablejump
3212 references the same label.
3213 And if we did record them, flow.c would make worse code. */
3215 || ! (GET_CODE (next
) == JUMP_INSN
3216 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3217 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
)))
3218 REG_NOTES (insn
) = gen_rtx (EXPR_LIST
, REG_LABEL
, label
,
3225 /* Do walk the labels in a vector, but not the first operand of an
3226 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3230 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
3232 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
3233 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, cross_jump
);
3238 fmt
= GET_RTX_FORMAT (code
);
3239 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3242 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
);
3243 else if (fmt
[i
] == 'E')
3246 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3247 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
);
3252 /* If all INSN does is set the pc, delete it,
3253 and delete the insn that set the condition codes for it
3254 if that's what the previous thing was. */
3260 register rtx set
= single_set (insn
);
3262 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3263 delete_computation (insn
);
3266 /* Delete INSN and recursively delete insns that compute values used only
3267 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3268 If we are running before flow.c, we need do nothing since flow.c will
3269 delete dead code. We also can't know if the registers being used are
3270 dead or not at this point.
3272 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3273 nothing other than set a register that dies in this insn, we can delete
3276 On machines with CC0, if CC0 is used in this insn, we may be able to
3277 delete the insn that set it. */
3280 delete_computation (insn
)
3286 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
3288 rtx prev
= prev_nonnote_insn (insn
);
3289 /* We assume that at this stage
3290 CC's are always set explicitly
3291 and always immediately before the jump that
3292 will use them. So if the previous insn
3293 exists to set the CC's, delete it
3294 (unless it performs auto-increments, etc.). */
3295 if (prev
&& GET_CODE (prev
) == INSN
3296 && sets_cc0_p (PATTERN (prev
)))
3298 if (sets_cc0_p (PATTERN (prev
)) > 0
3299 && !FIND_REG_INC_NOTE (prev
, NULL_RTX
))
3300 delete_computation (prev
);
3302 /* Otherwise, show that cc0 won't be used. */
3303 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_UNUSED
,
3304 cc0_rtx
, REG_NOTES (prev
));
3309 for (note
= REG_NOTES (insn
); note
; note
= next
)
3313 next
= XEXP (note
, 1);
3315 if (REG_NOTE_KIND (note
) != REG_DEAD
3316 /* Verify that the REG_NOTE is legitimate. */
3317 || GET_CODE (XEXP (note
, 0)) != REG
)
3320 for (our_prev
= prev_nonnote_insn (insn
);
3321 our_prev
&& GET_CODE (our_prev
) == INSN
;
3322 our_prev
= prev_nonnote_insn (our_prev
))
3324 /* If we reach a SEQUENCE, it is too complex to try to
3325 do anything with it, so give up. */
3326 if (GET_CODE (PATTERN (our_prev
)) == SEQUENCE
)
3329 if (GET_CODE (PATTERN (our_prev
)) == USE
3330 && GET_CODE (XEXP (PATTERN (our_prev
), 0)) == INSN
)
3331 /* reorg creates USEs that look like this. We leave them
3332 alone because reorg needs them for its own purposes. */
3335 if (reg_set_p (XEXP (note
, 0), PATTERN (our_prev
)))
3337 if (FIND_REG_INC_NOTE (our_prev
, NULL_RTX
))
3340 if (GET_CODE (PATTERN (our_prev
)) == PARALLEL
)
3342 /* If we find a SET of something else, we can't
3347 for (i
= 0; i
< XVECLEN (PATTERN (our_prev
), 0); i
++)
3349 rtx part
= XVECEXP (PATTERN (our_prev
), 0, i
);
3351 if (GET_CODE (part
) == SET
3352 && SET_DEST (part
) != XEXP (note
, 0))
3356 if (i
== XVECLEN (PATTERN (our_prev
), 0))
3357 delete_computation (our_prev
);
3359 else if (GET_CODE (PATTERN (our_prev
)) == SET
3360 && SET_DEST (PATTERN (our_prev
)) == XEXP (note
, 0))
3361 delete_computation (our_prev
);
3366 /* If OUR_PREV references the register that dies here, it is an
3367 additional use. Hence any prior SET isn't dead. However, this
3368 insn becomes the new place for the REG_DEAD note. */
3369 if (reg_overlap_mentioned_p (XEXP (note
, 0),
3370 PATTERN (our_prev
)))
3372 XEXP (note
, 1) = REG_NOTES (our_prev
);
3373 REG_NOTES (our_prev
) = note
;
3382 /* Delete insn INSN from the chain of insns and update label ref counts.
3383 May delete some following insns as a consequence; may even delete
3384 a label elsewhere and insns that follow it.
3386 Returns the first insn after INSN that was not deleted. */
3392 register rtx next
= NEXT_INSN (insn
);
3393 register rtx prev
= PREV_INSN (insn
);
3394 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
3395 register int dont_really_delete
= 0;
3397 while (next
&& INSN_DELETED_P (next
))
3398 next
= NEXT_INSN (next
);
3400 /* This insn is already deleted => return first following nondeleted. */
3401 if (INSN_DELETED_P (insn
))
3404 /* Don't delete user-declared labels. Convert them to special NOTEs
3406 if (was_code_label
&& LABEL_NAME (insn
) != 0
3407 && optimize
&& ! dont_really_delete
)
3409 PUT_CODE (insn
, NOTE
);
3410 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
3411 NOTE_SOURCE_FILE (insn
) = 0;
3412 dont_really_delete
= 1;
3415 /* Mark this insn as deleted. */
3416 INSN_DELETED_P (insn
) = 1;
3418 /* If this is an unconditional jump, delete it from the jump chain. */
3419 if (simplejump_p (insn
))
3420 delete_from_jump_chain (insn
);
3422 /* If instruction is followed by a barrier,
3423 delete the barrier too. */
3425 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
3427 INSN_DELETED_P (next
) = 1;
3428 next
= NEXT_INSN (next
);
3431 /* Patch out INSN (and the barrier if any) */
3433 if (optimize
&& ! dont_really_delete
)
3437 NEXT_INSN (prev
) = next
;
3438 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3439 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
3440 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
3445 PREV_INSN (next
) = prev
;
3446 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
3447 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
3450 if (prev
&& NEXT_INSN (prev
) == 0)
3451 set_last_insn (prev
);
3454 /* If deleting a jump, decrement the count of the label,
3455 and delete the label if it is now unused. */
3457 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
3458 if (--LABEL_NUSES (JUMP_LABEL (insn
)) == 0)
3460 /* This can delete NEXT or PREV,
3461 either directly if NEXT is JUMP_LABEL (INSN),
3462 or indirectly through more levels of jumps. */
3463 delete_insn (JUMP_LABEL (insn
));
3464 /* I feel a little doubtful about this loop,
3465 but I see no clean and sure alternative way
3466 to find the first insn after INSN that is not now deleted.
3467 I hope this works. */
3468 while (next
&& INSN_DELETED_P (next
))
3469 next
= NEXT_INSN (next
);
3473 /* Likewise if we're deleting a dispatch table. */
3475 if (GET_CODE (insn
) == JUMP_INSN
3476 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
3477 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
3479 rtx pat
= PATTERN (insn
);
3480 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
3481 int len
= XVECLEN (pat
, diff_vec_p
);
3483 for (i
= 0; i
< len
; i
++)
3484 if (--LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
3485 delete_insn (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
3486 while (next
&& INSN_DELETED_P (next
))
3487 next
= NEXT_INSN (next
);
3491 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
3492 prev
= PREV_INSN (prev
);
3494 /* If INSN was a label and a dispatch table follows it,
3495 delete the dispatch table. The tablejump must have gone already.
3496 It isn't useful to fall through into a table. */
3499 && NEXT_INSN (insn
) != 0
3500 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3501 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
3502 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
3503 next
= delete_insn (NEXT_INSN (insn
));
3505 /* If INSN was a label, delete insns following it if now unreachable. */
3507 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
3509 register RTX_CODE code
;
3511 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
3512 || code
== NOTE
|| code
== BARRIER
3513 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
3516 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
3517 next
= NEXT_INSN (next
);
3518 /* Keep going past other deleted labels to delete what follows. */
3519 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
3520 next
= NEXT_INSN (next
);
3522 /* Note: if this deletes a jump, it can cause more
3523 deletion of unreachable code, after a different label.
3524 As long as the value from this recursive call is correct,
3525 this invocation functions correctly. */
3526 next
= delete_insn (next
);
3533 /* Advance from INSN till reaching something not deleted
3534 then return that. May return INSN itself. */
3537 next_nondeleted_insn (insn
)
3540 while (INSN_DELETED_P (insn
))
3541 insn
= NEXT_INSN (insn
);
3545 /* Delete a range of insns from FROM to TO, inclusive.
3546 This is for the sake of peephole optimization, so assume
3547 that whatever these insns do will still be done by a new
3548 peephole insn that will replace them. */
3551 delete_for_peephole (from
, to
)
3552 register rtx from
, to
;
3554 register rtx insn
= from
;
3558 register rtx next
= NEXT_INSN (insn
);
3559 register rtx prev
= PREV_INSN (insn
);
3561 if (GET_CODE (insn
) != NOTE
)
3563 INSN_DELETED_P (insn
) = 1;
3565 /* Patch this insn out of the chain. */
3566 /* We don't do this all at once, because we
3567 must preserve all NOTEs. */
3569 NEXT_INSN (prev
) = next
;
3572 PREV_INSN (next
) = prev
;
3580 /* Note that if TO is an unconditional jump
3581 we *do not* delete the BARRIER that follows,
3582 since the peephole that replaces this sequence
3583 is also an unconditional jump in that case. */
3586 /* Invert the condition of the jump JUMP, and make it jump
3587 to label NLABEL instead of where it jumps now. */
3590 invert_jump (jump
, nlabel
)
3593 /* We have to either invert the condition and change the label or
3594 do neither. Either operation could fail. We first try to invert
3595 the jump. If that succeeds, we try changing the label. If that fails,
3596 we invert the jump back to what it was. */
3598 if (! invert_exp (PATTERN (jump
), jump
))
3601 if (redirect_jump (jump
, nlabel
))
3604 if (! invert_exp (PATTERN (jump
), jump
))
3605 /* This should just be putting it back the way it was. */
3611 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3613 Return 1 if we can do so, 0 if we cannot find a way to do so that
3614 matches a pattern. */
3617 invert_exp (x
, insn
)
3621 register RTX_CODE code
;
3625 code
= GET_CODE (x
);
3627 if (code
== IF_THEN_ELSE
)
3629 register rtx comp
= XEXP (x
, 0);
3632 /* We can do this in two ways: The preferable way, which can only
3633 be done if this is not an integer comparison, is to reverse
3634 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3635 of the IF_THEN_ELSE. If we can't do either, fail. */
3637 if (can_reverse_comparison_p (comp
, insn
)
3638 && validate_change (insn
, &XEXP (x
, 0),
3639 gen_rtx (reverse_condition (GET_CODE (comp
)),
3640 GET_MODE (comp
), XEXP (comp
, 0),
3641 XEXP (comp
, 1)), 0))
3645 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
3646 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
3647 return apply_change_group ();
3650 fmt
= GET_RTX_FORMAT (code
);
3651 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3654 if (! invert_exp (XEXP (x
, i
), insn
))
3659 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3660 if (!invert_exp (XVECEXP (x
, i
, j
), insn
))
3668 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3669 If the old jump target label is unused as a result,
3670 it and the code following it may be deleted.
3672 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3675 The return value will be 1 if the change was made, 0 if it wasn't (this
3676 can only occur for NLABEL == 0). */
3679 redirect_jump (jump
, nlabel
)
3682 register rtx olabel
= JUMP_LABEL (jump
);
3684 if (nlabel
== olabel
)
3687 if (! redirect_exp (&PATTERN (jump
), olabel
, nlabel
, jump
))
3690 /* If this is an unconditional branch, delete it from the jump_chain of
3691 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3692 have UID's in range and JUMP_CHAIN is valid). */
3693 if (jump_chain
&& (simplejump_p (jump
)
3694 || GET_CODE (PATTERN (jump
)) == RETURN
))
3696 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
3698 delete_from_jump_chain (jump
);
3699 if (label_index
< max_jump_chain
3700 && INSN_UID (jump
) < max_jump_chain
)
3702 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
3703 jump_chain
[label_index
] = jump
;
3707 JUMP_LABEL (jump
) = nlabel
;
3709 ++LABEL_NUSES (nlabel
);
3711 if (olabel
&& --LABEL_NUSES (olabel
) == 0)
3712 delete_insn (olabel
);
3717 /* Delete the instruction JUMP from any jump chain it might be on. */
3720 delete_from_jump_chain (jump
)
3724 rtx olabel
= JUMP_LABEL (jump
);
3726 /* Handle unconditional jumps. */
3727 if (jump_chain
&& olabel
!= 0
3728 && INSN_UID (olabel
) < max_jump_chain
3729 && simplejump_p (jump
))
3730 index
= INSN_UID (olabel
);
3731 /* Handle return insns. */
3732 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
3736 if (jump_chain
[index
] == jump
)
3737 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
3742 for (insn
= jump_chain
[index
];
3744 insn
= jump_chain
[INSN_UID (insn
)])
3745 if (jump_chain
[INSN_UID (insn
)] == jump
)
3747 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
3753 /* If NLABEL is nonzero, throughout the rtx at LOC,
3754 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3755 zero, alter (RETURN) to (LABEL_REF NLABEL).
3757 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3758 validity with validate_change. Convert (set (pc) (label_ref olabel))
3761 Return 0 if we found a change we would like to make but it is invalid.
3762 Otherwise, return 1. */
3765 redirect_exp (loc
, olabel
, nlabel
, insn
)
3770 register rtx x
= *loc
;
3771 register RTX_CODE code
= GET_CODE (x
);
3775 if (code
== LABEL_REF
)
3777 if (XEXP (x
, 0) == olabel
)
3780 XEXP (x
, 0) = nlabel
;
3782 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3786 else if (code
== RETURN
&& olabel
== 0)
3788 x
= gen_rtx (LABEL_REF
, VOIDmode
, nlabel
);
3789 if (loc
== &PATTERN (insn
))
3790 x
= gen_rtx (SET
, VOIDmode
, pc_rtx
, x
);
3791 return validate_change (insn
, loc
, x
, 0);
3794 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
3795 && GET_CODE (SET_SRC (x
)) == LABEL_REF
3796 && XEXP (SET_SRC (x
), 0) == olabel
)
3797 return validate_change (insn
, loc
, gen_rtx (RETURN
, VOIDmode
), 0);
3799 fmt
= GET_RTX_FORMAT (code
);
3800 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3803 if (! redirect_exp (&XEXP (x
, i
), olabel
, nlabel
, insn
))
3808 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3809 if (! redirect_exp (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
))
3817 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3819 If the old jump target label (before the dispatch table) becomes unused,
3820 it and the dispatch table may be deleted. In that case, find the insn
3821 before the jump references that label and delete it and logical successors
3825 redirect_tablejump (jump
, nlabel
)
3828 register rtx olabel
= JUMP_LABEL (jump
);
3830 /* Add this jump to the jump_chain of NLABEL. */
3831 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
3832 && INSN_UID (jump
) < max_jump_chain
)
3834 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
3835 jump_chain
[INSN_UID (nlabel
)] = jump
;
3838 PATTERN (jump
) = gen_jump (nlabel
);
3839 JUMP_LABEL (jump
) = nlabel
;
3840 ++LABEL_NUSES (nlabel
);
3841 INSN_CODE (jump
) = -1;
3843 if (--LABEL_NUSES (olabel
) == 0)
3845 delete_labelref_insn (jump
, olabel
, 0);
3846 delete_insn (olabel
);
3850 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3851 If we found one, delete it and then delete this insn if DELETE_THIS is
3852 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3855 delete_labelref_insn (insn
, label
, delete_this
)
3862 if (GET_CODE (insn
) != NOTE
3863 && reg_mentioned_p (label
, PATTERN (insn
)))
3874 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
3875 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
3889 /* Like rtx_equal_p except that it considers two REGs as equal
3890 if they renumber to the same value and considers two commutative
3891 operations to be the same if the order of the operands has been
3895 rtx_renumbered_equal_p (x
, y
)
3899 register RTX_CODE code
= GET_CODE (x
);
3905 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
3906 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
3907 && GET_CODE (SUBREG_REG (y
)) == REG
)))
3909 int reg_x
= -1, reg_y
= -1;
3910 int word_x
= 0, word_y
= 0;
3912 if (GET_MODE (x
) != GET_MODE (y
))
3915 /* If we haven't done any renumbering, don't
3916 make any assumptions. */
3917 if (reg_renumber
== 0)
3918 return rtx_equal_p (x
, y
);
3922 reg_x
= REGNO (SUBREG_REG (x
));
3923 word_x
= SUBREG_WORD (x
);
3925 if (reg_renumber
[reg_x
] >= 0)
3927 reg_x
= reg_renumber
[reg_x
] + word_x
;
3935 if (reg_renumber
[reg_x
] >= 0)
3936 reg_x
= reg_renumber
[reg_x
];
3939 if (GET_CODE (y
) == SUBREG
)
3941 reg_y
= REGNO (SUBREG_REG (y
));
3942 word_y
= SUBREG_WORD (y
);
3944 if (reg_renumber
[reg_y
] >= 0)
3946 reg_y
= reg_renumber
[reg_y
];
3954 if (reg_renumber
[reg_y
] >= 0)
3955 reg_y
= reg_renumber
[reg_y
];
3958 return reg_x
>= 0 && reg_x
== reg_y
&& word_x
== word_y
;
3961 /* Now we have disposed of all the cases
3962 in which different rtx codes can match. */
3963 if (code
!= GET_CODE (y
))
3975 return INTVAL (x
) == INTVAL (y
);
3978 /* We can't assume nonlocal labels have their following insns yet. */
3979 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
3980 return XEXP (x
, 0) == XEXP (y
, 0);
3982 /* Two label-refs are equivalent if they point at labels
3983 in the same position in the instruction stream. */
3984 return (next_real_insn (XEXP (x
, 0))
3985 == next_real_insn (XEXP (y
, 0)));
3988 return XSTR (x
, 0) == XSTR (y
, 0);
3991 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3993 if (GET_MODE (x
) != GET_MODE (y
))
3996 /* For commutative operations, the RTX match if the operand match in any
3997 order. Also handle the simple binary and unary cases without a loop. */
3998 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
3999 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4000 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
4001 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
4002 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
4003 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
4004 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4005 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
4006 else if (GET_RTX_CLASS (code
) == '1')
4007 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
4009 /* Compare the elements. If any pair of corresponding elements
4010 fail to match, return 0 for the whole things. */
4012 fmt
= GET_RTX_FORMAT (code
);
4013 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4019 if (XWINT (x
, i
) != XWINT (y
, i
))
4024 if (XINT (x
, i
) != XINT (y
, i
))
4029 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4034 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
4039 if (XEXP (x
, i
) != XEXP (y
, i
))
4046 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4048 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4049 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
4060 /* If X is a hard register or equivalent to one or a subregister of one,
4061 return the hard register number. If X is a pseudo register that was not
4062 assigned a hard register, return the pseudo register number. Otherwise,
4063 return -1. Any rtx is valid for X. */
4069 if (GET_CODE (x
) == REG
)
4071 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
4072 return reg_renumber
[REGNO (x
)];
4075 if (GET_CODE (x
) == SUBREG
)
4077 int base
= true_regnum (SUBREG_REG (x
));
4078 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
4079 return SUBREG_WORD (x
) + base
;
4084 /* Optimize code of the form:
4086 for (x = a[i]; x; ...)
4088 for (x = a[i]; x; ...)
4092 Loop optimize will change the above code into
4096 { ...; if (! (x = ...)) break; }
4099 { ...; if (! (x = ...)) break; }
4102 In general, if the first test fails, the program can branch
4103 directly to `foo' and skip the second try which is doomed to fail.
4104 We run this after loop optimization and before flow analysis. */
4106 /* When comparing the insn patterns, we track the fact that different
4107 pseudo-register numbers may have been used in each computation.
4108 The following array stores an equivalence -- same_regs[I] == J means
4109 that pseudo register I was used in the first set of tests in a context
4110 where J was used in the second set. We also count the number of such
4111 pending equivalences. If nonzero, the expressions really aren't the
4114 static int *same_regs
;
4116 static int num_same_regs
;
4118 /* Track any registers modified between the target of the first jump and
4119 the second jump. They never compare equal. */
4121 static char *modified_regs
;
4123 /* Record if memory was modified. */
4125 static int modified_mem
;
4127 /* Called via note_stores on each insn between the target of the first
4128 branch and the second branch. It marks any changed registers. */
4131 mark_modified_reg (dest
, x
)
4137 if (GET_CODE (dest
) == SUBREG
)
4138 dest
= SUBREG_REG (dest
);
4140 if (GET_CODE (dest
) == MEM
)
4143 if (GET_CODE (dest
) != REG
)
4146 regno
= REGNO (dest
);
4147 if (regno
>= FIRST_PSEUDO_REGISTER
)
4148 modified_regs
[regno
] = 1;
4150 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
4151 modified_regs
[regno
+ i
] = 1;
4154 /* F is the first insn in the chain of insns. */
4157 thread_jumps (f
, max_reg
, flag_before_loop
)
4160 int flag_before_loop
;
4162 /* Basic algorithm is to find a conditional branch,
4163 the label it may branch to, and the branch after
4164 that label. If the two branches test the same condition,
4165 walk back from both branch paths until the insn patterns
4166 differ, or code labels are hit. If we make it back to
4167 the target of the first branch, then we know that the first branch
4168 will either always succeed or always fail depending on the relative
4169 senses of the two branches. So adjust the first branch accordingly
4172 rtx label
, b1
, b2
, t1
, t2
;
4173 enum rtx_code code1
, code2
;
4174 rtx b1op0
, b1op1
, b2op0
, b2op1
;
4179 /* Allocate register tables and quick-reset table. */
4180 modified_regs
= (char *) alloca (max_reg
* sizeof (char));
4181 same_regs
= (int *) alloca (max_reg
* sizeof (int));
4182 all_reset
= (int *) alloca (max_reg
* sizeof (int));
4183 for (i
= 0; i
< max_reg
; i
++)
4190 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
4192 /* Get to a candidate branch insn. */
4193 if (GET_CODE (b1
) != JUMP_INSN
4194 || ! condjump_p (b1
) || simplejump_p (b1
)
4195 || JUMP_LABEL (b1
) == 0)
4198 bzero (modified_regs
, max_reg
* sizeof (char));
4201 bcopy ((char *) all_reset
, (char *) same_regs
,
4202 max_reg
* sizeof (int));
4205 label
= JUMP_LABEL (b1
);
4207 /* Look for a branch after the target. Record any registers and
4208 memory modified between the target and the branch. Stop when we
4209 get to a label since we can't know what was changed there. */
4210 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
4212 if (GET_CODE (b2
) == CODE_LABEL
)
4215 else if (GET_CODE (b2
) == JUMP_INSN
)
4217 /* If this is an unconditional jump and is the only use of
4218 its target label, we can follow it. */
4219 if (simplejump_p (b2
)
4220 && JUMP_LABEL (b2
) != 0
4221 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
4223 b2
= JUMP_LABEL (b2
);
4230 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
4233 if (GET_CODE (b2
) == CALL_INSN
)
4236 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4237 if (call_used_regs
[i
] && ! fixed_regs
[i
]
4238 && i
!= STACK_POINTER_REGNUM
4239 && i
!= FRAME_POINTER_REGNUM
4240 && i
!= HARD_FRAME_POINTER_REGNUM
4241 && i
!= ARG_POINTER_REGNUM
)
4242 modified_regs
[i
] = 1;
4245 note_stores (PATTERN (b2
), mark_modified_reg
);
4248 /* Check the next candidate branch insn from the label
4251 || GET_CODE (b2
) != JUMP_INSN
4253 || ! condjump_p (b2
)
4254 || simplejump_p (b2
))
4257 /* Get the comparison codes and operands, reversing the
4258 codes if appropriate. If we don't have comparison codes,
4259 we can't do anything. */
4260 b1op0
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 0);
4261 b1op1
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 1);
4262 code1
= GET_CODE (XEXP (SET_SRC (PATTERN (b1
)), 0));
4263 if (XEXP (SET_SRC (PATTERN (b1
)), 1) == pc_rtx
)
4264 code1
= reverse_condition (code1
);
4266 b2op0
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 0);
4267 b2op1
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 1);
4268 code2
= GET_CODE (XEXP (SET_SRC (PATTERN (b2
)), 0));
4269 if (XEXP (SET_SRC (PATTERN (b2
)), 1) == pc_rtx
)
4270 code2
= reverse_condition (code2
);
4272 /* If they test the same things and knowing that B1 branches
4273 tells us whether or not B2 branches, check if we
4274 can thread the branch. */
4275 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
4276 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
4277 && (comparison_dominates_p (code1
, code2
)
4278 || comparison_dominates_p (code1
, reverse_condition (code2
))))
4280 t1
= prev_nonnote_insn (b1
);
4281 t2
= prev_nonnote_insn (b2
);
4283 while (t1
!= 0 && t2
!= 0)
4287 /* We have reached the target of the first branch.
4288 If there are no pending register equivalents,
4289 we know that this branch will either always
4290 succeed (if the senses of the two branches are
4291 the same) or always fail (if not). */
4294 if (num_same_regs
!= 0)
4297 if (comparison_dominates_p (code1
, code2
))
4298 new_label
= JUMP_LABEL (b2
);
4300 new_label
= get_label_after (b2
);
4302 if (JUMP_LABEL (b1
) != new_label
)
4304 rtx prev
= PREV_INSN (new_label
);
4306 if (flag_before_loop
4307 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
4309 /* Don't thread to the loop label. If a loop
4310 label is reused, loop optimization will
4311 be disabled for that loop. */
4312 new_label
= gen_label_rtx ();
4313 emit_label_after (new_label
, PREV_INSN (prev
));
4315 changed
|= redirect_jump (b1
, new_label
);
4320 /* If either of these is not a normal insn (it might be
4321 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4322 have already been skipped above.) Similarly, fail
4323 if the insns are different. */
4324 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
4325 || recog_memoized (t1
) != recog_memoized (t2
)
4326 || ! rtx_equal_for_thread_p (PATTERN (t1
),
4330 t1
= prev_nonnote_insn (t1
);
4331 t2
= prev_nonnote_insn (t2
);
4338 /* This is like RTX_EQUAL_P except that it knows about our handling of
4339 possibly equivalent registers and knows to consider volatile and
4340 modified objects as not equal.
4342 YINSN is the insn containing Y. */
4345 rtx_equal_for_thread_p (x
, y
, yinsn
)
4351 register enum rtx_code code
;
4354 code
= GET_CODE (x
);
4355 /* Rtx's of different codes cannot be equal. */
4356 if (code
!= GET_CODE (y
))
4359 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4360 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4362 if (GET_MODE (x
) != GET_MODE (y
))
4365 /* For commutative operations, the RTX match if the operand match in any
4366 order. Also handle the simple binary and unary cases without a loop. */
4367 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4368 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
4369 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
4370 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
4371 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
4372 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
4373 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
4374 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
4375 else if (GET_RTX_CLASS (code
) == '1')
4376 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
4378 /* Handle special-cases first. */
4382 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
4385 /* If neither is user variable or hard register, check for possible
4387 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
4388 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4389 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
4392 if (same_regs
[REGNO (x
)] == -1)
4394 same_regs
[REGNO (x
)] = REGNO (y
);
4397 /* If this is the first time we are seeing a register on the `Y'
4398 side, see if it is the last use. If not, we can't thread the
4399 jump, so mark it as not equivalent. */
4400 if (regno_last_uid
[REGNO (y
)] != INSN_UID (yinsn
))
4406 return (same_regs
[REGNO (x
)] == REGNO (y
));
4411 /* If memory modified or either volatile, not equivalent.
4412 Else, check address. */
4413 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4416 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
4419 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4425 /* Cancel a pending `same_regs' if setting equivalenced registers.
4426 Then process source. */
4427 if (GET_CODE (SET_DEST (x
)) == REG
4428 && GET_CODE (SET_DEST (y
)) == REG
)
4430 if (same_regs
[REGNO (SET_DEST (x
))] == REGNO (SET_DEST (y
)))
4432 same_regs
[REGNO (SET_DEST (x
))] = -1;
4435 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
4439 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
4442 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
4445 return XEXP (x
, 0) == XEXP (y
, 0);
4448 return XSTR (x
, 0) == XSTR (y
, 0);
4454 fmt
= GET_RTX_FORMAT (code
);
4455 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4460 if (XWINT (x
, i
) != XWINT (y
, i
))
4466 if (XINT (x
, i
) != XINT (y
, i
))
4472 /* Two vectors must have the same length. */
4473 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4476 /* And the corresponding elements must match. */
4477 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4478 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
4479 XVECEXP (y
, i
, j
), yinsn
) == 0)
4484 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
4490 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4495 /* These are just backpointers, so they don't matter. */
4501 /* It is believed that rtx's at this level will never
4502 contain anything but integers and other rtx's,
4503 except for within LABEL_REFs and SYMBOL_REFs. */