1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
59 #include "hard-reg-set.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx
*jump_chain
;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain
;
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
104 static int cross_jump_death_matters
= 0;
106 static int init_label_info
PARAMS ((rtx
));
107 static void delete_barrier_successors
PARAMS ((rtx
));
108 static void mark_all_labels
PARAMS ((rtx
, int));
109 static rtx delete_unreferenced_labels
PARAMS ((rtx
));
110 static void delete_noop_moves
PARAMS ((rtx
));
111 static int duplicate_loop_exit_test
PARAMS ((rtx
));
112 static void find_cross_jump
PARAMS ((rtx
, rtx
, int, rtx
*, rtx
*));
113 static void do_cross_jump
PARAMS ((rtx
, rtx
, rtx
));
114 static int jump_back_p
PARAMS ((rtx
, rtx
));
115 static int tension_vector_labels
PARAMS ((rtx
, int));
116 static void delete_computation
PARAMS ((rtx
));
117 static void redirect_exp_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
118 static int redirect_exp
PARAMS ((rtx
, rtx
, rtx
));
119 static void invert_exp_1
PARAMS ((rtx
));
120 static int invert_exp
PARAMS ((rtx
));
121 static void delete_from_jump_chain
PARAMS ((rtx
));
122 static int delete_labelref_insn
PARAMS ((rtx
, rtx
, int));
123 static void mark_modified_reg
PARAMS ((rtx
, rtx
, void *));
124 static void redirect_tablejump
PARAMS ((rtx
, rtx
));
125 static void jump_optimize_1
PARAMS ((rtx
, int, int, int, int, int));
126 static int returnjump_p_1
PARAMS ((rtx
*, void *));
127 static void delete_prior_computation
PARAMS ((rtx
, rtx
));
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
132 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
138 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
, 0, 0);
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
145 rebuild_jump_labels (f
)
148 jump_optimize_1 (f
, 0, 0, 0, 1, 0);
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
154 jump_optimize_minimal (f
)
157 jump_optimize_1 (f
, 0, 0, 0, 0, 1);
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
168 If NOOP_MOVES is nonzero, delete no-op move insns.
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
195 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
,
196 mark_labels_only
, minimal
)
201 int mark_labels_only
;
204 register rtx insn
, next
;
211 enum rtx_code reversed_code
;
214 cross_jump_death_matters
= (cross_jump
== 2);
215 max_uid
= init_label_info (f
) + 1;
217 /* If we are performing cross jump optimizations, then initialize
218 tables mapping UIDs to EH regions to avoid incorrect movement
219 of insns from one EH region to another. */
220 if (flag_exceptions
&& cross_jump
)
221 init_insn_eh_region (f
, max_uid
);
223 if (! mark_labels_only
)
224 delete_barrier_successors (f
);
226 /* Leave some extra room for labels and duplicate exit test insns
228 max_jump_chain
= max_uid
* 14 / 10;
229 jump_chain
= (rtx
*) xcalloc (max_jump_chain
, sizeof (rtx
));
231 mark_all_labels (f
, cross_jump
);
233 /* Keep track of labels used from static data; we don't track them
234 closely enough to delete them here, so make sure their reference
235 count doesn't drop to zero. */
237 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
238 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
239 LABEL_NUSES (XEXP (insn
, 0))++;
241 check_exception_handler_labels ();
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
246 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
247 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
248 LABEL_NUSES (XEXP (insn
, 0))++;
250 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
251 notes and recompute LABEL_NUSES. */
252 if (mark_labels_only
)
256 exception_optimize ();
258 last_insn
= delete_unreferenced_labels (f
);
261 delete_noop_moves (f
);
263 /* If we haven't yet gotten to reload and we have just run regscan,
264 delete any insn that sets a register that isn't used elsewhere.
265 This helps some of the optimizations below by having less insns
266 being jumped around. */
268 if (optimize
&& ! reload_completed
&& after_regscan
)
269 for (insn
= f
; insn
; insn
= next
)
271 rtx set
= single_set (insn
);
273 next
= NEXT_INSN (insn
);
275 if (set
&& GET_CODE (SET_DEST (set
)) == REG
276 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
277 && REGNO_FIRST_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
278 /* We use regno_last_note_uid so as not to delete the setting
279 of a reg that's used in notes. A subsequent optimization
280 might arrange to use that reg for real. */
281 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
282 && ! side_effects_p (SET_SRC (set
))
283 && ! find_reg_note (insn
, REG_RETVAL
, 0)
284 /* An ADDRESSOF expression can turn into a use of the internal arg
285 pointer, so do not delete the initialization of the internal
286 arg pointer yet. If it is truly dead, flow will delete the
287 initializing insn. */
288 && SET_DEST (set
) != current_function_internal_arg_pointer
)
292 /* Now iterate optimizing jumps until nothing changes over one pass. */
294 old_max_reg
= max_reg_num ();
299 for (insn
= f
; insn
; insn
= next
)
302 rtx temp
, temp1
, temp2
= NULL_RTX
;
303 rtx temp4 ATTRIBUTE_UNUSED
;
305 int this_is_any_uncondjump
;
306 int this_is_any_condjump
;
307 int this_is_onlyjump
;
309 next
= NEXT_INSN (insn
);
311 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
312 jump. Try to optimize by duplicating the loop exit test if so.
313 This is only safe immediately after regscan, because it uses
314 the values of regno_first_uid and regno_last_uid. */
315 if (after_regscan
&& GET_CODE (insn
) == NOTE
316 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
317 && (temp1
= next_nonnote_insn (insn
)) != 0
318 && any_uncondjump_p (temp1
)
319 && onlyjump_p (temp1
))
321 temp
= PREV_INSN (insn
);
322 if (duplicate_loop_exit_test (insn
))
325 next
= NEXT_INSN (temp
);
330 if (GET_CODE (insn
) != JUMP_INSN
)
333 this_is_any_condjump
= any_condjump_p (insn
);
334 this_is_any_uncondjump
= any_uncondjump_p (insn
);
335 this_is_onlyjump
= onlyjump_p (insn
);
337 /* Tension the labels in dispatch tables. */
339 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
340 changed
|= tension_vector_labels (PATTERN (insn
), 0);
341 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
342 changed
|= tension_vector_labels (PATTERN (insn
), 1);
344 /* See if this jump goes to another jump and redirect if so. */
345 nlabel
= follow_jumps (JUMP_LABEL (insn
));
346 if (nlabel
!= JUMP_LABEL (insn
))
347 changed
|= redirect_jump (insn
, nlabel
, 1);
349 if (! optimize
|| minimal
)
352 /* If a dispatch table always goes to the same place,
353 get rid of it and replace the insn that uses it. */
355 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
356 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
359 rtx pat
= PATTERN (insn
);
360 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
361 int len
= XVECLEN (pat
, diff_vec_p
);
362 rtx dispatch
= prev_real_insn (insn
);
365 for (i
= 0; i
< len
; i
++)
366 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
367 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
372 && GET_CODE (dispatch
) == JUMP_INSN
373 && JUMP_LABEL (dispatch
) != 0
374 /* Don't mess with a casesi insn.
375 XXX according to the comment before computed_jump_p(),
376 all casesi insns should be a parallel of the jump
377 and a USE of a LABEL_REF. */
378 && ! ((set
= single_set (dispatch
)) != NULL
379 && (GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
))
380 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
382 redirect_tablejump (dispatch
,
383 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
388 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
390 /* Detect jump to following insn. */
391 if (reallabelprev
== insn
392 && (this_is_any_condjump
|| this_is_any_uncondjump
)
395 next
= next_real_insn (JUMP_LABEL (insn
));
398 /* Remove the "inactive" but "real" insns (i.e. uses and
399 clobbers) in between here and there. */
401 while ((temp
= next_real_insn (temp
)) != next
)
408 /* Detect a conditional jump going to the same place
409 as an immediately following unconditional jump. */
410 else if (this_is_any_condjump
&& this_is_onlyjump
411 && (temp
= next_active_insn (insn
)) != 0
412 && simplejump_p (temp
)
413 && (next_active_insn (JUMP_LABEL (insn
))
414 == next_active_insn (JUMP_LABEL (temp
))))
416 /* Don't mess up test coverage analysis. */
418 if (flag_test_coverage
&& !reload_completed
)
419 for (temp2
= insn
; temp2
!= temp
; temp2
= NEXT_INSN (temp2
))
420 if (GET_CODE (temp2
) == NOTE
&& NOTE_LINE_NUMBER (temp2
) > 0)
425 /* Ensure that we jump to the later of the two labels.
436 If we leave the goto L1, we'll incorrectly leave
437 return-reg dead for TEST true. */
439 temp2
= next_active_insn (JUMP_LABEL (insn
));
441 temp2
= get_last_insn ();
442 if (GET_CODE (temp2
) != CODE_LABEL
)
443 temp2
= prev_label (temp2
);
444 if (temp2
!= JUMP_LABEL (temp
))
445 redirect_jump (temp
, temp2
, 1);
453 /* Detect a conditional jump jumping over an unconditional jump. */
455 else if (this_is_any_condjump
456 && reallabelprev
!= 0
457 && GET_CODE (reallabelprev
) == JUMP_INSN
458 && prev_active_insn (reallabelprev
) == insn
459 && no_labels_between_p (insn
, reallabelprev
)
460 && any_uncondjump_p (reallabelprev
)
461 && onlyjump_p (reallabelprev
))
463 /* When we invert the unconditional jump, we will be
464 decrementing the usage count of its old label.
465 Make sure that we don't delete it now because that
466 might cause the following code to be deleted. */
467 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
468 rtx prev_label
= JUMP_LABEL (insn
);
471 ++LABEL_NUSES (prev_label
);
473 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
), 1))
475 /* It is very likely that if there are USE insns before
476 this jump, they hold REG_DEAD notes. These REG_DEAD
477 notes are no longer valid due to this optimization,
478 and will cause the life-analysis that following passes
479 (notably delayed-branch scheduling) to think that
480 these registers are dead when they are not.
482 To prevent this trouble, we just remove the USE insns
483 from the insn chain. */
485 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
486 && GET_CODE (PATTERN (prev_uses
)) == USE
)
488 rtx useless
= prev_uses
;
489 prev_uses
= prev_nonnote_insn (prev_uses
);
490 delete_insn (useless
);
493 delete_insn (reallabelprev
);
497 /* We can now safely delete the label if it is unreferenced
498 since the delete_insn above has deleted the BARRIER. */
499 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
500 delete_insn (prev_label
);
502 next
= NEXT_INSN (insn
);
505 /* If we have an unconditional jump preceded by a USE, try to put
506 the USE before the target and jump there. This simplifies many
507 of the optimizations below since we don't have to worry about
508 dealing with these USE insns. We only do this if the label
509 being branch to already has the identical USE or if code
510 never falls through to that label. */
512 else if (this_is_any_uncondjump
513 && (temp
= prev_nonnote_insn (insn
)) != 0
514 && GET_CODE (temp
) == INSN
515 && GET_CODE (PATTERN (temp
)) == USE
516 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
517 && (GET_CODE (temp1
) == BARRIER
518 || (GET_CODE (temp1
) == INSN
519 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
))))
520 /* Don't do this optimization if we have a loop containing
521 only the USE instruction, and the loop start label has
522 a usage count of 1. This is because we will redo this
523 optimization everytime through the outer loop, and jump
524 opt will never exit. */
525 && ! ((temp2
= prev_nonnote_insn (temp
)) != 0
526 && temp2
== JUMP_LABEL (insn
)
527 && LABEL_NUSES (temp2
) == 1))
529 if (GET_CODE (temp1
) == BARRIER
)
531 emit_insn_after (PATTERN (temp
), temp1
);
532 temp1
= NEXT_INSN (temp1
);
536 redirect_jump (insn
, get_label_before (temp1
), 1);
537 reallabelprev
= prev_real_insn (temp1
);
539 next
= NEXT_INSN (insn
);
543 /* Detect a conditional jump jumping over an unconditional trap. */
545 && this_is_any_condjump
&& this_is_onlyjump
546 && reallabelprev
!= 0
547 && GET_CODE (reallabelprev
) == INSN
548 && GET_CODE (PATTERN (reallabelprev
)) == TRAP_IF
549 && TRAP_CONDITION (PATTERN (reallabelprev
)) == const_true_rtx
550 && prev_active_insn (reallabelprev
) == insn
551 && no_labels_between_p (insn
, reallabelprev
)
552 && (temp2
= get_condition (insn
, &temp4
))
553 && ((reversed_code
= reversed_comparison_code (temp2
, insn
))
556 rtx
new = gen_cond_trap (reversed_code
,
557 XEXP (temp2
, 0), XEXP (temp2
, 1),
558 TRAP_CODE (PATTERN (reallabelprev
)));
562 emit_insn_before (new, temp4
);
563 delete_insn (reallabelprev
);
569 /* Detect a jump jumping to an unconditional trap. */
570 else if (HAVE_trap
&& this_is_onlyjump
571 && (temp
= next_active_insn (JUMP_LABEL (insn
)))
572 && GET_CODE (temp
) == INSN
573 && GET_CODE (PATTERN (temp
)) == TRAP_IF
574 && (this_is_any_uncondjump
575 || (this_is_any_condjump
576 && (temp2
= get_condition (insn
, &temp4
)))))
578 rtx tc
= TRAP_CONDITION (PATTERN (temp
));
580 if (tc
== const_true_rtx
581 || (! this_is_any_uncondjump
&& rtx_equal_p (temp2
, tc
)))
584 /* Replace an unconditional jump to a trap with a trap. */
585 if (this_is_any_uncondjump
)
587 emit_barrier_after (emit_insn_before (gen_trap (), insn
));
592 new = gen_cond_trap (GET_CODE (temp2
), XEXP (temp2
, 0),
594 TRAP_CODE (PATTERN (temp
)));
597 emit_insn_before (new, temp4
);
603 /* If the trap condition and jump condition are mutually
604 exclusive, redirect the jump to the following insn. */
605 else if (GET_RTX_CLASS (GET_CODE (tc
)) == '<'
606 && this_is_any_condjump
607 && swap_condition (GET_CODE (temp2
)) == GET_CODE (tc
)
608 && rtx_equal_p (XEXP (tc
, 0), XEXP (temp2
, 0))
609 && rtx_equal_p (XEXP (tc
, 1), XEXP (temp2
, 1))
610 && redirect_jump (insn
, get_label_after (temp
), 1))
619 /* Now that the jump has been tensioned,
620 try cross jumping: check for identical code
621 before the jump and before its target label. */
623 /* First, cross jumping of conditional jumps: */
625 if (cross_jump
&& condjump_p (insn
))
627 rtx newjpos
, newlpos
;
628 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
630 /* A conditional jump may be crossjumped
631 only if the place it jumps to follows
632 an opposing jump that comes back here. */
634 if (x
!= 0 && ! jump_back_p (x
, insn
))
635 /* We have no opposing jump;
636 cannot cross jump this insn. */
640 /* TARGET is nonzero if it is ok to cross jump
641 to code before TARGET. If so, see if matches. */
643 find_cross_jump (insn
, x
, 2,
648 do_cross_jump (insn
, newjpos
, newlpos
);
649 /* Make the old conditional jump
650 into an unconditional one. */
651 SET_SRC (PATTERN (insn
))
652 = gen_rtx_LABEL_REF (VOIDmode
, JUMP_LABEL (insn
));
653 INSN_CODE (insn
) = -1;
654 emit_barrier_after (insn
);
655 /* Add to jump_chain unless this is a new label
656 whose UID is too large. */
657 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
659 jump_chain
[INSN_UID (insn
)]
660 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
661 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
668 /* Cross jumping of unconditional jumps:
669 a few differences. */
671 if (cross_jump
&& simplejump_p (insn
))
673 rtx newjpos
, newlpos
;
678 /* TARGET is nonzero if it is ok to cross jump
679 to code before TARGET. If so, see if matches. */
680 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
683 /* If cannot cross jump to code before the label,
684 see if we can cross jump to another jump to
686 /* Try each other jump to this label. */
687 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
688 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
689 target
!= 0 && newjpos
== 0;
690 target
= jump_chain
[INSN_UID (target
)])
692 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
693 /* Ignore TARGET if it's deleted. */
694 && ! INSN_DELETED_P (target
))
695 find_cross_jump (insn
, target
, 2,
700 do_cross_jump (insn
, newjpos
, newlpos
);
706 /* This code was dead in the previous jump.c! */
707 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
709 /* Return insns all "jump to the same place"
710 so we can cross-jump between any two of them. */
712 rtx newjpos
, newlpos
, target
;
716 /* If cannot cross jump to code before the label,
717 see if we can cross jump to another jump to
719 /* Try each other jump to this label. */
720 for (target
= jump_chain
[0];
721 target
!= 0 && newjpos
== 0;
722 target
= jump_chain
[INSN_UID (target
)])
724 && ! INSN_DELETED_P (target
)
725 && GET_CODE (PATTERN (target
)) == RETURN
)
726 find_cross_jump (insn
, target
, 2,
731 do_cross_jump (insn
, newjpos
, newlpos
);
742 /* Delete extraneous line number notes.
743 Note that two consecutive notes for different lines are not really
744 extraneous. There should be some indication where that line belonged,
745 even if it became empty. */
750 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
751 if (GET_CODE (insn
) == NOTE
)
753 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
754 /* Any previous line note was for the prologue; gdb wants a new
755 note after the prologue even if it is for the same line. */
756 last_note
= NULL_RTX
;
757 else if (NOTE_LINE_NUMBER (insn
) >= 0)
759 /* Delete this note if it is identical to previous note. */
761 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
762 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
779 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
780 notes whose labels don't occur in the insn any more. Returns the
781 largest INSN_UID found. */
789 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
791 if (GET_CODE (insn
) == CODE_LABEL
)
792 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
793 else if (GET_CODE (insn
) == JUMP_INSN
)
794 JUMP_LABEL (insn
) = 0;
795 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
799 for (note
= REG_NOTES (insn
); note
; note
= next
)
801 next
= XEXP (note
, 1);
802 if (REG_NOTE_KIND (note
) == REG_LABEL
803 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
804 remove_note (insn
, note
);
807 if (INSN_UID (insn
) > largest_uid
)
808 largest_uid
= INSN_UID (insn
);
814 /* Delete insns following barriers, up to next label.
816 Also delete no-op jumps created by gcse. */
819 delete_barrier_successors (f
)
825 for (insn
= f
; insn
;)
827 if (GET_CODE (insn
) == BARRIER
)
829 insn
= NEXT_INSN (insn
);
831 never_reached_warning (insn
);
833 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
835 if (GET_CODE (insn
) == NOTE
836 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
837 insn
= NEXT_INSN (insn
);
839 insn
= delete_insn (insn
);
841 /* INSN is now the code_label. */
844 /* Also remove (set (pc) (pc)) insns which can be created by
845 gcse. We eliminate such insns now to avoid having them
846 cause problems later. */
847 else if (GET_CODE (insn
) == JUMP_INSN
848 && (set
= pc_set (insn
)) != NULL
849 && SET_SRC (set
) == pc_rtx
850 && SET_DEST (set
) == pc_rtx
851 && onlyjump_p (insn
))
852 insn
= delete_insn (insn
);
855 insn
= NEXT_INSN (insn
);
859 /* Mark the label each jump jumps to.
860 Combine consecutive labels, and count uses of labels.
862 For each label, make a chain (using `jump_chain')
863 of all the *unconditional* jumps that jump to it;
864 also make a chain of all returns.
866 CROSS_JUMP indicates whether we are doing cross jumping
867 and if we are whether we will be paying attention to
868 death notes or not. */
871 mark_all_labels (f
, cross_jump
)
877 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
880 if (GET_CODE (insn
) == CALL_INSN
881 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
883 mark_all_labels (XEXP (PATTERN (insn
), 0), cross_jump
);
884 mark_all_labels (XEXP (PATTERN (insn
), 1), cross_jump
);
885 mark_all_labels (XEXP (PATTERN (insn
), 2), cross_jump
);
889 mark_jump_label (PATTERN (insn
), insn
, cross_jump
, 0);
890 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
892 /* When we know the LABEL_REF contained in a REG used in
893 an indirect jump, we'll have a REG_LABEL note so that
894 flow can tell where it's going. */
895 if (JUMP_LABEL (insn
) == 0)
897 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
900 /* But a LABEL_REF around the REG_LABEL note, so
901 that we can canonicalize it. */
902 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
903 XEXP (label_note
, 0));
905 mark_jump_label (label_ref
, insn
, cross_jump
, 0);
906 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
907 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
910 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
912 jump_chain
[INSN_UID (insn
)]
913 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
914 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
916 if (GET_CODE (PATTERN (insn
)) == RETURN
)
918 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
919 jump_chain
[0] = insn
;
925 /* Delete all labels already not referenced.
926 Also find and return the last insn. */
929 delete_unreferenced_labels (f
)
932 rtx final
= NULL_RTX
;
935 for (insn
= f
; insn
;)
937 if (GET_CODE (insn
) == CODE_LABEL
938 && LABEL_NUSES (insn
) == 0
939 && LABEL_ALTERNATE_NAME (insn
) == NULL
)
940 insn
= delete_insn (insn
);
944 insn
= NEXT_INSN (insn
);
951 /* Delete various simple forms of moves which have no necessary
955 delete_noop_moves (f
)
960 for (insn
= f
; insn
;)
962 next
= NEXT_INSN (insn
);
964 if (GET_CODE (insn
) == INSN
)
966 register rtx body
= PATTERN (insn
);
968 /* Detect and delete no-op move instructions
969 resulting from not allocating a parameter in a register. */
971 if (GET_CODE (body
) == SET
972 && (SET_DEST (body
) == SET_SRC (body
)
973 || (GET_CODE (SET_DEST (body
)) == MEM
974 && GET_CODE (SET_SRC (body
)) == MEM
975 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
976 && ! (GET_CODE (SET_DEST (body
)) == MEM
977 && MEM_VOLATILE_P (SET_DEST (body
)))
978 && ! (GET_CODE (SET_SRC (body
)) == MEM
979 && MEM_VOLATILE_P (SET_SRC (body
))))
980 delete_computation (insn
);
982 /* Detect and ignore no-op move instructions
983 resulting from smart or fortuitous register allocation. */
985 else if (GET_CODE (body
) == SET
)
987 int sreg
= true_regnum (SET_SRC (body
));
988 int dreg
= true_regnum (SET_DEST (body
));
990 if (sreg
== dreg
&& sreg
>= 0)
992 else if (sreg
>= 0 && dreg
>= 0)
995 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
996 sreg
, NULL_PTR
, dreg
,
997 GET_MODE (SET_SRC (body
)));
1000 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
1002 /* DREG may have been the target of a REG_DEAD note in
1003 the insn which makes INSN redundant. If so, reorg
1004 would still think it is dead. So search for such a
1005 note and delete it if we find it. */
1006 if (! find_regno_note (insn
, REG_UNUSED
, dreg
))
1007 for (trial
= prev_nonnote_insn (insn
);
1008 trial
&& GET_CODE (trial
) != CODE_LABEL
;
1009 trial
= prev_nonnote_insn (trial
))
1010 if (find_regno_note (trial
, REG_DEAD
, dreg
))
1012 remove_death (dreg
, trial
);
1016 /* Deleting insn could lose a death-note for SREG. */
1017 if ((trial
= find_regno_note (insn
, REG_DEAD
, sreg
)))
1019 /* Change this into a USE so that we won't emit
1020 code for it, but still can keep the note. */
1022 = gen_rtx_USE (VOIDmode
, XEXP (trial
, 0));
1023 INSN_CODE (insn
) = -1;
1024 /* Remove all reg notes but the REG_DEAD one. */
1025 REG_NOTES (insn
) = trial
;
1026 XEXP (trial
, 1) = NULL_RTX
;
1032 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
1033 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
1035 GET_MODE (SET_DEST (body
))))
1037 /* This handles the case where we have two consecutive
1038 assignments of the same constant to pseudos that didn't
1039 get a hard reg. Each SET from the constant will be
1040 converted into a SET of the spill register and an
1041 output reload will be made following it. This produces
1042 two loads of the same constant into the same spill
1047 /* Look back for a death note for the first reg.
1048 If there is one, it is no longer accurate. */
1049 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
1051 if ((GET_CODE (in_insn
) == INSN
1052 || GET_CODE (in_insn
) == JUMP_INSN
)
1053 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
1055 remove_death (dreg
, in_insn
);
1058 in_insn
= PREV_INSN (in_insn
);
1061 /* Delete the second load of the value. */
1065 else if (GET_CODE (body
) == PARALLEL
)
1067 /* If each part is a set between two identical registers or
1068 a USE or CLOBBER, delete the insn. */
1072 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1074 tem
= XVECEXP (body
, 0, i
);
1075 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
1078 if (GET_CODE (tem
) != SET
1079 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
1080 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
1088 /* Also delete insns to store bit fields if they are no-ops. */
1089 /* Not worth the hair to detect this in the big-endian case. */
1090 else if (! BYTES_BIG_ENDIAN
1091 && GET_CODE (body
) == SET
1092 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
1093 && XEXP (SET_DEST (body
), 2) == const0_rtx
1094 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
1095 && ! (GET_CODE (SET_SRC (body
)) == MEM
1096 && MEM_VOLATILE_P (SET_SRC (body
))))
1103 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1104 jump. Assume that this unconditional jump is to the exit test code. If
1105 the code is sufficiently simple, make a copy of it before INSN,
1106 followed by a jump to the exit of the loop. Then delete the unconditional
1109 Return 1 if we made the change, else 0.
1111 This is only safe immediately after a regscan pass because it uses the
1112 values of regno_first_uid and regno_last_uid. */
1115 duplicate_loop_exit_test (loop_start
)
1118 rtx insn
, set
, reg
, p
, link
;
1119 rtx copy
= 0, first_copy
= 0;
1121 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
1123 int max_reg
= max_reg_num ();
1126 /* Scan the exit code. We do not perform this optimization if any insn:
1130 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1131 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1132 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1135 We also do not do this if we find an insn with ASM_OPERANDS. While
1136 this restriction should not be necessary, copying an insn with
1137 ASM_OPERANDS can confuse asm_noperands in some cases.
1139 Also, don't do this if the exit code is more than 20 insns. */
1141 for (insn
= exitcode
;
1143 && ! (GET_CODE (insn
) == NOTE
1144 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
1145 insn
= NEXT_INSN (insn
))
1147 switch (GET_CODE (insn
))
1153 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1154 a jump immediately after the loop start that branches outside
1155 the loop but within an outer loop, near the exit test.
1156 If we copied this exit test and created a phony
1157 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1158 before the exit test look like these could be safely moved
1159 out of the loop even if they actually may be never executed.
1160 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1162 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
1163 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
1167 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1168 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1169 /* If we were to duplicate this code, we would not move
1170 the BLOCK notes, and so debugging the moved code would
1171 be difficult. Thus, we only move the code with -O2 or
1178 /* The code below would grossly mishandle REG_WAS_0 notes,
1179 so get rid of them here. */
1180 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
1181 remove_note (insn
, p
);
1182 if (++num_insns
> 20
1183 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
1184 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
1192 /* Unless INSN is zero, we can do the optimization. */
1198 /* See if any insn sets a register only used in the loop exit code and
1199 not a user variable. If so, replace it with a new register. */
1200 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
1201 if (GET_CODE (insn
) == INSN
1202 && (set
= single_set (insn
)) != 0
1203 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
1204 || (GET_CODE (reg
) == SUBREG
1205 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
1206 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
1207 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
1209 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
1210 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
1215 /* We can do the replacement. Allocate reg_map if this is the
1216 first replacement we found. */
1218 reg_map
= (rtx
*) xcalloc (max_reg
, sizeof (rtx
));
1220 REG_LOOP_TEST_P (reg
) = 1;
1222 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
1226 /* Now copy each insn. */
1227 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
1229 switch (GET_CODE (insn
))
1232 copy
= emit_barrier_before (loop_start
);
1235 /* Only copy line-number notes. */
1236 if (NOTE_LINE_NUMBER (insn
) >= 0)
1238 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
1239 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
1244 copy
= emit_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
1246 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
1248 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
1250 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1252 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1253 if (REG_NOTE_KIND (link
) != REG_LABEL
)
1255 if (GET_CODE (link
) == EXPR_LIST
)
1257 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
1262 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link
),
1267 if (reg_map
&& REG_NOTES (copy
))
1268 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
1272 copy
= emit_jump_insn_before (copy_insn (PATTERN (insn
)),
1275 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
1276 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
1277 if (REG_NOTES (insn
))
1279 REG_NOTES (copy
) = copy_insn_1 (REG_NOTES (insn
));
1281 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
1284 /* If this is a simple jump, add it to the jump chain. */
1286 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
1287 && simplejump_p (copy
))
1289 jump_chain
[INSN_UID (copy
)]
1290 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
1291 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
1299 /* Record the first insn we copied. We need it so that we can
1300 scan the copied insns for new pseudo registers. */
1305 /* Now clean up by emitting a jump to the end label and deleting the jump
1306 at the start of the loop. */
1307 if (! copy
|| GET_CODE (copy
) != BARRIER
)
1309 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
1312 /* Record the first insn we copied. We need it so that we can
1313 scan the copied insns for new pseudo registers. This may not
1314 be strictly necessary since we should have copied at least one
1315 insn above. But I am going to be safe. */
1319 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
1320 if (INSN_UID (copy
) < max_jump_chain
1321 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
1323 jump_chain
[INSN_UID (copy
)]
1324 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
1325 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
1327 emit_barrier_before (loop_start
);
1330 /* Now scan from the first insn we copied to the last insn we copied
1331 (copy) for new pseudo registers. Do this after the code to jump to
1332 the end label since that might create a new pseudo too. */
1333 reg_scan_update (first_copy
, copy
, max_reg
);
1335 /* Mark the exit code as the virtual top of the converted loop. */
1336 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
1338 delete_insn (next_nonnote_insn (loop_start
));
1347 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1348 notes between START and END out before START. Assume that END is not
1349 such a note. START may be such a note. Returns the value of the new
1350 starting insn, which may be different if the original start was such a
1354 squeeze_notes (start
, end
)
1360 for (insn
= start
; insn
!= end
; insn
= next
)
1362 next
= NEXT_INSN (insn
);
1363 if (GET_CODE (insn
) == NOTE
1364 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
1365 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1366 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
1367 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
1368 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
1369 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
1375 rtx prev
= PREV_INSN (insn
);
1376 PREV_INSN (insn
) = PREV_INSN (start
);
1377 NEXT_INSN (insn
) = start
;
1378 NEXT_INSN (PREV_INSN (insn
)) = insn
;
1379 PREV_INSN (NEXT_INSN (insn
)) = insn
;
1380 NEXT_INSN (prev
) = next
;
1381 PREV_INSN (next
) = prev
;
1389 /* Compare the instructions before insn E1 with those before E2
1390 to find an opportunity for cross jumping.
1391 (This means detecting identical sequences of insns followed by
1392 jumps to the same place, or followed by a label and a jump
1393 to that label, and replacing one with a jump to the other.)
1395 Assume E1 is a jump that jumps to label E2
1396 (that is not always true but it might as well be).
1397 Find the longest possible equivalent sequences
1398 and store the first insns of those sequences into *F1 and *F2.
1399 Store zero there if no equivalent preceding instructions are found.
1401 We give up if we find a label in stream 1.
1402 Actually we could transfer that label into stream 2. */
1405 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
1410 register rtx i1
= e1
, i2
= e2
;
1411 register rtx p1
, p2
;
1414 rtx last1
= 0, last2
= 0;
1415 rtx afterlast1
= 0, afterlast2
= 0;
1422 i1
= prev_nonnote_insn (i1
);
1424 i2
= PREV_INSN (i2
);
1425 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
1426 i2
= PREV_INSN (i2
);
1431 /* Don't allow the range of insns preceding E1 or E2
1432 to include the other (E2 or E1). */
1433 if (i2
== e1
|| i1
== e2
)
1436 /* If we will get to this code by jumping, those jumps will be
1437 tensioned to go directly to the new label (before I2),
1438 so this cross-jumping won't cost extra. So reduce the minimum. */
1439 if (GET_CODE (i1
) == CODE_LABEL
)
1445 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
1448 /* Avoid moving insns across EH regions if either of the insns
1451 && (asynchronous_exceptions
|| GET_CODE (i1
) == CALL_INSN
)
1452 && !in_same_eh_region (i1
, i2
))
1458 /* If this is a CALL_INSN, compare register usage information.
1459 If we don't check this on stack register machines, the two
1460 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1461 numbers of stack registers in the same basic block.
1462 If we don't check this on machines with delay slots, a delay slot may
1463 be filled that clobbers a parameter expected by the subroutine.
1465 ??? We take the simple route for now and assume that if they're
1466 equal, they were constructed identically. */
1468 if (GET_CODE (i1
) == CALL_INSN
1469 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1
),
1470 CALL_INSN_FUNCTION_USAGE (i2
)))
1474 /* If cross_jump_death_matters is not 0, the insn's mode
1475 indicates whether or not the insn contains any stack-like
1478 if (!lose
&& cross_jump_death_matters
&& stack_regs_mentioned (i1
))
1480 /* If register stack conversion has already been done, then
1481 death notes must also be compared before it is certain that
1482 the two instruction streams match. */
1485 HARD_REG_SET i1_regset
, i2_regset
;
1487 CLEAR_HARD_REG_SET (i1_regset
);
1488 CLEAR_HARD_REG_SET (i2_regset
);
1490 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
1491 if (REG_NOTE_KIND (note
) == REG_DEAD
1492 && STACK_REG_P (XEXP (note
, 0)))
1493 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
1495 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
1496 if (REG_NOTE_KIND (note
) == REG_DEAD
1497 && STACK_REG_P (XEXP (note
, 0)))
1498 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
1500 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
1509 /* Don't allow old-style asm or volatile extended asms to be accepted
1510 for cross jumping purposes. It is conceptually correct to allow
1511 them, since cross-jumping preserves the dynamic instruction order
1512 even though it is changing the static instruction order. However,
1513 if an asm is being used to emit an assembler pseudo-op, such as
1514 the MIPS `.set reorder' pseudo-op, then the static instruction order
1515 matters and it must be preserved. */
1516 if (GET_CODE (p1
) == ASM_INPUT
|| GET_CODE (p2
) == ASM_INPUT
1517 || (GET_CODE (p1
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p1
))
1518 || (GET_CODE (p2
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p2
)))
1521 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
1522 || ! rtx_renumbered_equal_p (p1
, p2
))
1524 /* The following code helps take care of G++ cleanups. */
1528 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
1529 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
1530 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
1531 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
1532 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
1533 /* If the equivalences are not to a constant, they may
1534 reference pseudos that no longer exist, so we can't
1536 && CONSTANT_P (XEXP (equiv1
, 0))
1537 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
1539 rtx s1
= single_set (i1
);
1540 rtx s2
= single_set (i2
);
1541 if (s1
!= 0 && s2
!= 0
1542 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
1544 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
1545 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
1546 if (! rtx_renumbered_equal_p (p1
, p2
))
1548 else if (apply_change_group ())
1553 /* Insns fail to match; cross jumping is limited to the following
1557 /* Don't allow the insn after a compare to be shared by
1558 cross-jumping unless the compare is also shared.
1559 Here, if either of these non-matching insns is a compare,
1560 exclude the following insn from possible cross-jumping. */
1561 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
1562 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
1565 /* If cross-jumping here will feed a jump-around-jump
1566 optimization, this jump won't cost extra, so reduce
1568 if (GET_CODE (i1
) == JUMP_INSN
1570 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
1576 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
1578 /* Ok, this insn is potentially includable in a cross-jump here. */
1579 afterlast1
= last1
, afterlast2
= last2
;
1580 last1
= i1
, last2
= i2
, --minimum
;
1584 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
1585 *f1
= last1
, *f2
= last2
;
1589 do_cross_jump (insn
, newjpos
, newlpos
)
1590 rtx insn
, newjpos
, newlpos
;
1592 /* Find an existing label at this point
1593 or make a new one if there is none. */
1594 register rtx label
= get_label_before (newlpos
);
1596 /* Make the same jump insn jump to the new point. */
1597 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1599 /* Remove from jump chain of returns. */
1600 delete_from_jump_chain (insn
);
1601 /* Change the insn. */
1602 PATTERN (insn
) = gen_jump (label
);
1603 INSN_CODE (insn
) = -1;
1604 JUMP_LABEL (insn
) = label
;
1605 LABEL_NUSES (label
)++;
1606 /* Add to new the jump chain. */
1607 if (INSN_UID (label
) < max_jump_chain
1608 && INSN_UID (insn
) < max_jump_chain
)
1610 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
1611 jump_chain
[INSN_UID (label
)] = insn
;
1615 redirect_jump (insn
, label
, 1);
1617 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1618 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1619 the NEWJPOS stream. */
1621 while (newjpos
!= insn
)
1625 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
1626 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
1627 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
1628 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
1629 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
1630 remove_note (newlpos
, lnote
);
1632 delete_insn (newjpos
);
1633 newjpos
= next_real_insn (newjpos
);
1634 newlpos
= next_real_insn (newlpos
);
1638 /* Return the label before INSN, or put a new label there. */
1641 get_label_before (insn
)
1646 /* Find an existing label at this point
1647 or make a new one if there is none. */
1648 label
= prev_nonnote_insn (insn
);
1650 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
1652 rtx prev
= PREV_INSN (insn
);
1654 label
= gen_label_rtx ();
1655 emit_label_after (label
, prev
);
1656 LABEL_NUSES (label
) = 0;
1661 /* Return the label after INSN, or put a new label there. */
1664 get_label_after (insn
)
1669 /* Find an existing label at this point
1670 or make a new one if there is none. */
1671 label
= next_nonnote_insn (insn
);
1673 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
1675 label
= gen_label_rtx ();
1676 emit_label_after (label
, insn
);
1677 LABEL_NUSES (label
) = 0;
1682 /* Return 1 if INSN is a jump that jumps to right after TARGET
1683 only on the condition that TARGET itself would drop through.
1684 Assumes that TARGET is a conditional jump. */
1687 jump_back_p (insn
, target
)
1691 enum rtx_code codei
, codet
;
1694 if (! any_condjump_p (insn
)
1695 || any_uncondjump_p (target
)
1696 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
1698 set
= pc_set (insn
);
1699 tset
= pc_set (target
);
1701 cinsn
= XEXP (SET_SRC (set
), 0);
1702 ctarget
= XEXP (SET_SRC (tset
), 0);
1704 codei
= GET_CODE (cinsn
);
1705 codet
= GET_CODE (ctarget
);
1707 if (XEXP (SET_SRC (set
), 1) == pc_rtx
)
1709 codei
= reversed_comparison_code (cinsn
, insn
);
1710 if (codei
== UNKNOWN
)
1714 if (XEXP (SET_SRC (tset
), 2) == pc_rtx
)
1716 codet
= reversed_comparison_code (ctarget
, target
);
1717 if (codei
== UNKNOWN
)
1721 return (codei
== codet
1722 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
1723 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
1726 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1727 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1728 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1729 know whether it's source is floating point or integer comparison. Machine
1730 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1731 to help this function avoid overhead in these cases. */
1733 reversed_comparison_code_parts (code
, arg0
, arg1
, insn
)
1734 rtx insn
, arg0
, arg1
;
1737 enum machine_mode mode
;
1739 /* If this is not actually a comparison, we can't reverse it. */
1740 if (GET_RTX_CLASS (code
) != '<')
1743 mode
= GET_MODE (arg0
);
1744 if (mode
== VOIDmode
)
1745 mode
= GET_MODE (arg1
);
1747 /* First see if machine description supply us way to reverse the comparison.
1748 Give it priority over everything else to allow machine description to do
1750 #ifdef REVERSIBLE_CC_MODE
1751 if (GET_MODE_CLASS (mode
) == MODE_CC
1752 && REVERSIBLE_CC_MODE (mode
))
1754 #ifdef REVERSE_CONDITION
1755 return REVERSE_CONDITION (code
, mode
);
1757 return reverse_condition (code
);
1761 /* Try few special cases based on the comparison code. */
1770 /* It is always safe to reverse EQ and NE, even for the floating
1771 point. Similary the unsigned comparisons are never used for
1772 floating point so we can reverse them in the default way. */
1773 return reverse_condition (code
);
1778 /* In case we already see unordered comparison, we can be sure to
1779 be dealing with floating point so we don't need any more tests. */
1780 return reverse_condition_maybe_unordered (code
);
1785 /* We don't have safe way to reverse these yet. */
1791 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1792 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
1793 || flag_unsafe_math_optimizations
)
1794 return reverse_condition (code
);
1796 if (GET_MODE_CLASS (mode
) == MODE_CC
1803 /* Try to search for the comparison to determine the real mode.
1804 This code is expensive, but with sane machine description it
1805 will be never used, since REVERSIBLE_CC_MODE will return true
1810 for (prev
= prev_nonnote_insn (insn
);
1811 prev
!= 0 && GET_CODE (prev
) != CODE_LABEL
;
1812 prev
= prev_nonnote_insn (prev
))
1814 rtx set
= set_of (arg0
, prev
);
1815 if (set
&& GET_CODE (set
) == SET
1816 && rtx_equal_p (SET_DEST (set
), arg0
))
1818 rtx src
= SET_SRC (set
);
1820 if (GET_CODE (src
) == COMPARE
)
1822 rtx comparison
= src
;
1823 arg0
= XEXP (src
, 0);
1824 mode
= GET_MODE (arg0
);
1825 if (mode
== VOIDmode
)
1826 mode
= GET_MODE (XEXP (comparison
, 1));
1829 /* We can get past reg-reg moves. This may be usefull for model
1830 of i387 comparisons that first move flag registers around. */
1837 /* If register is clobbered in some ununderstandable way,
1844 /* An integer condition. */
1845 if (GET_CODE (arg0
) == CONST_INT
1846 || (GET_MODE (arg0
) != VOIDmode
1847 && GET_MODE_CLASS (mode
) != MODE_CC
1848 && ! FLOAT_MODE_P (mode
)))
1849 return reverse_condition (code
);
1854 /* An wrapper around the previous function to take COMPARISON as rtx
1855 expression. This simplifies many callers. */
1857 reversed_comparison_code (comparison
, insn
)
1858 rtx comparison
, insn
;
1860 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
1862 return reversed_comparison_code_parts (GET_CODE (comparison
),
1863 XEXP (comparison
, 0),
1864 XEXP (comparison
, 1), insn
);
1867 /* Given an rtx-code for a comparison, return the code for the negated
1868 comparison. If no such code exists, return UNKNOWN.
1870 WATCH OUT! reverse_condition is not safe to use on a jump that might
1871 be acting on the results of an IEEE floating point comparison, because
1872 of the special treatment of non-signaling nans in comparisons.
1873 Use reversed_comparison_code instead. */
1876 reverse_condition (code
)
1919 /* Similar, but we're allowed to generate unordered comparisons, which
1920 makes it safe for IEEE floating-point. Of course, we have to recognize
1921 that the target will support them too... */
1924 reverse_condition_maybe_unordered (code
)
1927 /* Non-IEEE formats don't have unordered conditions. */
1928 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
1929 return reverse_condition (code
);
1967 /* Similar, but return the code when two operands of a comparison are swapped.
1968 This IS safe for IEEE floating-point. */
1971 swap_condition (code
)
2014 /* Given a comparison CODE, return the corresponding unsigned comparison.
2015 If CODE is an equality comparison or already an unsigned comparison,
2016 CODE is returned. */
2019 unsigned_condition (code
)
2046 /* Similarly, return the signed version of a comparison. */
2049 signed_condition (code
)
2076 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2077 truth of CODE1 implies the truth of CODE2. */
2080 comparison_dominates_p (code1
, code2
)
2081 enum rtx_code code1
, code2
;
2083 /* UNKNOWN comparison codes can happen as a result of trying to revert
2085 They can't match anything, so we have to reject them here. */
2086 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
2095 if (code2
== UNLE
|| code2
== UNGE
)
2100 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
2101 || code2
== ORDERED
)
2106 if (code2
== UNLE
|| code2
== NE
)
2111 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
2116 if (code2
== UNGE
|| code2
== NE
)
2121 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
2127 if (code2
== ORDERED
)
2132 if (code2
== NE
|| code2
== ORDERED
)
2137 if (code2
== LEU
|| code2
== NE
)
2142 if (code2
== GEU
|| code2
== NE
)
2147 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
2148 || code2
== UNGE
|| code2
== UNGT
)
2159 /* Return 1 if INSN is an unconditional jump and nothing else. */
2165 return (GET_CODE (insn
) == JUMP_INSN
2166 && GET_CODE (PATTERN (insn
)) == SET
2167 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
2168 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
2171 /* Return nonzero if INSN is a (possibly) conditional jump
2174 Use this function is deprecated, since we need to support combined
2175 branch and compare insns. Use any_condjump_p instead whenever possible. */
2181 register rtx x
= PATTERN (insn
);
2183 if (GET_CODE (x
) != SET
2184 || GET_CODE (SET_DEST (x
)) != PC
)
2188 if (GET_CODE (x
) == LABEL_REF
)
2191 return (GET_CODE (x
) == IF_THEN_ELSE
2192 && ((GET_CODE (XEXP (x
, 2)) == PC
2193 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
2194 || GET_CODE (XEXP (x
, 1)) == RETURN
))
2195 || (GET_CODE (XEXP (x
, 1)) == PC
2196 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
2197 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
2202 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2205 Use this function is deprecated, since we need to support combined
2206 branch and compare insns. Use any_condjump_p instead whenever possible. */
2209 condjump_in_parallel_p (insn
)
2212 register rtx x
= PATTERN (insn
);
2214 if (GET_CODE (x
) != PARALLEL
)
2217 x
= XVECEXP (x
, 0, 0);
2219 if (GET_CODE (x
) != SET
)
2221 if (GET_CODE (SET_DEST (x
)) != PC
)
2223 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
2225 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2227 if (XEXP (SET_SRC (x
), 2) == pc_rtx
2228 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
2229 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
2231 if (XEXP (SET_SRC (x
), 1) == pc_rtx
2232 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
2233 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
2238 /* Return set of PC, otherwise NULL. */
2245 if (GET_CODE (insn
) != JUMP_INSN
)
2247 pat
= PATTERN (insn
);
2249 /* The set is allowed to appear either as the insn pattern or
2250 the first set in a PARALLEL. */
2251 if (GET_CODE (pat
) == PARALLEL
)
2252 pat
= XVECEXP (pat
, 0, 0);
2253 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
2259 /* Return true when insn is an unconditional direct jump,
2260 possibly bundled inside a PARALLEL. */
2263 any_uncondjump_p (insn
)
2266 rtx x
= pc_set (insn
);
2269 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
2274 /* Return true when insn is a conditional jump. This function works for
2275 instructions containing PC sets in PARALLELs. The instruction may have
2276 various other effects so before removing the jump you must verify
2279 Note that unlike condjump_p it returns false for unconditional jumps. */
2282 any_condjump_p (insn
)
2285 rtx x
= pc_set (insn
);
2290 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
2293 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
2294 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
2296 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
2297 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
2300 /* Return the label of a conditional jump. */
2303 condjump_label (insn
)
2306 rtx x
= pc_set (insn
);
2311 if (GET_CODE (x
) == LABEL_REF
)
2313 if (GET_CODE (x
) != IF_THEN_ELSE
)
2315 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
2317 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
2322 /* Return true if INSN is a (possibly conditional) return insn. */
2325 returnjump_p_1 (loc
, data
)
2327 void *data ATTRIBUTE_UNUSED
;
2330 return x
&& GET_CODE (x
) == RETURN
;
2337 if (GET_CODE (insn
) != JUMP_INSN
)
2339 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
2342 /* Return true if INSN is a jump that only transfers control and
2351 if (GET_CODE (insn
) != JUMP_INSN
)
2354 set
= single_set (insn
);
2357 if (GET_CODE (SET_DEST (set
)) != PC
)
2359 if (side_effects_p (SET_SRC (set
)))
2367 /* Return 1 if X is an RTX that does nothing but set the condition codes
2368 and CLOBBER or USE registers.
2369 Return -1 if X does explicitly set the condition codes,
2370 but also does other things. */
2374 rtx x ATTRIBUTE_UNUSED
;
2376 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
2378 if (GET_CODE (x
) == PARALLEL
)
2382 int other_things
= 0;
2383 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
2385 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
2386 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
2388 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
2391 return ! sets_cc0
? 0 : other_things
? -1 : 1;
2397 /* Follow any unconditional jump at LABEL;
2398 return the ultimate label reached by any such chain of jumps.
2399 If LABEL is not followed by a jump, return LABEL.
2400 If the chain loops or we can't find end, return LABEL,
2401 since that tells caller to avoid changing the insn.
2403 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2404 a USE or CLOBBER. */
2407 follow_jumps (label
)
2412 register rtx value
= label
;
2417 && (insn
= next_active_insn (value
)) != 0
2418 && GET_CODE (insn
) == JUMP_INSN
2419 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
2420 && onlyjump_p (insn
))
2421 || GET_CODE (PATTERN (insn
)) == RETURN
)
2422 && (next
= NEXT_INSN (insn
))
2423 && GET_CODE (next
) == BARRIER
);
2426 /* Don't chain through the insn that jumps into a loop
2427 from outside the loop,
2428 since that would create multiple loop entry jumps
2429 and prevent loop optimization. */
2431 if (!reload_completed
)
2432 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
2433 if (GET_CODE (tem
) == NOTE
2434 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
2435 /* ??? Optional. Disables some optimizations, but makes
2436 gcov output more accurate with -O. */
2437 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
2440 /* If we have found a cycle, make the insn jump to itself. */
2441 if (JUMP_LABEL (insn
) == label
)
2444 tem
= next_active_insn (JUMP_LABEL (insn
));
2445 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
2446 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
2449 value
= JUMP_LABEL (insn
);
2456 /* Assuming that field IDX of X is a vector of label_refs,
2457 replace each of them by the ultimate label reached by it.
2458 Return nonzero if a change is made.
2459 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2462 tension_vector_labels (x
, idx
)
2468 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
2470 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
2471 register rtx nlabel
= follow_jumps (olabel
);
2472 if (nlabel
&& nlabel
!= olabel
)
2474 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
2475 ++LABEL_NUSES (nlabel
);
2476 if (--LABEL_NUSES (olabel
) == 0)
2477 delete_insn (olabel
);
2484 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2485 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2486 in INSN, then store one of them in JUMP_LABEL (INSN).
2487 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2488 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2489 Also, when there are consecutive labels, canonicalize on the last of them.
2491 Note that two labels separated by a loop-beginning note
2492 must be kept distinct if we have not yet done loop-optimization,
2493 because the gap between them is where loop-optimize
2494 will want to move invariant code to. CROSS_JUMP tells us
2495 that loop-optimization is done with.
2497 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2498 two labels distinct if they are separated by only USE or CLOBBER insns. */
2501 mark_jump_label (x
, insn
, cross_jump
, in_mem
)
2507 register RTX_CODE code
= GET_CODE (x
);
2509 register const char *fmt
;
2531 /* If this is a constant-pool reference, see if it is a label. */
2532 if (CONSTANT_POOL_ADDRESS_P (x
))
2533 mark_jump_label (get_pool_constant (x
), insn
, cross_jump
, in_mem
);
2538 rtx label
= XEXP (x
, 0);
2543 /* Ignore remaining references to unreachable labels that
2544 have been deleted. */
2545 if (GET_CODE (label
) == NOTE
2546 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
2549 if (GET_CODE (label
) != CODE_LABEL
)
2552 /* Ignore references to labels of containing functions. */
2553 if (LABEL_REF_NONLOCAL_P (x
))
2556 /* If there are other labels following this one,
2557 replace it with the last of the consecutive labels. */
2558 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
2560 if (GET_CODE (next
) == CODE_LABEL
)
2562 else if (cross_jump
&& GET_CODE (next
) == INSN
2563 && (GET_CODE (PATTERN (next
)) == USE
2564 || GET_CODE (PATTERN (next
)) == CLOBBER
))
2566 else if (GET_CODE (next
) != NOTE
)
2568 else if (! cross_jump
2569 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
2570 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
2571 /* ??? Optional. Disables some optimizations, but
2572 makes gcov output more accurate with -O. */
2573 || (flag_test_coverage
2574 && NOTE_LINE_NUMBER (next
) > 0)))
2578 XEXP (x
, 0) = label
;
2579 if (! insn
|| ! INSN_DELETED_P (insn
))
2580 ++LABEL_NUSES (label
);
2584 if (GET_CODE (insn
) == JUMP_INSN
)
2585 JUMP_LABEL (insn
) = label
;
2587 /* If we've changed OLABEL and we had a REG_LABEL note
2588 for it, update it as well. */
2589 else if (label
!= olabel
2590 && (note
= find_reg_note (insn
, REG_LABEL
, olabel
)) != 0)
2591 XEXP (note
, 0) = label
;
2593 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2595 else if (! find_reg_note (insn
, REG_LABEL
, label
))
2597 /* This code used to ignore labels which refered to dispatch
2598 tables to avoid flow.c generating worse code.
2600 However, in the presense of global optimizations like
2601 gcse which call find_basic_blocks without calling
2602 life_analysis, not recording such labels will lead
2603 to compiler aborts because of inconsistencies in the
2604 flow graph. So we go ahead and record the label.
2606 It may also be the case that the optimization argument
2607 is no longer valid because of the more accurate cfg
2608 we build in find_basic_blocks -- it no longer pessimizes
2609 code when it finds a REG_LABEL note. */
2610 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
2617 /* Do walk the labels in a vector, but not the first operand of an
2618 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2621 if (! INSN_DELETED_P (insn
))
2623 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
2625 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
2626 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
,
2627 cross_jump
, in_mem
);
2635 fmt
= GET_RTX_FORMAT (code
);
2636 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2639 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
, in_mem
);
2640 else if (fmt
[i
] == 'E')
2643 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2644 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
, in_mem
);
2649 /* If all INSN does is set the pc, delete it,
2650 and delete the insn that set the condition codes for it
2651 if that's what the previous thing was. */
2657 register rtx set
= single_set (insn
);
2659 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
2660 delete_computation (insn
);
2663 /* Verify INSN is a BARRIER and delete it. */
2666 delete_barrier (insn
)
2669 if (GET_CODE (insn
) != BARRIER
)
2675 /* Recursively delete prior insns that compute the value (used only by INSN
2676 which the caller is deleting) stored in the register mentioned by NOTE
2677 which is a REG_DEAD note associated with INSN. */
2680 delete_prior_computation (note
, insn
)
2685 rtx reg
= XEXP (note
, 0);
2687 for (our_prev
= prev_nonnote_insn (insn
);
2688 our_prev
&& (GET_CODE (our_prev
) == INSN
2689 || GET_CODE (our_prev
) == CALL_INSN
);
2690 our_prev
= prev_nonnote_insn (our_prev
))
2692 rtx pat
= PATTERN (our_prev
);
2694 /* If we reach a CALL which is not calling a const function
2695 or the callee pops the arguments, then give up. */
2696 if (GET_CODE (our_prev
) == CALL_INSN
2697 && (! CONST_CALL_P (our_prev
)
2698 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
2701 /* If we reach a SEQUENCE, it is too complex to try to
2702 do anything with it, so give up. */
2703 if (GET_CODE (pat
) == SEQUENCE
)
2706 if (GET_CODE (pat
) == USE
2707 && GET_CODE (XEXP (pat
, 0)) == INSN
)
2708 /* reorg creates USEs that look like this. We leave them
2709 alone because reorg needs them for its own purposes. */
2712 if (reg_set_p (reg
, pat
))
2714 if (side_effects_p (pat
) && GET_CODE (our_prev
) != CALL_INSN
)
2717 if (GET_CODE (pat
) == PARALLEL
)
2719 /* If we find a SET of something else, we can't
2724 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2726 rtx part
= XVECEXP (pat
, 0, i
);
2728 if (GET_CODE (part
) == SET
2729 && SET_DEST (part
) != reg
)
2733 if (i
== XVECLEN (pat
, 0))
2734 delete_computation (our_prev
);
2736 else if (GET_CODE (pat
) == SET
2737 && GET_CODE (SET_DEST (pat
)) == REG
)
2739 int dest_regno
= REGNO (SET_DEST (pat
));
2742 + (dest_regno
< FIRST_PSEUDO_REGISTER
2743 ? HARD_REGNO_NREGS (dest_regno
,
2744 GET_MODE (SET_DEST (pat
))) : 1));
2745 int regno
= REGNO (reg
);
2748 + (regno
< FIRST_PSEUDO_REGISTER
2749 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1));
2751 if (dest_regno
>= regno
2752 && dest_endregno
<= endregno
)
2753 delete_computation (our_prev
);
2755 /* We may have a multi-word hard register and some, but not
2756 all, of the words of the register are needed in subsequent
2757 insns. Write REG_UNUSED notes for those parts that were not
2759 else if (dest_regno
<= regno
2760 && dest_endregno
>= endregno
)
2764 REG_NOTES (our_prev
)
2765 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
2766 REG_NOTES (our_prev
));
2768 for (i
= dest_regno
; i
< dest_endregno
; i
++)
2769 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
2772 if (i
== dest_endregno
)
2773 delete_computation (our_prev
);
2780 /* If PAT references the register that dies here, it is an
2781 additional use. Hence any prior SET isn't dead. However, this
2782 insn becomes the new place for the REG_DEAD note. */
2783 if (reg_overlap_mentioned_p (reg
, pat
))
2785 XEXP (note
, 1) = REG_NOTES (our_prev
);
2786 REG_NOTES (our_prev
) = note
;
2792 /* Delete INSN and recursively delete insns that compute values used only
2793 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2794 If we are running before flow.c, we need do nothing since flow.c will
2795 delete dead code. We also can't know if the registers being used are
2796 dead or not at this point.
2798 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2799 nothing other than set a register that dies in this insn, we can delete
2802 On machines with CC0, if CC0 is used in this insn, we may be able to
2803 delete the insn that set it. */
2806 delete_computation (insn
)
2812 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
2814 rtx prev
= prev_nonnote_insn (insn
);
2815 /* We assume that at this stage
2816 CC's are always set explicitly
2817 and always immediately before the jump that
2818 will use them. So if the previous insn
2819 exists to set the CC's, delete it
2820 (unless it performs auto-increments, etc.). */
2821 if (prev
&& GET_CODE (prev
) == INSN
2822 && sets_cc0_p (PATTERN (prev
)))
2824 if (sets_cc0_p (PATTERN (prev
)) > 0
2825 && ! side_effects_p (PATTERN (prev
)))
2826 delete_computation (prev
);
2828 /* Otherwise, show that cc0 won't be used. */
2829 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
2830 cc0_rtx
, REG_NOTES (prev
));
2835 for (note
= REG_NOTES (insn
); note
; note
= next
)
2837 next
= XEXP (note
, 1);
2839 if (REG_NOTE_KIND (note
) != REG_DEAD
2840 /* Verify that the REG_NOTE is legitimate. */
2841 || GET_CODE (XEXP (note
, 0)) != REG
)
2844 delete_prior_computation (note
, insn
);
2850 /* Delete insn INSN from the chain of insns and update label ref counts.
2851 May delete some following insns as a consequence; may even delete
2852 a label elsewhere and insns that follow it.
2854 Returns the first insn after INSN that was not deleted. */
2860 register rtx next
= NEXT_INSN (insn
);
2861 register rtx prev
= PREV_INSN (insn
);
2862 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
2863 register int dont_really_delete
= 0;
2866 while (next
&& INSN_DELETED_P (next
))
2867 next
= NEXT_INSN (next
);
2869 /* This insn is already deleted => return first following nondeleted. */
2870 if (INSN_DELETED_P (insn
))
2874 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
2876 /* Don't delete user-declared labels. When optimizing, convert them
2877 to special NOTEs instead. When not optimizing, leave them alone. */
2878 if (was_code_label
&& LABEL_NAME (insn
) != 0)
2881 dont_really_delete
= 1;
2882 else if (! dont_really_delete
)
2884 const char *name
= LABEL_NAME (insn
);
2885 PUT_CODE (insn
, NOTE
);
2886 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
2887 NOTE_SOURCE_FILE (insn
) = name
;
2888 dont_really_delete
= 1;
2892 /* Mark this insn as deleted. */
2893 INSN_DELETED_P (insn
) = 1;
2895 /* If this is an unconditional jump, delete it from the jump chain. */
2896 if (simplejump_p (insn
))
2897 delete_from_jump_chain (insn
);
2899 /* If instruction is followed by a barrier,
2900 delete the barrier too. */
2902 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
2904 INSN_DELETED_P (next
) = 1;
2905 next
= NEXT_INSN (next
);
2908 /* Patch out INSN (and the barrier if any) */
2910 if (! dont_really_delete
)
2914 NEXT_INSN (prev
) = next
;
2915 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2916 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
2917 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
2922 PREV_INSN (next
) = prev
;
2923 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2924 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
2927 if (prev
&& NEXT_INSN (prev
) == 0)
2928 set_last_insn (prev
);
2931 /* If deleting a jump, decrement the count of the label,
2932 and delete the label if it is now unused. */
2934 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
2936 rtx lab
= JUMP_LABEL (insn
), lab_next
;
2938 if (--LABEL_NUSES (lab
) == 0)
2940 /* This can delete NEXT or PREV,
2941 either directly if NEXT is JUMP_LABEL (INSN),
2942 or indirectly through more levels of jumps. */
2945 /* I feel a little doubtful about this loop,
2946 but I see no clean and sure alternative way
2947 to find the first insn after INSN that is not now deleted.
2948 I hope this works. */
2949 while (next
&& INSN_DELETED_P (next
))
2950 next
= NEXT_INSN (next
);
2953 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
2954 && GET_CODE (lab_next
) == JUMP_INSN
2955 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
2956 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
2958 /* If we're deleting the tablejump, delete the dispatch table.
2959 We may not be able to kill the label immediately preceeding
2960 just yet, as it might be referenced in code leading up to
2962 delete_insn (lab_next
);
2966 /* Likewise if we're deleting a dispatch table. */
2968 if (GET_CODE (insn
) == JUMP_INSN
2969 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
2970 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
2972 rtx pat
= PATTERN (insn
);
2973 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
2974 int len
= XVECLEN (pat
, diff_vec_p
);
2976 for (i
= 0; i
< len
; i
++)
2977 if (--LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
2978 delete_insn (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
2979 while (next
&& INSN_DELETED_P (next
))
2980 next
= NEXT_INSN (next
);
2984 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2985 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
2986 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2987 if (REG_NOTE_KIND (note
) == REG_LABEL
2988 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2989 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
2990 if (--LABEL_NUSES (XEXP (note
, 0)) == 0)
2991 delete_insn (XEXP (note
, 0));
2993 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
2994 prev
= PREV_INSN (prev
);
2996 /* If INSN was a label and a dispatch table follows it,
2997 delete the dispatch table. The tablejump must have gone already.
2998 It isn't useful to fall through into a table. */
3001 && NEXT_INSN (insn
) != 0
3002 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3003 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
3004 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
3005 next
= delete_insn (NEXT_INSN (insn
));
3007 /* If INSN was a label, delete insns following it if now unreachable. */
3009 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
3011 register RTX_CODE code
;
3013 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
3014 || code
== NOTE
|| code
== BARRIER
3015 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
3018 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
3019 next
= NEXT_INSN (next
);
3020 /* Keep going past other deleted labels to delete what follows. */
3021 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
3022 next
= NEXT_INSN (next
);
3024 /* Note: if this deletes a jump, it can cause more
3025 deletion of unreachable code, after a different label.
3026 As long as the value from this recursive call is correct,
3027 this invocation functions correctly. */
3028 next
= delete_insn (next
);
3035 /* Advance from INSN till reaching something not deleted
3036 then return that. May return INSN itself. */
3039 next_nondeleted_insn (insn
)
3042 while (INSN_DELETED_P (insn
))
3043 insn
= NEXT_INSN (insn
);
3047 /* Delete a range of insns from FROM to TO, inclusive.
3048 This is for the sake of peephole optimization, so assume
3049 that whatever these insns do will still be done by a new
3050 peephole insn that will replace them. */
3053 delete_for_peephole (from
, to
)
3054 register rtx from
, to
;
3056 register rtx insn
= from
;
3060 register rtx next
= NEXT_INSN (insn
);
3061 register rtx prev
= PREV_INSN (insn
);
3063 if (GET_CODE (insn
) != NOTE
)
3065 INSN_DELETED_P (insn
) = 1;
3067 /* Patch this insn out of the chain. */
3068 /* We don't do this all at once, because we
3069 must preserve all NOTEs. */
3071 NEXT_INSN (prev
) = next
;
3074 PREV_INSN (next
) = prev
;
3082 /* Note that if TO is an unconditional jump
3083 we *do not* delete the BARRIER that follows,
3084 since the peephole that replaces this sequence
3085 is also an unconditional jump in that case. */
3088 /* We have determined that INSN is never reached, and are about to
3089 delete it. Print a warning if the user asked for one.
3091 To try to make this warning more useful, this should only be called
3092 once per basic block not reached, and it only warns when the basic
3093 block contains more than one line from the current function, and
3094 contains at least one operation. CSE and inlining can duplicate insns,
3095 so it's possible to get spurious warnings from this. */
3098 never_reached_warning (avoided_insn
)
3102 rtx a_line_note
= NULL
;
3103 int two_avoided_lines
= 0;
3104 int contains_insn
= 0;
3106 if (! warn_notreached
)
3109 /* Scan forwards, looking at LINE_NUMBER notes, until
3110 we hit a LABEL or we run out of insns. */
3112 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
3114 if (GET_CODE (insn
) == CODE_LABEL
)
3116 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
3117 && NOTE_LINE_NUMBER (insn
) >= 0)
3119 if (a_line_note
== NULL
)
3122 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
3123 != NOTE_LINE_NUMBER (insn
));
3125 else if (INSN_P (insn
))
3128 if (two_avoided_lines
&& contains_insn
)
3129 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
3130 NOTE_LINE_NUMBER (a_line_note
),
3131 "will never be executed");
3134 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3135 NLABEL as a return. Accrue modifications into the change group. */
3138 redirect_exp_1 (loc
, olabel
, nlabel
, insn
)
3143 register rtx x
= *loc
;
3144 register RTX_CODE code
= GET_CODE (x
);
3146 register const char *fmt
;
3148 if (code
== LABEL_REF
)
3150 if (XEXP (x
, 0) == olabel
)
3154 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
3156 n
= gen_rtx_RETURN (VOIDmode
);
3158 validate_change (insn
, loc
, n
, 1);
3162 else if (code
== RETURN
&& olabel
== 0)
3164 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
3165 if (loc
== &PATTERN (insn
))
3166 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
3167 validate_change (insn
, loc
, x
, 1);
3171 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
3172 && GET_CODE (SET_SRC (x
)) == LABEL_REF
3173 && XEXP (SET_SRC (x
), 0) == olabel
)
3175 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
3179 fmt
= GET_RTX_FORMAT (code
);
3180 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3183 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
3184 else if (fmt
[i
] == 'E')
3187 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3188 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
3193 /* Similar, but apply the change group and report success or failure. */
3196 redirect_exp (olabel
, nlabel
, insn
)
3202 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
3203 loc
= &XVECEXP (PATTERN (insn
), 0, 0);
3205 loc
= &PATTERN (insn
);
3207 redirect_exp_1 (loc
, olabel
, nlabel
, insn
);
3208 if (num_validated_changes () == 0)
3211 return apply_change_group ();
3214 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3215 the modifications into the change group. Return false if we did
3216 not see how to do that. */
3219 redirect_jump_1 (jump
, nlabel
)
3222 int ochanges
= num_validated_changes ();
3225 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
3226 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
3228 loc
= &PATTERN (jump
);
3230 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
3231 return num_validated_changes () > ochanges
;
3234 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3235 jump target label is unused as a result, it and the code following
3238 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3241 The return value will be 1 if the change was made, 0 if it wasn't
3242 (this can only occur for NLABEL == 0). */
3245 redirect_jump (jump
, nlabel
, delete_unused
)
3249 register rtx olabel
= JUMP_LABEL (jump
);
3251 if (nlabel
== olabel
)
3254 if (! redirect_exp (olabel
, nlabel
, jump
))
3257 /* If this is an unconditional branch, delete it from the jump_chain of
3258 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3259 have UID's in range and JUMP_CHAIN is valid). */
3260 if (jump_chain
&& (simplejump_p (jump
)
3261 || GET_CODE (PATTERN (jump
)) == RETURN
))
3263 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
3265 delete_from_jump_chain (jump
);
3266 if (label_index
< max_jump_chain
3267 && INSN_UID (jump
) < max_jump_chain
)
3269 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
3270 jump_chain
[label_index
] = jump
;
3274 JUMP_LABEL (jump
) = nlabel
;
3276 ++LABEL_NUSES (nlabel
);
3278 /* If we're eliding the jump over exception cleanups at the end of a
3279 function, move the function end note so that -Wreturn-type works. */
3280 if (olabel
&& nlabel
3281 && NEXT_INSN (olabel
)
3282 && GET_CODE (NEXT_INSN (olabel
)) == NOTE
3283 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
3284 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
3286 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
)
3287 delete_insn (olabel
);
3292 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3293 Accrue the modifications into the change group. */
3299 register RTX_CODE code
;
3300 rtx x
= pc_set (insn
);
3306 code
= GET_CODE (x
);
3308 if (code
== IF_THEN_ELSE
)
3310 register rtx comp
= XEXP (x
, 0);
3312 enum rtx_code reversed_code
;
3314 /* We can do this in two ways: The preferable way, which can only
3315 be done if this is not an integer comparison, is to reverse
3316 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3317 of the IF_THEN_ELSE. If we can't do either, fail. */
3319 reversed_code
= reversed_comparison_code (comp
, insn
);
3321 if (reversed_code
!= UNKNOWN
)
3323 validate_change (insn
, &XEXP (x
, 0),
3324 gen_rtx_fmt_ee (reversed_code
,
3325 GET_MODE (comp
), XEXP (comp
, 0),
3332 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
3333 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
3339 /* Invert the jump condition of conditional jump insn, INSN.
3341 Return 1 if we can do so, 0 if we cannot find a way to do so that
3342 matches a pattern. */
3348 invert_exp_1 (insn
);
3349 if (num_validated_changes () == 0)
3352 return apply_change_group ();
3355 /* Invert the condition of the jump JUMP, and make it jump to label
3356 NLABEL instead of where it jumps now. Accrue changes into the
3357 change group. Return false if we didn't see how to perform the
3358 inversion and redirection. */
3361 invert_jump_1 (jump
, nlabel
)
3366 ochanges
= num_validated_changes ();
3367 invert_exp_1 (jump
);
3368 if (num_validated_changes () == ochanges
)
3371 return redirect_jump_1 (jump
, nlabel
);
3374 /* Invert the condition of the jump JUMP, and make it jump to label
3375 NLABEL instead of where it jumps now. Return true if successful. */
3378 invert_jump (jump
, nlabel
, delete_unused
)
3382 /* We have to either invert the condition and change the label or
3383 do neither. Either operation could fail. We first try to invert
3384 the jump. If that succeeds, we try changing the label. If that fails,
3385 we invert the jump back to what it was. */
3387 if (! invert_exp (jump
))
3390 if (redirect_jump (jump
, nlabel
, delete_unused
))
3392 /* An inverted jump means that a probability taken becomes a
3393 probability not taken. Subtract the branch probability from the
3394 probability base to convert it back to a taken probability. */
3396 rtx note
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
3398 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
3403 if (! invert_exp (jump
))
3404 /* This should just be putting it back the way it was. */
3410 /* Delete the instruction JUMP from any jump chain it might be on. */
3413 delete_from_jump_chain (jump
)
3417 rtx olabel
= JUMP_LABEL (jump
);
3419 /* Handle unconditional jumps. */
3420 if (jump_chain
&& olabel
!= 0
3421 && INSN_UID (olabel
) < max_jump_chain
3422 && simplejump_p (jump
))
3423 index
= INSN_UID (olabel
);
3424 /* Handle return insns. */
3425 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
3430 if (jump_chain
[index
] == jump
)
3431 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
3436 for (insn
= jump_chain
[index
];
3438 insn
= jump_chain
[INSN_UID (insn
)])
3439 if (jump_chain
[INSN_UID (insn
)] == jump
)
3441 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
3447 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3449 If the old jump target label (before the dispatch table) becomes unused,
3450 it and the dispatch table may be deleted. In that case, find the insn
3451 before the jump references that label and delete it and logical successors
3455 redirect_tablejump (jump
, nlabel
)
3458 register rtx olabel
= JUMP_LABEL (jump
);
3459 rtx
*notep
, note
, next
;
3461 /* Add this jump to the jump_chain of NLABEL. */
3462 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
3463 && INSN_UID (jump
) < max_jump_chain
)
3465 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
3466 jump_chain
[INSN_UID (nlabel
)] = jump
;
3469 for (notep
= ®_NOTES (jump
), note
= *notep
; note
; note
= next
)
3471 next
= XEXP (note
, 1);
3473 if (REG_NOTE_KIND (note
) != REG_DEAD
3474 /* Verify that the REG_NOTE is legitimate. */
3475 || GET_CODE (XEXP (note
, 0)) != REG
3476 || ! reg_mentioned_p (XEXP (note
, 0), PATTERN (jump
)))
3477 notep
= &XEXP (note
, 1);
3480 delete_prior_computation (note
, jump
);
3485 PATTERN (jump
) = gen_jump (nlabel
);
3486 JUMP_LABEL (jump
) = nlabel
;
3487 ++LABEL_NUSES (nlabel
);
3488 INSN_CODE (jump
) = -1;
3490 if (--LABEL_NUSES (olabel
) == 0)
3492 delete_labelref_insn (jump
, olabel
, 0);
3493 delete_insn (olabel
);
3497 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3498 If we found one, delete it and then delete this insn if DELETE_THIS is
3499 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3502 delete_labelref_insn (insn
, label
, delete_this
)
3509 if (GET_CODE (insn
) != NOTE
3510 && reg_mentioned_p (label
, PATTERN (insn
)))
3521 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
3522 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
3536 /* Like rtx_equal_p except that it considers two REGs as equal
3537 if they renumber to the same value and considers two commutative
3538 operations to be the same if the order of the operands has been
3541 ??? Addition is not commutative on the PA due to the weird implicit
3542 space register selection rules for memory addresses. Therefore, we
3543 don't consider a + b == b + a.
3545 We could/should make this test a little tighter. Possibly only
3546 disabling it on the PA via some backend macro or only disabling this
3547 case when the PLUS is inside a MEM. */
3550 rtx_renumbered_equal_p (x
, y
)
3554 register RTX_CODE code
= GET_CODE (x
);
3555 register const char *fmt
;
3560 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
3561 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
3562 && GET_CODE (SUBREG_REG (y
)) == REG
)))
3564 int reg_x
= -1, reg_y
= -1;
3565 int word_x
= 0, word_y
= 0;
3567 if (GET_MODE (x
) != GET_MODE (y
))
3570 /* If we haven't done any renumbering, don't
3571 make any assumptions. */
3572 if (reg_renumber
== 0)
3573 return rtx_equal_p (x
, y
);
3577 reg_x
= REGNO (SUBREG_REG (x
));
3578 word_x
= SUBREG_WORD (x
);
3580 if (reg_renumber
[reg_x
] >= 0)
3582 reg_x
= reg_renumber
[reg_x
] + word_x
;
3590 if (reg_renumber
[reg_x
] >= 0)
3591 reg_x
= reg_renumber
[reg_x
];
3594 if (GET_CODE (y
) == SUBREG
)
3596 reg_y
= REGNO (SUBREG_REG (y
));
3597 word_y
= SUBREG_WORD (y
);
3599 if (reg_renumber
[reg_y
] >= 0)
3601 reg_y
= reg_renumber
[reg_y
];
3609 if (reg_renumber
[reg_y
] >= 0)
3610 reg_y
= reg_renumber
[reg_y
];
3613 return reg_x
>= 0 && reg_x
== reg_y
&& word_x
== word_y
;
3616 /* Now we have disposed of all the cases
3617 in which different rtx codes can match. */
3618 if (code
!= GET_CODE (y
))
3630 return INTVAL (x
) == INTVAL (y
);
3633 /* We can't assume nonlocal labels have their following insns yet. */
3634 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
3635 return XEXP (x
, 0) == XEXP (y
, 0);
3637 /* Two label-refs are equivalent if they point at labels
3638 in the same position in the instruction stream. */
3639 return (next_real_insn (XEXP (x
, 0))
3640 == next_real_insn (XEXP (y
, 0)));
3643 return XSTR (x
, 0) == XSTR (y
, 0);
3646 /* If we didn't match EQ equality above, they aren't the same. */
3653 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3655 if (GET_MODE (x
) != GET_MODE (y
))
3658 /* For commutative operations, the RTX match if the operand match in any
3659 order. Also handle the simple binary and unary cases without a loop.
3661 ??? Don't consider PLUS a commutative operator; see comments above. */
3662 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
3664 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
3665 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
3666 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
3667 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
3668 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
3669 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
3670 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
3671 else if (GET_RTX_CLASS (code
) == '1')
3672 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
3674 /* Compare the elements. If any pair of corresponding elements
3675 fail to match, return 0 for the whole things. */
3677 fmt
= GET_RTX_FORMAT (code
);
3678 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3684 if (XWINT (x
, i
) != XWINT (y
, i
))
3689 if (XINT (x
, i
) != XINT (y
, i
))
3694 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
3699 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
3704 if (XEXP (x
, i
) != XEXP (y
, i
))
3711 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
3713 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3714 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
3725 /* If X is a hard register or equivalent to one or a subregister of one,
3726 return the hard register number. If X is a pseudo register that was not
3727 assigned a hard register, return the pseudo register number. Otherwise,
3728 return -1. Any rtx is valid for X. */
3734 if (GET_CODE (x
) == REG
)
3736 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
3737 return reg_renumber
[REGNO (x
)];
3740 if (GET_CODE (x
) == SUBREG
)
3742 int base
= true_regnum (SUBREG_REG (x
));
3743 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
3744 return SUBREG_WORD (x
) + base
;
3749 /* Optimize code of the form:
3751 for (x = a[i]; x; ...)
3753 for (x = a[i]; x; ...)
3757 Loop optimize will change the above code into
3761 { ...; if (! (x = ...)) break; }
3764 { ...; if (! (x = ...)) break; }
3767 In general, if the first test fails, the program can branch
3768 directly to `foo' and skip the second try which is doomed to fail.
3769 We run this after loop optimization and before flow analysis. */
3771 /* When comparing the insn patterns, we track the fact that different
3772 pseudo-register numbers may have been used in each computation.
3773 The following array stores an equivalence -- same_regs[I] == J means
3774 that pseudo register I was used in the first set of tests in a context
3775 where J was used in the second set. We also count the number of such
3776 pending equivalences. If nonzero, the expressions really aren't the
3779 static int *same_regs
;
3781 static int num_same_regs
;
3783 /* Track any registers modified between the target of the first jump and
3784 the second jump. They never compare equal. */
3786 static char *modified_regs
;
3788 /* Record if memory was modified. */
3790 static int modified_mem
;
3792 /* Called via note_stores on each insn between the target of the first
3793 branch and the second branch. It marks any changed registers. */
3796 mark_modified_reg (dest
, x
, data
)
3798 rtx x ATTRIBUTE_UNUSED
;
3799 void *data ATTRIBUTE_UNUSED
;
3804 if (GET_CODE (dest
) == SUBREG
)
3805 dest
= SUBREG_REG (dest
);
3807 if (GET_CODE (dest
) == MEM
)
3810 if (GET_CODE (dest
) != REG
)
3813 regno
= REGNO (dest
);
3814 if (regno
>= FIRST_PSEUDO_REGISTER
)
3815 modified_regs
[regno
] = 1;
3817 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
3818 modified_regs
[regno
+ i
] = 1;
3821 /* F is the first insn in the chain of insns. */
3824 thread_jumps (f
, max_reg
, flag_before_loop
)
3827 int flag_before_loop
;
3829 /* Basic algorithm is to find a conditional branch,
3830 the label it may branch to, and the branch after
3831 that label. If the two branches test the same condition,
3832 walk back from both branch paths until the insn patterns
3833 differ, or code labels are hit. If we make it back to
3834 the target of the first branch, then we know that the first branch
3835 will either always succeed or always fail depending on the relative
3836 senses of the two branches. So adjust the first branch accordingly
3839 rtx label
, b1
, b2
, t1
, t2
;
3840 enum rtx_code code1
, code2
;
3841 rtx b1op0
, b1op1
, b2op0
, b2op1
;
3845 enum rtx_code reversed_code1
, reversed_code2
;
3847 /* Allocate register tables and quick-reset table. */
3848 modified_regs
= (char *) xmalloc (max_reg
* sizeof (char));
3849 same_regs
= (int *) xmalloc (max_reg
* sizeof (int));
3850 all_reset
= (int *) xmalloc (max_reg
* sizeof (int));
3851 for (i
= 0; i
< max_reg
; i
++)
3858 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
3863 /* Get to a candidate branch insn. */
3864 if (GET_CODE (b1
) != JUMP_INSN
3865 || ! any_condjump_p (b1
) || JUMP_LABEL (b1
) == 0)
3868 memset (modified_regs
, 0, max_reg
* sizeof (char));
3871 memcpy (same_regs
, all_reset
, max_reg
* sizeof (int));
3874 label
= JUMP_LABEL (b1
);
3876 /* Look for a branch after the target. Record any registers and
3877 memory modified between the target and the branch. Stop when we
3878 get to a label since we can't know what was changed there. */
3879 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
3881 if (GET_CODE (b2
) == CODE_LABEL
)
3884 else if (GET_CODE (b2
) == JUMP_INSN
)
3886 /* If this is an unconditional jump and is the only use of
3887 its target label, we can follow it. */
3888 if (any_uncondjump_p (b2
)
3890 && JUMP_LABEL (b2
) != 0
3891 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
3893 b2
= JUMP_LABEL (b2
);
3900 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
3903 if (GET_CODE (b2
) == CALL_INSN
)
3906 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3907 if (call_used_regs
[i
] && ! fixed_regs
[i
]
3908 && i
!= STACK_POINTER_REGNUM
3909 && i
!= FRAME_POINTER_REGNUM
3910 && i
!= HARD_FRAME_POINTER_REGNUM
3911 && i
!= ARG_POINTER_REGNUM
)
3912 modified_regs
[i
] = 1;
3915 note_stores (PATTERN (b2
), mark_modified_reg
, NULL
);
3918 /* Check the next candidate branch insn from the label
3921 || GET_CODE (b2
) != JUMP_INSN
3923 || !any_condjump_p (b2
)
3924 || !onlyjump_p (b2
))
3929 /* Get the comparison codes and operands, reversing the
3930 codes if appropriate. If we don't have comparison codes,
3931 we can't do anything. */
3932 b1op0
= XEXP (XEXP (SET_SRC (set
), 0), 0);
3933 b1op1
= XEXP (XEXP (SET_SRC (set
), 0), 1);
3934 code1
= GET_CODE (XEXP (SET_SRC (set
), 0));
3935 reversed_code1
= code1
;
3936 if (XEXP (SET_SRC (set
), 1) == pc_rtx
)
3937 code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
3939 reversed_code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
3941 b2op0
= XEXP (XEXP (SET_SRC (set2
), 0), 0);
3942 b2op1
= XEXP (XEXP (SET_SRC (set2
), 0), 1);
3943 code2
= GET_CODE (XEXP (SET_SRC (set2
), 0));
3944 reversed_code2
= code2
;
3945 if (XEXP (SET_SRC (set2
), 1) == pc_rtx
)
3946 code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
3948 reversed_code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
3950 /* If they test the same things and knowing that B1 branches
3951 tells us whether or not B2 branches, check if we
3952 can thread the branch. */
3953 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
3954 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
3955 && (comparison_dominates_p (code1
, code2
)
3956 || comparison_dominates_p (code1
, reversed_code2
)))
3959 t1
= prev_nonnote_insn (b1
);
3960 t2
= prev_nonnote_insn (b2
);
3962 while (t1
!= 0 && t2
!= 0)
3966 /* We have reached the target of the first branch.
3967 If there are no pending register equivalents,
3968 we know that this branch will either always
3969 succeed (if the senses of the two branches are
3970 the same) or always fail (if not). */
3973 if (num_same_regs
!= 0)
3976 if (comparison_dominates_p (code1
, code2
))
3977 new_label
= JUMP_LABEL (b2
);
3979 new_label
= get_label_after (b2
);
3981 if (JUMP_LABEL (b1
) != new_label
)
3983 rtx prev
= PREV_INSN (new_label
);
3985 if (flag_before_loop
3986 && GET_CODE (prev
) == NOTE
3987 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
3989 /* Don't thread to the loop label. If a loop
3990 label is reused, loop optimization will
3991 be disabled for that loop. */
3992 new_label
= gen_label_rtx ();
3993 emit_label_after (new_label
, PREV_INSN (prev
));
3995 changed
|= redirect_jump (b1
, new_label
, 1);
4000 /* If either of these is not a normal insn (it might be
4001 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4002 have already been skipped above.) Similarly, fail
4003 if the insns are different. */
4004 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
4005 || recog_memoized (t1
) != recog_memoized (t2
)
4006 || ! rtx_equal_for_thread_p (PATTERN (t1
),
4010 t1
= prev_nonnote_insn (t1
);
4011 t2
= prev_nonnote_insn (t2
);
4018 free (modified_regs
);
4023 /* This is like RTX_EQUAL_P except that it knows about our handling of
4024 possibly equivalent registers and knows to consider volatile and
4025 modified objects as not equal.
4027 YINSN is the insn containing Y. */
4030 rtx_equal_for_thread_p (x
, y
, yinsn
)
4036 register enum rtx_code code
;
4037 register const char *fmt
;
4039 code
= GET_CODE (x
);
4040 /* Rtx's of different codes cannot be equal. */
4041 if (code
!= GET_CODE (y
))
4044 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4045 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4047 if (GET_MODE (x
) != GET_MODE (y
))
4050 /* For floating-point, consider everything unequal. This is a bit
4051 pessimistic, but this pass would only rarely do anything for FP
4053 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
4054 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_unsafe_math_optimizations
)
4057 /* For commutative operations, the RTX match if the operand match in any
4058 order. Also handle the simple binary and unary cases without a loop. */
4059 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4060 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
4061 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
4062 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
4063 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
4064 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
4065 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
4066 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
4067 else if (GET_RTX_CLASS (code
) == '1')
4068 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
4070 /* Handle special-cases first. */
4074 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
4077 /* If neither is user variable or hard register, check for possible
4079 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
4080 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4081 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
4084 if (same_regs
[REGNO (x
)] == -1)
4086 same_regs
[REGNO (x
)] = REGNO (y
);
4089 /* If this is the first time we are seeing a register on the `Y'
4090 side, see if it is the last use. If not, we can't thread the
4091 jump, so mark it as not equivalent. */
4092 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
4098 return (same_regs
[REGNO (x
)] == (int) REGNO (y
));
4103 /* If memory modified or either volatile, not equivalent.
4104 Else, check address. */
4105 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4108 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
4111 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
4117 /* Cancel a pending `same_regs' if setting equivalenced registers.
4118 Then process source. */
4119 if (GET_CODE (SET_DEST (x
)) == REG
4120 && GET_CODE (SET_DEST (y
)) == REG
)
4122 if (same_regs
[REGNO (SET_DEST (x
))] == (int) REGNO (SET_DEST (y
)))
4124 same_regs
[REGNO (SET_DEST (x
))] = -1;
4127 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
4132 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
4136 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
4139 return XEXP (x
, 0) == XEXP (y
, 0);
4142 return XSTR (x
, 0) == XSTR (y
, 0);
4151 fmt
= GET_RTX_FORMAT (code
);
4152 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4157 if (XWINT (x
, i
) != XWINT (y
, i
))
4163 if (XINT (x
, i
) != XINT (y
, i
))
4169 /* Two vectors must have the same length. */
4170 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4173 /* And the corresponding elements must match. */
4174 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4175 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
4176 XVECEXP (y
, i
, j
), yinsn
) == 0)
4181 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
4187 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4192 /* These are just backpointers, so they don't matter. */
4199 /* It is believed that rtx's at this level will never
4200 contain anything but integers and other rtx's,
4201 except for within LABEL_REFs and SYMBOL_REFs. */