]> gcc.gnu.org Git - gcc.git/blob - gcc/jump.c
(parse_escape): Diagnose '\x' with no digits.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This is the jump-optimization pass of the compiler.
22 It is run two or three times: once before cse, sometimes once after cse,
23 and once after reload (before final).
24
25 jump_optimize deletes unreachable code and labels that are not used.
26 It also deletes jumps that jump to the following insn,
27 and simplifies jumps around unconditional jumps and jumps
28 to unconditional jumps.
29
30 Each CODE_LABEL has a count of the times it is used
31 stored in the LABEL_NUSES internal field, and each JUMP_INSN
32 has one label that it refers to stored in the
33 JUMP_LABEL internal field. With this we can detect labels that
34 become unused because of the deletion of all the jumps that
35 formerly used them. The JUMP_LABEL info is sometimes looked
36 at by later passes.
37
38 Optionally, cross-jumping can be done. Currently it is done
39 only the last time (when after reload and before final).
40 In fact, the code for cross-jumping now assumes that register
41 allocation has been done, since it uses `rtx_renumbered_equal_p'.
42
43 Jump optimization is done after cse when cse's constant-propagation
44 causes jumps to become unconditional or to be deleted.
45
46 Unreachable loops are not detected here, because the labels
47 have references and the insns appear reachable from the labels.
48 find_basic_blocks in flow.c finds and deletes such loops.
49
50 The subroutines delete_insn, redirect_jump, and invert_jump are used
51 from other passes as well. */
52
53 #include "config.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "hard-reg-set.h"
57 #include "regs.h"
58 #include "expr.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
61 #include "real.h"
62
63 /* ??? Eventually must record somehow the labels used by jumps
64 from nested functions. */
65 /* Pre-record the next or previous real insn for each label?
66 No, this pass is very fast anyway. */
67 /* Condense consecutive labels?
68 This would make life analysis faster, maybe. */
69 /* Optimize jump y; x: ... y: jumpif... x?
70 Don't know if it is worth bothering with. */
71 /* Optimize two cases of conditional jump to conditional jump?
72 This can never delete any instruction or make anything dead,
73 or even change what is live at any point.
74 So perhaps let combiner do it. */
75
76 /* Vector indexed by uid.
77 For each CODE_LABEL, index by its uid to get first unconditional jump
78 that jumps to the label.
79 For each JUMP_INSN, index by its uid to get the next unconditional jump
80 that jumps to the same label.
81 Element 0 is the start of a chain of all return insns.
82 (It is safe to use element 0 because insn uid 0 is not used. */
83
84 static rtx *jump_chain;
85
86 /* List of labels referred to from initializers.
87 These can never be deleted. */
88 rtx forced_labels;
89
90 /* Maximum index in jump_chain. */
91
92 static int max_jump_chain;
93
94 /* Set nonzero by jump_optimize if control can fall through
95 to the end of the function. */
96 int can_reach_end;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int duplicate_loop_exit_test ();
107 void redirect_tablejump ();
108 static int delete_labelref_insn ();
109 static void mark_jump_label ();
110 void delete_jump ();
111 void delete_computation ();
112 static void delete_from_jump_chain ();
113 static int tension_vector_labels ();
114 static void find_cross_jump ();
115 static void do_cross_jump ();
116 static int jump_back_p ();
117
118 extern rtx gen_jump ();
119 \f
120 /* Delete no-op jumps and optimize jumps to jumps
121 and jumps around jumps.
122 Delete unused labels and unreachable code.
123
124 If CROSS_JUMP is 1, detect matching code
125 before a jump and its destination and unify them.
126 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
127
128 If NOOP_MOVES is nonzero, delete no-op move insns.
129
130 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
131 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
132
133 If `optimize' is zero, don't change any code,
134 just determine whether control drops off the end of the function.
135 This case occurs when we have -W and not -O.
136 It works because `delete_insn' checks the value of `optimize'
137 and refrains from actually deleting when that is 0. */
138
139 void
140 jump_optimize (f, cross_jump, noop_moves, after_regscan)
141 rtx f;
142 int cross_jump;
143 int noop_moves;
144 int after_regscan;
145 {
146 register rtx insn;
147 int changed;
148 int first = 1;
149 int max_uid = 0;
150 rtx last_insn;
151
152 cross_jump_death_matters = (cross_jump == 2);
153
154 /* Initialize LABEL_NUSES and JUMP_LABEL fields. */
155
156 for (insn = f; insn; insn = NEXT_INSN (insn))
157 {
158 if (GET_CODE (insn) == CODE_LABEL)
159 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
160 else if (GET_CODE (insn) == JUMP_INSN)
161 JUMP_LABEL (insn) = 0;
162 if (INSN_UID (insn) > max_uid)
163 max_uid = INSN_UID (insn);
164 }
165
166 max_uid++;
167
168 /* Delete insns following barriers, up to next label. */
169
170 for (insn = f; insn;)
171 {
172 if (GET_CODE (insn) == BARRIER)
173 {
174 insn = NEXT_INSN (insn);
175 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
176 {
177 if (GET_CODE (insn) == NOTE
178 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
179 insn = NEXT_INSN (insn);
180 else
181 insn = delete_insn (insn);
182 }
183 /* INSN is now the code_label. */
184 }
185 else
186 insn = NEXT_INSN (insn);
187 }
188
189 /* Leave some extra room for labels and duplicate exit test insns
190 we make. */
191 max_jump_chain = max_uid * 14 / 10;
192 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
193 bzero (jump_chain, max_jump_chain * sizeof (rtx));
194
195 /* Mark the label each jump jumps to.
196 Combine consecutive labels, and count uses of labels.
197
198 For each label, make a chain (using `jump_chain')
199 of all the *unconditional* jumps that jump to it;
200 also make a chain of all returns. */
201
202 for (insn = f; insn; insn = NEXT_INSN (insn))
203 if ((GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN
204 || GET_CODE (insn) == CALL_INSN)
205 && ! INSN_DELETED_P (insn))
206 {
207 mark_jump_label (PATTERN (insn), insn, cross_jump);
208 if (GET_CODE (insn) == JUMP_INSN)
209 {
210 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
211 {
212 jump_chain[INSN_UID (insn)]
213 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
214 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
215 }
216 if (GET_CODE (PATTERN (insn)) == RETURN)
217 {
218 jump_chain[INSN_UID (insn)] = jump_chain[0];
219 jump_chain[0] = insn;
220 }
221 }
222 }
223
224 /* Keep track of labels used from static data;
225 they cannot ever be deleted. */
226
227 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
228 LABEL_NUSES (XEXP (insn, 0))++;
229
230 /* Delete all labels already not referenced.
231 Also find the last insn. */
232
233 last_insn = 0;
234 for (insn = f; insn; )
235 {
236 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
237 insn = delete_insn (insn);
238 else
239 {
240 last_insn = insn;
241 insn = NEXT_INSN (insn);
242 }
243 }
244
245 if (!optimize)
246 {
247 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
248 If so record that this function can drop off the end. */
249
250 insn = last_insn;
251 {
252 int n_labels = 1;
253 while (insn
254 /* One label can follow the end-note: the return label. */
255 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
256 /* Ordinary insns can follow it if returning a structure. */
257 || GET_CODE (insn) == INSN
258 /* If machine uses explicit RETURN insns, no epilogue,
259 then one of them follows the note. */
260 || (GET_CODE (insn) == JUMP_INSN
261 && GET_CODE (PATTERN (insn)) == RETURN)
262 /* Other kinds of notes can follow also. */
263 || (GET_CODE (insn) == NOTE
264 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
265 insn = PREV_INSN (insn);
266 }
267
268 /* Report if control can fall through at the end of the function. */
269 if (insn && GET_CODE (insn) == NOTE
270 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
271 && ! INSN_DELETED_P (insn))
272 can_reach_end = 1;
273
274 /* Zero the "deleted" flag of all the "deleted" insns. */
275 for (insn = f; insn; insn = NEXT_INSN (insn))
276 INSN_DELETED_P (insn) = 0;
277 return;
278 }
279
280 #ifdef HAVE_return
281 if (HAVE_return)
282 {
283 /* If we fall through to the epilogue, see if we can insert a RETURN insn
284 in front of it. If the machine allows it at this point (we might be
285 after reload for a leaf routine), it will improve optimization for it
286 to be there. */
287 insn = get_last_insn ();
288 while (insn && GET_CODE (insn) == NOTE)
289 insn = PREV_INSN (insn);
290
291 if (insn && GET_CODE (insn) != BARRIER)
292 {
293 emit_jump_insn (gen_return ());
294 emit_barrier ();
295 }
296 }
297 #endif
298
299 if (noop_moves)
300 for (insn = f; insn; )
301 {
302 register rtx next = NEXT_INSN (insn);
303
304 if (GET_CODE (insn) == INSN)
305 {
306 register rtx body = PATTERN (insn);
307
308 /* Combine stack_adjusts with following push_insns. */
309 #ifdef PUSH_ROUNDING
310 if (GET_CODE (body) == SET
311 && SET_DEST (body) == stack_pointer_rtx
312 && GET_CODE (SET_SRC (body)) == PLUS
313 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
314 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
315 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
316 {
317 rtx p;
318 rtx stack_adjust_insn = insn;
319 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
320 int total_pushed = 0;
321 int pushes = 0;
322
323 /* Find all successive push insns. */
324 p = insn;
325 /* Don't convert more than three pushes;
326 that starts adding too many displaced addresses
327 and the whole thing starts becoming a losing
328 proposition. */
329 while (pushes < 3)
330 {
331 rtx pbody, dest;
332 p = next_nonnote_insn (p);
333 if (p == 0 || GET_CODE (p) != INSN)
334 break;
335 pbody = PATTERN (p);
336 if (GET_CODE (pbody) != SET)
337 break;
338 dest = SET_DEST (pbody);
339 /* Allow a no-op move between the adjust and the push. */
340 if (GET_CODE (dest) == REG
341 && GET_CODE (SET_SRC (pbody)) == REG
342 && REGNO (dest) == REGNO (SET_SRC (pbody)))
343 continue;
344 if (! (GET_CODE (dest) == MEM
345 && GET_CODE (XEXP (dest, 0)) == POST_INC
346 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
347 break;
348 pushes++;
349 if (total_pushed + GET_MODE_SIZE (SET_DEST (pbody))
350 > stack_adjust_amount)
351 break;
352 total_pushed += GET_MODE_SIZE (SET_DEST (pbody));
353 }
354
355 /* Discard the amount pushed from the stack adjust;
356 maybe eliminate it entirely. */
357 if (total_pushed >= stack_adjust_amount)
358 {
359 delete_insn (stack_adjust_insn);
360 total_pushed = stack_adjust_amount;
361 }
362 else
363 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
364 = GEN_INT (stack_adjust_amount - total_pushed);
365
366 /* Change the appropriate push insns to ordinary stores. */
367 p = insn;
368 while (total_pushed > 0)
369 {
370 rtx pbody, dest;
371 p = next_nonnote_insn (p);
372 if (GET_CODE (p) != INSN)
373 break;
374 pbody = PATTERN (p);
375 if (GET_CODE (pbody) == SET)
376 break;
377 dest = SET_DEST (pbody);
378 if (! (GET_CODE (dest) == MEM
379 && GET_CODE (XEXP (dest, 0)) == POST_INC
380 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
381 break;
382 total_pushed -= GET_MODE_SIZE (SET_DEST (pbody));
383 /* If this push doesn't fully fit in the space
384 of the stack adjust that we deleted,
385 make another stack adjust here for what we
386 didn't use up. There should be peepholes
387 to recognize the resulting sequence of insns. */
388 if (total_pushed < 0)
389 {
390 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
391 GEN_INT (- total_pushed)),
392 p);
393 break;
394 }
395 XEXP (dest, 0)
396 = plus_constant (stack_pointer_rtx, total_pushed);
397 }
398 }
399 #endif
400
401 /* Detect and delete no-op move instructions
402 resulting from not allocating a parameter in a register. */
403
404 if (GET_CODE (body) == SET
405 && (SET_DEST (body) == SET_SRC (body)
406 || (GET_CODE (SET_DEST (body)) == MEM
407 && GET_CODE (SET_SRC (body)) == MEM
408 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
409 && ! (GET_CODE (SET_DEST (body)) == MEM
410 && MEM_VOLATILE_P (SET_DEST (body)))
411 && ! (GET_CODE (SET_SRC (body)) == MEM
412 && MEM_VOLATILE_P (SET_SRC (body))))
413 delete_insn (insn);
414
415 /* Detect and ignore no-op move instructions
416 resulting from smart or fortuitous register allocation. */
417
418 else if (GET_CODE (body) == SET)
419 {
420 int sreg = true_regnum (SET_SRC (body));
421 int dreg = true_regnum (SET_DEST (body));
422
423 if (sreg == dreg && sreg >= 0)
424 delete_insn (insn);
425 else if (sreg >= 0 && dreg >= 0)
426 {
427 rtx trial;
428 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
429 sreg, NULL_PTR, dreg,
430 GET_MODE (SET_SRC (body)));
431
432 #ifdef PRESERVE_DEATH_INFO_REGNO_P
433 /* Deleting insn could lose a death-note for SREG or DREG
434 so don't do it if final needs accurate death-notes. */
435 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg)
436 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg))
437 #endif
438 {
439 /* DREG may have been the target of a REG_DEAD note in
440 the insn which makes INSN redundant. If so, reorg
441 would still think it is dead. So search for such a
442 note and delete it if we find it. */
443 for (trial = prev_nonnote_insn (insn);
444 trial && GET_CODE (trial) != CODE_LABEL;
445 trial = prev_nonnote_insn (trial))
446 if (find_regno_note (trial, REG_DEAD, dreg))
447 {
448 remove_death (dreg, trial);
449 break;
450 }
451
452 if (tem != 0
453 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
454 delete_insn (insn);
455 }
456 }
457 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
458 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
459 NULL_PTR, 0,
460 GET_MODE (SET_DEST (body))))
461 {
462 /* This handles the case where we have two consecutive
463 assignments of the same constant to pseudos that didn't
464 get a hard reg. Each SET from the constant will be
465 converted into a SET of the spill register and an
466 output reload will be made following it. This produces
467 two loads of the same constant into the same spill
468 register. */
469
470 rtx in_insn = insn;
471
472 /* Look back for a death note for the first reg.
473 If there is one, it is no longer accurate. */
474 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
475 {
476 if ((GET_CODE (in_insn) == INSN
477 || GET_CODE (in_insn) == JUMP_INSN)
478 && find_regno_note (in_insn, REG_DEAD, dreg))
479 {
480 remove_death (dreg, in_insn);
481 break;
482 }
483 in_insn = PREV_INSN (in_insn);
484 }
485
486 /* Delete the second load of the value. */
487 delete_insn (insn);
488 }
489 }
490 else if (GET_CODE (body) == PARALLEL)
491 {
492 /* If each part is a set between two identical registers or
493 a USE or CLOBBER, delete the insn. */
494 int i, sreg, dreg;
495 rtx tem;
496
497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
498 {
499 tem = XVECEXP (body, 0, i);
500 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
501 continue;
502
503 if (GET_CODE (tem) != SET
504 || (sreg = true_regnum (SET_SRC (tem))) < 0
505 || (dreg = true_regnum (SET_DEST (tem))) < 0
506 || dreg != sreg)
507 break;
508 }
509
510 if (i < 0)
511 delete_insn (insn);
512 }
513 #if !BYTES_BIG_ENDIAN /* Not worth the hair to detect this
514 in the big-endian case. */
515 /* Also delete insns to store bit fields if they are no-ops. */
516 else if (GET_CODE (body) == SET
517 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
518 && XEXP (SET_DEST (body), 2) == const0_rtx
519 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
520 && ! (GET_CODE (SET_SRC (body)) == MEM
521 && MEM_VOLATILE_P (SET_SRC (body))))
522 delete_insn (insn);
523 #endif /* not BYTES_BIG_ENDIAN */
524 }
525 insn = next;
526 }
527
528 /* Now iterate optimizing jumps until nothing changes over one pass. */
529 changed = 1;
530 while (changed)
531 {
532 register rtx next;
533 changed = 0;
534
535 for (insn = f; insn; insn = next)
536 {
537 rtx reallabelprev;
538 rtx temp, temp1, temp2, temp3, temp4, temp5;
539 rtx nlabel;
540 int this_is_simplejump, this_is_condjump, reversep;
541 #if 0
542 /* If NOT the first iteration, if this is the last jump pass
543 (just before final), do the special peephole optimizations.
544 Avoiding the first iteration gives ordinary jump opts
545 a chance to work before peephole opts. */
546
547 if (reload_completed && !first && !flag_no_peephole)
548 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
549 peephole (insn);
550 #endif
551
552 /* That could have deleted some insns after INSN, so check now
553 what the following insn is. */
554
555 next = NEXT_INSN (insn);
556
557 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
558 jump. Try to optimize by duplicating the loop exit test if so.
559 This is only safe immediately after regscan, because it uses
560 the values of regno_first_uid and regno_last_uid. */
561 if (after_regscan && GET_CODE (insn) == NOTE
562 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
563 && (temp1 = next_nonnote_insn (insn)) != 0
564 && simplejump_p (temp1))
565 {
566 temp = PREV_INSN (insn);
567 if (duplicate_loop_exit_test (insn))
568 {
569 changed = 1;
570 next = NEXT_INSN (temp);
571 continue;
572 }
573 }
574
575 if (GET_CODE (insn) != JUMP_INSN)
576 continue;
577
578 this_is_simplejump = simplejump_p (insn);
579 this_is_condjump = condjump_p (insn);
580
581 /* Tension the labels in dispatch tables. */
582
583 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
584 changed |= tension_vector_labels (PATTERN (insn), 0);
585 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
586 changed |= tension_vector_labels (PATTERN (insn), 1);
587
588 /* If a dispatch table always goes to the same place,
589 get rid of it and replace the insn that uses it. */
590
591 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
592 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
593 {
594 int i;
595 rtx pat = PATTERN (insn);
596 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
597 int len = XVECLEN (pat, diff_vec_p);
598 rtx dispatch = prev_real_insn (insn);
599
600 for (i = 0; i < len; i++)
601 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
602 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
603 break;
604 if (i == len
605 && GET_CODE (dispatch) == JUMP_INSN
606 && JUMP_LABEL (dispatch) != 0
607 /* Don't mess with a casesi insn. */
608 && !(GET_CODE (PATTERN (dispatch)) == SET
609 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
610 == IF_THEN_ELSE))
611 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
612 {
613 redirect_tablejump (dispatch,
614 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
615 changed = 1;
616 }
617 }
618
619 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
620
621 /* If a jump references the end of the function, try to turn
622 it into a RETURN insn, possibly a conditional one. */
623 if (JUMP_LABEL (insn)
624 && next_active_insn (JUMP_LABEL (insn)) == 0)
625 changed |= redirect_jump (insn, NULL_RTX);
626
627 /* Detect jump to following insn. */
628 if (reallabelprev == insn && condjump_p (insn))
629 {
630 delete_jump (insn);
631 changed = 1;
632 continue;
633 }
634
635 /* If we have an unconditional jump preceded by a USE, try to put
636 the USE before the target and jump there. This simplifies many
637 of the optimizations below since we don't have to worry about
638 dealing with these USE insns. We only do this if the label
639 being branch to already has the identical USE or if code
640 never falls through to that label. */
641
642 if (this_is_simplejump
643 && (temp = prev_nonnote_insn (insn)) != 0
644 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
645 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
646 && (GET_CODE (temp1) == BARRIER
647 || (GET_CODE (temp1) == INSN
648 && rtx_equal_p (PATTERN (temp), PATTERN (temp1)))))
649 {
650 if (GET_CODE (temp1) == BARRIER)
651 {
652 reorder_insns (temp, temp, temp1);
653 temp1 = NEXT_INSN (temp1);
654 }
655 else
656 delete_insn (temp);
657
658 redirect_jump (insn, get_label_before (temp1));
659 reallabelprev = prev_real_insn (temp1);
660 changed = 1;
661 }
662
663 /* Simplify if (...) x = a; else x = b; by converting it
664 to x = b; if (...) x = a;
665 if B is sufficiently simple, the test doesn't involve X,
666 and nothing in the test modifies B or X.
667
668 If we have small register classes, we also can't do this if X
669 is a hard register.
670
671 If the "x = b;" insn has any REG_NOTES, we don't do this because
672 of the possibility that we are running after CSE and there is a
673 REG_EQUAL note that is only valid if the branch has already been
674 taken. If we move the insn with the REG_EQUAL note, we may
675 fold the comparison to always be false in a later CSE pass.
676 (We could also delete the REG_NOTES when moving the insn, but it
677 seems simpler to not move it.) An exception is that we can move
678 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
679 value is the same as "b".
680
681 INSN is the branch over the `else' part.
682
683 We set:
684
685 TEMP to the jump insn preceding "x = a;"
686 TEMP1 to X
687 TEMP2 to the insn that sets "x = b;"
688 TEMP3 to the insn that sets "x = a;" */
689
690 if (this_is_simplejump
691 && (temp3 = prev_active_insn (insn)) != 0
692 && GET_CODE (temp3) == INSN
693 && GET_CODE (PATTERN (temp3)) == SET
694 && GET_CODE (temp1 = SET_DEST (PATTERN (temp3))) == REG
695 #ifdef SMALL_REGISTER_CLASSES
696 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
697 #endif
698 && (temp2 = next_active_insn (insn)) != 0
699 && GET_CODE (temp2) == INSN
700 && GET_CODE (PATTERN (temp2)) == SET
701 && rtx_equal_p (SET_DEST (PATTERN (temp2)), temp1)
702 && (GET_CODE (SET_SRC (PATTERN (temp2))) == REG
703 || GET_CODE (SET_SRC (PATTERN (temp2))) == SUBREG
704 || CONSTANT_P (SET_SRC (PATTERN (temp2))))
705 && (REG_NOTES (temp2) == 0
706 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
707 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
708 && XEXP (REG_NOTES (temp2), 1) == 0
709 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
710 SET_SRC (PATTERN (temp2)))))
711 && (temp = prev_active_insn (temp3)) != 0
712 && condjump_p (temp) && ! simplejump_p (temp)
713 /* TEMP must skip over the "x = a;" insn */
714 && prev_real_insn (JUMP_LABEL (temp)) == insn
715 && no_labels_between_p (insn, JUMP_LABEL (temp))
716 /* There must be no other entries to the "x = b;" insn. */
717 && no_labels_between_p (JUMP_LABEL (temp), temp2)
718 /* INSN must either branch to the insn after TEMP2 or the insn
719 after TEMP2 must branch to the same place as INSN. */
720 && (reallabelprev == temp2
721 || ((temp4 = next_active_insn (temp2)) != 0
722 && simplejump_p (temp4)
723 && JUMP_LABEL (temp4) == JUMP_LABEL (insn))))
724 {
725 /* The test expression, X, may be a complicated test with
726 multiple branches. See if we can find all the uses of
727 the label that TEMP branches to without hitting a CALL_INSN
728 or a jump to somewhere else. */
729 rtx target = JUMP_LABEL (temp);
730 int nuses = LABEL_NUSES (target);
731 rtx p, q;
732
733 /* Set P to the first jump insn that goes around "x = a;". */
734 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
735 {
736 if (GET_CODE (p) == JUMP_INSN)
737 {
738 if (condjump_p (p) && ! simplejump_p (p)
739 && JUMP_LABEL (p) == target)
740 {
741 nuses--;
742 if (nuses == 0)
743 break;
744 }
745 else
746 break;
747 }
748 else if (GET_CODE (p) == CALL_INSN)
749 break;
750 }
751
752 #ifdef HAVE_cc0
753 /* We cannot insert anything between a set of cc and its use
754 so if P uses cc0, we must back up to the previous insn. */
755 q = prev_nonnote_insn (p);
756 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
757 && sets_cc0_p (PATTERN (q)))
758 p = q;
759 #endif
760
761 if (p)
762 p = PREV_INSN (p);
763
764 /* If we found all the uses and there was no data conflict, we
765 can move the assignment unless we can branch into the middle
766 from somewhere. */
767 if (nuses == 0 && p
768 && no_labels_between_p (p, insn)
769 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
770 && ! reg_set_between_p (temp1, p, temp3)
771 && (GET_CODE (SET_SRC (PATTERN (temp2))) == CONST_INT
772 || ! reg_set_between_p (SET_SRC (PATTERN (temp2)),
773 p, temp2)))
774 {
775 reorder_insns_with_line_notes (temp2, temp2, p);
776
777 /* Set NEXT to an insn that we know won't go away. */
778 next = next_active_insn (insn);
779
780 /* Delete the jump around the set. Note that we must do
781 this before we redirect the test jumps so that it won't
782 delete the code immediately following the assignment
783 we moved (which might be a jump). */
784
785 delete_insn (insn);
786
787 /* We either have two consecutive labels or a jump to
788 a jump, so adjust all the JUMP_INSNs to branch to where
789 INSN branches to. */
790 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
791 if (GET_CODE (p) == JUMP_INSN)
792 redirect_jump (p, target);
793
794 changed = 1;
795 continue;
796 }
797 }
798
799 /* We deal with four cases:
800
801 1) x = a; if (...) x = b; and either A or B is zero,
802 2) if (...) x = 0; and jumps are expensive,
803 3) x = a; if (...) x = b; and A and B are constants where all the
804 set bits in A are also set in B and jumps are expensive, and
805 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
806 more expensive.
807
808 In each of these try to use a store-flag insn to avoid the jump.
809 (If the jump would be faster, the machine should not have
810 defined the scc insns!). These cases are often made by the
811 previous optimization.
812
813 INSN here is the jump around the store. We set:
814
815 TEMP to the "x = b;" insn.
816 TEMP1 to X.
817 TEMP2 to B (const0_rtx in the second case).
818 TEMP3 to A (X in the second case).
819 TEMP4 to the condition being tested.
820 TEMP5 to the earliest insn used to find the condition. */
821
822 if (/* We can't do this after reload has completed. */
823 ! reload_completed
824 && this_is_condjump && ! this_is_simplejump
825 /* Set TEMP to the "x = b;" insn. */
826 && (temp = next_nonnote_insn (insn)) != 0
827 && GET_CODE (temp) == INSN
828 && GET_CODE (PATTERN (temp)) == SET
829 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
830 #ifdef SMALL_REGISTER_CLASSES
831 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
832 #endif
833 && GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
834 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
835 || GET_CODE (temp2) == SUBREG
836 || GET_CODE (temp2) == CONST_INT)
837 /* Allow either form, but prefer the former if both apply. */
838 && (((temp3 = reg_set_last (temp1, insn)) != 0
839 && ((GET_CODE (temp3) == REG
840 #ifdef SMALL_REGISTER_CLASSES
841 && REGNO (temp3) >= FIRST_PSEUDO_REGISTER
842 #endif
843 )
844 || GET_CODE (temp3) == SUBREG
845 || GET_CODE (temp3) == CONST_INT))
846 /* Make the latter case look like x = x; if (...) x = 0; */
847 || ((temp3 = temp1, BRANCH_COST >= 2)
848 && temp2 == const0_rtx))
849 /* INSN must either branch to the insn after TEMP or the insn
850 after TEMP must branch to the same place as INSN. */
851 && (reallabelprev == temp
852 || ((temp4 = next_active_insn (temp)) != 0
853 && simplejump_p (temp4)
854 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
855 && (temp4 = get_condition (insn, &temp5)) != 0
856
857 /* If B is zero, OK; if A is zero, can only do (1) if we
858 can reverse the condition. See if (3) applies possibly
859 by reversing the condition. Prefer reversing to (4) when
860 branches are very expensive. */
861 && ((reversep = 0, temp2 == const0_rtx)
862 || (temp3 == const0_rtx
863 && (reversep = can_reverse_comparison_p (temp4, insn)))
864 || (BRANCH_COST >= 2
865 && GET_CODE (temp2) == CONST_INT
866 && GET_CODE (temp3) == CONST_INT
867 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
868 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
869 && (reversep = can_reverse_comparison_p (temp4,
870 insn)))))
871 || BRANCH_COST >= 3))
872 {
873 enum rtx_code code = GET_CODE (temp4);
874 rtx uval, cval, var = temp1;
875 int normalizep;
876 rtx target;
877
878 /* If necessary, reverse the condition. */
879 if (reversep)
880 code = reverse_condition (code), uval = temp2, cval = temp3;
881 else
882 uval = temp3, cval = temp2;
883
884 /* See if we can do this with a store-flag insn. */
885 start_sequence ();
886
887 /* If CVAL is non-zero, normalize to -1. Otherwise,
888 if UVAL is the constant 1, it is best to just compute
889 the result directly. If UVAL is constant and STORE_FLAG_VALUE
890 includes all of its bits, it is best to compute the flag
891 value unnormalized and `and' it with UVAL. Otherwise,
892 normalize to -1 and `and' with UVAL. */
893 normalizep = (cval != const0_rtx ? -1
894 : (uval == const1_rtx ? 1
895 : (GET_CODE (uval) == CONST_INT
896 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
897 ? 0 : -1));
898
899 /* We will be putting the store-flag insn immediately in
900 front of the comparison that was originally being done,
901 so we know all the variables in TEMP4 will be valid.
902 However, this might be in front of the assignment of
903 A to VAR. If it is, it would clobber the store-flag
904 we will be emitting.
905
906 Therefore, emit into a temporary which will be copied to
907 VAR immediately after TEMP. */
908
909 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
910 XEXP (temp4, 0), XEXP (temp4, 1),
911 VOIDmode,
912 (code == LTU || code == LEU
913 || code == GEU || code == GTU),
914 normalizep);
915 if (target)
916 {
917 rtx seq;
918
919 /* Both CVAL and UVAL are non-zero. */
920 if (cval != const0_rtx && uval != const0_rtx)
921 {
922 rtx tem1, tem2;
923
924 tem1 = expand_and (uval, target, NULL_RTX);
925 if (GET_CODE (cval) == CONST_INT
926 && GET_CODE (uval) == CONST_INT
927 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
928 tem2 = cval;
929 else
930 {
931 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
932 target, NULL_RTX, 0);
933 tem2 = expand_and (cval, tem2, tem2);
934 }
935
936 /* If we usually make new pseudos, do so here. This
937 turns out to help machines that have conditional
938 move insns. */
939
940 if (flag_expensive_optimizations)
941 target = 0;
942
943 target = expand_binop (GET_MODE (var), ior_optab,
944 tem1, tem2, target,
945 1, OPTAB_WIDEN);
946 }
947 else if (normalizep != 1)
948 target = expand_and (uval, target,
949 (GET_CODE (target) == REG
950 && ! preserve_subexpressions_p ()
951 ? target : NULL_RTX));
952
953 seq = gen_sequence ();
954 end_sequence ();
955 emit_insn_before (seq, temp5);
956 emit_insn_after (gen_move_insn (var, target), insn);
957 delete_insn (temp);
958 next = NEXT_INSN (insn);
959 #ifdef HAVE_cc0
960 delete_insn (prev_nonnote_insn (insn));
961 #endif
962 delete_insn (insn);
963 changed = 1;
964 continue;
965 }
966 else
967 end_sequence ();
968 }
969
970 /* If branches are expensive, convert
971 if (foo) bar++; to bar += (foo != 0);
972 and similarly for "bar--;"
973
974 INSN is the conditional branch around the arithmetic. We set:
975
976 TEMP is the arithmetic insn.
977 TEMP1 is the SET doing the arithmetic.
978 TEMP2 is the operand being incremented or decremented.
979 TEMP3 to the condition being tested.
980 TEMP4 to the earliest insn used to find the condition. */
981
982 if (BRANCH_COST >= 2
983 && ! reload_completed
984 && this_is_condjump && ! this_is_simplejump
985 && (temp = next_nonnote_insn (insn)) != 0
986 && (temp1 = single_set (temp)) != 0
987 && (temp2 = SET_DEST (temp1),
988 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
989 && GET_CODE (SET_SRC (temp1)) == PLUS
990 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
991 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
992 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
993 /* INSN must either branch to the insn after TEMP or the insn
994 after TEMP must branch to the same place as INSN. */
995 && (reallabelprev == temp
996 || ((temp3 = next_active_insn (temp)) != 0
997 && simplejump_p (temp3)
998 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
999 && (temp3 = get_condition (insn, &temp4)) != 0
1000 && can_reverse_comparison_p (temp3, insn))
1001 {
1002 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1003 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1004
1005 start_sequence ();
1006
1007 /* It must be the case that TEMP2 is not modified in the range
1008 [TEMP4, INSN). The one exception we make is if the insn
1009 before INSN sets TEMP2 to something which is also unchanged
1010 in that range. In that case, we can move the initialization
1011 into our sequence. */
1012
1013 if ((temp5 = prev_active_insn (insn)) != 0
1014 && GET_CODE (temp5) == INSN
1015 && (temp6 = single_set (temp5)) != 0
1016 && rtx_equal_p (temp2, SET_DEST (temp6))
1017 && (CONSTANT_P (SET_SRC (temp6))
1018 || GET_CODE (SET_SRC (temp6)) == REG
1019 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1020 {
1021 emit_insn (PATTERN (temp5));
1022 init_insn = temp5;
1023 init = SET_SRC (temp6);
1024 }
1025
1026 if (CONSTANT_P (init)
1027 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1028 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1029 XEXP (temp3, 0), XEXP (temp3, 1),
1030 VOIDmode,
1031 (code == LTU || code == LEU
1032 || code == GTU || code == GEU), 1);
1033
1034 /* If we can do the store-flag, do the addition or
1035 subtraction. */
1036
1037 if (target)
1038 target = expand_binop (GET_MODE (temp2),
1039 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1040 ? add_optab : sub_optab),
1041 temp2, target, temp2, OPTAB_WIDEN);
1042
1043 if (target != 0)
1044 {
1045 /* Put the result back in temp2 in case it isn't already.
1046 Then replace the jump, possible a CC0-setting insn in
1047 front of the jump, and TEMP, with the sequence we have
1048 made. */
1049
1050 if (target != temp2)
1051 emit_move_insn (temp2, target);
1052
1053 seq = get_insns ();
1054 end_sequence ();
1055
1056 emit_insns_before (seq, temp4);
1057 delete_insn (temp);
1058
1059 if (init_insn)
1060 delete_insn (init_insn);
1061
1062 next = NEXT_INSN (insn);
1063 #ifdef HAVE_cc0
1064 delete_insn (prev_nonnote_insn (insn));
1065 #endif
1066 delete_insn (insn);
1067 changed = 1;
1068 continue;
1069 }
1070 else
1071 end_sequence ();
1072 }
1073
1074 /* Simplify if (...) x = 1; else {...} if (x) ...
1075 We recognize this case scanning backwards as well.
1076
1077 TEMP is the assignment to x;
1078 TEMP1 is the label at the head of the second if. */
1079 /* ?? This should call get_condition to find the values being
1080 compared, instead of looking for a COMPARE insn when HAVE_cc0
1081 is not defined. This would allow it to work on the m88k. */
1082 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1083 is not defined and the condition is tested by a separate compare
1084 insn. This is because the code below assumes that the result
1085 of the compare dies in the following branch.
1086
1087 Not only that, but there might be other insns between the
1088 compare and branch whose results are live. Those insns need
1089 to be executed.
1090
1091 A way to fix this is to move the insns at JUMP_LABEL (insn)
1092 to before INSN. If we are running before flow, they will
1093 be deleted if they aren't needed. But this doesn't work
1094 well after flow.
1095
1096 This is really a special-case of jump threading, anyway. The
1097 right thing to do is to replace this and jump threading with
1098 much simpler code in cse.
1099
1100 This code has been turned off in the non-cc0 case in the
1101 meantime. */
1102
1103 #ifdef HAVE_cc0
1104 else if (this_is_simplejump
1105 /* Safe to skip USE and CLOBBER insns here
1106 since they will not be deleted. */
1107 && (temp = prev_active_insn (insn))
1108 && no_labels_between_p (temp, insn)
1109 && GET_CODE (temp) == INSN
1110 && GET_CODE (PATTERN (temp)) == SET
1111 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1112 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1113 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1114 /* If we find that the next value tested is `x'
1115 (TEMP1 is the insn where this happens), win. */
1116 && GET_CODE (temp1) == INSN
1117 && GET_CODE (PATTERN (temp1)) == SET
1118 #ifdef HAVE_cc0
1119 /* Does temp1 `tst' the value of x? */
1120 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1121 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1122 && (temp1 = next_nonnote_insn (temp1))
1123 #else
1124 /* Does temp1 compare the value of x against zero? */
1125 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1126 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1127 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1128 == SET_DEST (PATTERN (temp)))
1129 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1130 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1131 #endif
1132 && condjump_p (temp1))
1133 {
1134 /* Get the if_then_else from the condjump. */
1135 rtx choice = SET_SRC (PATTERN (temp1));
1136 if (GET_CODE (choice) == IF_THEN_ELSE)
1137 {
1138 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1139 rtx val = SET_SRC (PATTERN (temp));
1140 rtx cond
1141 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1142 val, const0_rtx);
1143 rtx ultimate;
1144
1145 if (cond == const_true_rtx)
1146 ultimate = XEXP (choice, 1);
1147 else if (cond == const0_rtx)
1148 ultimate = XEXP (choice, 2);
1149 else
1150 ultimate = 0;
1151
1152 if (ultimate == pc_rtx)
1153 ultimate = get_label_after (temp1);
1154 else if (ultimate && GET_CODE (ultimate) != RETURN)
1155 ultimate = XEXP (ultimate, 0);
1156
1157 if (ultimate)
1158 changed |= redirect_jump (insn, ultimate);
1159 }
1160 }
1161 #endif
1162
1163 #if 0
1164 /* @@ This needs a bit of work before it will be right.
1165
1166 Any type of comparison can be accepted for the first and
1167 second compare. When rewriting the first jump, we must
1168 compute the what conditions can reach label3, and use the
1169 appropriate code. We can not simply reverse/swap the code
1170 of the first jump. In some cases, the second jump must be
1171 rewritten also.
1172
1173 For example,
1174 < == converts to > ==
1175 < != converts to == >
1176 etc.
1177
1178 If the code is written to only accept an '==' test for the second
1179 compare, then all that needs to be done is to swap the condition
1180 of the first branch.
1181
1182 It is questionable whether we want this optimization anyways,
1183 since if the user wrote code like this because he/she knew that
1184 the jump to label1 is taken most of the time, then rewriting
1185 this gives slower code. */
1186 /* @@ This should call get_condition to find the values being
1187 compared, instead of looking for a COMPARE insn when HAVE_cc0
1188 is not defined. This would allow it to work on the m88k. */
1189 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1190 is not defined and the condition is tested by a separate compare
1191 insn. This is because the code below assumes that the result
1192 of the compare dies in the following branch. */
1193
1194 /* Simplify test a ~= b
1195 condjump label1;
1196 test a == b
1197 condjump label2;
1198 jump label3;
1199 label1:
1200
1201 rewriting as
1202 test a ~~= b
1203 condjump label3
1204 test a == b
1205 condjump label2
1206 label1:
1207
1208 where ~= is an inequality, e.g. >, and ~~= is the swapped
1209 inequality, e.g. <.
1210
1211 We recognize this case scanning backwards.
1212
1213 TEMP is the conditional jump to `label2';
1214 TEMP1 is the test for `a == b';
1215 TEMP2 is the conditional jump to `label1';
1216 TEMP3 is the test for `a ~= b'. */
1217 else if (this_is_simplejump
1218 && (temp = prev_active_insn (insn))
1219 && no_labels_between_p (temp, insn)
1220 && condjump_p (temp)
1221 && (temp1 = prev_active_insn (temp))
1222 && no_labels_between_p (temp1, temp)
1223 && GET_CODE (temp1) == INSN
1224 && GET_CODE (PATTERN (temp1)) == SET
1225 #ifdef HAVE_cc0
1226 && sets_cc0_p (PATTERN (temp1)) == 1
1227 #else
1228 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1229 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1230 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1231 #endif
1232 && (temp2 = prev_active_insn (temp1))
1233 && no_labels_between_p (temp2, temp1)
1234 && condjump_p (temp2)
1235 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1236 && (temp3 = prev_active_insn (temp2))
1237 && no_labels_between_p (temp3, temp2)
1238 && GET_CODE (PATTERN (temp3)) == SET
1239 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1240 SET_DEST (PATTERN (temp1)))
1241 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1242 SET_SRC (PATTERN (temp3)))
1243 && ! inequality_comparisons_p (PATTERN (temp))
1244 && inequality_comparisons_p (PATTERN (temp2)))
1245 {
1246 rtx fallthrough_label = JUMP_LABEL (temp2);
1247
1248 ++LABEL_NUSES (fallthrough_label);
1249 if (swap_jump (temp2, JUMP_LABEL (insn)))
1250 {
1251 delete_insn (insn);
1252 changed = 1;
1253 }
1254
1255 if (--LABEL_NUSES (fallthrough_label) == 0)
1256 delete_insn (fallthrough_label);
1257 }
1258 #endif
1259 /* Simplify if (...) {... x = 1;} if (x) ...
1260
1261 We recognize this case backwards.
1262
1263 TEMP is the test of `x';
1264 TEMP1 is the assignment to `x' at the end of the
1265 previous statement. */
1266 /* @@ This should call get_condition to find the values being
1267 compared, instead of looking for a COMPARE insn when HAVE_cc0
1268 is not defined. This would allow it to work on the m88k. */
1269 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1270 is not defined and the condition is tested by a separate compare
1271 insn. This is because the code below assumes that the result
1272 of the compare dies in the following branch. */
1273
1274 /* ??? This has to be turned off. The problem is that the
1275 unconditional jump might indirectly end up branching to the
1276 label between TEMP1 and TEMP. We can't detect this, in general,
1277 since it may become a jump to there after further optimizations.
1278 If that jump is done, it will be deleted, so we will retry
1279 this optimization in the next pass, thus an infinite loop.
1280
1281 The present code prevents this by putting the jump after the
1282 label, but this is not logically correct. */
1283 #if 0
1284 else if (this_is_condjump
1285 /* Safe to skip USE and CLOBBER insns here
1286 since they will not be deleted. */
1287 && (temp = prev_active_insn (insn))
1288 && no_labels_between_p (temp, insn)
1289 && GET_CODE (temp) == INSN
1290 && GET_CODE (PATTERN (temp)) == SET
1291 #ifdef HAVE_cc0
1292 && sets_cc0_p (PATTERN (temp)) == 1
1293 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1294 #else
1295 /* Temp must be a compare insn, we can not accept a register
1296 to register move here, since it may not be simply a
1297 tst insn. */
1298 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1299 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1300 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1301 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1302 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1303 #endif
1304 /* May skip USE or CLOBBER insns here
1305 for checking for opportunity, since we
1306 take care of them later. */
1307 && (temp1 = prev_active_insn (temp))
1308 && GET_CODE (temp1) == INSN
1309 && GET_CODE (PATTERN (temp1)) == SET
1310 #ifdef HAVE_cc0
1311 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1312 #else
1313 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1314 == SET_DEST (PATTERN (temp1)))
1315 #endif
1316 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1317 /* If this isn't true, cse will do the job. */
1318 && ! no_labels_between_p (temp1, temp))
1319 {
1320 /* Get the if_then_else from the condjump. */
1321 rtx choice = SET_SRC (PATTERN (insn));
1322 if (GET_CODE (choice) == IF_THEN_ELSE
1323 && (GET_CODE (XEXP (choice, 0)) == EQ
1324 || GET_CODE (XEXP (choice, 0)) == NE))
1325 {
1326 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1327 rtx last_insn;
1328 rtx ultimate;
1329 rtx p;
1330
1331 /* Get the place that condjump will jump to
1332 if it is reached from here. */
1333 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1334 == want_nonzero)
1335 ultimate = XEXP (choice, 1);
1336 else
1337 ultimate = XEXP (choice, 2);
1338 /* Get it as a CODE_LABEL. */
1339 if (ultimate == pc_rtx)
1340 ultimate = get_label_after (insn);
1341 else
1342 /* Get the label out of the LABEL_REF. */
1343 ultimate = XEXP (ultimate, 0);
1344
1345 /* Insert the jump immediately before TEMP, specifically
1346 after the label that is between TEMP1 and TEMP. */
1347 last_insn = PREV_INSN (temp);
1348
1349 /* If we would be branching to the next insn, the jump
1350 would immediately be deleted and the re-inserted in
1351 a subsequent pass over the code. So don't do anything
1352 in that case. */
1353 if (next_active_insn (last_insn)
1354 != next_active_insn (ultimate))
1355 {
1356 emit_barrier_after (last_insn);
1357 p = emit_jump_insn_after (gen_jump (ultimate),
1358 last_insn);
1359 JUMP_LABEL (p) = ultimate;
1360 ++LABEL_NUSES (ultimate);
1361 if (INSN_UID (ultimate) < max_jump_chain
1362 && INSN_CODE (p) < max_jump_chain)
1363 {
1364 jump_chain[INSN_UID (p)]
1365 = jump_chain[INSN_UID (ultimate)];
1366 jump_chain[INSN_UID (ultimate)] = p;
1367 }
1368 changed = 1;
1369 continue;
1370 }
1371 }
1372 }
1373 #endif
1374 /* Detect a conditional jump going to the same place
1375 as an immediately following unconditional jump. */
1376 else if (this_is_condjump
1377 && (temp = next_active_insn (insn)) != 0
1378 && simplejump_p (temp)
1379 && (next_active_insn (JUMP_LABEL (insn))
1380 == next_active_insn (JUMP_LABEL (temp))))
1381 {
1382 delete_jump (insn);
1383 changed = 1;
1384 continue;
1385 }
1386 /* Detect a conditional jump jumping over an unconditional jump. */
1387
1388 else if (this_is_condjump && ! this_is_simplejump
1389 && reallabelprev != 0
1390 && GET_CODE (reallabelprev) == JUMP_INSN
1391 && prev_active_insn (reallabelprev) == insn
1392 && no_labels_between_p (insn, reallabelprev)
1393 && simplejump_p (reallabelprev))
1394 {
1395 /* When we invert the unconditional jump, we will be
1396 decrementing the usage count of its old label.
1397 Make sure that we don't delete it now because that
1398 might cause the following code to be deleted. */
1399 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1400 rtx prev_label = JUMP_LABEL (insn);
1401
1402 ++LABEL_NUSES (prev_label);
1403
1404 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1405 {
1406 /* It is very likely that if there are USE insns before
1407 this jump, they hold REG_DEAD notes. These REG_DEAD
1408 notes are no longer valid due to this optimization,
1409 and will cause the life-analysis that following passes
1410 (notably delayed-branch scheduling) to think that
1411 these registers are dead when they are not.
1412
1413 To prevent this trouble, we just remove the USE insns
1414 from the insn chain. */
1415
1416 while (prev_uses && GET_CODE (prev_uses) == INSN
1417 && GET_CODE (PATTERN (prev_uses)) == USE)
1418 {
1419 rtx useless = prev_uses;
1420 prev_uses = prev_nonnote_insn (prev_uses);
1421 delete_insn (useless);
1422 }
1423
1424 delete_insn (reallabelprev);
1425 next = insn;
1426 changed = 1;
1427 }
1428
1429 /* We can now safely delete the label if it is unreferenced
1430 since the delete_insn above has deleted the BARRIER. */
1431 if (--LABEL_NUSES (prev_label) == 0)
1432 delete_insn (prev_label);
1433 continue;
1434 }
1435 else
1436 {
1437 /* Detect a jump to a jump. */
1438
1439 nlabel = follow_jumps (JUMP_LABEL (insn));
1440 if (nlabel != JUMP_LABEL (insn)
1441 && redirect_jump (insn, nlabel))
1442 {
1443 changed = 1;
1444 next = insn;
1445 }
1446
1447 /* Look for if (foo) bar; else break; */
1448 /* The insns look like this:
1449 insn = condjump label1;
1450 ...range1 (some insns)...
1451 jump label2;
1452 label1:
1453 ...range2 (some insns)...
1454 jump somewhere unconditionally
1455 label2: */
1456 {
1457 rtx label1 = next_label (insn);
1458 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1459 /* Don't do this optimization on the first round, so that
1460 jump-around-a-jump gets simplified before we ask here
1461 whether a jump is unconditional.
1462
1463 Also don't do it when we are called after reload since
1464 it will confuse reorg. */
1465 if (! first
1466 && (reload_completed ? ! flag_delayed_branch : 1)
1467 /* Make sure INSN is something we can invert. */
1468 && condjump_p (insn)
1469 && label1 != 0
1470 && JUMP_LABEL (insn) == label1
1471 && LABEL_NUSES (label1) == 1
1472 && GET_CODE (range1end) == JUMP_INSN
1473 && simplejump_p (range1end))
1474 {
1475 rtx label2 = next_label (label1);
1476 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1477 if (range1end != range2end
1478 && JUMP_LABEL (range1end) == label2
1479 && GET_CODE (range2end) == JUMP_INSN
1480 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1481 /* Invert the jump condition, so we
1482 still execute the same insns in each case. */
1483 && invert_jump (insn, label1))
1484 {
1485 rtx range1beg = next_active_insn (insn);
1486 rtx range2beg = next_active_insn (label1);
1487 rtx range1after, range2after;
1488 rtx range1before, range2before;
1489
1490 /* Include in each range any line number before it. */
1491 while (PREV_INSN (range1beg)
1492 && GET_CODE (PREV_INSN (range1beg)) == NOTE
1493 && NOTE_LINE_NUMBER (PREV_INSN (range1beg)) > 0)
1494 range1beg = PREV_INSN (range1beg);
1495
1496 while (PREV_INSN (range2beg)
1497 && GET_CODE (PREV_INSN (range2beg)) == NOTE
1498 && NOTE_LINE_NUMBER (PREV_INSN (range2beg)) > 0)
1499 range2beg = PREV_INSN (range2beg);
1500
1501 /* Don't move NOTEs for blocks or loops; shift them
1502 outside the ranges, where they'll stay put. */
1503 range1beg = squeeze_notes (range1beg, range1end);
1504 range2beg = squeeze_notes (range2beg, range2end);
1505
1506 /* Get current surrounds of the 2 ranges. */
1507 range1before = PREV_INSN (range1beg);
1508 range2before = PREV_INSN (range2beg);
1509 range1after = NEXT_INSN (range1end);
1510 range2after = NEXT_INSN (range2end);
1511
1512 /* Splice range2 where range1 was. */
1513 NEXT_INSN (range1before) = range2beg;
1514 PREV_INSN (range2beg) = range1before;
1515 NEXT_INSN (range2end) = range1after;
1516 PREV_INSN (range1after) = range2end;
1517 /* Splice range1 where range2 was. */
1518 NEXT_INSN (range2before) = range1beg;
1519 PREV_INSN (range1beg) = range2before;
1520 NEXT_INSN (range1end) = range2after;
1521 PREV_INSN (range2after) = range1end;
1522 changed = 1;
1523 continue;
1524 }
1525 }
1526 }
1527
1528 /* Now that the jump has been tensioned,
1529 try cross jumping: check for identical code
1530 before the jump and before its target label. */
1531
1532 /* First, cross jumping of conditional jumps: */
1533
1534 if (cross_jump && condjump_p (insn))
1535 {
1536 rtx newjpos, newlpos;
1537 rtx x = prev_real_insn (JUMP_LABEL (insn));
1538
1539 /* A conditional jump may be crossjumped
1540 only if the place it jumps to follows
1541 an opposing jump that comes back here. */
1542
1543 if (x != 0 && ! jump_back_p (x, insn))
1544 /* We have no opposing jump;
1545 cannot cross jump this insn. */
1546 x = 0;
1547
1548 newjpos = 0;
1549 /* TARGET is nonzero if it is ok to cross jump
1550 to code before TARGET. If so, see if matches. */
1551 if (x != 0)
1552 find_cross_jump (insn, x, 2,
1553 &newjpos, &newlpos);
1554
1555 if (newjpos != 0)
1556 {
1557 do_cross_jump (insn, newjpos, newlpos);
1558 /* Make the old conditional jump
1559 into an unconditional one. */
1560 SET_SRC (PATTERN (insn))
1561 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
1562 INSN_CODE (insn) = -1;
1563 emit_barrier_after (insn);
1564 /* Add to jump_chain unless this is a new label
1565 whose UID is too large. */
1566 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1567 {
1568 jump_chain[INSN_UID (insn)]
1569 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1570 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1571 }
1572 changed = 1;
1573 next = insn;
1574 }
1575 }
1576
1577 /* Cross jumping of unconditional jumps:
1578 a few differences. */
1579
1580 if (cross_jump && simplejump_p (insn))
1581 {
1582 rtx newjpos, newlpos;
1583 rtx target;
1584
1585 newjpos = 0;
1586
1587 /* TARGET is nonzero if it is ok to cross jump
1588 to code before TARGET. If so, see if matches. */
1589 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1590 &newjpos, &newlpos);
1591
1592 /* If cannot cross jump to code before the label,
1593 see if we can cross jump to another jump to
1594 the same label. */
1595 /* Try each other jump to this label. */
1596 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1597 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1598 target != 0 && newjpos == 0;
1599 target = jump_chain[INSN_UID (target)])
1600 if (target != insn
1601 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1602 /* Ignore TARGET if it's deleted. */
1603 && ! INSN_DELETED_P (target))
1604 find_cross_jump (insn, target, 2,
1605 &newjpos, &newlpos);
1606
1607 if (newjpos != 0)
1608 {
1609 do_cross_jump (insn, newjpos, newlpos);
1610 changed = 1;
1611 next = insn;
1612 }
1613 }
1614
1615 /* This code was dead in the previous jump.c! */
1616 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
1617 {
1618 /* Return insns all "jump to the same place"
1619 so we can cross-jump between any two of them. */
1620
1621 rtx newjpos, newlpos, target;
1622
1623 newjpos = 0;
1624
1625 /* If cannot cross jump to code before the label,
1626 see if we can cross jump to another jump to
1627 the same label. */
1628 /* Try each other jump to this label. */
1629 for (target = jump_chain[0];
1630 target != 0 && newjpos == 0;
1631 target = jump_chain[INSN_UID (target)])
1632 if (target != insn
1633 && ! INSN_DELETED_P (target)
1634 && GET_CODE (PATTERN (target)) == RETURN)
1635 find_cross_jump (insn, target, 2,
1636 &newjpos, &newlpos);
1637
1638 if (newjpos != 0)
1639 {
1640 do_cross_jump (insn, newjpos, newlpos);
1641 changed = 1;
1642 next = insn;
1643 }
1644 }
1645 }
1646 }
1647
1648 first = 0;
1649 }
1650
1651 /* Delete extraneous line number notes.
1652 Note that two consecutive notes for different lines are not really
1653 extraneous. There should be some indication where that line belonged,
1654 even if it became empty. */
1655
1656 {
1657 rtx last_note = 0;
1658
1659 for (insn = f; insn; insn = NEXT_INSN (insn))
1660 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
1661 {
1662 /* Delete this note if it is identical to previous note. */
1663 if (last_note
1664 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
1665 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
1666 {
1667 delete_insn (insn);
1668 continue;
1669 }
1670
1671 last_note = insn;
1672 }
1673 }
1674
1675 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1676 If so, delete it, and record that this function can drop off the end. */
1677
1678 insn = last_insn;
1679 {
1680 int n_labels = 1;
1681 while (insn
1682 /* One label can follow the end-note: the return label. */
1683 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
1684 /* Ordinary insns can follow it if returning a structure. */
1685 || GET_CODE (insn) == INSN
1686 /* If machine uses explicit RETURN insns, no epilogue,
1687 then one of them follows the note. */
1688 || (GET_CODE (insn) == JUMP_INSN
1689 && GET_CODE (PATTERN (insn)) == RETURN)
1690 /* Other kinds of notes can follow also. */
1691 || (GET_CODE (insn) == NOTE
1692 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
1693 insn = PREV_INSN (insn);
1694 }
1695
1696 /* Report if control can fall through at the end of the function. */
1697 if (insn && GET_CODE (insn) == NOTE
1698 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
1699 {
1700 can_reach_end = 1;
1701 delete_insn (insn);
1702 }
1703
1704 /* Show JUMP_CHAIN no longer valid. */
1705 jump_chain = 0;
1706 }
1707 \f
1708 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1709 jump. Assume that this unconditional jump is to the exit test code. If
1710 the code is sufficiently simple, make a copy of it before INSN,
1711 followed by a jump to the exit of the loop. Then delete the unconditional
1712 jump after INSN.
1713
1714 Note that it is possible we can get confused here if the jump immediately
1715 after the loop start branches outside the loop but within an outer loop.
1716 If we are near the exit of that loop, we will copy its exit test. This
1717 will not generate incorrect code, but could suppress some optimizations.
1718 However, such cases are degenerate loops anyway.
1719
1720 Return 1 if we made the change, else 0.
1721
1722 This is only safe immediately after a regscan pass because it uses the
1723 values of regno_first_uid and regno_last_uid. */
1724
1725 static int
1726 duplicate_loop_exit_test (loop_start)
1727 rtx loop_start;
1728 {
1729 rtx insn, set, p;
1730 rtx copy, link;
1731 int num_insns = 0;
1732 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1733 rtx lastexit;
1734 int max_reg = max_reg_num ();
1735 rtx *reg_map = 0;
1736
1737 /* Scan the exit code. We do not perform this optimization if any insn:
1738
1739 is a CALL_INSN
1740 is a CODE_LABEL
1741 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1742 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1743 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1744 are not valid
1745
1746 Also, don't do this if the exit code is more than 20 insns. */
1747
1748 for (insn = exitcode;
1749 insn
1750 && ! (GET_CODE (insn) == NOTE
1751 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1752 insn = NEXT_INSN (insn))
1753 {
1754 switch (GET_CODE (insn))
1755 {
1756 case CODE_LABEL:
1757 case CALL_INSN:
1758 return 0;
1759 case NOTE:
1760 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1761 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1762 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
1763 return 0;
1764 break;
1765 case JUMP_INSN:
1766 case INSN:
1767 if (++num_insns > 20
1768 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1769 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1770 return 0;
1771 break;
1772 }
1773 }
1774
1775 /* Unless INSN is zero, we can do the optimization. */
1776 if (insn == 0)
1777 return 0;
1778
1779 lastexit = insn;
1780
1781 /* See if any insn sets a register only used in the loop exit code and
1782 not a user variable. If so, replace it with a new register. */
1783 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1784 if (GET_CODE (insn) == INSN
1785 && (set = single_set (insn)) != 0
1786 && GET_CODE (SET_DEST (set)) == REG
1787 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1788 && regno_first_uid[REGNO (SET_DEST (set))] == INSN_UID (insn))
1789 {
1790 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1791 if (regno_last_uid[REGNO (SET_DEST (set))] == INSN_UID (p))
1792 break;
1793
1794 if (p != lastexit)
1795 {
1796 /* We can do the replacement. Allocate reg_map if this is the
1797 first replacement we found. */
1798 if (reg_map == 0)
1799 {
1800 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
1801 bzero (reg_map, max_reg * sizeof (rtx));
1802 }
1803
1804 REG_LOOP_TEST_P (SET_DEST (set)) = 1;
1805
1806 reg_map[REGNO (SET_DEST (set))]
1807 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
1808 }
1809 }
1810
1811 /* Now copy each insn. */
1812 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1813 switch (GET_CODE (insn))
1814 {
1815 case BARRIER:
1816 copy = emit_barrier_before (loop_start);
1817 break;
1818 case NOTE:
1819 /* Only copy line-number notes. */
1820 if (NOTE_LINE_NUMBER (insn) >= 0)
1821 {
1822 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1823 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1824 }
1825 break;
1826
1827 case INSN:
1828 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
1829 if (reg_map)
1830 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1831
1832 mark_jump_label (PATTERN (copy), copy, 0);
1833
1834 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1835 make them. */
1836 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1837 if (REG_NOTE_KIND (link) != REG_LABEL)
1838 REG_NOTES (copy)
1839 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
1840 XEXP (link, 0), REG_NOTES (copy)));
1841 if (reg_map && REG_NOTES (copy))
1842 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1843 break;
1844
1845 case JUMP_INSN:
1846 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
1847 if (reg_map)
1848 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1849 mark_jump_label (PATTERN (copy), copy, 0);
1850 if (REG_NOTES (insn))
1851 {
1852 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
1853 if (reg_map)
1854 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1855 }
1856
1857 /* If this is a simple jump, add it to the jump chain. */
1858
1859 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1860 && simplejump_p (copy))
1861 {
1862 jump_chain[INSN_UID (copy)]
1863 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1864 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1865 }
1866 break;
1867
1868 default:
1869 abort ();
1870 }
1871
1872 /* Now clean up by emitting a jump to the end label and deleting the jump
1873 at the start of the loop. */
1874 if (GET_CODE (copy) != BARRIER)
1875 {
1876 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1877 loop_start);
1878 mark_jump_label (PATTERN (copy), copy, 0);
1879 if (INSN_UID (copy) < max_jump_chain
1880 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1881 {
1882 jump_chain[INSN_UID (copy)]
1883 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1884 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1885 }
1886 emit_barrier_before (loop_start);
1887 }
1888
1889 delete_insn (next_nonnote_insn (loop_start));
1890
1891 /* Mark the exit code as the virtual top of the converted loop. */
1892 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1893
1894 return 1;
1895 }
1896 \f
1897 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
1898 loop-end notes between START and END out before START. Assume that
1899 END is not such a note. START may be such a note. Returns the value
1900 of the new starting insn, which may be different if the original start
1901 was such a note. */
1902
1903 rtx
1904 squeeze_notes (start, end)
1905 rtx start, end;
1906 {
1907 rtx insn;
1908 rtx next;
1909
1910 for (insn = start; insn != end; insn = next)
1911 {
1912 next = NEXT_INSN (insn);
1913 if (GET_CODE (insn) == NOTE
1914 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1915 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1916 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1917 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1918 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1919 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1920 {
1921 if (insn == start)
1922 start = next;
1923 else
1924 {
1925 rtx prev = PREV_INSN (insn);
1926 PREV_INSN (insn) = PREV_INSN (start);
1927 NEXT_INSN (insn) = start;
1928 NEXT_INSN (PREV_INSN (insn)) = insn;
1929 PREV_INSN (NEXT_INSN (insn)) = insn;
1930 NEXT_INSN (prev) = next;
1931 PREV_INSN (next) = prev;
1932 }
1933 }
1934 }
1935
1936 return start;
1937 }
1938 \f
1939 /* Compare the instructions before insn E1 with those before E2
1940 to find an opportunity for cross jumping.
1941 (This means detecting identical sequences of insns followed by
1942 jumps to the same place, or followed by a label and a jump
1943 to that label, and replacing one with a jump to the other.)
1944
1945 Assume E1 is a jump that jumps to label E2
1946 (that is not always true but it might as well be).
1947 Find the longest possible equivalent sequences
1948 and store the first insns of those sequences into *F1 and *F2.
1949 Store zero there if no equivalent preceding instructions are found.
1950
1951 We give up if we find a label in stream 1.
1952 Actually we could transfer that label into stream 2. */
1953
1954 static void
1955 find_cross_jump (e1, e2, minimum, f1, f2)
1956 rtx e1, e2;
1957 int minimum;
1958 rtx *f1, *f2;
1959 {
1960 register rtx i1 = e1, i2 = e2;
1961 register rtx p1, p2;
1962 int lose = 0;
1963
1964 rtx last1 = 0, last2 = 0;
1965 rtx afterlast1 = 0, afterlast2 = 0;
1966 rtx prev1;
1967
1968 *f1 = 0;
1969 *f2 = 0;
1970
1971 while (1)
1972 {
1973 i1 = prev_nonnote_insn (i1);
1974
1975 i2 = PREV_INSN (i2);
1976 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1977 i2 = PREV_INSN (i2);
1978
1979 if (i1 == 0)
1980 break;
1981
1982 /* Don't allow the range of insns preceding E1 or E2
1983 to include the other (E2 or E1). */
1984 if (i2 == e1 || i1 == e2)
1985 break;
1986
1987 /* If we will get to this code by jumping, those jumps will be
1988 tensioned to go directly to the new label (before I2),
1989 so this cross-jumping won't cost extra. So reduce the minimum. */
1990 if (GET_CODE (i1) == CODE_LABEL)
1991 {
1992 --minimum;
1993 break;
1994 }
1995
1996 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1997 break;
1998
1999 p1 = PATTERN (i1);
2000 p2 = PATTERN (i2);
2001
2002 #ifdef STACK_REGS
2003 /* If cross_jump_death_matters is not 0, the insn's mode
2004 indicates whether or not the insn contains any stack-like
2005 regs. */
2006
2007 if (cross_jump_death_matters && GET_MODE (i1) == QImode)
2008 {
2009 /* If register stack conversion has already been done, then
2010 death notes must also be compared before it is certain that
2011 the two instruction streams match. */
2012
2013 rtx note;
2014 HARD_REG_SET i1_regset, i2_regset;
2015
2016 CLEAR_HARD_REG_SET (i1_regset);
2017 CLEAR_HARD_REG_SET (i2_regset);
2018
2019 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2020 if (REG_NOTE_KIND (note) == REG_DEAD
2021 && STACK_REG_P (XEXP (note, 0)))
2022 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2023
2024 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2025 if (REG_NOTE_KIND (note) == REG_DEAD
2026 && STACK_REG_P (XEXP (note, 0)))
2027 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2028
2029 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2030
2031 lose = 1;
2032
2033 done:
2034 ;
2035 }
2036 #endif
2037
2038 if (lose || GET_CODE (p1) != GET_CODE (p2)
2039 || ! rtx_renumbered_equal_p (p1, p2))
2040 {
2041 /* The following code helps take care of G++ cleanups. */
2042 rtx equiv1;
2043 rtx equiv2;
2044
2045 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2046 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2047 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2048 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2049 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2050 /* If the equivalences are not to a constant, they may
2051 reference pseudos that no longer exist, so we can't
2052 use them. */
2053 && CONSTANT_P (XEXP (equiv1, 0))
2054 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2055 {
2056 rtx s1 = single_set (i1);
2057 rtx s2 = single_set (i2);
2058 if (s1 != 0 && s2 != 0
2059 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2060 {
2061 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2062 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2063 if (! rtx_renumbered_equal_p (p1, p2))
2064 cancel_changes (0);
2065 else if (apply_change_group ())
2066 goto win;
2067 }
2068 }
2069
2070 /* Insns fail to match; cross jumping is limited to the following
2071 insns. */
2072
2073 #ifdef HAVE_cc0
2074 /* Don't allow the insn after a compare to be shared by
2075 cross-jumping unless the compare is also shared.
2076 Here, if either of these non-matching insns is a compare,
2077 exclude the following insn from possible cross-jumping. */
2078 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2079 last1 = afterlast1, last2 = afterlast2, ++minimum;
2080 #endif
2081
2082 /* If cross-jumping here will feed a jump-around-jump
2083 optimization, this jump won't cost extra, so reduce
2084 the minimum. */
2085 if (GET_CODE (i1) == JUMP_INSN
2086 && JUMP_LABEL (i1)
2087 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2088 --minimum;
2089 break;
2090 }
2091
2092 win:
2093 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2094 {
2095 /* Ok, this insn is potentially includable in a cross-jump here. */
2096 afterlast1 = last1, afterlast2 = last2;
2097 last1 = i1, last2 = i2, --minimum;
2098 }
2099 }
2100
2101 /* We have to be careful that we do not cross-jump into the middle of
2102 USE-CALL_INSN-CLOBBER sequence. This sequence is used instead of
2103 putting the USE and CLOBBERs inside the CALL_INSN. The delay slot
2104 scheduler needs to know what registers are used and modified by the
2105 CALL_INSN and needs the adjacent USE and CLOBBERs to do so.
2106
2107 ??? At some point we should probably change this so that these are
2108 part of the CALL_INSN. The way we are doing it now is a kludge that
2109 is now causing trouble. */
2110
2111 if (last1 != 0 && GET_CODE (last1) == CALL_INSN
2112 && (prev1 = prev_nonnote_insn (last1))
2113 && GET_CODE (prev1) == INSN
2114 && GET_CODE (PATTERN (prev1)) == USE)
2115 {
2116 /* Remove this CALL_INSN from the range we can cross-jump. */
2117 last1 = next_real_insn (last1);
2118 last2 = next_real_insn (last2);
2119
2120 minimum++;
2121 }
2122
2123 /* Skip past CLOBBERS since they may be right after a CALL_INSN. It
2124 isn't worth checking for the CALL_INSN. */
2125 while (last1 != 0 && GET_CODE (PATTERN (last1)) == CLOBBER)
2126 last1 = next_real_insn (last1), last2 = next_real_insn (last2);
2127
2128 if (minimum <= 0 && last1 != 0 && last1 != e1)
2129 *f1 = last1, *f2 = last2;
2130 }
2131
2132 static void
2133 do_cross_jump (insn, newjpos, newlpos)
2134 rtx insn, newjpos, newlpos;
2135 {
2136 /* Find an existing label at this point
2137 or make a new one if there is none. */
2138 register rtx label = get_label_before (newlpos);
2139
2140 /* Make the same jump insn jump to the new point. */
2141 if (GET_CODE (PATTERN (insn)) == RETURN)
2142 {
2143 /* Remove from jump chain of returns. */
2144 delete_from_jump_chain (insn);
2145 /* Change the insn. */
2146 PATTERN (insn) = gen_jump (label);
2147 INSN_CODE (insn) = -1;
2148 JUMP_LABEL (insn) = label;
2149 LABEL_NUSES (label)++;
2150 /* Add to new the jump chain. */
2151 if (INSN_UID (label) < max_jump_chain
2152 && INSN_UID (insn) < max_jump_chain)
2153 {
2154 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2155 jump_chain[INSN_UID (label)] = insn;
2156 }
2157 }
2158 else
2159 redirect_jump (insn, label);
2160
2161 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2162 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2163 the NEWJPOS stream. */
2164
2165 while (newjpos != insn)
2166 {
2167 rtx lnote;
2168
2169 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2170 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2171 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2172 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2173 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2174 remove_note (newlpos, lnote);
2175
2176 delete_insn (newjpos);
2177 newjpos = next_real_insn (newjpos);
2178 newlpos = next_real_insn (newlpos);
2179 }
2180 }
2181 \f
2182 /* Return the label before INSN, or put a new label there. */
2183
2184 rtx
2185 get_label_before (insn)
2186 rtx insn;
2187 {
2188 rtx label;
2189
2190 /* Find an existing label at this point
2191 or make a new one if there is none. */
2192 label = prev_nonnote_insn (insn);
2193
2194 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2195 {
2196 rtx prev = PREV_INSN (insn);
2197
2198 /* Don't put a label between a CALL_INSN and USE insns that precede
2199 it. */
2200
2201 if (GET_CODE (insn) == CALL_INSN
2202 || (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE
2203 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN))
2204 while (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == USE)
2205 prev = PREV_INSN (prev);
2206
2207 label = gen_label_rtx ();
2208 emit_label_after (label, prev);
2209 LABEL_NUSES (label) = 0;
2210 }
2211 return label;
2212 }
2213
2214 /* Return the label after INSN, or put a new label there. */
2215
2216 rtx
2217 get_label_after (insn)
2218 rtx insn;
2219 {
2220 rtx label;
2221
2222 /* Find an existing label at this point
2223 or make a new one if there is none. */
2224 label = next_nonnote_insn (insn);
2225
2226 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2227 {
2228 /* Don't put a label between a CALL_INSN and CLOBBER insns
2229 following it. */
2230
2231 if (GET_CODE (insn) == CALL_INSN
2232 || (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE
2233 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN))
2234 while (GET_CODE (NEXT_INSN (insn)) == INSN
2235 && GET_CODE (PATTERN (NEXT_INSN (insn))) == CLOBBER)
2236 insn = NEXT_INSN (insn);
2237
2238 label = gen_label_rtx ();
2239 emit_label_after (label, insn);
2240 LABEL_NUSES (label) = 0;
2241 }
2242 return label;
2243 }
2244 \f
2245 /* Return 1 if INSN is a jump that jumps to right after TARGET
2246 only on the condition that TARGET itself would drop through.
2247 Assumes that TARGET is a conditional jump. */
2248
2249 static int
2250 jump_back_p (insn, target)
2251 rtx insn, target;
2252 {
2253 rtx cinsn, ctarget;
2254 enum rtx_code codei, codet;
2255
2256 if (simplejump_p (insn) || ! condjump_p (insn)
2257 || simplejump_p (target)
2258 || target != prev_real_insn (JUMP_LABEL (insn)))
2259 return 0;
2260
2261 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2262 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2263
2264 codei = GET_CODE (cinsn);
2265 codet = GET_CODE (ctarget);
2266
2267 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2268 {
2269 if (! can_reverse_comparison_p (cinsn, insn))
2270 return 0;
2271 codei = reverse_condition (codei);
2272 }
2273
2274 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2275 {
2276 if (! can_reverse_comparison_p (ctarget, target))
2277 return 0;
2278 codet = reverse_condition (codet);
2279 }
2280
2281 return (codei == codet
2282 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2283 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2284 }
2285 \f
2286 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2287 return non-zero if it is safe to reverse this comparison. It is if our
2288 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2289 this is known to be an integer comparison. */
2290
2291 int
2292 can_reverse_comparison_p (comparison, insn)
2293 rtx comparison;
2294 rtx insn;
2295 {
2296 rtx arg0;
2297
2298 /* If this is not actually a comparison, we can't reverse it. */
2299 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2300 return 0;
2301
2302 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2303 /* If this is an NE comparison, it is safe to reverse it to an EQ
2304 comparison and vice versa, even for floating point. If no operands
2305 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2306 always false and NE is always true, so the reversal is also valid. */
2307 || GET_CODE (comparison) == NE
2308 || GET_CODE (comparison) == EQ)
2309 return 1;
2310
2311 arg0 = XEXP (comparison, 0);
2312
2313 /* Make sure ARG0 is one of the actual objects being compared. If we
2314 can't do this, we can't be sure the comparison can be reversed.
2315
2316 Handle cc0 and a MODE_CC register. */
2317 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2318 #ifdef HAVE_cc0
2319 || arg0 == cc0_rtx
2320 #endif
2321 )
2322 {
2323 rtx prev = prev_nonnote_insn (insn);
2324 rtx set = single_set (prev);
2325
2326 if (set == 0 || SET_DEST (set) != arg0)
2327 return 0;
2328
2329 arg0 = SET_SRC (set);
2330
2331 if (GET_CODE (arg0) == COMPARE)
2332 arg0 = XEXP (arg0, 0);
2333 }
2334
2335 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2336 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2337 return (GET_CODE (arg0) == CONST_INT
2338 || (GET_MODE (arg0) != VOIDmode
2339 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2340 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2341 }
2342
2343 /* Given an rtx-code for a comparison, return the code
2344 for the negated comparison.
2345 WATCH OUT! reverse_condition is not safe to use on a jump
2346 that might be acting on the results of an IEEE floating point comparison,
2347 because of the special treatment of non-signaling nans in comparisons.
2348 Use can_reverse_comparison_p to be sure. */
2349
2350 enum rtx_code
2351 reverse_condition (code)
2352 enum rtx_code code;
2353 {
2354 switch (code)
2355 {
2356 case EQ:
2357 return NE;
2358
2359 case NE:
2360 return EQ;
2361
2362 case GT:
2363 return LE;
2364
2365 case GE:
2366 return LT;
2367
2368 case LT:
2369 return GE;
2370
2371 case LE:
2372 return GT;
2373
2374 case GTU:
2375 return LEU;
2376
2377 case GEU:
2378 return LTU;
2379
2380 case LTU:
2381 return GEU;
2382
2383 case LEU:
2384 return GTU;
2385
2386 default:
2387 abort ();
2388 return UNKNOWN;
2389 }
2390 }
2391
2392 /* Similar, but return the code when two operands of a comparison are swapped.
2393 This IS safe for IEEE floating-point. */
2394
2395 enum rtx_code
2396 swap_condition (code)
2397 enum rtx_code code;
2398 {
2399 switch (code)
2400 {
2401 case EQ:
2402 case NE:
2403 return code;
2404
2405 case GT:
2406 return LT;
2407
2408 case GE:
2409 return LE;
2410
2411 case LT:
2412 return GT;
2413
2414 case LE:
2415 return GE;
2416
2417 case GTU:
2418 return LTU;
2419
2420 case GEU:
2421 return LEU;
2422
2423 case LTU:
2424 return GTU;
2425
2426 case LEU:
2427 return GEU;
2428
2429 default:
2430 abort ();
2431 return UNKNOWN;
2432 }
2433 }
2434
2435 /* Given a comparison CODE, return the corresponding unsigned comparison.
2436 If CODE is an equality comparison or already an unsigned comparison,
2437 CODE is returned. */
2438
2439 enum rtx_code
2440 unsigned_condition (code)
2441 enum rtx_code code;
2442 {
2443 switch (code)
2444 {
2445 case EQ:
2446 case NE:
2447 case GTU:
2448 case GEU:
2449 case LTU:
2450 case LEU:
2451 return code;
2452
2453 case GT:
2454 return GTU;
2455
2456 case GE:
2457 return GEU;
2458
2459 case LT:
2460 return LTU;
2461
2462 case LE:
2463 return LEU;
2464
2465 default:
2466 abort ();
2467 }
2468 }
2469
2470 /* Similarly, return the signed version of a comparison. */
2471
2472 enum rtx_code
2473 signed_condition (code)
2474 enum rtx_code code;
2475 {
2476 switch (code)
2477 {
2478 case EQ:
2479 case NE:
2480 case GT:
2481 case GE:
2482 case LT:
2483 case LE:
2484 return code;
2485
2486 case GTU:
2487 return GT;
2488
2489 case GEU:
2490 return GE;
2491
2492 case LTU:
2493 return LT;
2494
2495 case LEU:
2496 return LE;
2497
2498 default:
2499 abort ();
2500 }
2501 }
2502 \f
2503 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2504 truth of CODE1 implies the truth of CODE2. */
2505
2506 int
2507 comparison_dominates_p (code1, code2)
2508 enum rtx_code code1, code2;
2509 {
2510 if (code1 == code2)
2511 return 1;
2512
2513 switch (code1)
2514 {
2515 case EQ:
2516 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
2517 return 1;
2518 break;
2519
2520 case LT:
2521 if (code2 == LE)
2522 return 1;
2523 break;
2524
2525 case GT:
2526 if (code2 == GE)
2527 return 1;
2528 break;
2529
2530 case LTU:
2531 if (code2 == LEU)
2532 return 1;
2533 break;
2534
2535 case GTU:
2536 if (code2 == GEU)
2537 return 1;
2538 break;
2539 }
2540
2541 return 0;
2542 }
2543 \f
2544 /* Return 1 if INSN is an unconditional jump and nothing else. */
2545
2546 int
2547 simplejump_p (insn)
2548 rtx insn;
2549 {
2550 return (GET_CODE (insn) == JUMP_INSN
2551 && GET_CODE (PATTERN (insn)) == SET
2552 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2553 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2554 }
2555
2556 /* Return nonzero if INSN is a (possibly) conditional jump
2557 and nothing more. */
2558
2559 int
2560 condjump_p (insn)
2561 rtx insn;
2562 {
2563 register rtx x = PATTERN (insn);
2564 if (GET_CODE (x) != SET)
2565 return 0;
2566 if (GET_CODE (SET_DEST (x)) != PC)
2567 return 0;
2568 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2569 return 1;
2570 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2571 return 0;
2572 if (XEXP (SET_SRC (x), 2) == pc_rtx
2573 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2574 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2575 return 1;
2576 if (XEXP (SET_SRC (x), 1) == pc_rtx
2577 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2578 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2579 return 1;
2580 return 0;
2581 }
2582
2583 /* Return 1 if X is an RTX that does nothing but set the condition codes
2584 and CLOBBER or USE registers.
2585 Return -1 if X does explicitly set the condition codes,
2586 but also does other things. */
2587
2588 int
2589 sets_cc0_p (x)
2590 rtx x;
2591 {
2592 #ifdef HAVE_cc0
2593 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2594 return 1;
2595 if (GET_CODE (x) == PARALLEL)
2596 {
2597 int i;
2598 int sets_cc0 = 0;
2599 int other_things = 0;
2600 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2601 {
2602 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2603 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2604 sets_cc0 = 1;
2605 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2606 other_things = 1;
2607 }
2608 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2609 }
2610 return 0;
2611 #else
2612 abort ();
2613 #endif
2614 }
2615 \f
2616 /* Follow any unconditional jump at LABEL;
2617 return the ultimate label reached by any such chain of jumps.
2618 If LABEL is not followed by a jump, return LABEL.
2619 If the chain loops or we can't find end, return LABEL,
2620 since that tells caller to avoid changing the insn.
2621
2622 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2623 a USE or CLOBBER. */
2624
2625 rtx
2626 follow_jumps (label)
2627 rtx label;
2628 {
2629 register rtx insn;
2630 register rtx next;
2631 register rtx value = label;
2632 register int depth;
2633
2634 for (depth = 0;
2635 (depth < 10
2636 && (insn = next_active_insn (value)) != 0
2637 && GET_CODE (insn) == JUMP_INSN
2638 && (JUMP_LABEL (insn) != 0 || GET_CODE (PATTERN (insn)) == RETURN)
2639 && (next = NEXT_INSN (insn))
2640 && GET_CODE (next) == BARRIER);
2641 depth++)
2642 {
2643 /* Don't chain through the insn that jumps into a loop
2644 from outside the loop,
2645 since that would create multiple loop entry jumps
2646 and prevent loop optimization. */
2647 rtx tem;
2648 if (!reload_completed)
2649 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2650 if (GET_CODE (tem) == NOTE
2651 && NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG)
2652 return value;
2653
2654 /* If we have found a cycle, make the insn jump to itself. */
2655 if (JUMP_LABEL (insn) == label)
2656 return label;
2657 value = JUMP_LABEL (insn);
2658 }
2659 if (depth == 10)
2660 return label;
2661 return value;
2662 }
2663
2664 /* Assuming that field IDX of X is a vector of label_refs,
2665 replace each of them by the ultimate label reached by it.
2666 Return nonzero if a change is made.
2667 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2668
2669 static int
2670 tension_vector_labels (x, idx)
2671 register rtx x;
2672 register int idx;
2673 {
2674 int changed = 0;
2675 register int i;
2676 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2677 {
2678 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2679 register rtx nlabel = follow_jumps (olabel);
2680 if (nlabel && nlabel != olabel)
2681 {
2682 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2683 ++LABEL_NUSES (nlabel);
2684 if (--LABEL_NUSES (olabel) == 0)
2685 delete_insn (olabel);
2686 changed = 1;
2687 }
2688 }
2689 return changed;
2690 }
2691 \f
2692 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2693 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2694 in INSN, then store one of them in JUMP_LABEL (INSN).
2695 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2696 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2697 Also, when there are consecutive labels, canonicalize on the last of them.
2698
2699 Note that two labels separated by a loop-beginning note
2700 must be kept distinct if we have not yet done loop-optimization,
2701 because the gap between them is where loop-optimize
2702 will want to move invariant code to. CROSS_JUMP tells us
2703 that loop-optimization is done with.
2704
2705 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2706 two labels distinct if they are separated by only USE or CLOBBER insns. */
2707
2708 static void
2709 mark_jump_label (x, insn, cross_jump)
2710 register rtx x;
2711 rtx insn;
2712 int cross_jump;
2713 {
2714 register RTX_CODE code = GET_CODE (x);
2715 register int i;
2716 register char *fmt;
2717
2718 switch (code)
2719 {
2720 case PC:
2721 case CC0:
2722 case REG:
2723 case SUBREG:
2724 case CONST_INT:
2725 case SYMBOL_REF:
2726 case CONST_DOUBLE:
2727 case CLOBBER:
2728 case CALL:
2729 return;
2730
2731 case MEM:
2732 /* If this is a constant-pool reference, see if it is a label. */
2733 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2734 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2735 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
2736 break;
2737
2738 case LABEL_REF:
2739 {
2740 register rtx label = XEXP (x, 0);
2741 register rtx next;
2742 if (GET_CODE (label) != CODE_LABEL)
2743 abort ();
2744 /* Ignore references to labels of containing functions. */
2745 if (LABEL_REF_NONLOCAL_P (x))
2746 break;
2747 /* If there are other labels following this one,
2748 replace it with the last of the consecutive labels. */
2749 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2750 {
2751 if (GET_CODE (next) == CODE_LABEL)
2752 label = next;
2753 else if (cross_jump && GET_CODE (next) == INSN
2754 && (GET_CODE (PATTERN (next)) == USE
2755 || GET_CODE (PATTERN (next)) == CLOBBER))
2756 continue;
2757 else if (GET_CODE (next) != NOTE)
2758 break;
2759 else if (! cross_jump
2760 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2761 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END))
2762 break;
2763 }
2764 XEXP (x, 0) = label;
2765 ++LABEL_NUSES (label);
2766 if (insn)
2767 {
2768 if (GET_CODE (insn) == JUMP_INSN)
2769 JUMP_LABEL (insn) = label;
2770 else if (! find_reg_note (insn, REG_LABEL, label))
2771 {
2772 rtx next = next_real_insn (label);
2773 /* Don't record labels that refer to dispatch tables.
2774 This is not necessary, since the tablejump
2775 references the same label.
2776 And if we did record them, flow.c would make worse code. */
2777 if (next == 0
2778 || ! (GET_CODE (next) == JUMP_INSN
2779 && (GET_CODE (PATTERN (next)) == ADDR_VEC
2780 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
2781 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
2782 REG_NOTES (insn));
2783 }
2784 }
2785 return;
2786 }
2787
2788 /* Do walk the labels in a vector, but not the first operand of an
2789 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2790 case ADDR_VEC:
2791 case ADDR_DIFF_VEC:
2792 {
2793 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2794
2795 for (i = 0; i < XVECLEN (x, eltnum); i++)
2796 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
2797 return;
2798 }
2799 }
2800
2801 fmt = GET_RTX_FORMAT (code);
2802 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2803 {
2804 if (fmt[i] == 'e')
2805 mark_jump_label (XEXP (x, i), insn, cross_jump);
2806 else if (fmt[i] == 'E')
2807 {
2808 register int j;
2809 for (j = 0; j < XVECLEN (x, i); j++)
2810 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
2811 }
2812 }
2813 }
2814
2815 /* If all INSN does is set the pc, delete it,
2816 and delete the insn that set the condition codes for it
2817 if that's what the previous thing was. */
2818
2819 void
2820 delete_jump (insn)
2821 rtx insn;
2822 {
2823 register rtx x = PATTERN (insn);
2824
2825 if (GET_CODE (x) == SET
2826 && GET_CODE (SET_DEST (x)) == PC)
2827 {
2828 #ifdef HAVE_cc0
2829 rtx prev = prev_nonnote_insn (insn);
2830 /* We assume that at this stage
2831 CC's are always set explicitly
2832 and always immediately before the jump that
2833 will use them. So if the previous insn
2834 exists to set the CC's, delete it
2835 (unless it performs auto-increments, etc.). */
2836 if (prev && GET_CODE (prev) == INSN
2837 && sets_cc0_p (PATTERN (prev)))
2838 {
2839 if (sets_cc0_p (PATTERN (prev)) > 0
2840 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
2841 delete_insn (prev);
2842 else
2843 /* Otherwise, show that cc0 won't be used. */
2844 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
2845 cc0_rtx, REG_NOTES (prev));
2846 }
2847 #endif
2848 /* Now delete the jump insn itself. */
2849 delete_computation (insn);
2850 }
2851 }
2852
2853 /* Delete INSN and recursively delete insns that compute values used only
2854 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2855 If we are running before flow.c, we need do nothing since flow.c will
2856 delete dead code. We also can't know if the registers being used are
2857 dead or not at this point.
2858
2859 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2860 nothing other than set a register that dies in this insn, we can delete
2861 that insn as well. */
2862
2863 void
2864 delete_computation (insn)
2865 rtx insn;
2866 {
2867 #ifndef HAVE_cc0
2868 rtx note, next;
2869
2870 for (note = REG_NOTES (insn); note; note = next)
2871 {
2872 rtx our_prev;
2873
2874 next = XEXP (note, 1);
2875
2876 if (REG_NOTE_KIND (note) != REG_DEAD
2877 /* Verify that the REG_NOTE is legitimate. */
2878 || GET_CODE (XEXP (note, 0)) != REG)
2879 continue;
2880
2881 for (our_prev = prev_nonnote_insn (insn);
2882 our_prev && GET_CODE (our_prev) == INSN;
2883 our_prev = prev_nonnote_insn (our_prev))
2884 {
2885 /* If we reach a SEQUENCE, it is too complex to try to
2886 do anything with it, so give up. */
2887 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
2888 break;
2889
2890 if (GET_CODE (PATTERN (our_prev)) == USE
2891 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
2892 /* reorg creates USEs that look like this. We leave them
2893 alone because reorg needs them for its own purposes. */
2894 break;
2895
2896 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
2897 {
2898 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
2899 break;
2900
2901 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
2902 {
2903 /* If we find a SET of something else, we can't
2904 delete the insn. */
2905
2906 int i;
2907
2908 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
2909 {
2910 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
2911
2912 if (GET_CODE (part) == SET
2913 && SET_DEST (part) != XEXP (note, 0))
2914 break;
2915 }
2916
2917 if (i == XVECLEN (PATTERN (our_prev), 0))
2918 delete_computation (our_prev);
2919 }
2920 else if (GET_CODE (PATTERN (our_prev)) == SET
2921 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
2922 delete_computation (our_prev);
2923
2924 break;
2925 }
2926
2927 /* If OUR_PREV references the register that dies here, it is an
2928 additional use. Hence any prior SET isn't dead. However, this
2929 insn becomes the new place for the REG_DEAD note. */
2930 if (reg_overlap_mentioned_p (XEXP (note, 0),
2931 PATTERN (our_prev)))
2932 {
2933 XEXP (note, 1) = REG_NOTES (our_prev);
2934 REG_NOTES (our_prev) = note;
2935 break;
2936 }
2937 }
2938 }
2939 #endif /* Don't HAVE_cc0 */
2940 delete_insn (insn);
2941 }
2942 \f
2943 /* Delete insn INSN from the chain of insns and update label ref counts.
2944 May delete some following insns as a consequence; may even delete
2945 a label elsewhere and insns that follow it.
2946
2947 Returns the first insn after INSN that was not deleted. */
2948
2949 rtx
2950 delete_insn (insn)
2951 register rtx insn;
2952 {
2953 register rtx next = NEXT_INSN (insn);
2954 register rtx prev = PREV_INSN (insn);
2955 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2956 register int dont_really_delete = 0;
2957
2958 while (next && INSN_DELETED_P (next))
2959 next = NEXT_INSN (next);
2960
2961 /* This insn is already deleted => return first following nondeleted. */
2962 if (INSN_DELETED_P (insn))
2963 return next;
2964
2965 /* Don't delete user-declared labels. Convert them to special NOTEs
2966 instead. */
2967 if (was_code_label && LABEL_NAME (insn) != 0)
2968 {
2969 PUT_CODE (insn, NOTE);
2970 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2971 NOTE_SOURCE_FILE (insn) = 0;
2972 dont_really_delete = 1;
2973 }
2974 else
2975 /* Mark this insn as deleted. */
2976 INSN_DELETED_P (insn) = 1;
2977
2978 /* If this is an unconditional jump, delete it from the jump chain. */
2979 if (simplejump_p (insn))
2980 delete_from_jump_chain (insn);
2981
2982 /* If instruction is followed by a barrier,
2983 delete the barrier too. */
2984
2985 if (next != 0 && GET_CODE (next) == BARRIER)
2986 {
2987 INSN_DELETED_P (next) = 1;
2988 next = NEXT_INSN (next);
2989 }
2990
2991 /* Patch out INSN (and the barrier if any) */
2992
2993 if (optimize && ! dont_really_delete)
2994 {
2995 if (prev)
2996 {
2997 NEXT_INSN (prev) = next;
2998 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2999 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3000 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3001 }
3002
3003 if (next)
3004 {
3005 PREV_INSN (next) = prev;
3006 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3007 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3008 }
3009
3010 if (prev && NEXT_INSN (prev) == 0)
3011 set_last_insn (prev);
3012 }
3013
3014 /* If deleting a jump, decrement the count of the label,
3015 and delete the label if it is now unused. */
3016
3017 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3018 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3019 {
3020 /* This can delete NEXT or PREV,
3021 either directly if NEXT is JUMP_LABEL (INSN),
3022 or indirectly through more levels of jumps. */
3023 delete_insn (JUMP_LABEL (insn));
3024 /* I feel a little doubtful about this loop,
3025 but I see no clean and sure alternative way
3026 to find the first insn after INSN that is not now deleted.
3027 I hope this works. */
3028 while (next && INSN_DELETED_P (next))
3029 next = NEXT_INSN (next);
3030 return next;
3031 }
3032
3033 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3034 prev = PREV_INSN (prev);
3035
3036 /* If INSN was a label and a dispatch table follows it,
3037 delete the dispatch table. The tablejump must have gone already.
3038 It isn't useful to fall through into a table. */
3039
3040 if (was_code_label
3041 && NEXT_INSN (insn) != 0
3042 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3043 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3044 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3045 next = delete_insn (NEXT_INSN (insn));
3046
3047 /* If INSN was a label, delete insns following it if now unreachable. */
3048
3049 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3050 {
3051 register RTX_CODE code;
3052 while (next != 0
3053 && ((code = GET_CODE (next)) == INSN
3054 || code == JUMP_INSN || code == CALL_INSN
3055 || code == NOTE
3056 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3057 {
3058 if (code == NOTE
3059 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3060 next = NEXT_INSN (next);
3061 /* Keep going past other deleted labels to delete what follows. */
3062 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3063 next = NEXT_INSN (next);
3064 else
3065 /* Note: if this deletes a jump, it can cause more
3066 deletion of unreachable code, after a different label.
3067 As long as the value from this recursive call is correct,
3068 this invocation functions correctly. */
3069 next = delete_insn (next);
3070 }
3071 }
3072
3073 return next;
3074 }
3075
3076 /* Advance from INSN till reaching something not deleted
3077 then return that. May return INSN itself. */
3078
3079 rtx
3080 next_nondeleted_insn (insn)
3081 rtx insn;
3082 {
3083 while (INSN_DELETED_P (insn))
3084 insn = NEXT_INSN (insn);
3085 return insn;
3086 }
3087 \f
3088 /* Delete a range of insns from FROM to TO, inclusive.
3089 This is for the sake of peephole optimization, so assume
3090 that whatever these insns do will still be done by a new
3091 peephole insn that will replace them. */
3092
3093 void
3094 delete_for_peephole (from, to)
3095 register rtx from, to;
3096 {
3097 register rtx insn = from;
3098
3099 while (1)
3100 {
3101 register rtx next = NEXT_INSN (insn);
3102 register rtx prev = PREV_INSN (insn);
3103
3104 if (GET_CODE (insn) != NOTE)
3105 {
3106 INSN_DELETED_P (insn) = 1;
3107
3108 /* Patch this insn out of the chain. */
3109 /* We don't do this all at once, because we
3110 must preserve all NOTEs. */
3111 if (prev)
3112 NEXT_INSN (prev) = next;
3113
3114 if (next)
3115 PREV_INSN (next) = prev;
3116 }
3117
3118 if (insn == to)
3119 break;
3120 insn = next;
3121 }
3122
3123 /* Note that if TO is an unconditional jump
3124 we *do not* delete the BARRIER that follows,
3125 since the peephole that replaces this sequence
3126 is also an unconditional jump in that case. */
3127 }
3128 \f
3129 /* Invert the condition of the jump JUMP, and make it jump
3130 to label NLABEL instead of where it jumps now. */
3131
3132 int
3133 invert_jump (jump, nlabel)
3134 rtx jump, nlabel;
3135 {
3136 register rtx olabel = JUMP_LABEL (jump);
3137
3138 /* We have to either invert the condition and change the label or
3139 do neither. Either operation could fail. We first try to invert
3140 the jump. If that succeeds, we try changing the label. If that fails,
3141 we invert the jump back to what it was. */
3142
3143 if (! invert_exp (PATTERN (jump), jump))
3144 return 0;
3145
3146 if (redirect_jump (jump, nlabel))
3147 return 1;
3148
3149 if (! invert_exp (PATTERN (jump), jump))
3150 /* This should just be putting it back the way it was. */
3151 abort ();
3152
3153 return 0;
3154 }
3155
3156 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3157
3158 Return 1 if we can do so, 0 if we cannot find a way to do so that
3159 matches a pattern. */
3160
3161 int
3162 invert_exp (x, insn)
3163 rtx x;
3164 rtx insn;
3165 {
3166 register RTX_CODE code;
3167 register int i;
3168 register char *fmt;
3169
3170 code = GET_CODE (x);
3171
3172 if (code == IF_THEN_ELSE)
3173 {
3174 register rtx comp = XEXP (x, 0);
3175 register rtx tem;
3176
3177 /* We can do this in two ways: The preferable way, which can only
3178 be done if this is not an integer comparison, is to reverse
3179 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3180 of the IF_THEN_ELSE. If we can't do either, fail. */
3181
3182 if (can_reverse_comparison_p (comp, insn)
3183 && validate_change (insn, &XEXP (x, 0),
3184 gen_rtx (reverse_condition (GET_CODE (comp)),
3185 GET_MODE (comp), XEXP (comp, 0),
3186 XEXP (comp, 1)), 0))
3187 return 1;
3188
3189 tem = XEXP (x, 1);
3190 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3191 validate_change (insn, &XEXP (x, 2), tem, 1);
3192 return apply_change_group ();
3193 }
3194
3195 fmt = GET_RTX_FORMAT (code);
3196 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3197 {
3198 if (fmt[i] == 'e')
3199 if (! invert_exp (XEXP (x, i), insn))
3200 return 0;
3201 if (fmt[i] == 'E')
3202 {
3203 register int j;
3204 for (j = 0; j < XVECLEN (x, i); j++)
3205 if (!invert_exp (XVECEXP (x, i, j), insn))
3206 return 0;
3207 }
3208 }
3209
3210 return 1;
3211 }
3212 \f
3213 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3214 If the old jump target label is unused as a result,
3215 it and the code following it may be deleted.
3216
3217 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3218 RETURN insn.
3219
3220 The return value will be 1 if the change was made, 0 if it wasn't (this
3221 can only occur for NLABEL == 0). */
3222
3223 int
3224 redirect_jump (jump, nlabel)
3225 rtx jump, nlabel;
3226 {
3227 register rtx olabel = JUMP_LABEL (jump);
3228
3229 if (nlabel == olabel)
3230 return 1;
3231
3232 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3233 return 0;
3234
3235 /* If this is an unconditional branch, delete it from the jump_chain of
3236 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3237 have UID's in range and JUMP_CHAIN is valid). */
3238 if (jump_chain && (simplejump_p (jump)
3239 || GET_CODE (PATTERN (jump)) == RETURN))
3240 {
3241 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3242
3243 delete_from_jump_chain (jump);
3244 if (label_index < max_jump_chain
3245 && INSN_UID (jump) < max_jump_chain)
3246 {
3247 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3248 jump_chain[label_index] = jump;
3249 }
3250 }
3251
3252 JUMP_LABEL (jump) = nlabel;
3253 if (nlabel)
3254 ++LABEL_NUSES (nlabel);
3255
3256 if (olabel && --LABEL_NUSES (olabel) == 0)
3257 delete_insn (olabel);
3258
3259 return 1;
3260 }
3261
3262 /* Delete the instruction JUMP from any jump chain it might be on. */
3263
3264 static void
3265 delete_from_jump_chain (jump)
3266 rtx jump;
3267 {
3268 int index;
3269 rtx olabel = JUMP_LABEL (jump);
3270
3271 /* Handle unconditional jumps. */
3272 if (jump_chain && olabel != 0
3273 && INSN_UID (olabel) < max_jump_chain
3274 && simplejump_p (jump))
3275 index = INSN_UID (olabel);
3276 /* Handle return insns. */
3277 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3278 index = 0;
3279 else return;
3280
3281 if (jump_chain[index] == jump)
3282 jump_chain[index] = jump_chain[INSN_UID (jump)];
3283 else
3284 {
3285 rtx insn;
3286
3287 for (insn = jump_chain[index];
3288 insn != 0;
3289 insn = jump_chain[INSN_UID (insn)])
3290 if (jump_chain[INSN_UID (insn)] == jump)
3291 {
3292 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3293 break;
3294 }
3295 }
3296 }
3297
3298 /* If NLABEL is nonzero, throughout the rtx at LOC,
3299 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3300 zero, alter (RETURN) to (LABEL_REF NLABEL).
3301
3302 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3303 validity with validate_change. Convert (set (pc) (label_ref olabel))
3304 to (return).
3305
3306 Return 0 if we found a change we would like to make but it is invalid.
3307 Otherwise, return 1. */
3308
3309 int
3310 redirect_exp (loc, olabel, nlabel, insn)
3311 rtx *loc;
3312 rtx olabel, nlabel;
3313 rtx insn;
3314 {
3315 register rtx x = *loc;
3316 register RTX_CODE code = GET_CODE (x);
3317 register int i;
3318 register char *fmt;
3319
3320 if (code == LABEL_REF)
3321 {
3322 if (XEXP (x, 0) == olabel)
3323 {
3324 if (nlabel)
3325 XEXP (x, 0) = nlabel;
3326 else
3327 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3328 return 1;
3329 }
3330 }
3331 else if (code == RETURN && olabel == 0)
3332 {
3333 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3334 if (loc == &PATTERN (insn))
3335 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3336 return validate_change (insn, loc, x, 0);
3337 }
3338
3339 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3340 && GET_CODE (SET_SRC (x)) == LABEL_REF
3341 && XEXP (SET_SRC (x), 0) == olabel)
3342 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3343
3344 fmt = GET_RTX_FORMAT (code);
3345 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3346 {
3347 if (fmt[i] == 'e')
3348 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
3349 return 0;
3350 if (fmt[i] == 'E')
3351 {
3352 register int j;
3353 for (j = 0; j < XVECLEN (x, i); j++)
3354 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
3355 return 0;
3356 }
3357 }
3358
3359 return 1;
3360 }
3361 \f
3362 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3363
3364 If the old jump target label (before the dispatch table) becomes unused,
3365 it and the dispatch table may be deleted. In that case, find the insn
3366 before the jump references that label and delete it and logical successors
3367 too. */
3368
3369 void
3370 redirect_tablejump (jump, nlabel)
3371 rtx jump, nlabel;
3372 {
3373 register rtx olabel = JUMP_LABEL (jump);
3374
3375 /* Add this jump to the jump_chain of NLABEL. */
3376 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3377 && INSN_UID (jump) < max_jump_chain)
3378 {
3379 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3380 jump_chain[INSN_UID (nlabel)] = jump;
3381 }
3382
3383 PATTERN (jump) = gen_jump (nlabel);
3384 JUMP_LABEL (jump) = nlabel;
3385 ++LABEL_NUSES (nlabel);
3386 INSN_CODE (jump) = -1;
3387
3388 if (--LABEL_NUSES (olabel) == 0)
3389 {
3390 delete_labelref_insn (jump, olabel, 0);
3391 delete_insn (olabel);
3392 }
3393 }
3394
3395 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3396 If we found one, delete it and then delete this insn if DELETE_THIS is
3397 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3398
3399 static int
3400 delete_labelref_insn (insn, label, delete_this)
3401 rtx insn, label;
3402 int delete_this;
3403 {
3404 int deleted = 0;
3405 rtx link;
3406
3407 if (GET_CODE (insn) != NOTE
3408 && reg_mentioned_p (label, PATTERN (insn)))
3409 {
3410 if (delete_this)
3411 {
3412 delete_insn (insn);
3413 deleted = 1;
3414 }
3415 else
3416 return 1;
3417 }
3418
3419 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3420 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3421 {
3422 if (delete_this)
3423 {
3424 delete_insn (insn);
3425 deleted = 1;
3426 }
3427 else
3428 return 1;
3429 }
3430
3431 return deleted;
3432 }
3433 \f
3434 /* Like rtx_equal_p except that it considers two REGs as equal
3435 if they renumber to the same value. */
3436
3437 int
3438 rtx_renumbered_equal_p (x, y)
3439 rtx x, y;
3440 {
3441 register int i;
3442 register RTX_CODE code = GET_CODE (x);
3443 register char *fmt;
3444
3445 if (x == y)
3446 return 1;
3447 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3448 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3449 && GET_CODE (SUBREG_REG (y)) == REG)))
3450 {
3451 register int j;
3452
3453 if (GET_MODE (x) != GET_MODE (y))
3454 return 0;
3455
3456 /* If we haven't done any renumbering, don't
3457 make any assumptions. */
3458 if (reg_renumber == 0)
3459 return rtx_equal_p (x, y);
3460
3461 if (code == SUBREG)
3462 {
3463 i = REGNO (SUBREG_REG (x));
3464 if (reg_renumber[i] >= 0)
3465 i = reg_renumber[i];
3466 i += SUBREG_WORD (x);
3467 }
3468 else
3469 {
3470 i = REGNO (x);
3471 if (reg_renumber[i] >= 0)
3472 i = reg_renumber[i];
3473 }
3474 if (GET_CODE (y) == SUBREG)
3475 {
3476 j = REGNO (SUBREG_REG (y));
3477 if (reg_renumber[j] >= 0)
3478 j = reg_renumber[j];
3479 j += SUBREG_WORD (y);
3480 }
3481 else
3482 {
3483 j = REGNO (y);
3484 if (reg_renumber[j] >= 0)
3485 j = reg_renumber[j];
3486 }
3487 return i == j;
3488 }
3489 /* Now we have disposed of all the cases
3490 in which different rtx codes can match. */
3491 if (code != GET_CODE (y))
3492 return 0;
3493 switch (code)
3494 {
3495 case PC:
3496 case CC0:
3497 case ADDR_VEC:
3498 case ADDR_DIFF_VEC:
3499 return 0;
3500
3501 case CONST_INT:
3502 return XINT (x, 0) == XINT (y, 0);
3503
3504 case LABEL_REF:
3505 /* We can't assume nonlocal labels have their following insns yet. */
3506 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3507 return XEXP (x, 0) == XEXP (y, 0);
3508 /* Two label-refs are equivalent if they point at labels
3509 in the same position in the instruction stream. */
3510 return (next_real_insn (XEXP (x, 0))
3511 == next_real_insn (XEXP (y, 0)));
3512
3513 case SYMBOL_REF:
3514 return XSTR (x, 0) == XSTR (y, 0);
3515 }
3516
3517 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3518
3519 if (GET_MODE (x) != GET_MODE (y))
3520 return 0;
3521
3522 /* Compare the elements. If any pair of corresponding elements
3523 fail to match, return 0 for the whole things. */
3524
3525 fmt = GET_RTX_FORMAT (code);
3526 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3527 {
3528 register int j;
3529 switch (fmt[i])
3530 {
3531 case 'w':
3532 if (XWINT (x, i) != XWINT (y, i))
3533 return 0;
3534 break;
3535
3536 case 'i':
3537 if (XINT (x, i) != XINT (y, i))
3538 return 0;
3539 break;
3540
3541 case 's':
3542 if (strcmp (XSTR (x, i), XSTR (y, i)))
3543 return 0;
3544 break;
3545
3546 case 'e':
3547 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3548 return 0;
3549 break;
3550
3551 case 'u':
3552 if (XEXP (x, i) != XEXP (y, i))
3553 return 0;
3554 /* fall through. */
3555 case '0':
3556 break;
3557
3558 case 'E':
3559 if (XVECLEN (x, i) != XVECLEN (y, i))
3560 return 0;
3561 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3562 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3563 return 0;
3564 break;
3565
3566 default:
3567 abort ();
3568 }
3569 }
3570 return 1;
3571 }
3572 \f
3573 /* If X is a hard register or equivalent to one or a subregister of one,
3574 return the hard register number. If X is a pseudo register that was not
3575 assigned a hard register, return the pseudo register number. Otherwise,
3576 return -1. Any rtx is valid for X. */
3577
3578 int
3579 true_regnum (x)
3580 rtx x;
3581 {
3582 if (GET_CODE (x) == REG)
3583 {
3584 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3585 return reg_renumber[REGNO (x)];
3586 return REGNO (x);
3587 }
3588 if (GET_CODE (x) == SUBREG)
3589 {
3590 int base = true_regnum (SUBREG_REG (x));
3591 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3592 return SUBREG_WORD (x) + base;
3593 }
3594 return -1;
3595 }
3596 \f
3597 /* Optimize code of the form:
3598
3599 for (x = a[i]; x; ...)
3600 ...
3601 for (x = a[i]; x; ...)
3602 ...
3603 foo:
3604
3605 Loop optimize will change the above code into
3606
3607 if (x = a[i])
3608 for (;;)
3609 { ...; if (! (x = ...)) break; }
3610 if (x = a[i])
3611 for (;;)
3612 { ...; if (! (x = ...)) break; }
3613 foo:
3614
3615 In general, if the first test fails, the program can branch
3616 directly to `foo' and skip the second try which is doomed to fail.
3617 We run this after loop optimization and before flow analysis. */
3618
3619 /* When comparing the insn patterns, we track the fact that different
3620 pseudo-register numbers may have been used in each computation.
3621 The following array stores an equivalence -- same_regs[I] == J means
3622 that pseudo register I was used in the first set of tests in a context
3623 where J was used in the second set. We also count the number of such
3624 pending equivalences. If nonzero, the expressions really aren't the
3625 same. */
3626
3627 static short *same_regs;
3628
3629 static int num_same_regs;
3630
3631 /* Track any registers modified between the target of the first jump and
3632 the second jump. They never compare equal. */
3633
3634 static char *modified_regs;
3635
3636 /* Record if memory was modified. */
3637
3638 static int modified_mem;
3639
3640 /* Called via note_stores on each insn between the target of the first
3641 branch and the second branch. It marks any changed registers. */
3642
3643 static void
3644 mark_modified_reg (dest, x)
3645 rtx dest;
3646 rtx x;
3647 {
3648 int regno, i;
3649
3650 if (GET_CODE (dest) == SUBREG)
3651 dest = SUBREG_REG (dest);
3652
3653 if (GET_CODE (dest) == MEM)
3654 modified_mem = 1;
3655
3656 if (GET_CODE (dest) != REG)
3657 return;
3658
3659 regno = REGNO (dest);
3660 if (regno >= FIRST_PSEUDO_REGISTER)
3661 modified_regs[regno] = 1;
3662 else
3663 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3664 modified_regs[regno + i] = 1;
3665 }
3666
3667 /* F is the first insn in the chain of insns. */
3668
3669 void
3670 thread_jumps (f, max_reg, verbose)
3671 rtx f;
3672 int max_reg;
3673 int verbose;
3674 {
3675 /* Basic algorithm is to find a conditional branch,
3676 the label it may branch to, and the branch after
3677 that label. If the two branches test the same condition,
3678 walk back from both branch paths until the insn patterns
3679 differ, or code labels are hit. If we make it back to
3680 the target of the first branch, then we know that the first branch
3681 will either always succeed or always fail depending on the relative
3682 senses of the two branches. So adjust the first branch accordingly
3683 in this case. */
3684
3685 rtx label, b1, b2, t1, t2;
3686 enum rtx_code code1, code2;
3687 rtx b1op0, b1op1, b2op0, b2op1;
3688 int changed = 1;
3689 int i;
3690 short *all_reset;
3691
3692 /* Allocate register tables and quick-reset table. */
3693 modified_regs = (char *) alloca (max_reg * sizeof (char));
3694 same_regs = (short *) alloca (max_reg * sizeof (short));
3695 all_reset = (short *) alloca (max_reg * sizeof (short));
3696 for (i = 0; i < max_reg; i++)
3697 all_reset[i] = -1;
3698
3699 while (changed)
3700 {
3701 changed = 0;
3702
3703 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3704 {
3705 /* Get to a candidate branch insn. */
3706 if (GET_CODE (b1) != JUMP_INSN
3707 || ! condjump_p (b1) || simplejump_p (b1)
3708 || JUMP_LABEL (b1) == 0)
3709 continue;
3710
3711 bzero (modified_regs, max_reg * sizeof (char));
3712 modified_mem = 0;
3713
3714 bcopy (all_reset, same_regs, max_reg * sizeof (short));
3715 num_same_regs = 0;
3716
3717 label = JUMP_LABEL (b1);
3718
3719 /* Look for a branch after the target. Record any registers and
3720 memory modified between the target and the branch. Stop when we
3721 get to a label since we can't know what was changed there. */
3722 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3723 {
3724 if (GET_CODE (b2) == CODE_LABEL)
3725 break;
3726
3727 else if (GET_CODE (b2) == JUMP_INSN)
3728 {
3729 /* If this is an unconditional jump and is the only use of
3730 its target label, we can follow it. */
3731 if (simplejump_p (b2)
3732 && JUMP_LABEL (b2) != 0
3733 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3734 {
3735 b2 = JUMP_LABEL (b2);
3736 continue;
3737 }
3738 else
3739 break;
3740 }
3741
3742 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3743 continue;
3744
3745 if (GET_CODE (b2) == CALL_INSN)
3746 {
3747 modified_mem = 1;
3748 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3749 if (call_used_regs[i] && ! fixed_regs[i]
3750 && i != STACK_POINTER_REGNUM
3751 && i != FRAME_POINTER_REGNUM
3752 && i != ARG_POINTER_REGNUM)
3753 modified_regs[i] = 1;
3754 }
3755
3756 note_stores (PATTERN (b2), mark_modified_reg);
3757 }
3758
3759 /* Check the next candidate branch insn from the label
3760 of the first. */
3761 if (b2 == 0
3762 || GET_CODE (b2) != JUMP_INSN
3763 || b2 == b1
3764 || ! condjump_p (b2)
3765 || simplejump_p (b2))
3766 continue;
3767
3768 /* Get the comparison codes and operands, reversing the
3769 codes if appropriate. If we don't have comparison codes,
3770 we can't do anything. */
3771 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
3772 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
3773 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
3774 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
3775 code1 = reverse_condition (code1);
3776
3777 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
3778 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
3779 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
3780 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
3781 code2 = reverse_condition (code2);
3782
3783 /* If they test the same things and knowing that B1 branches
3784 tells us whether or not B2 branches, check if we
3785 can thread the branch. */
3786 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3787 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3788 && (comparison_dominates_p (code1, code2)
3789 || comparison_dominates_p (code1, reverse_condition (code2))))
3790 {
3791 t1 = prev_nonnote_insn (b1);
3792 t2 = prev_nonnote_insn (b2);
3793
3794 while (t1 != 0 && t2 != 0)
3795 {
3796 if (t1 == 0 || t2 == 0)
3797 break;
3798
3799 if (t2 == label)
3800 {
3801 /* We have reached the target of the first branch.
3802 If there are no pending register equivalents,
3803 we know that this branch will either always
3804 succeed (if the senses of the two branches are
3805 the same) or always fail (if not). */
3806 rtx new_label;
3807
3808 if (num_same_regs != 0)
3809 break;
3810
3811 if (comparison_dominates_p (code1, code2))
3812 new_label = JUMP_LABEL (b2);
3813 else
3814 new_label = get_label_after (b2);
3815
3816 if (JUMP_LABEL (b1) != new_label
3817 && redirect_jump (b1, new_label))
3818 changed = 1;
3819 break;
3820 }
3821
3822 /* If either of these is not a normal insn (it might be
3823 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3824 have already been skipped above.) Similarly, fail
3825 if the insns are different. */
3826 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3827 || recog_memoized (t1) != recog_memoized (t2)
3828 || ! rtx_equal_for_thread_p (PATTERN (t1),
3829 PATTERN (t2), t2))
3830 break;
3831
3832 t1 = prev_nonnote_insn (t1);
3833 t2 = prev_nonnote_insn (t2);
3834 }
3835 }
3836 }
3837 }
3838 }
3839 \f
3840 /* This is like RTX_EQUAL_P except that it knows about our handling of
3841 possibly equivalent registers and knows to consider volatile and
3842 modified objects as not equal.
3843
3844 YINSN is the insn containing Y. */
3845
3846 int
3847 rtx_equal_for_thread_p (x, y, yinsn)
3848 rtx x, y;
3849 rtx yinsn;
3850 {
3851 register int i;
3852 register int j;
3853 register enum rtx_code code;
3854 register char *fmt;
3855
3856 code = GET_CODE (x);
3857 /* Rtx's of different codes cannot be equal. */
3858 if (code != GET_CODE (y))
3859 return 0;
3860
3861 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3862 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3863
3864 if (GET_MODE (x) != GET_MODE (y))
3865 return 0;
3866
3867 /* Handle special-cases first. */
3868 switch (code)
3869 {
3870 case REG:
3871 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
3872 return 1;
3873
3874 /* If neither is user variable or hard register, check for possible
3875 equivalence. */
3876 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
3877 || REGNO (x) < FIRST_PSEUDO_REGISTER
3878 || REGNO (y) < FIRST_PSEUDO_REGISTER)
3879 return 0;
3880
3881 if (same_regs[REGNO (x)] == -1)
3882 {
3883 same_regs[REGNO (x)] = REGNO (y);
3884 num_same_regs++;
3885
3886 /* If this is the first time we are seeing a register on the `Y'
3887 side, see if it is the last use. If not, we can't thread the
3888 jump, so mark it as not equivalent. */
3889 if (regno_last_uid[REGNO (y)] != INSN_UID (yinsn))
3890 return 0;
3891
3892 return 1;
3893 }
3894 else
3895 return (same_regs[REGNO (x)] == REGNO (y));
3896
3897 break;
3898
3899 case MEM:
3900 /* If memory modified or either volatile, not equivalent.
3901 Else, check address. */
3902 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3903 return 0;
3904
3905 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
3906
3907 case ASM_INPUT:
3908 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3909 return 0;
3910
3911 break;
3912
3913 case SET:
3914 /* Cancel a pending `same_regs' if setting equivalenced registers.
3915 Then process source. */
3916 if (GET_CODE (SET_DEST (x)) == REG
3917 && GET_CODE (SET_DEST (y)) == REG)
3918 {
3919 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
3920 {
3921 same_regs[REGNO (SET_DEST (x))] = -1;
3922 num_same_regs--;
3923 }
3924 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
3925 return 0;
3926 }
3927 else
3928 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
3929 return 0;
3930
3931 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
3932
3933 case LABEL_REF:
3934 return XEXP (x, 0) == XEXP (y, 0);
3935
3936 case SYMBOL_REF:
3937 return XSTR (x, 0) == XSTR (y, 0);
3938 }
3939
3940 if (x == y)
3941 return 1;
3942
3943 fmt = GET_RTX_FORMAT (code);
3944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3945 {
3946 switch (fmt[i])
3947 {
3948 case 'w':
3949 if (XWINT (x, i) != XWINT (y, i))
3950 return 0;
3951 break;
3952
3953 case 'n':
3954 case 'i':
3955 if (XINT (x, i) != XINT (y, i))
3956 return 0;
3957 break;
3958
3959 case 'V':
3960 case 'E':
3961 /* Two vectors must have the same length. */
3962 if (XVECLEN (x, i) != XVECLEN (y, i))
3963 return 0;
3964
3965 /* And the corresponding elements must match. */
3966 for (j = 0; j < XVECLEN (x, i); j++)
3967 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
3968 XVECEXP (y, i, j), yinsn) == 0)
3969 return 0;
3970 break;
3971
3972 case 'e':
3973 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
3974 return 0;
3975 break;
3976
3977 case 'S':
3978 case 's':
3979 if (strcmp (XSTR (x, i), XSTR (y, i)))
3980 return 0;
3981 break;
3982
3983 case 'u':
3984 /* These are just backpointers, so they don't matter. */
3985 break;
3986
3987 case '0':
3988 break;
3989
3990 /* It is believed that rtx's at this level will never
3991 contain anything but integers and other rtx's,
3992 except for within LABEL_REFs and SYMBOL_REFs. */
3993 default:
3994 abort ();
3995 }
3996 }
3997 return 1;
3998 }
This page took 0.220143 seconds and 5 git commands to generate.