]> gcc.gnu.org Git - gcc.git/blame - gcc/jump.c
(identify_blocks, reorder_blocks): Don't use SET_NOTE_BLOCK_NUMBER.
[gcc.git] / gcc / jump.c
CommitLineData
15a63be1 1/* Optimize jump instructions, for GNU compiler.
5f4f0e22 2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
15a63be1
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This is the jump-optimization pass of the compiler.
22 It is run two or three times: once before cse, sometimes once after cse,
23 and once after reload (before final).
24
25 jump_optimize deletes unreachable code and labels that are not used.
26 It also deletes jumps that jump to the following insn,
27 and simplifies jumps around unconditional jumps and jumps
28 to unconditional jumps.
29
30 Each CODE_LABEL has a count of the times it is used
31 stored in the LABEL_NUSES internal field, and each JUMP_INSN
32 has one label that it refers to stored in the
33 JUMP_LABEL internal field. With this we can detect labels that
34 become unused because of the deletion of all the jumps that
35 formerly used them. The JUMP_LABEL info is sometimes looked
36 at by later passes.
37
38 Optionally, cross-jumping can be done. Currently it is done
39 only the last time (when after reload and before final).
40 In fact, the code for cross-jumping now assumes that register
41 allocation has been done, since it uses `rtx_renumbered_equal_p'.
42
43 Jump optimization is done after cse when cse's constant-propagation
44 causes jumps to become unconditional or to be deleted.
45
46 Unreachable loops are not detected here, because the labels
47 have references and the insns appear reachable from the labels.
48 find_basic_blocks in flow.c finds and deletes such loops.
49
50 The subroutines delete_insn, redirect_jump, and invert_jump are used
51 from other passes as well. */
52
53#include "config.h"
54#include "rtl.h"
55#include "flags.h"
56#include "hard-reg-set.h"
57#include "regs.h"
58#include "expr.h"
59#include "insn-config.h"
60#include "insn-flags.h"
61#include "real.h"
62
63/* ??? Eventually must record somehow the labels used by jumps
64 from nested functions. */
65/* Pre-record the next or previous real insn for each label?
66 No, this pass is very fast anyway. */
67/* Condense consecutive labels?
68 This would make life analysis faster, maybe. */
69/* Optimize jump y; x: ... y: jumpif... x?
70 Don't know if it is worth bothering with. */
71/* Optimize two cases of conditional jump to conditional jump?
72 This can never delete any instruction or make anything dead,
73 or even change what is live at any point.
74 So perhaps let combiner do it. */
75
76/* Vector indexed by uid.
77 For each CODE_LABEL, index by its uid to get first unconditional jump
78 that jumps to the label.
79 For each JUMP_INSN, index by its uid to get the next unconditional jump
80 that jumps to the same label.
81 Element 0 is the start of a chain of all return insns.
82 (It is safe to use element 0 because insn uid 0 is not used. */
83
84static rtx *jump_chain;
85
86/* List of labels referred to from initializers.
87 These can never be deleted. */
88rtx forced_labels;
89
90/* Maximum index in jump_chain. */
91
92static int max_jump_chain;
93
94/* Set nonzero by jump_optimize if control can fall through
95 to the end of the function. */
96int can_reach_end;
97
98/* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104static int cross_jump_death_matters = 0;
105
106static int duplicate_loop_exit_test ();
107rtx delete_insn ();
108int redirect_jump ();
109static int redirect_exp ();
110void redirect_tablejump ();
111static int delete_labelref_insn ();
112int invert_jump ();
113static int invert_exp ();
114int condjump_p ();
115int simplejump_p ();
116
117extern rtx gen_jump ();
118
15a63be1
RK
119static void mark_jump_label ();
120void delete_jump ();
121static void delete_from_jump_chain ();
122static int tension_vector_labels ();
123static void find_cross_jump ();
124static void do_cross_jump ();
125static int jump_back_p ();
126\f
127/* Delete no-op jumps and optimize jumps to jumps
128 and jumps around jumps.
129 Delete unused labels and unreachable code.
130
131 If CROSS_JUMP is 1, detect matching code
132 before a jump and its destination and unify them.
133 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
134
135 If NOOP_MOVES is nonzero, delete no-op move insns.
136
137 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
138 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
139
140 If `optimize' is zero, don't change any code,
141 just determine whether control drops off the end of the function.
142 This case occurs when we have -W and not -O.
143 It works because `delete_insn' checks the value of `optimize'
144 and refrains from actually deleting when that is 0. */
145
146void
147jump_optimize (f, cross_jump, noop_moves, after_regscan)
148 rtx f;
149 int cross_jump;
150 int noop_moves;
151 int after_regscan;
152{
153 register rtx insn;
154 int changed;
155 int first = 1;
156 int max_uid = 0;
157 rtx last_insn;
158
159 cross_jump_death_matters = (cross_jump == 2);
160
161 /* Initialize LABEL_NUSES and JUMP_LABEL fields. */
162
163 for (insn = f; insn; insn = NEXT_INSN (insn))
164 {
165 if (GET_CODE (insn) == CODE_LABEL)
166 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
167 else if (GET_CODE (insn) == JUMP_INSN)
168 JUMP_LABEL (insn) = 0;
169 if (INSN_UID (insn) > max_uid)
170 max_uid = INSN_UID (insn);
171 }
172
173 max_uid++;
174
175 /* Delete insns following barriers, up to next label. */
176
177 for (insn = f; insn;)
178 {
179 if (GET_CODE (insn) == BARRIER)
180 {
181 insn = NEXT_INSN (insn);
182 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
183 {
184 if (GET_CODE (insn) == NOTE
185 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
186 insn = NEXT_INSN (insn);
187 else
188 insn = delete_insn (insn);
189 }
190 /* INSN is now the code_label. */
191 }
192 else
193 insn = NEXT_INSN (insn);
194 }
195
196 /* Leave some extra room for labels and duplicate exit test insns
197 we make. */
198 max_jump_chain = max_uid * 14 / 10;
199 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
200 bzero (jump_chain, max_jump_chain * sizeof (rtx));
201
202 /* Mark the label each jump jumps to.
203 Combine consecutive labels, and count uses of labels.
204
205 For each label, make a chain (using `jump_chain')
206 of all the *unconditional* jumps that jump to it;
207 also make a chain of all returns. */
208
209 for (insn = f; insn; insn = NEXT_INSN (insn))
210 if ((GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN
211 || GET_CODE (insn) == CALL_INSN)
212 && ! INSN_DELETED_P (insn))
213 {
214 mark_jump_label (PATTERN (insn), insn, cross_jump);
215 if (GET_CODE (insn) == JUMP_INSN)
216 {
217 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
218 {
219 jump_chain[INSN_UID (insn)]
220 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
221 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
222 }
223 if (GET_CODE (PATTERN (insn)) == RETURN)
224 {
225 jump_chain[INSN_UID (insn)] = jump_chain[0];
226 jump_chain[0] = insn;
227 }
228 }
229 }
230
231 /* Keep track of labels used from static data;
232 they cannot ever be deleted. */
233
234 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
235 LABEL_NUSES (XEXP (insn, 0))++;
236
237 /* Delete all labels already not referenced.
238 Also find the last insn. */
239
240 last_insn = 0;
241 for (insn = f; insn; )
242 {
243 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
244 insn = delete_insn (insn);
245 else
246 {
247 last_insn = insn;
248 insn = NEXT_INSN (insn);
249 }
250 }
251
252 if (!optimize)
253 {
254 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
255 If so record that this function can drop off the end. */
256
257 insn = last_insn;
258 {
259 int n_labels = 1;
260 while (insn
261 /* One label can follow the end-note: the return label. */
262 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
263 /* Ordinary insns can follow it if returning a structure. */
264 || GET_CODE (insn) == INSN
265 /* If machine uses explicit RETURN insns, no epilogue,
266 then one of them follows the note. */
267 || (GET_CODE (insn) == JUMP_INSN
268 && GET_CODE (PATTERN (insn)) == RETURN)
269 /* Other kinds of notes can follow also. */
270 || (GET_CODE (insn) == NOTE
271 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
272 insn = PREV_INSN (insn);
273 }
274
275 /* Report if control can fall through at the end of the function. */
276 if (insn && GET_CODE (insn) == NOTE
277 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
278 && ! INSN_DELETED_P (insn))
279 can_reach_end = 1;
280
281 /* Zero the "deleted" flag of all the "deleted" insns. */
282 for (insn = f; insn; insn = NEXT_INSN (insn))
283 INSN_DELETED_P (insn) = 0;
284 return;
285 }
286
287#ifdef HAVE_return
288 if (HAVE_return)
289 {
290 /* If we fall through to the epilogue, see if we can insert a RETURN insn
291 in front of it. If the machine allows it at this point (we might be
292 after reload for a leaf routine), it will improve optimization for it
293 to be there. */
294 insn = get_last_insn ();
295 while (insn && GET_CODE (insn) == NOTE)
296 insn = PREV_INSN (insn);
297
298 if (insn && GET_CODE (insn) != BARRIER)
299 {
300 emit_jump_insn (gen_return ());
301 emit_barrier ();
302 }
303 }
304#endif
305
306 if (noop_moves)
307 for (insn = f; insn; )
308 {
309 register rtx next = NEXT_INSN (insn);
310
311 if (GET_CODE (insn) == INSN)
312 {
313 register rtx body = PATTERN (insn);
314
315/* Combine stack_adjusts with following push_insns. */
316#ifdef PUSH_ROUNDING
317 if (GET_CODE (body) == SET
318 && SET_DEST (body) == stack_pointer_rtx
319 && GET_CODE (SET_SRC (body)) == PLUS
320 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
321 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
322 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
323 {
324 rtx p;
325 rtx stack_adjust_insn = insn;
326 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
327 int total_pushed = 0;
328 int pushes = 0;
329
330 /* Find all successive push insns. */
331 p = insn;
332 /* Don't convert more than three pushes;
333 that starts adding too many displaced addresses
334 and the whole thing starts becoming a losing
335 proposition. */
336 while (pushes < 3)
337 {
338 rtx pbody, dest;
339 p = next_nonnote_insn (p);
340 if (p == 0 || GET_CODE (p) != INSN)
341 break;
342 pbody = PATTERN (p);
343 if (GET_CODE (pbody) != SET)
344 break;
345 dest = SET_DEST (pbody);
346 /* Allow a no-op move between the adjust and the push. */
347 if (GET_CODE (dest) == REG
348 && GET_CODE (SET_SRC (pbody)) == REG
349 && REGNO (dest) == REGNO (SET_SRC (pbody)))
350 continue;
351 if (! (GET_CODE (dest) == MEM
352 && GET_CODE (XEXP (dest, 0)) == POST_INC
353 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
354 break;
355 pushes++;
356 if (total_pushed + GET_MODE_SIZE (SET_DEST (pbody))
357 > stack_adjust_amount)
358 break;
359 total_pushed += GET_MODE_SIZE (SET_DEST (pbody));
360 }
361
362 /* Discard the amount pushed from the stack adjust;
363 maybe eliminate it entirely. */
364 if (total_pushed >= stack_adjust_amount)
365 {
366 delete_insn (stack_adjust_insn);
367 total_pushed = stack_adjust_amount;
368 }
369 else
370 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
5f4f0e22 371 = GEN_INT (stack_adjust_amount - total_pushed);
15a63be1
RK
372
373 /* Change the appropriate push insns to ordinary stores. */
374 p = insn;
375 while (total_pushed > 0)
376 {
377 rtx pbody, dest;
378 p = next_nonnote_insn (p);
379 if (GET_CODE (p) != INSN)
380 break;
381 pbody = PATTERN (p);
382 if (GET_CODE (pbody) == SET)
383 break;
384 dest = SET_DEST (pbody);
385 if (! (GET_CODE (dest) == MEM
386 && GET_CODE (XEXP (dest, 0)) == POST_INC
387 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
388 break;
389 total_pushed -= GET_MODE_SIZE (SET_DEST (pbody));
390 /* If this push doesn't fully fit in the space
391 of the stack adjust that we deleted,
392 make another stack adjust here for what we
393 didn't use up. There should be peepholes
394 to recognize the resulting sequence of insns. */
395 if (total_pushed < 0)
396 {
397 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
5f4f0e22 398 GEN_INT (- total_pushed)),
15a63be1
RK
399 p);
400 break;
401 }
402 XEXP (dest, 0)
403 = plus_constant (stack_pointer_rtx, total_pushed);
404 }
405 }
406#endif
407
408 /* Detect and delete no-op move instructions
409 resulting from not allocating a parameter in a register. */
410
411 if (GET_CODE (body) == SET
412 && (SET_DEST (body) == SET_SRC (body)
413 || (GET_CODE (SET_DEST (body)) == MEM
414 && GET_CODE (SET_SRC (body)) == MEM
415 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
416 && ! (GET_CODE (SET_DEST (body)) == MEM
417 && MEM_VOLATILE_P (SET_DEST (body)))
418 && ! (GET_CODE (SET_SRC (body)) == MEM
419 && MEM_VOLATILE_P (SET_SRC (body))))
420 delete_insn (insn);
421
422 /* Detect and ignore no-op move instructions
423 resulting from smart or fortuitous register allocation. */
424
425 else if (GET_CODE (body) == SET)
426 {
427 int sreg = true_regnum (SET_SRC (body));
428 int dreg = true_regnum (SET_DEST (body));
429
430 if (sreg == dreg && sreg >= 0)
431 delete_insn (insn);
432 else if (sreg >= 0 && dreg >= 0)
433 {
434 rtx trial;
5f4f0e22
CH
435 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
436 sreg, NULL_PTR, dreg,
15a63be1
RK
437 GET_MODE (SET_SRC (body)));
438
439#ifdef PRESERVE_DEATH_INFO_REGNO_P
440 /* Deleting insn could lose a death-note for SREG or DREG
441 so don't do it if final needs accurate death-notes. */
442 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg)
443 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg))
444#endif
445 {
446 /* DREG may have been the target of a REG_DEAD note in
447 the insn which makes INSN redundant. If so, reorg
448 would still think it is dead. So search for such a
449 note and delete it if we find it. */
450 for (trial = prev_nonnote_insn (insn);
451 trial && GET_CODE (trial) != CODE_LABEL;
452 trial = prev_nonnote_insn (trial))
453 if (find_regno_note (trial, REG_DEAD, dreg))
454 {
455 remove_death (dreg, trial);
456 break;
457 }
458
459 if (tem != 0
460 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
461 delete_insn (insn);
462 }
463 }
464 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
5f4f0e22
CH
465 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
466 NULL_PTR, 0,
467 GET_MODE (SET_DEST (body))))
15a63be1
RK
468 {
469 /* This handles the case where we have two consecutive
470 assignments of the same constant to pseudos that didn't
471 get a hard reg. Each SET from the constant will be
472 converted into a SET of the spill register and an
473 output reload will be made following it. This produces
474 two loads of the same constant into the same spill
475 register. */
476
477 rtx in_insn = insn;
478
479 /* Look back for a death note for the first reg.
480 If there is one, it is no longer accurate. */
481 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
482 {
483 if ((GET_CODE (in_insn) == INSN
484 || GET_CODE (in_insn) == JUMP_INSN)
485 && find_regno_note (in_insn, REG_DEAD, dreg))
486 {
487 remove_death (dreg, in_insn);
488 break;
489 }
490 in_insn = PREV_INSN (in_insn);
491 }
492
493 /* Delete the second load of the value. */
494 delete_insn (insn);
495 }
496 }
497 else if (GET_CODE (body) == PARALLEL)
498 {
499 /* If each part is a set between two identical registers or
500 a USE or CLOBBER, delete the insn. */
501 int i, sreg, dreg;
502 rtx tem;
503
504 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
505 {
506 tem = XVECEXP (body, 0, i);
507 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
508 continue;
509
510 if (GET_CODE (tem) != SET
511 || (sreg = true_regnum (SET_SRC (tem))) < 0
512 || (dreg = true_regnum (SET_DEST (tem))) < 0
513 || dreg != sreg)
514 break;
515 }
516
517 if (i < 0)
518 delete_insn (insn);
519 }
520#if !BYTES_BIG_ENDIAN /* Not worth the hair to detect this
521 in the big-endian case. */
522 /* Also delete insns to store bit fields if they are no-ops. */
523 else if (GET_CODE (body) == SET
524 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
525 && XEXP (SET_DEST (body), 2) == const0_rtx
526 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
527 && ! (GET_CODE (SET_SRC (body)) == MEM
528 && MEM_VOLATILE_P (SET_SRC (body))))
529 delete_insn (insn);
530#endif /* not BYTES_BIG_ENDIAN */
531 }
532 insn = next;
533 }
534
535 /* Now iterate optimizing jumps until nothing changes over one pass. */
536 changed = 1;
537 while (changed)
538 {
539 register rtx next;
540 changed = 0;
541
542 for (insn = f; insn; insn = next)
543 {
544 rtx reallabelprev;
545 rtx temp, temp1, temp2, temp3, temp4, temp5;
546 rtx nlabel;
3915de94 547 int this_is_simplejump, this_is_condjump, reversep;
15a63be1
RK
548#if 0
549 /* If NOT the first iteration, if this is the last jump pass
550 (just before final), do the special peephole optimizations.
551 Avoiding the first iteration gives ordinary jump opts
552 a chance to work before peephole opts. */
553
554 if (reload_completed && !first && !flag_no_peephole)
555 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
556 peephole (insn);
557#endif
558
559 /* That could have deleted some insns after INSN, so check now
560 what the following insn is. */
561
562 next = NEXT_INSN (insn);
563
564 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
565 jump. Try to optimize by duplicating the loop exit test if so.
566 This is only safe immediately after regscan, because it uses
567 the values of regno_first_uid and regno_last_uid. */
568 if (after_regscan && GET_CODE (insn) == NOTE
569 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
570 && (temp1 = next_nonnote_insn (insn)) != 0
571 && simplejump_p (temp1))
572 {
573 temp = PREV_INSN (insn);
574 if (duplicate_loop_exit_test (insn))
575 {
576 changed = 1;
577 next = NEXT_INSN (temp);
578 continue;
579 }
580 }
581
582 if (GET_CODE (insn) != JUMP_INSN)
583 continue;
584
585 this_is_simplejump = simplejump_p (insn);
586 this_is_condjump = condjump_p (insn);
587
588 /* Tension the labels in dispatch tables. */
589
590 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
591 changed |= tension_vector_labels (PATTERN (insn), 0);
592 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
593 changed |= tension_vector_labels (PATTERN (insn), 1);
594
595 /* If a dispatch table always goes to the same place,
596 get rid of it and replace the insn that uses it. */
597
598 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
599 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
600 {
601 int i;
602 rtx pat = PATTERN (insn);
603 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
604 int len = XVECLEN (pat, diff_vec_p);
605 rtx dispatch = prev_real_insn (insn);
606
607 for (i = 0; i < len; i++)
608 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
609 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
610 break;
611 if (i == len
612 && GET_CODE (dispatch) == JUMP_INSN
613 && JUMP_LABEL (dispatch) != 0
614 /* Don't mess with a casesi insn. */
615 && !(GET_CODE (PATTERN (dispatch)) == SET
616 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
617 == IF_THEN_ELSE))
618 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
619 {
620 redirect_tablejump (dispatch,
621 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
622 changed = 1;
623 }
624 }
625
626 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
627
628 /* If a jump references the end of the function, try to turn
629 it into a RETURN insn, possibly a conditional one. */
630 if (JUMP_LABEL (insn)
631 && next_active_insn (JUMP_LABEL (insn)) == 0)
5f4f0e22 632 changed |= redirect_jump (insn, NULL_RTX);
15a63be1
RK
633
634 /* Detect jump to following insn. */
635 if (reallabelprev == insn && condjump_p (insn))
636 {
637 delete_jump (insn);
638 changed = 1;
639 continue;
640 }
641
d45cf215 642 /* If we have an unconditional jump preceded by a USE, try to put
15a63be1
RK
643 the USE before the target and jump there. This simplifies many
644 of the optimizations below since we don't have to worry about
645 dealing with these USE insns. We only do this if the label
646 being branch to already has the identical USE or if code
647 never falls through to that label. */
648
649 if (this_is_simplejump
650 && (temp = prev_nonnote_insn (insn)) != 0
651 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
652 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
653 && (GET_CODE (temp1) == BARRIER
654 || (GET_CODE (temp1) == INSN
655 && rtx_equal_p (PATTERN (temp), PATTERN (temp1)))))
656 {
657 if (GET_CODE (temp1) == BARRIER)
658 {
659 reorder_insns (temp, temp, temp1);
660 temp1 = NEXT_INSN (temp1);
661 }
662 else
663 delete_insn (temp);
664
665 redirect_jump (insn, get_label_before (temp1));
666 reallabelprev = prev_real_insn (temp1);
667 changed = 1;
668 }
669
670 /* Simplify if (...) x = a; else x = b; by converting it
671 to x = b; if (...) x = a;
672 if B is sufficiently simple, the test doesn't involve X,
673 and nothing in the test modifies B or X.
674
675 If we have small register classes, we also can't do this if X
676 is a hard register.
677
678 If the "x = b;" insn has any REG_NOTES, we don't do this because
679 of the possibility that we are running after CSE and there is a
680 REG_EQUAL note that is only valid if the branch has already been
681 taken. If we move the insn with the REG_EQUAL note, we may
682 fold the comparison to always be false in a later CSE pass.
683 (We could also delete the REG_NOTES when moving the insn, but it
684 seems simpler to not move it.) An exception is that we can move
685 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
686 value is the same as "b".
687
688 INSN is the branch over the `else' part.
689
690 We set:
691
d45cf215 692 TEMP to the jump insn preceding "x = a;"
15a63be1
RK
693 TEMP1 to X
694 TEMP2 to the insn that sets "x = b;"
695 TEMP3 to the insn that sets "x = a;" */
696
697 if (this_is_simplejump
698 && (temp3 = prev_active_insn (insn)) != 0
699 && GET_CODE (temp3) == INSN
700 && GET_CODE (PATTERN (temp3)) == SET
701 && GET_CODE (temp1 = SET_DEST (PATTERN (temp3))) == REG
702#ifdef SMALL_REGISTER_CLASSES
703 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
704#endif
705 && (temp2 = next_active_insn (insn)) != 0
706 && GET_CODE (temp2) == INSN
707 && GET_CODE (PATTERN (temp2)) == SET
708 && rtx_equal_p (SET_DEST (PATTERN (temp2)), temp1)
709 && (GET_CODE (SET_SRC (PATTERN (temp2))) == REG
710 || CONSTANT_P (SET_SRC (PATTERN (temp2))))
711 && (REG_NOTES (temp2) == 0
712 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
713 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
714 && XEXP (REG_NOTES (temp2), 1) == 0
715 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
716 SET_SRC (PATTERN (temp2)))))
717 && (temp = prev_active_insn (temp3)) != 0
718 && condjump_p (temp) && ! simplejump_p (temp)
719 /* TEMP must skip over the "x = a;" insn */
720 && prev_real_insn (JUMP_LABEL (temp)) == insn
721 && no_labels_between_p (insn, JUMP_LABEL (temp))
722 /* There must be no other entries to the "x = b;" insn. */
723 && no_labels_between_p (JUMP_LABEL (temp), temp2)
724 /* INSN must either branch to the insn after TEMP2 or the insn
725 after TEMP2 must branch to the same place as INSN. */
726 && (reallabelprev == temp2
727 || ((temp4 = next_active_insn (temp2)) != 0
728 && simplejump_p (temp4)
729 && JUMP_LABEL (temp4) == JUMP_LABEL (insn))))
730 {
731 /* The test expression, X, may be a complicated test with
732 multiple branches. See if we can find all the uses of
733 the label that TEMP branches to without hitting a CALL_INSN
734 or a jump to somewhere else. */
735 rtx target = JUMP_LABEL (temp);
736 int nuses = LABEL_NUSES (target);
737 rtx p, q;
738
739 /* Set P to the first jump insn that goes around "x = a;". */
740 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
741 {
742 if (GET_CODE (p) == JUMP_INSN)
743 {
744 if (condjump_p (p) && ! simplejump_p (p)
745 && JUMP_LABEL (p) == target)
746 {
747 nuses--;
748 if (nuses == 0)
749 break;
750 }
751 else
752 break;
753 }
754 else if (GET_CODE (p) == CALL_INSN)
755 break;
756 }
757
758#ifdef HAVE_cc0
759 /* We cannot insert anything between a set of cc and its use
760 so if P uses cc0, we must back up to the previous insn. */
761 q = prev_nonnote_insn (p);
762 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
763 && sets_cc0_p (PATTERN (q)))
764 p = q;
765#endif
766
767 if (p)
768 p = PREV_INSN (p);
769
770 /* If we found all the uses and there was no data conflict, we
771 can move the assignment unless we can branch into the middle
772 from somewhere. */
773 if (nuses == 0 && p
774 && no_labels_between_p (p, insn)
775 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
776 && ! reg_set_between_p (temp1, p, temp3)
777 && (GET_CODE (SET_SRC (PATTERN (temp2))) == CONST_INT
778 || ! reg_set_between_p (SET_SRC (PATTERN (temp2)),
779 p, temp2)))
780 {
781 reorder_insns_with_line_notes (temp2, temp2, p);
782
783 /* Set NEXT to an insn that we know won't go away. */
784 next = next_active_insn (insn);
785
786 /* Delete the jump around the set. Note that we must do
787 this before we redirect the test jumps so that it won't
788 delete the code immediately following the assignment
789 we moved (which might be a jump). */
790
791 delete_insn (insn);
792
793 /* We either have two consecutive labels or a jump to
794 a jump, so adjust all the JUMP_INSNs to branch to where
795 INSN branches to. */
796 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
797 if (GET_CODE (p) == JUMP_INSN)
798 redirect_jump (p, target);
799
800 changed = 1;
801 continue;
802 }
803 }
804
3915de94
TW
805 /* We deal with four cases:
806
807 1) x = a; if (...) x = b; and either A or B is zero,
808 2) if (...) x = 0; and jumps are expensive,
809 3) x = a; if (...) x = b; and A and B are constants where all the
810 set bits in A are also set in B and jumps are expensive, and
811 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
812 more expensive.
813
814 In each of these try to use a store-flag insn to avoid the jump.
815 (If the jump would be faster, the machine should not have
816 defined the scc insns!). These cases are often made by the
817 previous optimization.
15a63be1
RK
818
819 INSN here is the jump around the store. We set:
820
821 TEMP to the "x = b;" insn.
822 TEMP1 to X.
823 TEMP2 to B (const0_rtx in the second case).
824 TEMP3 to A (X in the second case).
825 TEMP4 to the condition being tested.
826 TEMP5 to the earliest insn used to find the condition. */
827
828 if (/* We can't do this after reload has completed. */
829 ! reload_completed
830 && this_is_condjump && ! this_is_simplejump
831 /* Set TEMP to the "x = b;" insn. */
832 && (temp = next_nonnote_insn (insn)) != 0
833 && GET_CODE (temp) == INSN
834 && GET_CODE (PATTERN (temp)) == SET
835 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
836#ifdef SMALL_REGISTER_CLASSES
837 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
838#endif
839 && GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
840 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
841 || GET_CODE (temp2) == CONST_INT)
842 /* Allow either form, but prefer the former if both apply. */
843 && (((temp3 = reg_set_last (temp1, insn)) != 0
844 && ((GET_CODE (temp3) == REG
845#ifdef SMALL_REGISTER_CLASSES
846 && REGNO (temp3) >= FIRST_PSEUDO_REGISTER
847#endif
848 )
849 || GET_CODE (temp3) == CONST_INT))
850 /* Make the latter case look like x = x; if (...) x = 0; */
3915de94 851 || ((temp3 = temp1, BRANCH_COST >= 2)
15a63be1
RK
852 && temp2 == const0_rtx))
853 /* INSN must either branch to the insn after TEMP or the insn
854 after TEMP must branch to the same place as INSN. */
855 && (reallabelprev == temp
856 || ((temp4 = next_active_insn (temp)) != 0
857 && simplejump_p (temp4)
858 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
859 && (temp4 = get_condition (insn, &temp5)) != 0
860
3915de94
TW
861 /* If B is zero, OK; if A is zero, can only do (1) if we
862 can reverse the condition. See if (3) applies possibly
863 by reversing the condition. Prefer reversing to (4) when
864 branches are very expensive. */
865 && ((reversep = 0, temp2 == const0_rtx)
15a63be1 866 || (temp3 == const0_rtx
3915de94
TW
867 && (reversep = can_reverse_comparison_p (temp4, insn)))
868 || (BRANCH_COST >= 2
869 && GET_CODE (temp2) == CONST_INT
870 && GET_CODE (temp3) == CONST_INT
871 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
872 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
873 && (reversep = can_reverse_comparison_p (temp4,
874 insn)))))
875 || BRANCH_COST >= 3))
15a63be1
RK
876 {
877 enum rtx_code code = GET_CODE (temp4);
3915de94 878 rtx uval, cval, var = temp1;
15a63be1
RK
879 int normalizep;
880 rtx target;
881
882 /* If necessary, reverse the condition. */
3915de94
TW
883 if (reversep)
884 code = reverse_condition (code), uval = temp2, cval = temp3;
885 else
886 uval = temp3, cval = temp2;
15a63be1
RK
887
888 /* See if we can do this with a store-flag insn. */
889 start_sequence ();
890
3915de94
TW
891 /* If CVAL is non-zero, normalize to -1. Otherwise,
892 if UVAL is the constant 1, it is best to just compute
893 the result directly. If UVAL is constant and STORE_FLAG_VALUE
15a63be1 894 includes all of its bits, it is best to compute the flag
3915de94
TW
895 value unnormalized and `and' it with UVAL. Otherwise,
896 normalize to -1 and `and' with UVAL. */
897 normalizep = (cval != const0_rtx ? -1
898 : (uval == const1_rtx ? 1
899 : (GET_CODE (uval) == CONST_INT
900 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
901 ? 0 : -1));
15a63be1
RK
902
903 /* We will be putting the store-flag insn immediately in
904 front of the comparison that was originally being done,
905 so we know all the variables in TEMP4 will be valid.
906 However, this might be in front of the assignment of
907 A to VAR. If it is, it would clobber the store-flag
908 we will be emitting.
909
910 Therefore, emit into a temporary which will be copied to
911 VAR immediately after TEMP. */
912
913 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
914 XEXP (temp4, 0), XEXP (temp4, 1),
915 VOIDmode,
916 (code == LTU || code == LEU
917 || code == GEU || code == GTU),
918 normalizep);
919 if (target)
920 {
921 rtx seq;
922
3915de94
TW
923 /* Both CVAL and UVAL are non-zero. */
924 if (cval != const0_rtx && uval != const0_rtx)
925 {
926 rtx tem1, tem2;
927
928 tem1 = expand_and (uval, target, NULL_RTX);
929 if (GET_CODE (cval) == CONST_INT
930 && GET_CODE (uval) == CONST_INT
931 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
932 tem2 = cval;
933 else
934 {
935 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
936 target, NULL_RTX, 0);
937 tem2 = expand_and (cval, tem2, tem2);
938 }
939
940 /* If we usually make new pseudos, do so here. This
941 turns out to help machines that have conditional
942 move insns. */
943
944 if (flag_expensive_optimizations)
945 target = 0;
946
947 target = expand_binop (GET_MODE (var), ior_optab,
948 tem1, tem2, target,
949 1, OPTAB_WIDEN);
950 }
951 else if (normalizep != 1)
952 target = expand_and (uval, target,
15a63be1 953 (GET_CODE (target) == REG
5f4f0e22 954 ? target : NULL_RTX));
3915de94 955
15a63be1
RK
956 seq = gen_sequence ();
957 end_sequence ();
958 emit_insn_before (seq, temp5);
959 emit_insn_after (gen_move_insn (var, target), insn);
960 delete_insn (temp);
961 next = NEXT_INSN (insn);
962#ifdef HAVE_cc0
963 delete_insn (prev_nonnote_insn (insn));
964#endif
965 delete_insn (insn);
966 changed = 1;
967 continue;
968 }
969 else
970 end_sequence ();
971 }
972
973 /* If branches are expensive, convert
974 if (foo) bar++; to bar += (foo != 0);
975 and similarly for "bar--;"
976
977 INSN is the conditional branch around the arithmetic. We set:
978
979 TEMP is the arithmetic insn.
6dc42e49 980 TEMP1 is the SET doing the arithmetic.
15a63be1
RK
981 TEMP2 is the operand being incremented or decremented.
982 TEMP3 to the condition being tested.
983 TEMP4 to the earliest insn used to find the condition. */
984
985 if (BRANCH_COST >= 2
986 && ! reload_completed
987 && this_is_condjump && ! this_is_simplejump
988 && (temp = next_nonnote_insn (insn)) != 0
989 && (temp1 = single_set (temp)) != 0
990 && (temp2 = SET_DEST (temp1),
991 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
992 && GET_CODE (SET_SRC (temp1)) == PLUS
993 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
994 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
995 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
996 /* INSN must either branch to the insn after TEMP or the insn
997 after TEMP must branch to the same place as INSN. */
998 && (reallabelprev == temp
999 || ((temp3 = next_active_insn (temp)) != 0
1000 && simplejump_p (temp3)
1001 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1002 && (temp3 = get_condition (insn, &temp4)) != 0
1003 && can_reverse_comparison_p (temp3, insn))
1004 {
626d0d1d 1005 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
15a63be1
RK
1006 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1007
1008 start_sequence ();
1009
626d0d1d
RK
1010 /* It must be the case that TEMP2 is not modified in the range
1011 [TEMP4, INSN). The one exception we make is if the insn
1012 before INSN sets TEMP2 to something which is also unchanged
1013 in that range. In that case, we can move the initialization
1014 into our sequence. */
1015
1016 if ((temp5 = prev_active_insn (insn)) != 0
1017 && GET_CODE (temp5) == INSN
1018 && (temp6 = single_set (temp5)) != 0
1019 && rtx_equal_p (temp2, SET_DEST (temp6))
1020 && (CONSTANT_P (SET_SRC (temp6))
1021 || GET_CODE (SET_SRC (temp6)) == REG
1022 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1023 {
1024 emit_insn (PATTERN (temp5));
1025 init_insn = temp5;
1026 init = SET_SRC (temp6);
1027 }
1028
1029 if (CONSTANT_P (init)
1030 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1031 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1032 XEXP (temp3, 0), XEXP (temp3, 1),
1033 VOIDmode,
1034 (code == LTU || code == LEU
1035 || code == GTU || code == GEU), 1);
15a63be1
RK
1036
1037 /* If we can do the store-flag, do the addition or
1038 subtraction. */
1039
1040 if (target)
1041 target = expand_binop (GET_MODE (temp2),
1042 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1043 ? add_optab : sub_optab),
1044 temp2, target, temp2, OPTAB_WIDEN);
1045
1046 if (target != 0)
1047 {
1048 /* Put the result back in temp2 in case it isn't already.
1049 Then replace the jump, possible a CC0-setting insn in
1050 front of the jump, and TEMP, with the sequence we have
1051 made. */
1052
1053 if (target != temp2)
1054 emit_move_insn (temp2, target);
1055
1056 seq = get_insns ();
1057 end_sequence ();
1058
1059 emit_insns_before (seq, temp4);
1060 delete_insn (temp);
626d0d1d
RK
1061
1062 if (init_insn)
1063 delete_insn (init_insn);
1064
15a63be1
RK
1065 next = NEXT_INSN (insn);
1066#ifdef HAVE_cc0
1067 delete_insn (prev_nonnote_insn (insn));
1068#endif
1069 delete_insn (insn);
1070 changed = 1;
1071 continue;
1072 }
1073 else
1074 end_sequence ();
1075 }
1076
1077 /* Simplify if (...) x = 1; else {...} if (x) ...
1078 We recognize this case scanning backwards as well.
1079
1080 TEMP is the assignment to x;
1081 TEMP1 is the label at the head of the second if. */
1082 /* ?? This should call get_condition to find the values being
1083 compared, instead of looking for a COMPARE insn when HAVE_cc0
1084 is not defined. This would allow it to work on the m88k. */
1085 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1086 is not defined and the condition is tested by a separate compare
1087 insn. This is because the code below assumes that the result
1088 of the compare dies in the following branch.
1089
1090 Not only that, but there might be other insns between the
1091 compare and branch whose results are live. Those insns need
1092 to be executed.
1093
1094 A way to fix this is to move the insns at JUMP_LABEL (insn)
1095 to before INSN. If we are running before flow, they will
1096 be deleted if they aren't needed. But this doesn't work
1097 well after flow.
1098
1099 This is really a special-case of jump threading, anyway. The
1100 right thing to do is to replace this and jump threading with
1101 much simpler code in cse.
1102
1103 This code has been turned off in the non-cc0 case in the
1104 meantime. */
1105
1106#ifdef HAVE_cc0
1107 else if (this_is_simplejump
1108 /* Safe to skip USE and CLOBBER insns here
1109 since they will not be deleted. */
1110 && (temp = prev_active_insn (insn))
1111 && no_labels_between_p (temp, insn)
1112 && GET_CODE (temp) == INSN
1113 && GET_CODE (PATTERN (temp)) == SET
1114 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1115 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1116 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1117 /* If we find that the next value tested is `x'
1118 (TEMP1 is the insn where this happens), win. */
1119 && GET_CODE (temp1) == INSN
1120 && GET_CODE (PATTERN (temp1)) == SET
1121#ifdef HAVE_cc0
1122 /* Does temp1 `tst' the value of x? */
1123 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1124 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1125 && (temp1 = next_nonnote_insn (temp1))
1126#else
1127 /* Does temp1 compare the value of x against zero? */
1128 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1129 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1130 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1131 == SET_DEST (PATTERN (temp)))
1132 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1133 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1134#endif
1135 && condjump_p (temp1))
1136 {
1137 /* Get the if_then_else from the condjump. */
1138 rtx choice = SET_SRC (PATTERN (temp1));
1139 if (GET_CODE (choice) == IF_THEN_ELSE)
1140 {
1141 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1142 rtx val = SET_SRC (PATTERN (temp));
1143 rtx cond
1144 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1145 val, const0_rtx);
1146 rtx ultimate;
1147
1148 if (cond == const_true_rtx)
1149 ultimate = XEXP (choice, 1);
1150 else if (cond == const0_rtx)
1151 ultimate = XEXP (choice, 2);
1152 else
1153 ultimate = 0;
1154
1155 if (ultimate == pc_rtx)
1156 ultimate = get_label_after (temp1);
1157 else if (ultimate && GET_CODE (ultimate) != RETURN)
1158 ultimate = XEXP (ultimate, 0);
1159
1160 if (ultimate)
1161 changed |= redirect_jump (insn, ultimate);
1162 }
1163 }
1164#endif
1165
1166#if 0
1167 /* @@ This needs a bit of work before it will be right.
1168
1169 Any type of comparison can be accepted for the first and
1170 second compare. When rewriting the first jump, we must
1171 compute the what conditions can reach label3, and use the
1172 appropriate code. We can not simply reverse/swap the code
1173 of the first jump. In some cases, the second jump must be
1174 rewritten also.
1175
1176 For example,
1177 < == converts to > ==
1178 < != converts to == >
1179 etc.
1180
1181 If the code is written to only accept an '==' test for the second
1182 compare, then all that needs to be done is to swap the condition
1183 of the first branch.
1184
1185 It is questionable whether we want this optimization anyways,
1186 since if the user wrote code like this because he/she knew that
6dc42e49 1187 the jump to label1 is taken most of the time, then rewriting
15a63be1
RK
1188 this gives slower code. */
1189 /* @@ This should call get_condition to find the values being
1190 compared, instead of looking for a COMPARE insn when HAVE_cc0
1191 is not defined. This would allow it to work on the m88k. */
1192 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1193 is not defined and the condition is tested by a separate compare
1194 insn. This is because the code below assumes that the result
1195 of the compare dies in the following branch. */
1196
1197 /* Simplify test a ~= b
1198 condjump label1;
1199 test a == b
1200 condjump label2;
1201 jump label3;
1202 label1:
1203
1204 rewriting as
1205 test a ~~= b
1206 condjump label3
1207 test a == b
1208 condjump label2
1209 label1:
1210
1211 where ~= is an inequality, e.g. >, and ~~= is the swapped
1212 inequality, e.g. <.
1213
1214 We recognize this case scanning backwards.
1215
1216 TEMP is the conditional jump to `label2';
1217 TEMP1 is the test for `a == b';
1218 TEMP2 is the conditional jump to `label1';
1219 TEMP3 is the test for `a ~= b'. */
1220 else if (this_is_simplejump
1221 && (temp = prev_active_insn (insn))
1222 && no_labels_between_p (temp, insn)
1223 && condjump_p (temp)
1224 && (temp1 = prev_active_insn (temp))
1225 && no_labels_between_p (temp1, temp)
1226 && GET_CODE (temp1) == INSN
1227 && GET_CODE (PATTERN (temp1)) == SET
1228#ifdef HAVE_cc0
1229 && sets_cc0_p (PATTERN (temp1)) == 1
1230#else
1231 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1232 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1233 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1234#endif
1235 && (temp2 = prev_active_insn (temp1))
1236 && no_labels_between_p (temp2, temp1)
1237 && condjump_p (temp2)
1238 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1239 && (temp3 = prev_active_insn (temp2))
1240 && no_labels_between_p (temp3, temp2)
1241 && GET_CODE (PATTERN (temp3)) == SET
1242 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1243 SET_DEST (PATTERN (temp1)))
1244 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1245 SET_SRC (PATTERN (temp3)))
1246 && ! inequality_comparisons_p (PATTERN (temp))
1247 && inequality_comparisons_p (PATTERN (temp2)))
1248 {
1249 rtx fallthrough_label = JUMP_LABEL (temp2);
1250
1251 ++LABEL_NUSES (fallthrough_label);
1252 if (swap_jump (temp2, JUMP_LABEL (insn)))
1253 {
1254 delete_insn (insn);
1255 changed = 1;
1256 }
1257
1258 if (--LABEL_NUSES (fallthrough_label) == 0)
1259 delete_insn (fallthrough_label);
1260 }
1261#endif
1262 /* Simplify if (...) {... x = 1;} if (x) ...
1263
1264 We recognize this case backwards.
1265
1266 TEMP is the test of `x';
1267 TEMP1 is the assignment to `x' at the end of the
1268 previous statement. */
1269 /* @@ This should call get_condition to find the values being
1270 compared, instead of looking for a COMPARE insn when HAVE_cc0
1271 is not defined. This would allow it to work on the m88k. */
1272 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1273 is not defined and the condition is tested by a separate compare
1274 insn. This is because the code below assumes that the result
1275 of the compare dies in the following branch. */
2d20b9df
RS
1276
1277 /* ??? This has to be turned off. The problem is that the
1278 unconditional jump might indirectly end up branching to the
1279 label between TEMP1 and TEMP. We can't detect this, in general,
1280 since it may become a jump to there after further optimizations.
1281 If that jump is done, it will be deleted, so we will retry
1282 this optimization in the next pass, thus an infinite loop.
1283
1284 The present code prevents this by putting the jump after the
1285 label, but this is not logically correct. */
1286#if 0
15a63be1
RK
1287 else if (this_is_condjump
1288 /* Safe to skip USE and CLOBBER insns here
1289 since they will not be deleted. */
1290 && (temp = prev_active_insn (insn))
1291 && no_labels_between_p (temp, insn)
1292 && GET_CODE (temp) == INSN
1293 && GET_CODE (PATTERN (temp)) == SET
1294#ifdef HAVE_cc0
1295 && sets_cc0_p (PATTERN (temp)) == 1
1296 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1297#else
1298 /* Temp must be a compare insn, we can not accept a register
1299 to register move here, since it may not be simply a
1300 tst insn. */
1301 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1302 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1303 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1304 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1305 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1306#endif
1307 /* May skip USE or CLOBBER insns here
1308 for checking for opportunity, since we
1309 take care of them later. */
1310 && (temp1 = prev_active_insn (temp))
1311 && GET_CODE (temp1) == INSN
1312 && GET_CODE (PATTERN (temp1)) == SET
1313#ifdef HAVE_cc0
1314 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1315#else
1316 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1317 == SET_DEST (PATTERN (temp1)))
1318#endif
1319 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1320 /* If this isn't true, cse will do the job. */
1321 && ! no_labels_between_p (temp1, temp))
1322 {
1323 /* Get the if_then_else from the condjump. */
1324 rtx choice = SET_SRC (PATTERN (insn));
1325 if (GET_CODE (choice) == IF_THEN_ELSE
1326 && (GET_CODE (XEXP (choice, 0)) == EQ
1327 || GET_CODE (XEXP (choice, 0)) == NE))
1328 {
1329 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1330 rtx last_insn;
1331 rtx ultimate;
1332 rtx p;
1333
1334 /* Get the place that condjump will jump to
1335 if it is reached from here. */
1336 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1337 == want_nonzero)
1338 ultimate = XEXP (choice, 1);
1339 else
1340 ultimate = XEXP (choice, 2);
1341 /* Get it as a CODE_LABEL. */
1342 if (ultimate == pc_rtx)
1343 ultimate = get_label_after (insn);
1344 else
1345 /* Get the label out of the LABEL_REF. */
1346 ultimate = XEXP (ultimate, 0);
1347
2d20b9df
RS
1348 /* Insert the jump immediately before TEMP, specifically
1349 after the label that is between TEMP1 and TEMP. */
1350 last_insn = PREV_INSN (temp);
15a63be1
RK
1351
1352 /* If we would be branching to the next insn, the jump
1353 would immediately be deleted and the re-inserted in
1354 a subsequent pass over the code. So don't do anything
1355 in that case. */
1356 if (next_active_insn (last_insn)
1357 != next_active_insn (ultimate))
1358 {
1359 emit_barrier_after (last_insn);
1360 p = emit_jump_insn_after (gen_jump (ultimate),
1361 last_insn);
1362 JUMP_LABEL (p) = ultimate;
1363 ++LABEL_NUSES (ultimate);
1364 if (INSN_UID (ultimate) < max_jump_chain
1365 && INSN_CODE (p) < max_jump_chain)
1366 {
1367 jump_chain[INSN_UID (p)]
1368 = jump_chain[INSN_UID (ultimate)];
1369 jump_chain[INSN_UID (ultimate)] = p;
1370 }
1371 changed = 1;
1372 continue;
1373 }
1374 }
1375 }
2d20b9df 1376#endif
15a63be1
RK
1377 /* Detect a conditional jump going to the same place
1378 as an immediately following unconditional jump. */
1379 else if (this_is_condjump
1380 && (temp = next_active_insn (insn)) != 0
1381 && simplejump_p (temp)
1382 && (next_active_insn (JUMP_LABEL (insn))
1383 == next_active_insn (JUMP_LABEL (temp))))
1384 {
1385 delete_jump (insn);
1386 changed = 1;
1387 continue;
1388 }
1389 /* Detect a conditional jump jumping over an unconditional jump. */
1390
1391 else if (this_is_condjump && ! this_is_simplejump
1392 && reallabelprev != 0
1393 && GET_CODE (reallabelprev) == JUMP_INSN
1394 && prev_active_insn (reallabelprev) == insn
1395 && no_labels_between_p (insn, reallabelprev)
1396 && simplejump_p (reallabelprev))
1397 {
1398 /* When we invert the unconditional jump, we will be
1399 decrementing the usage count of its old label.
1400 Make sure that we don't delete it now because that
1401 might cause the following code to be deleted. */
1402 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1403 rtx prev_label = JUMP_LABEL (insn);
1404
1405 ++LABEL_NUSES (prev_label);
1406
1407 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1408 {
1409 /* It is very likely that if there are USE insns before
1410 this jump, they hold REG_DEAD notes. These REG_DEAD
1411 notes are no longer valid due to this optimization,
1412 and will cause the life-analysis that following passes
1413 (notably delayed-branch scheduling) to think that
1414 these registers are dead when they are not.
1415
1416 To prevent this trouble, we just remove the USE insns
1417 from the insn chain. */
1418
1419 while (prev_uses && GET_CODE (prev_uses) == INSN
1420 && GET_CODE (PATTERN (prev_uses)) == USE)
1421 {
1422 rtx useless = prev_uses;
1423 prev_uses = prev_nonnote_insn (prev_uses);
1424 delete_insn (useless);
1425 }
1426
1427 delete_insn (reallabelprev);
1428 next = insn;
1429 changed = 1;
1430 }
1431
1432 /* We can now safely delete the label if it is unreferenced
1433 since the delete_insn above has deleted the BARRIER. */
1434 if (--LABEL_NUSES (prev_label) == 0)
1435 delete_insn (prev_label);
1436 continue;
1437 }
1438 else
1439 {
1440 /* Detect a jump to a jump. */
1441
1442 nlabel = follow_jumps (JUMP_LABEL (insn));
1443 if (nlabel != JUMP_LABEL (insn)
1444 && redirect_jump (insn, nlabel))
1445 {
1446 changed = 1;
1447 next = insn;
1448 }
1449
1450 /* Look for if (foo) bar; else break; */
1451 /* The insns look like this:
1452 insn = condjump label1;
1453 ...range1 (some insns)...
1454 jump label2;
1455 label1:
1456 ...range2 (some insns)...
1457 jump somewhere unconditionally
1458 label2: */
1459 {
1460 rtx label1 = next_label (insn);
1461 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1462 /* Don't do this optimization on the first round, so that
1463 jump-around-a-jump gets simplified before we ask here
1464 whether a jump is unconditional.
1465
1466 Also don't do it when we are called after reload since
1467 it will confuse reorg. */
1468 if (! first
1469 && (reload_completed ? ! flag_delayed_branch : 1)
1470 /* Make sure INSN is something we can invert. */
1471 && condjump_p (insn)
1472 && label1 != 0
1473 && JUMP_LABEL (insn) == label1
1474 && LABEL_NUSES (label1) == 1
1475 && GET_CODE (range1end) == JUMP_INSN
1476 && simplejump_p (range1end))
1477 {
1478 rtx label2 = next_label (label1);
1479 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1480 if (range1end != range2end
1481 && JUMP_LABEL (range1end) == label2
1482 && GET_CODE (range2end) == JUMP_INSN
1483 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1484 /* Invert the jump condition, so we
1485 still execute the same insns in each case. */
1486 && invert_jump (insn, label1))
1487 {
1488 rtx range1beg = next_active_insn (insn);
1489 rtx range2beg = next_active_insn (label1);
1490 rtx range1after, range2after;
1491 rtx range1before, range2before;
1492
0e690bdb
RS
1493 /* Include in each range any line number before it. */
1494 while (PREV_INSN (range1beg)
d45cf215
RS
1495 && GET_CODE (PREV_INSN (range1beg)) == NOTE
1496 && NOTE_LINE_NUMBER (PREV_INSN (range1beg)) > 0)
0e690bdb
RS
1497 range1beg = PREV_INSN (range1beg);
1498
1499 while (PREV_INSN (range2beg)
d45cf215
RS
1500 && GET_CODE (PREV_INSN (range2beg)) == NOTE
1501 && NOTE_LINE_NUMBER (PREV_INSN (range2beg)) > 0)
0e690bdb
RS
1502 range2beg = PREV_INSN (range2beg);
1503
15a63be1
RK
1504 /* Don't move NOTEs for blocks or loops; shift them
1505 outside the ranges, where they'll stay put. */
915f619f
JW
1506 range1beg = squeeze_notes (range1beg, range1end);
1507 range2beg = squeeze_notes (range2beg, range2end);
15a63be1
RK
1508
1509 /* Get current surrounds of the 2 ranges. */
1510 range1before = PREV_INSN (range1beg);
1511 range2before = PREV_INSN (range2beg);
1512 range1after = NEXT_INSN (range1end);
1513 range2after = NEXT_INSN (range2end);
1514
1515 /* Splice range2 where range1 was. */
1516 NEXT_INSN (range1before) = range2beg;
1517 PREV_INSN (range2beg) = range1before;
1518 NEXT_INSN (range2end) = range1after;
1519 PREV_INSN (range1after) = range2end;
1520 /* Splice range1 where range2 was. */
1521 NEXT_INSN (range2before) = range1beg;
1522 PREV_INSN (range1beg) = range2before;
1523 NEXT_INSN (range1end) = range2after;
1524 PREV_INSN (range2after) = range1end;
1525 changed = 1;
1526 continue;
1527 }
1528 }
1529 }
1530
1531 /* Now that the jump has been tensioned,
1532 try cross jumping: check for identical code
1533 before the jump and before its target label. */
1534
1535 /* First, cross jumping of conditional jumps: */
1536
1537 if (cross_jump && condjump_p (insn))
1538 {
1539 rtx newjpos, newlpos;
1540 rtx x = prev_real_insn (JUMP_LABEL (insn));
1541
1542 /* A conditional jump may be crossjumped
1543 only if the place it jumps to follows
1544 an opposing jump that comes back here. */
1545
1546 if (x != 0 && ! jump_back_p (x, insn))
1547 /* We have no opposing jump;
1548 cannot cross jump this insn. */
1549 x = 0;
1550
1551 newjpos = 0;
1552 /* TARGET is nonzero if it is ok to cross jump
1553 to code before TARGET. If so, see if matches. */
1554 if (x != 0)
1555 find_cross_jump (insn, x, 2,
1556 &newjpos, &newlpos);
1557
1558 if (newjpos != 0)
1559 {
1560 do_cross_jump (insn, newjpos, newlpos);
1561 /* Make the old conditional jump
1562 into an unconditional one. */
1563 SET_SRC (PATTERN (insn))
1564 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
1565 INSN_CODE (insn) = -1;
1566 emit_barrier_after (insn);
1567 /* Add to jump_chain unless this is a new label
1568 whose UID is too large. */
1569 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1570 {
1571 jump_chain[INSN_UID (insn)]
1572 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1573 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1574 }
1575 changed = 1;
1576 next = insn;
1577 }
1578 }
1579
1580 /* Cross jumping of unconditional jumps:
1581 a few differences. */
1582
1583 if (cross_jump && simplejump_p (insn))
1584 {
1585 rtx newjpos, newlpos;
1586 rtx target;
1587
1588 newjpos = 0;
1589
1590 /* TARGET is nonzero if it is ok to cross jump
1591 to code before TARGET. If so, see if matches. */
1592 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1593 &newjpos, &newlpos);
1594
1595 /* If cannot cross jump to code before the label,
1596 see if we can cross jump to another jump to
1597 the same label. */
1598 /* Try each other jump to this label. */
1599 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1600 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1601 target != 0 && newjpos == 0;
1602 target = jump_chain[INSN_UID (target)])
1603 if (target != insn
1604 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1605 /* Ignore TARGET if it's deleted. */
1606 && ! INSN_DELETED_P (target))
1607 find_cross_jump (insn, target, 2,
1608 &newjpos, &newlpos);
1609
1610 if (newjpos != 0)
1611 {
1612 do_cross_jump (insn, newjpos, newlpos);
1613 changed = 1;
1614 next = insn;
1615 }
1616 }
1617
1618 /* This code was dead in the previous jump.c! */
1619 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
1620 {
1621 /* Return insns all "jump to the same place"
1622 so we can cross-jump between any two of them. */
1623
1624 rtx newjpos, newlpos, target;
1625
1626 newjpos = 0;
1627
1628 /* If cannot cross jump to code before the label,
1629 see if we can cross jump to another jump to
1630 the same label. */
1631 /* Try each other jump to this label. */
1632 for (target = jump_chain[0];
1633 target != 0 && newjpos == 0;
1634 target = jump_chain[INSN_UID (target)])
1635 if (target != insn
1636 && ! INSN_DELETED_P (target)
1637 && GET_CODE (PATTERN (target)) == RETURN)
1638 find_cross_jump (insn, target, 2,
1639 &newjpos, &newlpos);
1640
1641 if (newjpos != 0)
1642 {
1643 do_cross_jump (insn, newjpos, newlpos);
1644 changed = 1;
1645 next = insn;
1646 }
1647 }
1648 }
1649 }
1650
1651 first = 0;
1652 }
1653
1654 /* Delete extraneous line number notes.
1655 Note that two consecutive notes for different lines are not really
1656 extraneous. There should be some indication where that line belonged,
1657 even if it became empty. */
1658
1659 {
1660 rtx last_note = 0;
1661
1662 for (insn = f; insn; insn = NEXT_INSN (insn))
1663 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
1664 {
1665 /* Delete this note if it is identical to previous note. */
1666 if (last_note
1667 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
1668 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
1669 {
1670 delete_insn (insn);
1671 continue;
1672 }
1673
1674 last_note = insn;
1675 }
1676 }
1677
1678 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1679 If so, delete it, and record that this function can drop off the end. */
1680
1681 insn = last_insn;
1682 {
1683 int n_labels = 1;
1684 while (insn
1685 /* One label can follow the end-note: the return label. */
1686 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
1687 /* Ordinary insns can follow it if returning a structure. */
1688 || GET_CODE (insn) == INSN
1689 /* If machine uses explicit RETURN insns, no epilogue,
1690 then one of them follows the note. */
1691 || (GET_CODE (insn) == JUMP_INSN
1692 && GET_CODE (PATTERN (insn)) == RETURN)
1693 /* Other kinds of notes can follow also. */
1694 || (GET_CODE (insn) == NOTE
1695 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
1696 insn = PREV_INSN (insn);
1697 }
1698
1699 /* Report if control can fall through at the end of the function. */
1700 if (insn && GET_CODE (insn) == NOTE
1701 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
1702 {
1703 can_reach_end = 1;
1704 delete_insn (insn);
1705 }
1706
1707 /* Show JUMP_CHAIN no longer valid. */
1708 jump_chain = 0;
1709}
1710\f
1711/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1712 jump. Assume that this unconditional jump is to the exit test code. If
1713 the code is sufficiently simple, make a copy of it before INSN,
1714 followed by a jump to the exit of the loop. Then delete the unconditional
1715 jump after INSN.
1716
1717 Note that it is possible we can get confused here if the jump immediately
1718 after the loop start branches outside the loop but within an outer loop.
1719 If we are near the exit of that loop, we will copy its exit test. This
1720 will not generate incorrect code, but could suppress some optimizations.
1721 However, such cases are degenerate loops anyway.
1722
1723 Return 1 if we made the change, else 0.
1724
1725 This is only safe immediately after a regscan pass because it uses the
1726 values of regno_first_uid and regno_last_uid. */
1727
1728static int
1729duplicate_loop_exit_test (loop_start)
1730 rtx loop_start;
1731{
1732 rtx insn, set, p;
1733 rtx copy, link;
1734 int num_insns = 0;
1735 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1736 rtx lastexit;
1737 int max_reg = max_reg_num ();
1738 rtx *reg_map = 0;
1739
1740 /* Scan the exit code. We do not perform this optimization if any insn:
1741
1742 is a CALL_INSN
1743 is a CODE_LABEL
1744 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1745 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1746 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1747 are not valid
1748
1749 Also, don't do this if the exit code is more than 20 insns. */
1750
1751 for (insn = exitcode;
1752 insn
1753 && ! (GET_CODE (insn) == NOTE
1754 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1755 insn = NEXT_INSN (insn))
1756 {
1757 switch (GET_CODE (insn))
1758 {
1759 case CODE_LABEL:
1760 case CALL_INSN:
1761 return 0;
1762 case NOTE:
1763 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1764 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1765 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
1766 return 0;
1767 break;
1768 case JUMP_INSN:
1769 case INSN:
1770 if (++num_insns > 20
5f4f0e22
CH
1771 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1772 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
15a63be1
RK
1773 return 0;
1774 break;
1775 }
1776 }
1777
1778 /* Unless INSN is zero, we can do the optimization. */
1779 if (insn == 0)
1780 return 0;
1781
1782 lastexit = insn;
1783
1784 /* See if any insn sets a register only used in the loop exit code and
1785 not a user variable. If so, replace it with a new register. */
1786 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1787 if (GET_CODE (insn) == INSN
1788 && (set = single_set (insn)) != 0
1789 && GET_CODE (SET_DEST (set)) == REG
1790 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1791 && regno_first_uid[REGNO (SET_DEST (set))] == INSN_UID (insn))
1792 {
1793 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1794 if (regno_last_uid[REGNO (SET_DEST (set))] == INSN_UID (p))
1795 break;
1796
1797 if (p != lastexit)
1798 {
1799 /* We can do the replacement. Allocate reg_map if this is the
1800 first replacement we found. */
1801 if (reg_map == 0)
1802 {
1803 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
1804 bzero (reg_map, max_reg * sizeof (rtx));
1805 }
1806
1807 REG_LOOP_TEST_P (SET_DEST (set)) = 1;
1808
1809 reg_map[REGNO (SET_DEST (set))]
1810 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
1811 }
1812 }
1813
1814 /* Now copy each insn. */
1815 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1816 switch (GET_CODE (insn))
1817 {
1818 case BARRIER:
1819 copy = emit_barrier_before (loop_start);
1820 break;
1821 case NOTE:
1822 /* Only copy line-number notes. */
1823 if (NOTE_LINE_NUMBER (insn) >= 0)
1824 {
1825 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1826 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1827 }
1828 break;
1829
1830 case INSN:
1831 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
1832 if (reg_map)
1833 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1834
1835 mark_jump_label (PATTERN (copy), copy, 0);
1836
1837 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1838 make them. */
1839 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1840 if (REG_NOTE_KIND (link) != REG_LABEL)
1841 REG_NOTES (copy)
1842 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
1843 XEXP (link, 0), REG_NOTES (copy)));
1844 if (reg_map && REG_NOTES (copy))
1845 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1846 break;
1847
1848 case JUMP_INSN:
1849 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
1850 if (reg_map)
1851 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1852 mark_jump_label (PATTERN (copy), copy, 0);
1853 if (REG_NOTES (insn))
1854 {
1855 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
1856 if (reg_map)
1857 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1858 }
1859
1860 /* If this is a simple jump, add it to the jump chain. */
1861
1862 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1863 && simplejump_p (copy))
1864 {
1865 jump_chain[INSN_UID (copy)]
1866 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1867 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1868 }
1869 break;
1870
1871 default:
1872 abort ();
1873 }
1874
1875 /* Now clean up by emitting a jump to the end label and deleting the jump
1876 at the start of the loop. */
1877 if (GET_CODE (copy) != BARRIER)
1878 {
1879 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1880 loop_start);
1881 mark_jump_label (PATTERN (copy), copy, 0);
1882 if (INSN_UID (copy) < max_jump_chain
1883 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1884 {
1885 jump_chain[INSN_UID (copy)]
1886 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1887 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1888 }
1889 emit_barrier_before (loop_start);
1890 }
1891
1892 delete_insn (next_nonnote_insn (loop_start));
1893
1894 /* Mark the exit code as the virtual top of the converted loop. */
1895 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1896
1897 return 1;
1898}
1899\f
1900/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
915f619f
JW
1901 loop-end notes between START and END out before START. Assume that
1902 END is not such a note. START may be such a note. Returns the value
1903 of the new starting insn, which may be different if the original start
1904 was such a note. */
15a63be1 1905
915f619f 1906rtx
15a63be1
RK
1907squeeze_notes (start, end)
1908 rtx start, end;
1909{
1910 rtx insn;
1911 rtx next;
1912
1913 for (insn = start; insn != end; insn = next)
1914 {
1915 next = NEXT_INSN (insn);
1916 if (GET_CODE (insn) == NOTE
1917 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1918 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1919 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1920 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1921 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1922 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1923 {
915f619f
JW
1924 if (insn == start)
1925 start = next;
1926 else
1927 {
1928 rtx prev = PREV_INSN (insn);
1929 PREV_INSN (insn) = PREV_INSN (start);
1930 NEXT_INSN (insn) = start;
1931 NEXT_INSN (PREV_INSN (insn)) = insn;
1932 PREV_INSN (NEXT_INSN (insn)) = insn;
1933 NEXT_INSN (prev) = next;
1934 PREV_INSN (next) = prev;
1935 }
15a63be1
RK
1936 }
1937 }
915f619f
JW
1938
1939 return start;
15a63be1
RK
1940}
1941\f
1942/* Compare the instructions before insn E1 with those before E2
1943 to find an opportunity for cross jumping.
1944 (This means detecting identical sequences of insns followed by
1945 jumps to the same place, or followed by a label and a jump
1946 to that label, and replacing one with a jump to the other.)
1947
1948 Assume E1 is a jump that jumps to label E2
1949 (that is not always true but it might as well be).
1950 Find the longest possible equivalent sequences
1951 and store the first insns of those sequences into *F1 and *F2.
1952 Store zero there if no equivalent preceding instructions are found.
1953
1954 We give up if we find a label in stream 1.
1955 Actually we could transfer that label into stream 2. */
1956
1957static void
1958find_cross_jump (e1, e2, minimum, f1, f2)
1959 rtx e1, e2;
1960 int minimum;
1961 rtx *f1, *f2;
1962{
1963 register rtx i1 = e1, i2 = e2;
1964 register rtx p1, p2;
1965 int lose = 0;
1966
1967 rtx last1 = 0, last2 = 0;
1968 rtx afterlast1 = 0, afterlast2 = 0;
1969 rtx prev1;
1970
1971 *f1 = 0;
1972 *f2 = 0;
1973
1974 while (1)
1975 {
1976 i1 = prev_nonnote_insn (i1);
1977
1978 i2 = PREV_INSN (i2);
1979 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1980 i2 = PREV_INSN (i2);
1981
1982 if (i1 == 0)
1983 break;
1984
1985 /* Don't allow the range of insns preceding E1 or E2
1986 to include the other (E2 or E1). */
1987 if (i2 == e1 || i1 == e2)
1988 break;
1989
1990 /* If we will get to this code by jumping, those jumps will be
1991 tensioned to go directly to the new label (before I2),
1992 so this cross-jumping won't cost extra. So reduce the minimum. */
1993 if (GET_CODE (i1) == CODE_LABEL)
1994 {
1995 --minimum;
1996 break;
1997 }
1998
1999 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2000 break;
2001
2002 p1 = PATTERN (i1);
2003 p2 = PATTERN (i2);
2004
2005#ifdef STACK_REGS
2006 /* If cross_jump_death_matters is not 0, the insn's mode
2007 indicates whether or not the insn contains any stack-like
2008 regs. */
2009
2010 if (cross_jump_death_matters && GET_MODE (i1) == QImode)
2011 {
2012 /* If register stack conversion has already been done, then
2013 death notes must also be compared before it is certain that
2014 the two instruction streams match. */
2015
2016 rtx note;
2017 HARD_REG_SET i1_regset, i2_regset;
2018
2019 CLEAR_HARD_REG_SET (i1_regset);
2020 CLEAR_HARD_REG_SET (i2_regset);
2021
2022 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2023 if (REG_NOTE_KIND (note) == REG_DEAD
2024 && STACK_REG_P (XEXP (note, 0)))
2025 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2026
2027 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2028 if (REG_NOTE_KIND (note) == REG_DEAD
2029 && STACK_REG_P (XEXP (note, 0)))
2030 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2031
2032 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2033
2034 lose = 1;
2035
2036 done:
2037 ;
2038 }
2039#endif
2040
2041 if (lose || GET_CODE (p1) != GET_CODE (p2)
2042 || ! rtx_renumbered_equal_p (p1, p2))
2043 {
2044 /* The following code helps take care of G++ cleanups. */
2045 rtx equiv1;
2046 rtx equiv2;
2047
2048 if (!lose && GET_CODE (p1) == GET_CODE (p2)
5f4f0e22
CH
2049 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2050 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2051 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2052 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
15a63be1
RK
2053 /* If the equivalences are not to a constant, they may
2054 reference pseudos that no longer exist, so we can't
2055 use them. */
2056 && CONSTANT_P (XEXP (equiv1, 0))
2057 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2058 {
2059 rtx s1 = single_set (i1);
2060 rtx s2 = single_set (i2);
2061 if (s1 != 0 && s2 != 0
2062 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2063 {
2064 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2065 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2066 if (! rtx_renumbered_equal_p (p1, p2))
2067 cancel_changes (0);
2068 else if (apply_change_group ())
2069 goto win;
2070 }
2071 }
2072
2073 /* Insns fail to match; cross jumping is limited to the following
2074 insns. */
2075
2076#ifdef HAVE_cc0
2077 /* Don't allow the insn after a compare to be shared by
2078 cross-jumping unless the compare is also shared.
2079 Here, if either of these non-matching insns is a compare,
2080 exclude the following insn from possible cross-jumping. */
2081 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2082 last1 = afterlast1, last2 = afterlast2, ++minimum;
2083#endif
2084
2085 /* If cross-jumping here will feed a jump-around-jump
2086 optimization, this jump won't cost extra, so reduce
2087 the minimum. */
2088 if (GET_CODE (i1) == JUMP_INSN
2089 && JUMP_LABEL (i1)
2090 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2091 --minimum;
2092 break;
2093 }
2094
2095 win:
2096 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2097 {
2098 /* Ok, this insn is potentially includable in a cross-jump here. */
2099 afterlast1 = last1, afterlast2 = last2;
2100 last1 = i1, last2 = i2, --minimum;
2101 }
2102 }
2103
2104 /* We have to be careful that we do not cross-jump into the middle of
2105 USE-CALL_INSN-CLOBBER sequence. This sequence is used instead of
2106 putting the USE and CLOBBERs inside the CALL_INSN. The delay slot
2107 scheduler needs to know what registers are used and modified by the
2108 CALL_INSN and needs the adjacent USE and CLOBBERs to do so.
2109
2110 ??? At some point we should probably change this so that these are
2111 part of the CALL_INSN. The way we are doing it now is a kludge that
2112 is now causing trouble. */
2113
2114 if (last1 != 0 && GET_CODE (last1) == CALL_INSN
2115 && (prev1 = prev_nonnote_insn (last1))
2116 && GET_CODE (prev1) == INSN
2117 && GET_CODE (PATTERN (prev1)) == USE)
2118 {
2119 /* Remove this CALL_INSN from the range we can cross-jump. */
2120 last1 = next_real_insn (last1);
2121 last2 = next_real_insn (last2);
2122
2123 minimum++;
2124 }
2125
2126 /* Skip past CLOBBERS since they may be right after a CALL_INSN. It
2127 isn't worth checking for the CALL_INSN. */
2128 while (last1 != 0 && GET_CODE (PATTERN (last1)) == CLOBBER)
2129 last1 = next_real_insn (last1), last2 = next_real_insn (last2);
2130
2131 if (minimum <= 0 && last1 != 0 && last1 != e1)
2132 *f1 = last1, *f2 = last2;
2133}
2134
2135static void
2136do_cross_jump (insn, newjpos, newlpos)
2137 rtx insn, newjpos, newlpos;
2138{
2139 /* Find an existing label at this point
2140 or make a new one if there is none. */
2141 register rtx label = get_label_before (newlpos);
2142
2143 /* Make the same jump insn jump to the new point. */
2144 if (GET_CODE (PATTERN (insn)) == RETURN)
2145 {
2146 /* Remove from jump chain of returns. */
2147 delete_from_jump_chain (insn);
2148 /* Change the insn. */
2149 PATTERN (insn) = gen_jump (label);
2150 INSN_CODE (insn) = -1;
2151 JUMP_LABEL (insn) = label;
2152 LABEL_NUSES (label)++;
2153 /* Add to new the jump chain. */
2154 if (INSN_UID (label) < max_jump_chain
2155 && INSN_UID (insn) < max_jump_chain)
2156 {
2157 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2158 jump_chain[INSN_UID (label)] = insn;
2159 }
2160 }
2161 else
2162 redirect_jump (insn, label);
2163
2164 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2165 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2166 the NEWJPOS stream. */
2167
2168 while (newjpos != insn)
2169 {
2170 rtx lnote;
2171
2172 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2173 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2174 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2175 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2176 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2177 remove_note (newlpos, lnote);
2178
2179 delete_insn (newjpos);
2180 newjpos = next_real_insn (newjpos);
2181 newlpos = next_real_insn (newlpos);
2182 }
2183}
2184\f
2185/* Return the label before INSN, or put a new label there. */
2186
2187rtx
2188get_label_before (insn)
2189 rtx insn;
2190{
2191 rtx label;
2192
2193 /* Find an existing label at this point
2194 or make a new one if there is none. */
2195 label = prev_nonnote_insn (insn);
2196
2197 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2198 {
2199 rtx prev = PREV_INSN (insn);
2200
6dc42e49 2201 /* Don't put a label between a CALL_INSN and USE insns that precede
15a63be1
RK
2202 it. */
2203
2204 if (GET_CODE (insn) == CALL_INSN
2205 || (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE
2206 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN))
2207 while (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == USE)
2208 prev = PREV_INSN (prev);
2209
2210 label = gen_label_rtx ();
2211 emit_label_after (label, prev);
2212 LABEL_NUSES (label) = 0;
2213 }
2214 return label;
2215}
2216
2217/* Return the label after INSN, or put a new label there. */
2218
2219rtx
2220get_label_after (insn)
2221 rtx insn;
2222{
2223 rtx label;
2224
2225 /* Find an existing label at this point
2226 or make a new one if there is none. */
2227 label = next_nonnote_insn (insn);
2228
2229 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2230 {
2231 /* Don't put a label between a CALL_INSN and CLOBBER insns
2232 following it. */
2233
2234 if (GET_CODE (insn) == CALL_INSN
2235 || (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE
2236 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN))
2237 while (GET_CODE (NEXT_INSN (insn)) == INSN
2238 && GET_CODE (PATTERN (NEXT_INSN (insn))) == CLOBBER)
2239 insn = NEXT_INSN (insn);
2240
2241 label = gen_label_rtx ();
2242 emit_label_after (label, insn);
2243 LABEL_NUSES (label) = 0;
2244 }
2245 return label;
2246}
2247\f
2248/* Return 1 if INSN is a jump that jumps to right after TARGET
2249 only on the condition that TARGET itself would drop through.
2250 Assumes that TARGET is a conditional jump. */
2251
2252static int
2253jump_back_p (insn, target)
2254 rtx insn, target;
2255{
2256 rtx cinsn, ctarget;
2257 enum rtx_code codei, codet;
2258
2259 if (simplejump_p (insn) || ! condjump_p (insn)
2260 || simplejump_p (target)
2261 || target != prev_real_insn (JUMP_LABEL (insn)))
2262 return 0;
2263
2264 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2265 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2266
2267 codei = GET_CODE (cinsn);
2268 codet = GET_CODE (ctarget);
2269
2270 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2271 {
2272 if (! can_reverse_comparison_p (cinsn, insn))
2273 return 0;
2274 codei = reverse_condition (codei);
2275 }
2276
2277 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2278 {
2279 if (! can_reverse_comparison_p (ctarget, target))
2280 return 0;
2281 codet = reverse_condition (codet);
2282 }
2283
2284 return (codei == codet
2285 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2286 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2287}
2288\f
2289/* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2290 return non-zero if it is safe to reverse this comparison. It is if our
2291 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2292 this is known to be an integer comparison. */
2293
2294int
2295can_reverse_comparison_p (comparison, insn)
2296 rtx comparison;
2297 rtx insn;
2298{
2299 rtx arg0;
2300
2301 /* If this is not actually a comparison, we can't reverse it. */
2302 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2303 return 0;
2304
2305 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2306 /* If this is an NE comparison, it is safe to reverse it to an EQ
2307 comparison and vice versa, even for floating point. If no operands
2308 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2309 always false and NE is always true, so the reversal is also valid. */
2310 || GET_CODE (comparison) == NE
2311 || GET_CODE (comparison) == EQ)
2312 return 1;
2313
2314 arg0 = XEXP (comparison, 0);
2315
2316 /* Make sure ARG0 is one of the actual objects being compared. If we
2317 can't do this, we can't be sure the comparison can be reversed.
2318
2319 Handle cc0 and a MODE_CC register. */
2320 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2321#ifdef HAVE_cc0
2322 || arg0 == cc0_rtx
2323#endif
2324 )
2325 {
2326 rtx prev = prev_nonnote_insn (insn);
2327 rtx set = single_set (prev);
2328
2329 if (set == 0 || SET_DEST (set) != arg0)
2330 return 0;
2331
2332 arg0 = SET_SRC (set);
2333
2334 if (GET_CODE (arg0) == COMPARE)
2335 arg0 = XEXP (arg0, 0);
2336 }
2337
2338 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2339 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2340 return (GET_CODE (arg0) == CONST_INT
2341 || (GET_MODE (arg0) != VOIDmode
2342 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2343 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2344}
2345
2346/* Given an rtx-code for a comparison, return the code
2347 for the negated comparison.
2348 WATCH OUT! reverse_condition is not safe to use on a jump
2349 that might be acting on the results of an IEEE floating point comparison,
2350 because of the special treatment of non-signaling nans in comparisons.
2351 Use can_reverse_comparison_p to be sure. */
2352
2353enum rtx_code
2354reverse_condition (code)
2355 enum rtx_code code;
2356{
2357 switch (code)
2358 {
2359 case EQ:
2360 return NE;
2361
2362 case NE:
2363 return EQ;
2364
2365 case GT:
2366 return LE;
2367
2368 case GE:
2369 return LT;
2370
2371 case LT:
2372 return GE;
2373
2374 case LE:
2375 return GT;
2376
2377 case GTU:
2378 return LEU;
2379
2380 case GEU:
2381 return LTU;
2382
2383 case LTU:
2384 return GEU;
2385
2386 case LEU:
2387 return GTU;
2388
2389 default:
2390 abort ();
2391 return UNKNOWN;
2392 }
2393}
2394
2395/* Similar, but return the code when two operands of a comparison are swapped.
2396 This IS safe for IEEE floating-point. */
2397
2398enum rtx_code
2399swap_condition (code)
2400 enum rtx_code code;
2401{
2402 switch (code)
2403 {
2404 case EQ:
2405 case NE:
2406 return code;
2407
2408 case GT:
2409 return LT;
2410
2411 case GE:
2412 return LE;
2413
2414 case LT:
2415 return GT;
2416
2417 case LE:
2418 return GE;
2419
2420 case GTU:
2421 return LTU;
2422
2423 case GEU:
2424 return LEU;
2425
2426 case LTU:
2427 return GTU;
2428
2429 case LEU:
2430 return GEU;
2431
2432 default:
2433 abort ();
2434 return UNKNOWN;
2435 }
2436}
2437
2438/* Given a comparison CODE, return the corresponding unsigned comparison.
2439 If CODE is an equality comparison or already an unsigned comparison,
2440 CODE is returned. */
2441
2442enum rtx_code
2443unsigned_condition (code)
2444 enum rtx_code code;
2445{
2446 switch (code)
2447 {
2448 case EQ:
2449 case NE:
2450 case GTU:
2451 case GEU:
2452 case LTU:
2453 case LEU:
2454 return code;
2455
2456 case GT:
2457 return GTU;
2458
2459 case GE:
2460 return GEU;
2461
2462 case LT:
2463 return LTU;
2464
2465 case LE:
2466 return LEU;
2467
2468 default:
2469 abort ();
2470 }
2471}
2472
2473/* Similarly, return the signed version of a comparison. */
2474
2475enum rtx_code
2476signed_condition (code)
2477 enum rtx_code code;
2478{
2479 switch (code)
2480 {
2481 case EQ:
2482 case NE:
2483 case GT:
2484 case GE:
2485 case LT:
2486 case LE:
2487 return code;
2488
2489 case GTU:
2490 return GT;
2491
2492 case GEU:
2493 return GE;
2494
2495 case LTU:
2496 return LT;
2497
2498 case LEU:
2499 return LE;
2500
2501 default:
2502 abort ();
2503 }
2504}
2505\f
2506/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2507 truth of CODE1 implies the truth of CODE2. */
2508
2509int
2510comparison_dominates_p (code1, code2)
2511 enum rtx_code code1, code2;
2512{
2513 if (code1 == code2)
2514 return 1;
2515
2516 switch (code1)
2517 {
2518 case EQ:
2519 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
2520 return 1;
2521 break;
2522
2523 case LT:
2524 if (code2 == LE)
2525 return 1;
2526 break;
2527
2528 case GT:
2529 if (code2 == GE)
2530 return 1;
2531 break;
2532
2533 case LTU:
2534 if (code2 == LEU)
2535 return 1;
2536 break;
2537
2538 case GTU:
2539 if (code2 == GEU)
2540 return 1;
2541 break;
2542 }
2543
2544 return 0;
2545}
2546\f
2547/* Return 1 if INSN is an unconditional jump and nothing else. */
2548
2549int
2550simplejump_p (insn)
2551 rtx insn;
2552{
2553 return (GET_CODE (insn) == JUMP_INSN
2554 && GET_CODE (PATTERN (insn)) == SET
2555 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2556 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2557}
2558
2559/* Return nonzero if INSN is a (possibly) conditional jump
2560 and nothing more. */
2561
2562int
2563condjump_p (insn)
2564 rtx insn;
2565{
2566 register rtx x = PATTERN (insn);
2567 if (GET_CODE (x) != SET)
2568 return 0;
2569 if (GET_CODE (SET_DEST (x)) != PC)
2570 return 0;
2571 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2572 return 1;
2573 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2574 return 0;
2575 if (XEXP (SET_SRC (x), 2) == pc_rtx
2576 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2577 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2578 return 1;
2579 if (XEXP (SET_SRC (x), 1) == pc_rtx
2580 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2581 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2582 return 1;
2583 return 0;
2584}
2585
2586/* Return 1 if X is an RTX that does nothing but set the condition codes
2587 and CLOBBER or USE registers.
2588 Return -1 if X does explicitly set the condition codes,
2589 but also does other things. */
2590
2591int
2592sets_cc0_p (x)
2593 rtx x;
2594{
2595#ifdef HAVE_cc0
2596 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2597 return 1;
2598 if (GET_CODE (x) == PARALLEL)
2599 {
2600 int i;
2601 int sets_cc0 = 0;
2602 int other_things = 0;
2603 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2604 {
2605 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2606 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2607 sets_cc0 = 1;
2608 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2609 other_things = 1;
2610 }
2611 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2612 }
2613 return 0;
2614#else
2615 abort ();
2616#endif
2617}
2618\f
2619/* Follow any unconditional jump at LABEL;
2620 return the ultimate label reached by any such chain of jumps.
2621 If LABEL is not followed by a jump, return LABEL.
2d20b9df
RS
2622 If the chain loops or we can't find end, return LABEL,
2623 since that tells caller to avoid changing the insn.
15a63be1
RK
2624
2625 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2626 a USE or CLOBBER. */
2627
2628rtx
2629follow_jumps (label)
2630 rtx label;
2631{
2632 register rtx insn;
2633 register rtx next;
2634 register rtx value = label;
2635 register int depth;
2636
2637 for (depth = 0;
2638 (depth < 10
2639 && (insn = next_active_insn (value)) != 0
2640 && GET_CODE (insn) == JUMP_INSN
2641 && (JUMP_LABEL (insn) != 0 || GET_CODE (PATTERN (insn)) == RETURN)
2642 && (next = NEXT_INSN (insn))
2643 && GET_CODE (next) == BARRIER);
2644 depth++)
2645 {
2646 /* Don't chain through the insn that jumps into a loop
2647 from outside the loop,
2648 since that would create multiple loop entry jumps
2649 and prevent loop optimization. */
2650 rtx tem;
2651 if (!reload_completed)
2652 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2653 if (GET_CODE (tem) == NOTE
2654 && NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG)
2655 return value;
2656
2657 /* If we have found a cycle, make the insn jump to itself. */
2658 if (JUMP_LABEL (insn) == label)
2d20b9df 2659 return label;
15a63be1
RK
2660 value = JUMP_LABEL (insn);
2661 }
2d20b9df
RS
2662 if (depth == 10)
2663 return label;
15a63be1
RK
2664 return value;
2665}
2666
2667/* Assuming that field IDX of X is a vector of label_refs,
2668 replace each of them by the ultimate label reached by it.
2669 Return nonzero if a change is made.
2670 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2671
2672static int
2673tension_vector_labels (x, idx)
2674 register rtx x;
2675 register int idx;
2676{
2677 int changed = 0;
2678 register int i;
2679 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2680 {
2681 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2682 register rtx nlabel = follow_jumps (olabel);
2683 if (nlabel && nlabel != olabel)
2684 {
2685 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2686 ++LABEL_NUSES (nlabel);
2687 if (--LABEL_NUSES (olabel) == 0)
2688 delete_insn (olabel);
2689 changed = 1;
2690 }
2691 }
2692 return changed;
2693}
2694\f
2695/* Find all CODE_LABELs referred to in X, and increment their use counts.
2696 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2697 in INSN, then store one of them in JUMP_LABEL (INSN).
2698 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2699 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2700 Also, when there are consecutive labels, canonicalize on the last of them.
2701
2702 Note that two labels separated by a loop-beginning note
2703 must be kept distinct if we have not yet done loop-optimization,
2704 because the gap between them is where loop-optimize
2705 will want to move invariant code to. CROSS_JUMP tells us
2706 that loop-optimization is done with.
2707
2708 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2709 two labels distinct if they are separated by only USE or CLOBBER insns. */
2710
2711static void
2712mark_jump_label (x, insn, cross_jump)
2713 register rtx x;
2714 rtx insn;
2715 int cross_jump;
2716{
2717 register RTX_CODE code = GET_CODE (x);
2718 register int i;
2719 register char *fmt;
2720
2721 switch (code)
2722 {
2723 case PC:
2724 case CC0:
2725 case REG:
2726 case SUBREG:
2727 case CONST_INT:
2728 case SYMBOL_REF:
2729 case CONST_DOUBLE:
2730 case CLOBBER:
2731 case CALL:
2732 return;
2733
d7ea4cf6
RK
2734 case MEM:
2735 /* If this is a constant-pool reference, see if it is a label. */
2736 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2737 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2738 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
2739 break;
2740
15a63be1
RK
2741 case LABEL_REF:
2742 {
2743 register rtx label = XEXP (x, 0);
2744 register rtx next;
2745 if (GET_CODE (label) != CODE_LABEL)
2746 abort ();
705f26cf
RS
2747 /* Ignore references to labels of containing functions. */
2748 if (LABEL_REF_NONLOCAL_P (x))
2749 break;
15a63be1
RK
2750 /* If there are other labels following this one,
2751 replace it with the last of the consecutive labels. */
2752 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2753 {
2754 if (GET_CODE (next) == CODE_LABEL)
2755 label = next;
2756 else if (cross_jump && GET_CODE (next) == INSN
2757 && (GET_CODE (PATTERN (next)) == USE
2758 || GET_CODE (PATTERN (next)) == CLOBBER))
2759 continue;
2760 else if (GET_CODE (next) != NOTE)
2761 break;
2762 else if (! cross_jump
2763 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2764 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END))
2765 break;
2766 }
2767 XEXP (x, 0) = label;
2768 ++LABEL_NUSES (label);
2769 if (insn)
2770 {
2771 if (GET_CODE (insn) == JUMP_INSN)
2772 JUMP_LABEL (insn) = label;
0e690bdb 2773 else if (! find_reg_note (insn, REG_LABEL, label))
15a63be1
RK
2774 {
2775 rtx next = next_real_insn (label);
2776 /* Don't record labels that refer to dispatch tables.
2777 This is not necessary, since the tablejump
2778 references the same label.
2779 And if we did record them, flow.c would make worse code. */
2780 if (next == 0
2781 || ! (GET_CODE (next) == JUMP_INSN
2782 && (GET_CODE (PATTERN (next)) == ADDR_VEC
2783 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
2784 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
2785 REG_NOTES (insn));
2786 }
2787 }
2788 return;
2789 }
2790
2791 /* Do walk the labels in a vector, but not the first operand of an
2792 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2793 case ADDR_VEC:
2794 case ADDR_DIFF_VEC:
2795 {
2796 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2797
2798 for (i = 0; i < XVECLEN (x, eltnum); i++)
5f4f0e22 2799 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
15a63be1
RK
2800 return;
2801 }
2802 }
2803
2804 fmt = GET_RTX_FORMAT (code);
2805 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2806 {
2807 if (fmt[i] == 'e')
2808 mark_jump_label (XEXP (x, i), insn, cross_jump);
2809 else if (fmt[i] == 'E')
2810 {
2811 register int j;
2812 for (j = 0; j < XVECLEN (x, i); j++)
2813 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
2814 }
2815 }
2816}
2817
2818/* If all INSN does is set the pc, delete it,
2819 and delete the insn that set the condition codes for it
2820 if that's what the previous thing was. */
2821
2822void
2823delete_jump (insn)
2824 rtx insn;
2825{
2826 register rtx x = PATTERN (insn);
2827 register rtx prev;
2828
2829 if (GET_CODE (x) == SET
2830 && GET_CODE (SET_DEST (x)) == PC)
2831 {
2832 prev = prev_nonnote_insn (insn);
2833#ifdef HAVE_cc0
2834 /* We assume that at this stage
2835 CC's are always set explicitly
2836 and always immediately before the jump that
2837 will use them. So if the previous insn
2838 exists to set the CC's, delete it
2839 (unless it performs auto-increments, etc.). */
2840 if (prev && GET_CODE (prev) == INSN
2841 && sets_cc0_p (PATTERN (prev)))
2842 {
2843 if (sets_cc0_p (PATTERN (prev)) > 0
5f4f0e22 2844 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
15a63be1
RK
2845 delete_insn (prev);
2846 else
2847 /* Otherwise, show that cc0 won't be used. */
2848 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
2849 cc0_rtx, REG_NOTES (prev));
2850 }
2851#else
2852 {
2853 rtx note;
2854
2855 /* If we are running before flow.c, we need do nothing since flow.c
2856 will delete the set of the condition code if it is dead. We also
2857 can't know if the register being used as the condition code is
2858 dead or not at this point.
2859
2860 Otherwise, look at all our REG_DEAD notes. If a previous insn
2861 does nothing other than set a register that dies in this jump,
2862 we can delete the insn. */
2863
2864 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2865 {
2866 rtx our_prev;
2867
2868 if (REG_NOTE_KIND (note) != REG_DEAD
2869 /* Verify that the REG_NOTE has a legal value. */
2870 || GET_CODE (XEXP (note, 0)) != REG)
2871 continue;
2872
2873 for (our_prev = prev_nonnote_insn (insn);
2874 our_prev && GET_CODE (our_prev) == INSN;
2875 our_prev = prev_nonnote_insn (our_prev))
2876 {
2877 /* If we reach a SEQUENCE, it is too complex to try to
2878 do anything with it, so give up. */
2879 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
2880 break;
2881
2882 if (GET_CODE (PATTERN (our_prev)) == USE
2883 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
2884 /* reorg creates USEs that look like this. We leave them
2885 alone because reorg needs them for its own purposes. */
2886 break;
2887
2888 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
2889 {
5f4f0e22 2890 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
15a63be1
RK
2891 break;
2892
2893 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
2894 {
2895 /* If we find a SET of something else, we can't
2896 delete the insn. */
2897
2898 int i;
2899
2900 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
2901 {
2902 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
2903
2904 if (GET_CODE (part) == SET
2905 && SET_DEST (part) != XEXP (note, 0))
2906 break;
2907 }
2908
2909 if (i == XVECLEN (PATTERN (our_prev), 0))
2910 delete_insn (our_prev);
2911 }
2912 else if (GET_CODE (PATTERN (our_prev)) == SET
2913 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
2914 delete_insn (our_prev);
2915
2916 break;
2917 }
2918
2919 /* If OUR_PREV references the register that dies here,
2920 it is an additional use. Hence any prior SET isn't
2921 dead. */
2922 if (reg_overlap_mentioned_p (XEXP (note, 0),
2923 PATTERN (our_prev)))
2924 break;
2925 }
2926 }
2927 }
2928#endif
2929 /* Now delete the jump insn itself. */
2930 delete_insn (insn);
2931 }
2932}
2933\f
2934/* Delete insn INSN from the chain of insns and update label ref counts.
2935 May delete some following insns as a consequence; may even delete
2936 a label elsewhere and insns that follow it.
2937
2938 Returns the first insn after INSN that was not deleted. */
2939
2940rtx
2941delete_insn (insn)
2942 register rtx insn;
2943{
2944 register rtx next = NEXT_INSN (insn);
2945 register rtx prev = PREV_INSN (insn);
196cedd0
RS
2946 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2947 register int dont_really_delete = 0;
15a63be1
RK
2948
2949 while (next && INSN_DELETED_P (next))
2950 next = NEXT_INSN (next);
2951
2952 /* This insn is already deleted => return first following nondeleted. */
2953 if (INSN_DELETED_P (insn))
2954 return next;
2955
196cedd0
RS
2956 /* Don't delete user-declared labels. Convert them to special NOTEs
2957 instead. */
2958 if (was_code_label && LABEL_NAME (insn) != 0)
2959 {
2960 PUT_CODE (insn, NOTE);
2961 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2962 NOTE_SOURCE_FILE (insn) = 0;
2963 dont_really_delete = 1;
2964 }
2965 else
2966 /* Mark this insn as deleted. */
2967 INSN_DELETED_P (insn) = 1;
15a63be1
RK
2968
2969 /* If this is an unconditional jump, delete it from the jump chain. */
2970 if (simplejump_p (insn))
2971 delete_from_jump_chain (insn);
2972
2973 /* If instruction is followed by a barrier,
2974 delete the barrier too. */
2975
2976 if (next != 0 && GET_CODE (next) == BARRIER)
2977 {
2978 INSN_DELETED_P (next) = 1;
2979 next = NEXT_INSN (next);
2980 }
2981
2982 /* Patch out INSN (and the barrier if any) */
2983
196cedd0 2984 if (optimize && ! dont_really_delete)
15a63be1
RK
2985 {
2986 if (prev)
2987 {
2988 NEXT_INSN (prev) = next;
2989 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2990 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2991 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2992 }
2993
2994 if (next)
2995 {
2996 PREV_INSN (next) = prev;
2997 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2998 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2999 }
3000
3001 if (prev && NEXT_INSN (prev) == 0)
3002 set_last_insn (prev);
3003 }
3004
3005 /* If deleting a jump, decrement the count of the label,
3006 and delete the label if it is now unused. */
3007
3008 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3009 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3010 {
3011 /* This can delete NEXT or PREV,
3012 either directly if NEXT is JUMP_LABEL (INSN),
3013 or indirectly through more levels of jumps. */
3014 delete_insn (JUMP_LABEL (insn));
3015 /* I feel a little doubtful about this loop,
3016 but I see no clean and sure alternative way
3017 to find the first insn after INSN that is not now deleted.
3018 I hope this works. */
3019 while (next && INSN_DELETED_P (next))
3020 next = NEXT_INSN (next);
3021 return next;
3022 }
3023
3024 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3025 prev = PREV_INSN (prev);
3026
3027 /* If INSN was a label and a dispatch table follows it,
3028 delete the dispatch table. The tablejump must have gone already.
3029 It isn't useful to fall through into a table. */
3030
196cedd0 3031 if (was_code_label
15a63be1
RK
3032 && NEXT_INSN (insn) != 0
3033 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3034 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3035 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3036 next = delete_insn (NEXT_INSN (insn));
3037
3038 /* If INSN was a label, delete insns following it if now unreachable. */
3039
196cedd0 3040 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
15a63be1
RK
3041 {
3042 register RTX_CODE code;
3043 while (next != 0
3044 && ((code = GET_CODE (next)) == INSN
3045 || code == JUMP_INSN || code == CALL_INSN
2e1dbf22
RS
3046 || code == NOTE
3047 || (code == CODE_LABEL && INSN_DELETED_P (next))))
15a63be1
RK
3048 {
3049 if (code == NOTE
3050 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3051 next = NEXT_INSN (next);
2e1dbf22
RS
3052 /* Keep going past other deleted labels to delete what follows. */
3053 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3054 next = NEXT_INSN (next);
15a63be1
RK
3055 else
3056 /* Note: if this deletes a jump, it can cause more
3057 deletion of unreachable code, after a different label.
3058 As long as the value from this recursive call is correct,
3059 this invocation functions correctly. */
3060 next = delete_insn (next);
3061 }
3062 }
3063
3064 return next;
3065}
3066
3067/* Advance from INSN till reaching something not deleted
3068 then return that. May return INSN itself. */
3069
3070rtx
3071next_nondeleted_insn (insn)
3072 rtx insn;
3073{
3074 while (INSN_DELETED_P (insn))
3075 insn = NEXT_INSN (insn);
3076 return insn;
3077}
3078\f
3079/* Delete a range of insns from FROM to TO, inclusive.
3080 This is for the sake of peephole optimization, so assume
3081 that whatever these insns do will still be done by a new
3082 peephole insn that will replace them. */
3083
3084void
3085delete_for_peephole (from, to)
3086 register rtx from, to;
3087{
3088 register rtx insn = from;
3089
3090 while (1)
3091 {
3092 register rtx next = NEXT_INSN (insn);
3093 register rtx prev = PREV_INSN (insn);
3094
3095 if (GET_CODE (insn) != NOTE)
3096 {
3097 INSN_DELETED_P (insn) = 1;
3098
3099 /* Patch this insn out of the chain. */
3100 /* We don't do this all at once, because we
3101 must preserve all NOTEs. */
3102 if (prev)
3103 NEXT_INSN (prev) = next;
3104
3105 if (next)
3106 PREV_INSN (next) = prev;
3107 }
3108
3109 if (insn == to)
3110 break;
3111 insn = next;
3112 }
3113
3114 /* Note that if TO is an unconditional jump
3115 we *do not* delete the BARRIER that follows,
3116 since the peephole that replaces this sequence
3117 is also an unconditional jump in that case. */
3118}
3119\f
3120/* Invert the condition of the jump JUMP, and make it jump
3121 to label NLABEL instead of where it jumps now. */
3122
3123int
3124invert_jump (jump, nlabel)
3125 rtx jump, nlabel;
3126{
3127 register rtx olabel = JUMP_LABEL (jump);
3128
3129 /* We have to either invert the condition and change the label or
3130 do neither. Either operation could fail. We first try to invert
3131 the jump. If that succeeds, we try changing the label. If that fails,
3132 we invert the jump back to what it was. */
3133
3134 if (! invert_exp (PATTERN (jump), jump))
3135 return 0;
3136
3137 if (redirect_jump (jump, nlabel))
3138 return 1;
3139
3140 if (! invert_exp (PATTERN (jump), jump))
3141 /* This should just be putting it back the way it was. */
3142 abort ();
3143
3144 return 0;
3145}
3146
3147/* Invert the jump condition of rtx X contained in jump insn, INSN.
3148
3149 Return 1 if we can do so, 0 if we cannot find a way to do so that
3150 matches a pattern. */
3151
3152static int
3153invert_exp (x, insn)
3154 rtx x;
3155 rtx insn;
3156{
3157 register RTX_CODE code;
3158 register int i;
3159 register char *fmt;
3160
3161 code = GET_CODE (x);
3162
3163 if (code == IF_THEN_ELSE)
3164 {
3165 register rtx comp = XEXP (x, 0);
3166 register rtx tem;
3167
3168 /* We can do this in two ways: The preferable way, which can only
3169 be done if this is not an integer comparison, is to reverse
3170 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3171 of the IF_THEN_ELSE. If we can't do either, fail. */
3172
3173 if (can_reverse_comparison_p (comp, insn)
3174 && validate_change (insn, &XEXP (x, 0),
3175 gen_rtx (reverse_condition (GET_CODE (comp)),
3176 GET_MODE (comp), XEXP (comp, 0),
3177 XEXP (comp, 1)), 0))
3178 return 1;
3179
3180 tem = XEXP (x, 1);
3181 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3182 validate_change (insn, &XEXP (x, 2), tem, 1);
3183 return apply_change_group ();
3184 }
3185
3186 fmt = GET_RTX_FORMAT (code);
3187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3188 {
3189 if (fmt[i] == 'e')
3190 if (! invert_exp (XEXP (x, i), insn))
3191 return 0;
3192 if (fmt[i] == 'E')
3193 {
3194 register int j;
3195 for (j = 0; j < XVECLEN (x, i); j++)
3196 if (!invert_exp (XVECEXP (x, i, j), insn))
3197 return 0;
3198 }
3199 }
3200
3201 return 1;
3202}
3203\f
3204/* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3205 If the old jump target label is unused as a result,
3206 it and the code following it may be deleted.
3207
3208 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3209 RETURN insn.
3210
3211 The return value will be 1 if the change was made, 0 if it wasn't (this
3212 can only occur for NLABEL == 0). */
3213
3214int
3215redirect_jump (jump, nlabel)
3216 rtx jump, nlabel;
3217{
3218 register rtx olabel = JUMP_LABEL (jump);
3219
3220 if (nlabel == olabel)
3221 return 1;
3222
3223 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3224 return 0;
3225
3226 /* If this is an unconditional branch, delete it from the jump_chain of
3227 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3228 have UID's in range and JUMP_CHAIN is valid). */
3229 if (jump_chain && (simplejump_p (jump)
3230 || GET_CODE (PATTERN (jump)) == RETURN))
3231 {
3232 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3233
3234 delete_from_jump_chain (jump);
2d20b9df
RS
3235 if (label_index < max_jump_chain
3236 && INSN_UID (jump) < max_jump_chain)
15a63be1
RK
3237 {
3238 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3239 jump_chain[label_index] = jump;
3240 }
3241 }
3242
3243 JUMP_LABEL (jump) = nlabel;
3244 if (nlabel)
3245 ++LABEL_NUSES (nlabel);
3246
3247 if (olabel && --LABEL_NUSES (olabel) == 0)
3248 delete_insn (olabel);
3249
3250 return 1;
3251}
3252
3253/* Delete the instruction JUMP from any jump chain it might be on. */
3254
3255static void
3256delete_from_jump_chain (jump)
3257 rtx jump;
3258{
3259 int index;
3260 rtx olabel = JUMP_LABEL (jump);
3261
3262 /* Handle unconditional jumps. */
3263 if (jump_chain && olabel != 0
3264 && INSN_UID (olabel) < max_jump_chain
3265 && simplejump_p (jump))
3266 index = INSN_UID (olabel);
3267 /* Handle return insns. */
3268 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3269 index = 0;
3270 else return;
3271
3272 if (jump_chain[index] == jump)
3273 jump_chain[index] = jump_chain[INSN_UID (jump)];
3274 else
3275 {
3276 rtx insn;
3277
3278 for (insn = jump_chain[index];
3279 insn != 0;
3280 insn = jump_chain[INSN_UID (insn)])
3281 if (jump_chain[INSN_UID (insn)] == jump)
3282 {
3283 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3284 break;
3285 }
3286 }
3287}
3288
3289/* If NLABEL is nonzero, throughout the rtx at LOC,
3290 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3291 zero, alter (RETURN) to (LABEL_REF NLABEL).
3292
3293 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3294 validity with validate_change. Convert (set (pc) (label_ref olabel))
3295 to (return).
3296
3297 Return 0 if we found a change we would like to make but it is invalid.
3298 Otherwise, return 1. */
3299
3300static int
3301redirect_exp (loc, olabel, nlabel, insn)
3302 rtx *loc;
3303 rtx olabel, nlabel;
3304 rtx insn;
3305{
3306 register rtx x = *loc;
3307 register RTX_CODE code = GET_CODE (x);
3308 register int i;
3309 register char *fmt;
3310
3311 if (code == LABEL_REF)
3312 {
3313 if (XEXP (x, 0) == olabel)
3314 {
3315 if (nlabel)
3316 XEXP (x, 0) = nlabel;
3317 else
3318 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3319 return 1;
3320 }
3321 }
3322 else if (code == RETURN && olabel == 0)
3323 {
3324 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3325 if (loc == &PATTERN (insn))
3326 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3327 return validate_change (insn, loc, x, 0);
3328 }
3329
3330 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3331 && GET_CODE (SET_SRC (x)) == LABEL_REF
3332 && XEXP (SET_SRC (x), 0) == olabel)
3333 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3334
3335 fmt = GET_RTX_FORMAT (code);
3336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3337 {
3338 if (fmt[i] == 'e')
3339 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
3340 return 0;
3341 if (fmt[i] == 'E')
3342 {
3343 register int j;
3344 for (j = 0; j < XVECLEN (x, i); j++)
3345 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
3346 return 0;
3347 }
3348 }
3349
3350 return 1;
3351}
3352\f
3353/* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3354
3355 If the old jump target label (before the dispatch table) becomes unused,
3356 it and the dispatch table may be deleted. In that case, find the insn
6dc42e49 3357 before the jump references that label and delete it and logical successors
15a63be1
RK
3358 too. */
3359
3360void
3361redirect_tablejump (jump, nlabel)
3362 rtx jump, nlabel;
3363{
3364 register rtx olabel = JUMP_LABEL (jump);
3365
3366 /* Add this jump to the jump_chain of NLABEL. */
3367 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3368 && INSN_UID (jump) < max_jump_chain)
3369 {
3370 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3371 jump_chain[INSN_UID (nlabel)] = jump;
3372 }
3373
3374 PATTERN (jump) = gen_jump (nlabel);
3375 JUMP_LABEL (jump) = nlabel;
3376 ++LABEL_NUSES (nlabel);
3377 INSN_CODE (jump) = -1;
3378
3379 if (--LABEL_NUSES (olabel) == 0)
3380 {
3381 delete_labelref_insn (jump, olabel, 0);
3382 delete_insn (olabel);
3383 }
3384}
3385
3386/* Find the insn referencing LABEL that is a logical predecessor of INSN.
3387 If we found one, delete it and then delete this insn if DELETE_THIS is
3388 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3389
3390static int
3391delete_labelref_insn (insn, label, delete_this)
3392 rtx insn, label;
3393 int delete_this;
3394{
3395 int deleted = 0;
3396 rtx link;
3397
3398 if (GET_CODE (insn) != NOTE
3399 && reg_mentioned_p (label, PATTERN (insn)))
3400 {
3401 if (delete_this)
3402 {
3403 delete_insn (insn);
3404 deleted = 1;
3405 }
3406 else
3407 return 1;
3408 }
3409
3410 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3411 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3412 {
3413 if (delete_this)
3414 {
3415 delete_insn (insn);
3416 deleted = 1;
3417 }
3418 else
3419 return 1;
3420 }
3421
3422 return deleted;
3423}
3424\f
3425/* Like rtx_equal_p except that it considers two REGs as equal
3426 if they renumber to the same value. */
3427
3428int
3429rtx_renumbered_equal_p (x, y)
3430 rtx x, y;
3431{
3432 register int i;
3433 register RTX_CODE code = GET_CODE (x);
3434 register char *fmt;
3435
3436 if (x == y)
3437 return 1;
3438 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3439 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3440 && GET_CODE (SUBREG_REG (y)) == REG)))
3441 {
3442 register int j;
3443
3444 if (GET_MODE (x) != GET_MODE (y))
3445 return 0;
3446
3447 /* If we haven't done any renumbering, don't
3448 make any assumptions. */
3449 if (reg_renumber == 0)
3450 return rtx_equal_p (x, y);
3451
3452 if (code == SUBREG)
3453 {
3454 i = REGNO (SUBREG_REG (x));
3455 if (reg_renumber[i] >= 0)
3456 i = reg_renumber[i];
3457 i += SUBREG_WORD (x);
3458 }
3459 else
3460 {
3461 i = REGNO (x);
3462 if (reg_renumber[i] >= 0)
3463 i = reg_renumber[i];
3464 }
3465 if (GET_CODE (y) == SUBREG)
3466 {
3467 j = REGNO (SUBREG_REG (y));
3468 if (reg_renumber[j] >= 0)
3469 j = reg_renumber[j];
3470 j += SUBREG_WORD (y);
3471 }
3472 else
3473 {
3474 j = REGNO (y);
3475 if (reg_renumber[j] >= 0)
3476 j = reg_renumber[j];
3477 }
3478 return i == j;
3479 }
3480 /* Now we have disposed of all the cases
3481 in which different rtx codes can match. */
3482 if (code != GET_CODE (y))
3483 return 0;
3484 switch (code)
3485 {
3486 case PC:
3487 case CC0:
3488 case ADDR_VEC:
3489 case ADDR_DIFF_VEC:
3490 return 0;
3491
3492 case CONST_INT:
3493 return XINT (x, 0) == XINT (y, 0);
3494
3495 case LABEL_REF:
705f26cf
RS
3496 /* We can't assume nonlocal labels have their following insns yet. */
3497 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3498 return XEXP (x, 0) == XEXP (y, 0);
15a63be1
RK
3499 /* Two label-refs are equivalent if they point at labels
3500 in the same position in the instruction stream. */
3501 return (next_real_insn (XEXP (x, 0))
3502 == next_real_insn (XEXP (y, 0)));
3503
3504 case SYMBOL_REF:
3505 return XSTR (x, 0) == XSTR (y, 0);
3506 }
3507
3508 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3509
3510 if (GET_MODE (x) != GET_MODE (y))
3511 return 0;
3512
3513 /* Compare the elements. If any pair of corresponding elements
3514 fail to match, return 0 for the whole things. */
3515
3516 fmt = GET_RTX_FORMAT (code);
3517 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3518 {
3519 register int j;
3520 switch (fmt[i])
3521 {
5f4f0e22
CH
3522 case 'w':
3523 if (XWINT (x, i) != XWINT (y, i))
3524 return 0;
3525 break;
3526
15a63be1
RK
3527 case 'i':
3528 if (XINT (x, i) != XINT (y, i))
3529 return 0;
3530 break;
3531
3532 case 's':
3533 if (strcmp (XSTR (x, i), XSTR (y, i)))
3534 return 0;
3535 break;
3536
3537 case 'e':
3538 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3539 return 0;
3540 break;
3541
3542 case 'u':
3543 if (XEXP (x, i) != XEXP (y, i))
3544 return 0;
3545 /* fall through. */
3546 case '0':
3547 break;
3548
3549 case 'E':
3550 if (XVECLEN (x, i) != XVECLEN (y, i))
3551 return 0;
3552 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3553 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3554 return 0;
3555 break;
3556
3557 default:
3558 abort ();
3559 }
3560 }
3561 return 1;
3562}
3563\f
3564/* If X is a hard register or equivalent to one or a subregister of one,
3565 return the hard register number. If X is a pseudo register that was not
3566 assigned a hard register, return the pseudo register number. Otherwise,
3567 return -1. Any rtx is valid for X. */
3568
3569int
3570true_regnum (x)
3571 rtx x;
3572{
3573 if (GET_CODE (x) == REG)
3574 {
3575 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3576 return reg_renumber[REGNO (x)];
3577 return REGNO (x);
3578 }
3579 if (GET_CODE (x) == SUBREG)
3580 {
3581 int base = true_regnum (SUBREG_REG (x));
3582 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3583 return SUBREG_WORD (x) + base;
3584 }
3585 return -1;
3586}
3587\f
3588/* Optimize code of the form:
3589
3590 for (x = a[i]; x; ...)
3591 ...
3592 for (x = a[i]; x; ...)
3593 ...
3594 foo:
3595
3596 Loop optimize will change the above code into
3597
3598 if (x = a[i])
3599 for (;;)
3600 { ...; if (! (x = ...)) break; }
3601 if (x = a[i])
3602 for (;;)
3603 { ...; if (! (x = ...)) break; }
3604 foo:
3605
3606 In general, if the first test fails, the program can branch
3607 directly to `foo' and skip the second try which is doomed to fail.
3608 We run this after loop optimization and before flow analysis. */
3609
3610/* When comparing the insn patterns, we track the fact that different
3611 pseudo-register numbers may have been used in each computation.
3612 The following array stores an equivalence -- same_regs[I] == J means
3613 that pseudo register I was used in the first set of tests in a context
3614 where J was used in the second set. We also count the number of such
3615 pending equivalences. If nonzero, the expressions really aren't the
3616 same. */
3617
3618static short *same_regs;
3619
3620static int num_same_regs;
3621
3622/* Track any registers modified between the target of the first jump and
3623 the second jump. They never compare equal. */
3624
3625static char *modified_regs;
3626
3627/* Record if memory was modified. */
3628
3629static int modified_mem;
3630
3631/* Called via note_stores on each insn between the target of the first
3632 branch and the second branch. It marks any changed registers. */
3633
3634static void
3635mark_modified_reg (dest, x)
3636 rtx dest;
3637 rtx x;
3638{
3639 int regno, i;
3640
3641 if (GET_CODE (dest) == SUBREG)
3642 dest = SUBREG_REG (dest);
3643
3644 if (GET_CODE (dest) == MEM)
3645 modified_mem = 1;
3646
3647 if (GET_CODE (dest) != REG)
3648 return;
3649
3650 regno = REGNO (dest);
3651 if (regno >= FIRST_PSEUDO_REGISTER)
3652 modified_regs[regno] = 1;
3653 else
3654 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3655 modified_regs[regno + i] = 1;
3656}
3657
3658/* F is the first insn in the chain of insns. */
3659
3660void
3661thread_jumps (f, max_reg, verbose)
3662 rtx f;
3663 int max_reg;
3664 int verbose;
3665{
3666 /* Basic algorithm is to find a conditional branch,
3667 the label it may branch to, and the branch after
3668 that label. If the two branches test the same condition,
3669 walk back from both branch paths until the insn patterns
3670 differ, or code labels are hit. If we make it back to
3671 the target of the first branch, then we know that the first branch
3672 will either always succeed or always fail depending on the relative
3673 senses of the two branches. So adjust the first branch accordingly
3674 in this case. */
3675
3676 rtx label, b1, b2, t1, t2;
3677 enum rtx_code code1, code2;
3678 rtx b1op0, b1op1, b2op0, b2op1;
3679 int changed = 1;
3680 int i;
3681 short *all_reset;
3682
3683 /* Allocate register tables and quick-reset table. */
3684 modified_regs = (char *) alloca (max_reg * sizeof (char));
3685 same_regs = (short *) alloca (max_reg * sizeof (short));
3686 all_reset = (short *) alloca (max_reg * sizeof (short));
3687 for (i = 0; i < max_reg; i++)
3688 all_reset[i] = -1;
3689
3690 while (changed)
3691 {
3692 changed = 0;
3693
3694 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3695 {
3696 /* Get to a candidate branch insn. */
3697 if (GET_CODE (b1) != JUMP_INSN
3698 || ! condjump_p (b1) || simplejump_p (b1)
3699 || JUMP_LABEL (b1) == 0)
3700 continue;
3701
3702 bzero (modified_regs, max_reg * sizeof (char));
3703 modified_mem = 0;
3704
3705 bcopy (all_reset, same_regs, max_reg * sizeof (short));
3706 num_same_regs = 0;
3707
3708 label = JUMP_LABEL (b1);
3709
3710 /* Look for a branch after the target. Record any registers and
3711 memory modified between the target and the branch. Stop when we
3712 get to a label since we can't know what was changed there. */
3713 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3714 {
3715 if (GET_CODE (b2) == CODE_LABEL)
3716 break;
3717
3718 else if (GET_CODE (b2) == JUMP_INSN)
3719 {
3720 /* If this is an unconditional jump and is the only use of
3721 its target label, we can follow it. */
3722 if (simplejump_p (b2)
3723 && JUMP_LABEL (b2) != 0
3724 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3725 {
3726 b2 = JUMP_LABEL (b2);
3727 continue;
3728 }
3729 else
3730 break;
3731 }
3732
3733 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3734 continue;
3735
3736 if (GET_CODE (b2) == CALL_INSN)
3737 {
3738 modified_mem = 1;
3739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3740 if (call_used_regs[i] && ! fixed_regs[i]
3741 && i != STACK_POINTER_REGNUM
3742 && i != FRAME_POINTER_REGNUM
3743 && i != ARG_POINTER_REGNUM)
3744 modified_regs[i] = 1;
3745 }
3746
3747 note_stores (PATTERN (b2), mark_modified_reg);
3748 }
3749
3750 /* Check the next candidate branch insn from the label
3751 of the first. */
3752 if (b2 == 0
3753 || GET_CODE (b2) != JUMP_INSN
3754 || b2 == b1
3755 || ! condjump_p (b2)
3756 || simplejump_p (b2))
3757 continue;
3758
3759 /* Get the comparison codes and operands, reversing the
3760 codes if appropriate. If we don't have comparison codes,
3761 we can't do anything. */
3762 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
3763 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
3764 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
3765 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
3766 code1 = reverse_condition (code1);
3767
3768 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
3769 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
3770 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
3771 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
3772 code2 = reverse_condition (code2);
3773
3774 /* If they test the same things and knowing that B1 branches
3775 tells us whether or not B2 branches, check if we
3776 can thread the branch. */
3777 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3778 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3779 && (comparison_dominates_p (code1, code2)
3780 || comparison_dominates_p (code1, reverse_condition (code2))))
3781 {
3782 t1 = prev_nonnote_insn (b1);
3783 t2 = prev_nonnote_insn (b2);
3784
3785 while (t1 != 0 && t2 != 0)
3786 {
3787 if (t1 == 0 || t2 == 0)
3788 break;
3789
3790 if (t2 == label)
3791 {
3792 /* We have reached the target of the first branch.
3793 If there are no pending register equivalents,
3794 we know that this branch will either always
3795 succeed (if the senses of the two branches are
3796 the same) or always fail (if not). */
3797 rtx new_label;
3798
3799 if (num_same_regs != 0)
3800 break;
3801
3802 if (comparison_dominates_p (code1, code2))
3803 new_label = JUMP_LABEL (b2);
3804 else
3805 new_label = get_label_after (b2);
3806
3807 if (JUMP_LABEL (b1) != new_label
3808 && redirect_jump (b1, new_label))
3809 changed = 1;
3810 break;
3811 }
3812
3813 /* If either of these is not a normal insn (it might be
3814 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3815 have already been skipped above.) Similarly, fail
3816 if the insns are different. */
3817 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3818 || recog_memoized (t1) != recog_memoized (t2)
3819 || ! rtx_equal_for_thread_p (PATTERN (t1),
3820 PATTERN (t2), t2))
3821 break;
3822
3823 t1 = prev_nonnote_insn (t1);
3824 t2 = prev_nonnote_insn (t2);
3825 }
3826 }
3827 }
3828 }
3829}
3830\f
3831/* This is like RTX_EQUAL_P except that it knows about our handling of
3832 possibly equivalent registers and knows to consider volatile and
3833 modified objects as not equal.
3834
3835 YINSN is the insn containing Y. */
3836
3837int
3838rtx_equal_for_thread_p (x, y, yinsn)
3839 rtx x, y;
3840 rtx yinsn;
3841{
3842 register int i;
3843 register int j;
3844 register enum rtx_code code;
3845 register char *fmt;
3846
3847 code = GET_CODE (x);
3848 /* Rtx's of different codes cannot be equal. */
3849 if (code != GET_CODE (y))
3850 return 0;
3851
3852 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3853 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3854
3855 if (GET_MODE (x) != GET_MODE (y))
3856 return 0;
3857
3858 /* Handle special-cases first. */
3859 switch (code)
3860 {
3861 case REG:
3862 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
3863 return 1;
3864
3865 /* If neither is user variable or hard register, check for possible
3866 equivalence. */
3867 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
3868 || REGNO (x) < FIRST_PSEUDO_REGISTER
3869 || REGNO (y) < FIRST_PSEUDO_REGISTER)
3870 return 0;
3871
3872 if (same_regs[REGNO (x)] == -1)
3873 {
3874 same_regs[REGNO (x)] = REGNO (y);
3875 num_same_regs++;
3876
3877 /* If this is the first time we are seeing a register on the `Y'
3878 side, see if it is the last use. If not, we can't thread the
3879 jump, so mark it as not equivalent. */
3880 if (regno_last_uid[REGNO (y)] != INSN_UID (yinsn))
3881 return 0;
3882
3883 return 1;
3884 }
3885 else
3886 return (same_regs[REGNO (x)] == REGNO (y));
3887
3888 break;
3889
3890 case MEM:
6dc42e49 3891 /* If memory modified or either volatile, not equivalent.
15a63be1
RK
3892 Else, check address. */
3893 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3894 return 0;
3895
3896 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
3897
3898 case ASM_INPUT:
3899 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3900 return 0;
3901
3902 break;
3903
3904 case SET:
3905 /* Cancel a pending `same_regs' if setting equivalenced registers.
3906 Then process source. */
3907 if (GET_CODE (SET_DEST (x)) == REG
3908 && GET_CODE (SET_DEST (y)) == REG)
3909 {
3910 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
3911 {
3912 same_regs[REGNO (SET_DEST (x))] = -1;
3913 num_same_regs--;
3914 }
3915 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
3916 return 0;
3917 }
3918 else
3919 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
3920 return 0;
3921
3922 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
3923
3924 case LABEL_REF:
3925 return XEXP (x, 0) == XEXP (y, 0);
3926
3927 case SYMBOL_REF:
3928 return XSTR (x, 0) == XSTR (y, 0);
3929 }
3930
3931 if (x == y)
3932 return 1;
3933
3934 fmt = GET_RTX_FORMAT (code);
3935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3936 {
3937 switch (fmt[i])
3938 {
5f4f0e22
CH
3939 case 'w':
3940 if (XWINT (x, i) != XWINT (y, i))
3941 return 0;
3942 break;
3943
15a63be1
RK
3944 case 'n':
3945 case 'i':
3946 if (XINT (x, i) != XINT (y, i))
3947 return 0;
3948 break;
3949
3950 case 'V':
3951 case 'E':
3952 /* Two vectors must have the same length. */
3953 if (XVECLEN (x, i) != XVECLEN (y, i))
3954 return 0;
3955
3956 /* And the corresponding elements must match. */
3957 for (j = 0; j < XVECLEN (x, i); j++)
3958 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
3959 XVECEXP (y, i, j), yinsn) == 0)
3960 return 0;
3961 break;
3962
3963 case 'e':
3964 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
3965 return 0;
3966 break;
3967
3968 case 'S':
3969 case 's':
3970 if (strcmp (XSTR (x, i), XSTR (y, i)))
3971 return 0;
3972 break;
3973
3974 case 'u':
3975 /* These are just backpointers, so they don't matter. */
3976 break;
3977
3978 case '0':
3979 break;
3980
3981 /* It is believed that rtx's at this level will never
3982 contain anything but integers and other rtx's,
3983 except for within LABEL_REFs and SYMBOL_REFs. */
3984 default:
3985 abort ();
3986 }
3987 }
3988 return 1;
3989}
This page took 0.437906 seconds and 5 git commands to generate.