]> gcc.gnu.org Git - gcc.git/blob - gcc/jump.c
alias.c: Include toplev.h
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "recog.h"
63 #include "expr.h"
64 #include "real.h"
65 #include "except.h"
66 #include "toplev.h"
67
68 /* ??? Eventually must record somehow the labels used by jumps
69 from nested functions. */
70 /* Pre-record the next or previous real insn for each label?
71 No, this pass is very fast anyway. */
72 /* Condense consecutive labels?
73 This would make life analysis faster, maybe. */
74 /* Optimize jump y; x: ... y: jumpif... x?
75 Don't know if it is worth bothering with. */
76 /* Optimize two cases of conditional jump to conditional jump?
77 This can never delete any instruction or make anything dead,
78 or even change what is live at any point.
79 So perhaps let combiner do it. */
80
81 /* Vector indexed by uid.
82 For each CODE_LABEL, index by its uid to get first unconditional jump
83 that jumps to the label.
84 For each JUMP_INSN, index by its uid to get the next unconditional jump
85 that jumps to the same label.
86 Element 0 is the start of a chain of all return insns.
87 (It is safe to use element 0 because insn uid 0 is not used. */
88
89 static rtx *jump_chain;
90
91 /* List of labels referred to from initializers.
92 These can never be deleted. */
93 rtx forced_labels;
94
95 /* Maximum index in jump_chain. */
96
97 static int max_jump_chain;
98
99 /* Set nonzero by jump_optimize if control can fall through
100 to the end of the function. */
101 int can_reach_end;
102
103 /* Indicates whether death notes are significant in cross jump analysis.
104 Normally they are not significant, because of A and B jump to C,
105 and R dies in A, it must die in B. But this might not be true after
106 stack register conversion, and we must compare death notes in that
107 case. */
108
109 static int cross_jump_death_matters = 0;
110
111 static int duplicate_loop_exit_test PROTO((rtx));
112 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PROTO((rtx, rtx, rtx));
114 static int jump_back_p PROTO((rtx, rtx));
115 static int tension_vector_labels PROTO((rtx, int));
116 static void mark_jump_label PROTO((rtx, rtx, int));
117 static void delete_computation PROTO((rtx));
118 static void delete_from_jump_chain PROTO((rtx));
119 static int delete_labelref_insn PROTO((rtx, rtx, int));
120 static void mark_modified_reg PROTO((rtx, rtx));
121 static void redirect_tablejump PROTO((rtx, rtx));
122 #ifndef HAVE_cc0
123 static rtx find_insert_position PROTO((rtx, rtx));
124 #endif
125 static int rtx_unsafe_p PROTO((rtx));
126 \f
127 /* Delete no-op jumps and optimize jumps to jumps
128 and jumps around jumps.
129 Delete unused labels and unreachable code.
130
131 If CROSS_JUMP is 1, detect matching code
132 before a jump and its destination and unify them.
133 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
134
135 If NOOP_MOVES is nonzero, delete no-op move insns.
136
137 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
138 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
139
140 If `optimize' is zero, don't change any code,
141 just determine whether control drops off the end of the function.
142 This case occurs when we have -W and not -O.
143 It works because `delete_insn' checks the value of `optimize'
144 and refrains from actually deleting when that is 0. */
145
146 void
147 jump_optimize (f, cross_jump, noop_moves, after_regscan)
148 rtx f;
149 int cross_jump;
150 int noop_moves;
151 int after_regscan;
152 {
153 register rtx insn, next, note;
154 int changed;
155 int first = 1;
156 int max_uid = 0;
157 rtx last_insn;
158
159 cross_jump_death_matters = (cross_jump == 2);
160
161 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
162 notes whose labels don't occur in the insn any more. */
163
164 for (insn = f; insn; insn = NEXT_INSN (insn))
165 {
166 if (GET_CODE (insn) == CODE_LABEL)
167 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
168 else if (GET_CODE (insn) == JUMP_INSN)
169 JUMP_LABEL (insn) = 0;
170 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
171 for (note = REG_NOTES (insn); note; note = next)
172 {
173 next = XEXP (note, 1);
174 if (REG_NOTE_KIND (note) == REG_LABEL
175 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
176 remove_note (insn, note);
177 }
178
179 if (INSN_UID (insn) > max_uid)
180 max_uid = INSN_UID (insn);
181 }
182
183 max_uid++;
184
185 /* Delete insns following barriers, up to next label. */
186
187 for (insn = f; insn;)
188 {
189 if (GET_CODE (insn) == BARRIER)
190 {
191 insn = NEXT_INSN (insn);
192 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
193 {
194 if (GET_CODE (insn) == NOTE
195 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
196 insn = NEXT_INSN (insn);
197 else
198 insn = delete_insn (insn);
199 }
200 /* INSN is now the code_label. */
201 }
202 else
203 insn = NEXT_INSN (insn);
204 }
205
206 /* Leave some extra room for labels and duplicate exit test insns
207 we make. */
208 max_jump_chain = max_uid * 14 / 10;
209 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
210 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
211
212 /* Mark the label each jump jumps to.
213 Combine consecutive labels, and count uses of labels.
214
215 For each label, make a chain (using `jump_chain')
216 of all the *unconditional* jumps that jump to it;
217 also make a chain of all returns. */
218
219 for (insn = f; insn; insn = NEXT_INSN (insn))
220 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
221 {
222 mark_jump_label (PATTERN (insn), insn, cross_jump);
223 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
224 {
225 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
226 {
227 jump_chain[INSN_UID (insn)]
228 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
229 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
230 }
231 if (GET_CODE (PATTERN (insn)) == RETURN)
232 {
233 jump_chain[INSN_UID (insn)] = jump_chain[0];
234 jump_chain[0] = insn;
235 }
236 }
237 }
238
239 /* Keep track of labels used from static data;
240 they cannot ever be deleted. */
241
242 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
243 LABEL_NUSES (XEXP (insn, 0))++;
244
245 check_exception_handler_labels ();
246
247 /* Keep track of labels used for marking handlers for exception
248 regions; they cannot usually be deleted. */
249
250 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
251 LABEL_NUSES (XEXP (insn, 0))++;
252
253 exception_optimize ();
254
255 /* Delete all labels already not referenced.
256 Also find the last insn. */
257
258 last_insn = 0;
259 for (insn = f; insn; )
260 {
261 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
262 insn = delete_insn (insn);
263 else
264 {
265 last_insn = insn;
266 insn = NEXT_INSN (insn);
267 }
268 }
269
270 if (!optimize)
271 {
272 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
273 If so record that this function can drop off the end. */
274
275 insn = last_insn;
276 {
277 int n_labels = 1;
278 while (insn
279 /* One label can follow the end-note: the return label. */
280 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
281 /* Ordinary insns can follow it if returning a structure. */
282 || GET_CODE (insn) == INSN
283 /* If machine uses explicit RETURN insns, no epilogue,
284 then one of them follows the note. */
285 || (GET_CODE (insn) == JUMP_INSN
286 && GET_CODE (PATTERN (insn)) == RETURN)
287 /* A barrier can follow the return insn. */
288 || GET_CODE (insn) == BARRIER
289 /* Other kinds of notes can follow also. */
290 || (GET_CODE (insn) == NOTE
291 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
292 insn = PREV_INSN (insn);
293 }
294
295 /* Report if control can fall through at the end of the function. */
296 if (insn && GET_CODE (insn) == NOTE
297 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
298 && ! INSN_DELETED_P (insn))
299 can_reach_end = 1;
300
301 /* Zero the "deleted" flag of all the "deleted" insns. */
302 for (insn = f; insn; insn = NEXT_INSN (insn))
303 INSN_DELETED_P (insn) = 0;
304 return;
305 }
306
307 #ifdef HAVE_return
308 if (HAVE_return)
309 {
310 /* If we fall through to the epilogue, see if we can insert a RETURN insn
311 in front of it. If the machine allows it at this point (we might be
312 after reload for a leaf routine), it will improve optimization for it
313 to be there. */
314 insn = get_last_insn ();
315 while (insn && GET_CODE (insn) == NOTE)
316 insn = PREV_INSN (insn);
317
318 if (insn && GET_CODE (insn) != BARRIER)
319 {
320 emit_jump_insn (gen_return ());
321 emit_barrier ();
322 }
323 }
324 #endif
325
326 if (noop_moves)
327 for (insn = f; insn; )
328 {
329 next = NEXT_INSN (insn);
330
331 if (GET_CODE (insn) == INSN)
332 {
333 register rtx body = PATTERN (insn);
334
335 /* Combine stack_adjusts with following push_insns. */
336 #ifdef PUSH_ROUNDING
337 if (GET_CODE (body) == SET
338 && SET_DEST (body) == stack_pointer_rtx
339 && GET_CODE (SET_SRC (body)) == PLUS
340 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
341 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
342 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
343 {
344 rtx p;
345 rtx stack_adjust_insn = insn;
346 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
347 int total_pushed = 0;
348 int pushes = 0;
349
350 /* Find all successive push insns. */
351 p = insn;
352 /* Don't convert more than three pushes;
353 that starts adding too many displaced addresses
354 and the whole thing starts becoming a losing
355 proposition. */
356 while (pushes < 3)
357 {
358 rtx pbody, dest;
359 p = next_nonnote_insn (p);
360 if (p == 0 || GET_CODE (p) != INSN)
361 break;
362 pbody = PATTERN (p);
363 if (GET_CODE (pbody) != SET)
364 break;
365 dest = SET_DEST (pbody);
366 /* Allow a no-op move between the adjust and the push. */
367 if (GET_CODE (dest) == REG
368 && GET_CODE (SET_SRC (pbody)) == REG
369 && REGNO (dest) == REGNO (SET_SRC (pbody)))
370 continue;
371 if (! (GET_CODE (dest) == MEM
372 && GET_CODE (XEXP (dest, 0)) == POST_INC
373 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
374 break;
375 pushes++;
376 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
377 > stack_adjust_amount)
378 break;
379 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
380 }
381
382 /* Discard the amount pushed from the stack adjust;
383 maybe eliminate it entirely. */
384 if (total_pushed >= stack_adjust_amount)
385 {
386 delete_computation (stack_adjust_insn);
387 total_pushed = stack_adjust_amount;
388 }
389 else
390 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
391 = GEN_INT (stack_adjust_amount - total_pushed);
392
393 /* Change the appropriate push insns to ordinary stores. */
394 p = insn;
395 while (total_pushed > 0)
396 {
397 rtx pbody, dest;
398 p = next_nonnote_insn (p);
399 if (GET_CODE (p) != INSN)
400 break;
401 pbody = PATTERN (p);
402 if (GET_CODE (pbody) == SET)
403 break;
404 dest = SET_DEST (pbody);
405 if (! (GET_CODE (dest) == MEM
406 && GET_CODE (XEXP (dest, 0)) == POST_INC
407 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
408 break;
409 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
410 /* If this push doesn't fully fit in the space
411 of the stack adjust that we deleted,
412 make another stack adjust here for what we
413 didn't use up. There should be peepholes
414 to recognize the resulting sequence of insns. */
415 if (total_pushed < 0)
416 {
417 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
418 GEN_INT (- total_pushed)),
419 p);
420 break;
421 }
422 XEXP (dest, 0)
423 = plus_constant (stack_pointer_rtx, total_pushed);
424 }
425 }
426 #endif
427
428 /* Detect and delete no-op move instructions
429 resulting from not allocating a parameter in a register. */
430
431 if (GET_CODE (body) == SET
432 && (SET_DEST (body) == SET_SRC (body)
433 || (GET_CODE (SET_DEST (body)) == MEM
434 && GET_CODE (SET_SRC (body)) == MEM
435 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
436 && ! (GET_CODE (SET_DEST (body)) == MEM
437 && MEM_VOLATILE_P (SET_DEST (body)))
438 && ! (GET_CODE (SET_SRC (body)) == MEM
439 && MEM_VOLATILE_P (SET_SRC (body))))
440 delete_computation (insn);
441
442 /* Detect and ignore no-op move instructions
443 resulting from smart or fortuitous register allocation. */
444
445 else if (GET_CODE (body) == SET)
446 {
447 int sreg = true_regnum (SET_SRC (body));
448 int dreg = true_regnum (SET_DEST (body));
449
450 if (sreg == dreg && sreg >= 0)
451 delete_insn (insn);
452 else if (sreg >= 0 && dreg >= 0)
453 {
454 rtx trial;
455 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
456 sreg, NULL_PTR, dreg,
457 GET_MODE (SET_SRC (body)));
458
459 if (tem != 0
460 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
461 {
462 /* DREG may have been the target of a REG_DEAD note in
463 the insn which makes INSN redundant. If so, reorg
464 would still think it is dead. So search for such a
465 note and delete it if we find it. */
466 if (! find_regno_note (insn, REG_UNUSED, dreg))
467 for (trial = prev_nonnote_insn (insn);
468 trial && GET_CODE (trial) != CODE_LABEL;
469 trial = prev_nonnote_insn (trial))
470 if (find_regno_note (trial, REG_DEAD, dreg))
471 {
472 remove_death (dreg, trial);
473 break;
474 }
475 #ifdef PRESERVE_DEATH_INFO_REGNO_P
476 /* Deleting insn could lose a death-note for SREG
477 so don't do it if final needs accurate
478 death-notes. */
479 if (PRESERVE_DEATH_INFO_REGNO_P (sreg)
480 && (trial = find_regno_note (insn, REG_DEAD, sreg)))
481 {
482 /* Change this into a USE so that we won't emit
483 code for it, but still can keep the note. */
484 PATTERN (insn)
485 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
486 INSN_CODE (insn) = -1;
487 /* Remove all reg notes but the REG_DEAD one. */
488 REG_NOTES (insn) = trial;
489 XEXP (trial, 1) = NULL_RTX;
490 }
491 else
492 #endif
493 delete_insn (insn);
494 }
495 }
496 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
497 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
498 NULL_PTR, 0,
499 GET_MODE (SET_DEST (body))))
500 {
501 /* This handles the case where we have two consecutive
502 assignments of the same constant to pseudos that didn't
503 get a hard reg. Each SET from the constant will be
504 converted into a SET of the spill register and an
505 output reload will be made following it. This produces
506 two loads of the same constant into the same spill
507 register. */
508
509 rtx in_insn = insn;
510
511 /* Look back for a death note for the first reg.
512 If there is one, it is no longer accurate. */
513 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
514 {
515 if ((GET_CODE (in_insn) == INSN
516 || GET_CODE (in_insn) == JUMP_INSN)
517 && find_regno_note (in_insn, REG_DEAD, dreg))
518 {
519 remove_death (dreg, in_insn);
520 break;
521 }
522 in_insn = PREV_INSN (in_insn);
523 }
524
525 /* Delete the second load of the value. */
526 delete_insn (insn);
527 }
528 }
529 else if (GET_CODE (body) == PARALLEL)
530 {
531 /* If each part is a set between two identical registers or
532 a USE or CLOBBER, delete the insn. */
533 int i, sreg, dreg;
534 rtx tem;
535
536 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
537 {
538 tem = XVECEXP (body, 0, i);
539 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
540 continue;
541
542 if (GET_CODE (tem) != SET
543 || (sreg = true_regnum (SET_SRC (tem))) < 0
544 || (dreg = true_regnum (SET_DEST (tem))) < 0
545 || dreg != sreg)
546 break;
547 }
548
549 if (i < 0)
550 delete_insn (insn);
551 }
552 /* Also delete insns to store bit fields if they are no-ops. */
553 /* Not worth the hair to detect this in the big-endian case. */
554 else if (! BYTES_BIG_ENDIAN
555 && GET_CODE (body) == SET
556 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
557 && XEXP (SET_DEST (body), 2) == const0_rtx
558 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
559 && ! (GET_CODE (SET_SRC (body)) == MEM
560 && MEM_VOLATILE_P (SET_SRC (body))))
561 delete_insn (insn);
562 }
563 insn = next;
564 }
565
566 /* If we haven't yet gotten to reload and we have just run regscan,
567 delete any insn that sets a register that isn't used elsewhere.
568 This helps some of the optimizations below by having less insns
569 being jumped around. */
570
571 if (! reload_completed && after_regscan)
572 for (insn = f; insn; insn = next)
573 {
574 rtx set = single_set (insn);
575
576 next = NEXT_INSN (insn);
577
578 if (set && GET_CODE (SET_DEST (set)) == REG
579 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
580 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
581 /* We use regno_last_note_uid so as not to delete the setting
582 of a reg that's used in notes. A subsequent optimization
583 might arrange to use that reg for real. */
584 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
585 && ! side_effects_p (SET_SRC (set))
586 && ! find_reg_note (insn, REG_RETVAL, 0))
587 delete_insn (insn);
588 }
589
590 /* Now iterate optimizing jumps until nothing changes over one pass. */
591 changed = 1;
592 while (changed)
593 {
594 changed = 0;
595
596 for (insn = f; insn; insn = next)
597 {
598 rtx reallabelprev;
599 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
600 rtx nlabel;
601 int this_is_simplejump, this_is_condjump, reversep;
602 int this_is_condjump_in_parallel;
603 #if 0
604 /* If NOT the first iteration, if this is the last jump pass
605 (just before final), do the special peephole optimizations.
606 Avoiding the first iteration gives ordinary jump opts
607 a chance to work before peephole opts. */
608
609 if (reload_completed && !first && !flag_no_peephole)
610 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
611 peephole (insn);
612 #endif
613
614 /* That could have deleted some insns after INSN, so check now
615 what the following insn is. */
616
617 next = NEXT_INSN (insn);
618
619 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
620 jump. Try to optimize by duplicating the loop exit test if so.
621 This is only safe immediately after regscan, because it uses
622 the values of regno_first_uid and regno_last_uid. */
623 if (after_regscan && GET_CODE (insn) == NOTE
624 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
625 && (temp1 = next_nonnote_insn (insn)) != 0
626 && simplejump_p (temp1))
627 {
628 temp = PREV_INSN (insn);
629 if (duplicate_loop_exit_test (insn))
630 {
631 changed = 1;
632 next = NEXT_INSN (temp);
633 continue;
634 }
635 }
636
637 if (GET_CODE (insn) != JUMP_INSN)
638 continue;
639
640 this_is_simplejump = simplejump_p (insn);
641 this_is_condjump = condjump_p (insn);
642 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
643
644 /* Tension the labels in dispatch tables. */
645
646 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
647 changed |= tension_vector_labels (PATTERN (insn), 0);
648 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
649 changed |= tension_vector_labels (PATTERN (insn), 1);
650
651 /* If a dispatch table always goes to the same place,
652 get rid of it and replace the insn that uses it. */
653
654 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
655 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
656 {
657 int i;
658 rtx pat = PATTERN (insn);
659 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
660 int len = XVECLEN (pat, diff_vec_p);
661 rtx dispatch = prev_real_insn (insn);
662
663 for (i = 0; i < len; i++)
664 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
665 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
666 break;
667 if (i == len
668 && dispatch != 0
669 && GET_CODE (dispatch) == JUMP_INSN
670 && JUMP_LABEL (dispatch) != 0
671 /* Don't mess with a casesi insn. */
672 && !(GET_CODE (PATTERN (dispatch)) == SET
673 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
674 == IF_THEN_ELSE))
675 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
676 {
677 redirect_tablejump (dispatch,
678 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
679 changed = 1;
680 }
681 }
682
683 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
684
685 /* If a jump references the end of the function, try to turn
686 it into a RETURN insn, possibly a conditional one. */
687 if (JUMP_LABEL (insn)
688 && (next_active_insn (JUMP_LABEL (insn)) == 0
689 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
690 == RETURN))
691 changed |= redirect_jump (insn, NULL_RTX);
692
693 /* Detect jump to following insn. */
694 if (reallabelprev == insn && condjump_p (insn))
695 {
696 next = next_real_insn (JUMP_LABEL (insn));
697 delete_jump (insn);
698 changed = 1;
699 continue;
700 }
701
702 /* If we have an unconditional jump preceded by a USE, try to put
703 the USE before the target and jump there. This simplifies many
704 of the optimizations below since we don't have to worry about
705 dealing with these USE insns. We only do this if the label
706 being branch to already has the identical USE or if code
707 never falls through to that label. */
708
709 if (this_is_simplejump
710 && (temp = prev_nonnote_insn (insn)) != 0
711 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
712 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
713 && (GET_CODE (temp1) == BARRIER
714 || (GET_CODE (temp1) == INSN
715 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
716 /* Don't do this optimization if we have a loop containing only
717 the USE instruction, and the loop start label has a usage
718 count of 1. This is because we will redo this optimization
719 everytime through the outer loop, and jump opt will never
720 exit. */
721 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
722 && temp2 == JUMP_LABEL (insn)
723 && LABEL_NUSES (temp2) == 1))
724 {
725 if (GET_CODE (temp1) == BARRIER)
726 {
727 emit_insn_after (PATTERN (temp), temp1);
728 temp1 = NEXT_INSN (temp1);
729 }
730
731 delete_insn (temp);
732 redirect_jump (insn, get_label_before (temp1));
733 reallabelprev = prev_real_insn (temp1);
734 changed = 1;
735 }
736
737 /* Simplify if (...) x = a; else x = b; by converting it
738 to x = b; if (...) x = a;
739 if B is sufficiently simple, the test doesn't involve X,
740 and nothing in the test modifies B or X.
741
742 If we have small register classes, we also can't do this if X
743 is a hard register.
744
745 If the "x = b;" insn has any REG_NOTES, we don't do this because
746 of the possibility that we are running after CSE and there is a
747 REG_EQUAL note that is only valid if the branch has already been
748 taken. If we move the insn with the REG_EQUAL note, we may
749 fold the comparison to always be false in a later CSE pass.
750 (We could also delete the REG_NOTES when moving the insn, but it
751 seems simpler to not move it.) An exception is that we can move
752 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
753 value is the same as "b".
754
755 INSN is the branch over the `else' part.
756
757 We set:
758
759 TEMP to the jump insn preceding "x = a;"
760 TEMP1 to X
761 TEMP2 to the insn that sets "x = b;"
762 TEMP3 to the insn that sets "x = a;"
763 TEMP4 to the set of "x = b"; */
764
765 if (this_is_simplejump
766 && (temp3 = prev_active_insn (insn)) != 0
767 && GET_CODE (temp3) == INSN
768 && (temp4 = single_set (temp3)) != 0
769 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
770 && (! SMALL_REGISTER_CLASSES
771 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
772 && (temp2 = next_active_insn (insn)) != 0
773 && GET_CODE (temp2) == INSN
774 && (temp4 = single_set (temp2)) != 0
775 && rtx_equal_p (SET_DEST (temp4), temp1)
776 && ! rtx_unsafe_p (SET_SRC (temp4))
777 && (REG_NOTES (temp2) == 0
778 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
779 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
780 && XEXP (REG_NOTES (temp2), 1) == 0
781 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
782 SET_SRC (temp4))))
783 && (temp = prev_active_insn (temp3)) != 0
784 && condjump_p (temp) && ! simplejump_p (temp)
785 /* TEMP must skip over the "x = a;" insn */
786 && prev_real_insn (JUMP_LABEL (temp)) == insn
787 && no_labels_between_p (insn, JUMP_LABEL (temp))
788 /* There must be no other entries to the "x = b;" insn. */
789 && no_labels_between_p (JUMP_LABEL (temp), temp2)
790 /* INSN must either branch to the insn after TEMP2 or the insn
791 after TEMP2 must branch to the same place as INSN. */
792 && (reallabelprev == temp2
793 || ((temp5 = next_active_insn (temp2)) != 0
794 && simplejump_p (temp5)
795 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
796 {
797 /* The test expression, X, may be a complicated test with
798 multiple branches. See if we can find all the uses of
799 the label that TEMP branches to without hitting a CALL_INSN
800 or a jump to somewhere else. */
801 rtx target = JUMP_LABEL (temp);
802 int nuses = LABEL_NUSES (target);
803 rtx p;
804 #ifdef HAVE_cc0
805 rtx q;
806 #endif
807
808 /* Set P to the first jump insn that goes around "x = a;". */
809 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
810 {
811 if (GET_CODE (p) == JUMP_INSN)
812 {
813 if (condjump_p (p) && ! simplejump_p (p)
814 && JUMP_LABEL (p) == target)
815 {
816 nuses--;
817 if (nuses == 0)
818 break;
819 }
820 else
821 break;
822 }
823 else if (GET_CODE (p) == CALL_INSN)
824 break;
825 }
826
827 #ifdef HAVE_cc0
828 /* We cannot insert anything between a set of cc and its use
829 so if P uses cc0, we must back up to the previous insn. */
830 q = prev_nonnote_insn (p);
831 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
832 && sets_cc0_p (PATTERN (q)))
833 p = q;
834 #endif
835
836 if (p)
837 p = PREV_INSN (p);
838
839 /* If we found all the uses and there was no data conflict, we
840 can move the assignment unless we can branch into the middle
841 from somewhere. */
842 if (nuses == 0 && p
843 && no_labels_between_p (p, insn)
844 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
845 && ! reg_set_between_p (temp1, p, temp3)
846 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
847 || ! modified_between_p (SET_SRC (temp4), p, temp2)))
848 {
849 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
850 delete_insn (temp2);
851
852 /* Set NEXT to an insn that we know won't go away. */
853 next = next_active_insn (insn);
854
855 /* Delete the jump around the set. Note that we must do
856 this before we redirect the test jumps so that it won't
857 delete the code immediately following the assignment
858 we moved (which might be a jump). */
859
860 delete_insn (insn);
861
862 /* We either have two consecutive labels or a jump to
863 a jump, so adjust all the JUMP_INSNs to branch to where
864 INSN branches to. */
865 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
866 if (GET_CODE (p) == JUMP_INSN)
867 redirect_jump (p, target);
868
869 changed = 1;
870 continue;
871 }
872 }
873
874 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
875 to x = a; if (...) goto l; x = b;
876 if A is sufficiently simple, the test doesn't involve X,
877 and nothing in the test modifies A or X.
878
879 If we have small register classes, we also can't do this if X
880 is a hard register.
881
882 If the "x = a;" insn has any REG_NOTES, we don't do this because
883 of the possibility that we are running after CSE and there is a
884 REG_EQUAL note that is only valid if the branch has already been
885 taken. If we move the insn with the REG_EQUAL note, we may
886 fold the comparison to always be false in a later CSE pass.
887 (We could also delete the REG_NOTES when moving the insn, but it
888 seems simpler to not move it.) An exception is that we can move
889 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
890 value is the same as "a".
891
892 INSN is the goto.
893
894 We set:
895
896 TEMP to the jump insn preceding "x = a;"
897 TEMP1 to X
898 TEMP2 to the insn that sets "x = b;"
899 TEMP3 to the insn that sets "x = a;"
900 TEMP4 to the set of "x = a"; */
901
902 if (this_is_simplejump
903 && (temp2 = next_active_insn (insn)) != 0
904 && GET_CODE (temp2) == INSN
905 && (temp4 = single_set (temp2)) != 0
906 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
907 && (! SMALL_REGISTER_CLASSES
908 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
909 && (temp3 = prev_active_insn (insn)) != 0
910 && GET_CODE (temp3) == INSN
911 && (temp4 = single_set (temp3)) != 0
912 && rtx_equal_p (SET_DEST (temp4), temp1)
913 && (GET_CODE (SET_SRC (temp4)) == REG
914 || GET_CODE (SET_SRC (temp4)) == SUBREG
915 || (GET_CODE (SET_SRC (temp4)) == MEM
916 && RTX_UNCHANGING_P (SET_SRC (temp4)))
917 || CONSTANT_P (SET_SRC (temp4)))
918 && (REG_NOTES (temp3) == 0
919 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
920 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
921 && XEXP (REG_NOTES (temp3), 1) == 0
922 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
923 SET_SRC (temp4))))
924 && (temp = prev_active_insn (temp3)) != 0
925 && condjump_p (temp) && ! simplejump_p (temp)
926 /* TEMP must skip over the "x = a;" insn */
927 && prev_real_insn (JUMP_LABEL (temp)) == insn
928 && no_labels_between_p (temp, insn))
929 {
930 rtx prev_label = JUMP_LABEL (temp);
931 rtx insert_after = prev_nonnote_insn (temp);
932
933 #ifdef HAVE_cc0
934 /* We cannot insert anything between a set of cc and its use. */
935 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
936 && sets_cc0_p (PATTERN (insert_after)))
937 insert_after = prev_nonnote_insn (insert_after);
938 #endif
939 ++LABEL_NUSES (prev_label);
940
941 if (insert_after
942 && no_labels_between_p (insert_after, temp)
943 && ! reg_referenced_between_p (temp1, insert_after, temp3)
944 && ! reg_referenced_between_p (temp1, temp3,
945 NEXT_INSN (temp2))
946 && ! reg_set_between_p (temp1, insert_after, temp)
947 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
948 || ! reg_set_between_p (SET_SRC (temp4),
949 insert_after, temp))
950 && invert_jump (temp, JUMP_LABEL (insn)))
951 {
952 emit_insn_after_with_line_notes (PATTERN (temp3),
953 insert_after, temp3);
954 delete_insn (temp3);
955 delete_insn (insn);
956 /* Set NEXT to an insn that we know won't go away. */
957 next = temp2;
958 changed = 1;
959 }
960 if (prev_label && --LABEL_NUSES (prev_label) == 0)
961 delete_insn (prev_label);
962 if (changed)
963 continue;
964 }
965
966 #ifndef HAVE_cc0
967 /* If we have if (...) x = exp; and branches are expensive,
968 EXP is a single insn, does not have any side effects, cannot
969 trap, and is not too costly, convert this to
970 t = exp; if (...) x = t;
971
972 Don't do this when we have CC0 because it is unlikely to help
973 and we'd need to worry about where to place the new insn and
974 the potential for conflicts. We also can't do this when we have
975 notes on the insn for the same reason as above.
976
977 We set:
978
979 TEMP to the "x = exp;" insn.
980 TEMP1 to the single set in the "x = exp; insn.
981 TEMP2 to "x". */
982
983 if (! reload_completed
984 && this_is_condjump && ! this_is_simplejump
985 && BRANCH_COST >= 3
986 && (temp = next_nonnote_insn (insn)) != 0
987 && GET_CODE (temp) == INSN
988 && REG_NOTES (temp) == 0
989 && (reallabelprev == temp
990 || ((temp2 = next_active_insn (temp)) != 0
991 && simplejump_p (temp2)
992 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
993 && (temp1 = single_set (temp)) != 0
994 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
995 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
996 && (! SMALL_REGISTER_CLASSES
997 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
998 && GET_CODE (SET_SRC (temp1)) != REG
999 && GET_CODE (SET_SRC (temp1)) != SUBREG
1000 && GET_CODE (SET_SRC (temp1)) != CONST_INT
1001 && ! side_effects_p (SET_SRC (temp1))
1002 && ! may_trap_p (SET_SRC (temp1))
1003 && rtx_cost (SET_SRC (temp1), SET) < 10)
1004 {
1005 rtx new = gen_reg_rtx (GET_MODE (temp2));
1006
1007 if ((temp3 = find_insert_position (insn, temp))
1008 && validate_change (temp, &SET_DEST (temp1), new, 0))
1009 {
1010 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1011 emit_insn_after_with_line_notes (PATTERN (temp),
1012 PREV_INSN (temp3), temp);
1013 delete_insn (temp);
1014 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1015 }
1016 }
1017
1018 /* Similarly, if it takes two insns to compute EXP but they
1019 have the same destination. Here TEMP3 will be the second
1020 insn and TEMP4 the SET from that insn. */
1021
1022 if (! reload_completed
1023 && this_is_condjump && ! this_is_simplejump
1024 && BRANCH_COST >= 4
1025 && (temp = next_nonnote_insn (insn)) != 0
1026 && GET_CODE (temp) == INSN
1027 && REG_NOTES (temp) == 0
1028 && (temp3 = next_nonnote_insn (temp)) != 0
1029 && GET_CODE (temp3) == INSN
1030 && REG_NOTES (temp3) == 0
1031 && (reallabelprev == temp3
1032 || ((temp2 = next_active_insn (temp3)) != 0
1033 && simplejump_p (temp2)
1034 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1035 && (temp1 = single_set (temp)) != 0
1036 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
1037 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
1038 && (! SMALL_REGISTER_CLASSES
1039 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
1040 && ! side_effects_p (SET_SRC (temp1))
1041 && ! may_trap_p (SET_SRC (temp1))
1042 && rtx_cost (SET_SRC (temp1), SET) < 10
1043 && (temp4 = single_set (temp3)) != 0
1044 && rtx_equal_p (SET_DEST (temp4), temp2)
1045 && ! side_effects_p (SET_SRC (temp4))
1046 && ! may_trap_p (SET_SRC (temp4))
1047 && rtx_cost (SET_SRC (temp4), SET) < 10)
1048 {
1049 rtx new = gen_reg_rtx (GET_MODE (temp2));
1050
1051 if ((temp5 = find_insert_position (insn, temp))
1052 && (temp6 = find_insert_position (insn, temp3))
1053 && validate_change (temp, &SET_DEST (temp1), new, 0))
1054 {
1055 /* Use the earliest of temp5 and temp6. */
1056 if (temp5 != insn)
1057 temp6 = temp5;
1058 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1059 emit_insn_after_with_line_notes (PATTERN (temp),
1060 PREV_INSN (temp6), temp);
1061 emit_insn_after_with_line_notes
1062 (replace_rtx (PATTERN (temp3), temp2, new),
1063 PREV_INSN (temp6), temp3);
1064 delete_insn (temp);
1065 delete_insn (temp3);
1066 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1067 }
1068 }
1069
1070 /* Finally, handle the case where two insns are used to
1071 compute EXP but a temporary register is used. Here we must
1072 ensure that the temporary register is not used anywhere else. */
1073
1074 if (! reload_completed
1075 && after_regscan
1076 && this_is_condjump && ! this_is_simplejump
1077 && BRANCH_COST >= 4
1078 && (temp = next_nonnote_insn (insn)) != 0
1079 && GET_CODE (temp) == INSN
1080 && REG_NOTES (temp) == 0
1081 && (temp3 = next_nonnote_insn (temp)) != 0
1082 && GET_CODE (temp3) == INSN
1083 && REG_NOTES (temp3) == 0
1084 && (reallabelprev == temp3
1085 || ((temp2 = next_active_insn (temp3)) != 0
1086 && simplejump_p (temp2)
1087 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1088 && (temp1 = single_set (temp)) != 0
1089 && (temp5 = SET_DEST (temp1),
1090 (GET_CODE (temp5) == REG
1091 || (GET_CODE (temp5) == SUBREG
1092 && (temp5 = SUBREG_REG (temp5),
1093 GET_CODE (temp5) == REG))))
1094 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
1095 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
1096 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
1097 && ! side_effects_p (SET_SRC (temp1))
1098 && ! may_trap_p (SET_SRC (temp1))
1099 && rtx_cost (SET_SRC (temp1), SET) < 10
1100 && (temp4 = single_set (temp3)) != 0
1101 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
1102 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
1103 && (! SMALL_REGISTER_CLASSES
1104 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
1105 && rtx_equal_p (SET_DEST (temp4), temp2)
1106 && ! side_effects_p (SET_SRC (temp4))
1107 && ! may_trap_p (SET_SRC (temp4))
1108 && rtx_cost (SET_SRC (temp4), SET) < 10)
1109 {
1110 rtx new = gen_reg_rtx (GET_MODE (temp2));
1111
1112 if ((temp5 = find_insert_position (insn, temp))
1113 && (temp6 = find_insert_position (insn, temp3))
1114 && validate_change (temp3, &SET_DEST (temp4), new, 0))
1115 {
1116 /* Use the earliest of temp5 and temp6. */
1117 if (temp5 != insn)
1118 temp6 = temp5;
1119 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1120 emit_insn_after_with_line_notes (PATTERN (temp),
1121 PREV_INSN (temp6), temp);
1122 emit_insn_after_with_line_notes (PATTERN (temp3),
1123 PREV_INSN (temp6), temp3);
1124 delete_insn (temp);
1125 delete_insn (temp3);
1126 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1127 }
1128 }
1129 #endif /* HAVE_cc0 */
1130
1131 /* Try to use a conditional move (if the target has them), or a
1132 store-flag insn. The general case is:
1133
1134 1) x = a; if (...) x = b; and
1135 2) if (...) x = b;
1136
1137 If the jump would be faster, the machine should not have defined
1138 the movcc or scc insns!. These cases are often made by the
1139 previous optimization.
1140
1141 The second case is treated as x = x; if (...) x = b;.
1142
1143 INSN here is the jump around the store. We set:
1144
1145 TEMP to the "x = b;" insn.
1146 TEMP1 to X.
1147 TEMP2 to B.
1148 TEMP3 to A (X in the second case).
1149 TEMP4 to the condition being tested.
1150 TEMP5 to the earliest insn used to find the condition. */
1151
1152 if (/* We can't do this after reload has completed. */
1153 ! reload_completed
1154 && this_is_condjump && ! this_is_simplejump
1155 /* Set TEMP to the "x = b;" insn. */
1156 && (temp = next_nonnote_insn (insn)) != 0
1157 && GET_CODE (temp) == INSN
1158 && GET_CODE (PATTERN (temp)) == SET
1159 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
1160 && (! SMALL_REGISTER_CLASSES
1161 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1162 && ! rtx_unsafe_p (temp2 = SET_SRC (PATTERN (temp)))
1163 /* Allow either form, but prefer the former if both apply.
1164 There is no point in using the old value of TEMP1 if
1165 it is a register, since cse will alias them. It can
1166 lose if the old value were a hard register since CSE
1167 won't replace hard registers. Avoid using TEMP3 if
1168 small register classes and it is a hard register. */
1169 && (((temp3 = reg_set_last (temp1, insn)) != 0
1170 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1171 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1172 /* Make the latter case look like x = x; if (...) x = b; */
1173 || (temp3 = temp1, 1))
1174 /* INSN must either branch to the insn after TEMP or the insn
1175 after TEMP must branch to the same place as INSN. */
1176 && (reallabelprev == temp
1177 || ((temp4 = next_active_insn (temp)) != 0
1178 && simplejump_p (temp4)
1179 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1180 && (temp4 = get_condition (insn, &temp5)) != 0
1181 /* We must be comparing objects whose modes imply the size.
1182 We could handle BLKmode if (1) emit_store_flag could
1183 and (2) we could find the size reliably. */
1184 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1185 /* Even if branches are cheap, the store_flag optimization
1186 can win when the operation to be performed can be
1187 expressed directly. */
1188 #ifdef HAVE_cc0
1189 /* If the previous insn sets CC0 and something else, we can't
1190 do this since we are going to delete that insn. */
1191
1192 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1193 && GET_CODE (temp6) == INSN
1194 && (sets_cc0_p (PATTERN (temp6)) == -1
1195 || (sets_cc0_p (PATTERN (temp6)) == 1
1196 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1197 #endif
1198 )
1199 {
1200 #ifdef HAVE_conditional_move
1201 /* First try a conditional move. */
1202 {
1203 enum rtx_code code = GET_CODE (temp4);
1204 rtx var = temp1;
1205 rtx cond0, cond1, aval, bval;
1206 rtx target;
1207
1208 /* Copy the compared variables into cond0 and cond1, so that
1209 any side effects performed in or after the old comparison,
1210 will not affect our compare which will come later. */
1211 /* ??? Is it possible to just use the comparison in the jump
1212 insn? After all, we're going to delete it. We'd have
1213 to modify emit_conditional_move to take a comparison rtx
1214 instead or write a new function. */
1215 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1216 /* We want the target to be able to simplify comparisons with
1217 zero (and maybe other constants as well), so don't create
1218 pseudos for them. There's no need to either. */
1219 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1220 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1221 cond1 = XEXP (temp4, 1);
1222 else
1223 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1224
1225 aval = temp3;
1226 bval = temp2;
1227
1228 start_sequence ();
1229 target = emit_conditional_move (var, code,
1230 cond0, cond1, VOIDmode,
1231 aval, bval, GET_MODE (var),
1232 (code == LTU || code == GEU
1233 || code == LEU || code == GTU));
1234
1235 if (target)
1236 {
1237 rtx seq1,seq2;
1238
1239 /* Save the conditional move sequence but don't emit it
1240 yet. On some machines, like the alpha, it is possible
1241 that temp5 == insn, so next generate the sequence that
1242 saves the compared values and then emit both
1243 sequences ensuring seq1 occurs before seq2. */
1244 seq2 = get_insns ();
1245 end_sequence ();
1246
1247 /* Now that we can't fail, generate the copy insns that
1248 preserve the compared values. */
1249 start_sequence ();
1250 emit_move_insn (cond0, XEXP (temp4, 0));
1251 if (cond1 != XEXP (temp4, 1))
1252 emit_move_insn (cond1, XEXP (temp4, 1));
1253 seq1 = get_insns ();
1254 end_sequence ();
1255
1256 emit_insns_before (seq1, temp5);
1257 /* Insert conditional move after insn, to be sure that
1258 the jump and a possible compare won't be separated */
1259 emit_insns_after (seq2, insn);
1260
1261 /* ??? We can also delete the insn that sets X to A.
1262 Flow will do it too though. */
1263 delete_insn (temp);
1264 next = NEXT_INSN (insn);
1265 delete_jump (insn);
1266 changed = 1;
1267 continue;
1268 }
1269 else
1270 end_sequence ();
1271 }
1272 #endif
1273
1274 /* That didn't work, try a store-flag insn.
1275
1276 We further divide the cases into:
1277
1278 1) x = a; if (...) x = b; and either A or B is zero,
1279 2) if (...) x = 0; and jumps are expensive,
1280 3) x = a; if (...) x = b; and A and B are constants where all
1281 the set bits in A are also set in B and jumps are expensive,
1282 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1283 more expensive, and
1284 5) if (...) x = b; if jumps are even more expensive. */
1285
1286 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1287 && ((GET_CODE (temp3) == CONST_INT)
1288 /* Make the latter case look like
1289 x = x; if (...) x = 0; */
1290 || (temp3 = temp1,
1291 ((BRANCH_COST >= 2
1292 && temp2 == const0_rtx)
1293 || BRANCH_COST >= 3)))
1294 /* If B is zero, OK; if A is zero, can only do (1) if we
1295 can reverse the condition. See if (3) applies possibly
1296 by reversing the condition. Prefer reversing to (4) when
1297 branches are very expensive. */
1298 && (((BRANCH_COST >= 2
1299 || STORE_FLAG_VALUE == -1
1300 || (STORE_FLAG_VALUE == 1
1301 /* Check that the mask is a power of two,
1302 so that it can probably be generated
1303 with a shift. */
1304 && exact_log2 (INTVAL (temp3)) >= 0))
1305 && (reversep = 0, temp2 == const0_rtx))
1306 || ((BRANCH_COST >= 2
1307 || STORE_FLAG_VALUE == -1
1308 || (STORE_FLAG_VALUE == 1
1309 && exact_log2 (INTVAL (temp2)) >= 0))
1310 && temp3 == const0_rtx
1311 && (reversep = can_reverse_comparison_p (temp4, insn)))
1312 || (BRANCH_COST >= 2
1313 && GET_CODE (temp2) == CONST_INT
1314 && GET_CODE (temp3) == CONST_INT
1315 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1316 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1317 && (reversep = can_reverse_comparison_p (temp4,
1318 insn)))))
1319 || BRANCH_COST >= 3)
1320 )
1321 {
1322 enum rtx_code code = GET_CODE (temp4);
1323 rtx uval, cval, var = temp1;
1324 int normalizep;
1325 rtx target;
1326
1327 /* If necessary, reverse the condition. */
1328 if (reversep)
1329 code = reverse_condition (code), uval = temp2, cval = temp3;
1330 else
1331 uval = temp3, cval = temp2;
1332
1333 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1334 is the constant 1, it is best to just compute the result
1335 directly. If UVAL is constant and STORE_FLAG_VALUE
1336 includes all of its bits, it is best to compute the flag
1337 value unnormalized and `and' it with UVAL. Otherwise,
1338 normalize to -1 and `and' with UVAL. */
1339 normalizep = (cval != const0_rtx ? -1
1340 : (uval == const1_rtx ? 1
1341 : (GET_CODE (uval) == CONST_INT
1342 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1343 ? 0 : -1));
1344
1345 /* We will be putting the store-flag insn immediately in
1346 front of the comparison that was originally being done,
1347 so we know all the variables in TEMP4 will be valid.
1348 However, this might be in front of the assignment of
1349 A to VAR. If it is, it would clobber the store-flag
1350 we will be emitting.
1351
1352 Therefore, emit into a temporary which will be copied to
1353 VAR immediately after TEMP. */
1354
1355 start_sequence ();
1356 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1357 XEXP (temp4, 0), XEXP (temp4, 1),
1358 VOIDmode,
1359 (code == LTU || code == LEU
1360 || code == GEU || code == GTU),
1361 normalizep);
1362 if (target)
1363 {
1364 rtx seq;
1365 rtx before = insn;
1366
1367 seq = get_insns ();
1368 end_sequence ();
1369
1370 /* Put the store-flag insns in front of the first insn
1371 used to compute the condition to ensure that we
1372 use the same values of them as the current
1373 comparison. However, the remainder of the insns we
1374 generate will be placed directly in front of the
1375 jump insn, in case any of the pseudos we use
1376 are modified earlier. */
1377
1378 emit_insns_before (seq, temp5);
1379
1380 start_sequence ();
1381
1382 /* Both CVAL and UVAL are non-zero. */
1383 if (cval != const0_rtx && uval != const0_rtx)
1384 {
1385 rtx tem1, tem2;
1386
1387 tem1 = expand_and (uval, target, NULL_RTX);
1388 if (GET_CODE (cval) == CONST_INT
1389 && GET_CODE (uval) == CONST_INT
1390 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1391 tem2 = cval;
1392 else
1393 {
1394 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1395 target, NULL_RTX, 0);
1396 tem2 = expand_and (cval, tem2,
1397 (GET_CODE (tem2) == REG
1398 ? tem2 : 0));
1399 }
1400
1401 /* If we usually make new pseudos, do so here. This
1402 turns out to help machines that have conditional
1403 move insns. */
1404 /* ??? Conditional moves have already been handled.
1405 This may be obsolete. */
1406
1407 if (flag_expensive_optimizations)
1408 target = 0;
1409
1410 target = expand_binop (GET_MODE (var), ior_optab,
1411 tem1, tem2, target,
1412 1, OPTAB_WIDEN);
1413 }
1414 else if (normalizep != 1)
1415 {
1416 /* We know that either CVAL or UVAL is zero. If
1417 UVAL is zero, negate TARGET and `and' with CVAL.
1418 Otherwise, `and' with UVAL. */
1419 if (uval == const0_rtx)
1420 {
1421 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1422 target, NULL_RTX, 0);
1423 uval = cval;
1424 }
1425
1426 target = expand_and (uval, target,
1427 (GET_CODE (target) == REG
1428 && ! preserve_subexpressions_p ()
1429 ? target : NULL_RTX));
1430 }
1431
1432 emit_move_insn (var, target);
1433 seq = get_insns ();
1434 end_sequence ();
1435 #ifdef HAVE_cc0
1436 /* If INSN uses CC0, we must not separate it from the
1437 insn that sets cc0. */
1438 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1439 before = prev_nonnote_insn (before);
1440 #endif
1441 emit_insns_before (seq, before);
1442
1443 delete_insn (temp);
1444 next = NEXT_INSN (insn);
1445 delete_jump (insn);
1446 changed = 1;
1447 continue;
1448 }
1449 else
1450 end_sequence ();
1451 }
1452 }
1453
1454 /* If branches are expensive, convert
1455 if (foo) bar++; to bar += (foo != 0);
1456 and similarly for "bar--;"
1457
1458 INSN is the conditional branch around the arithmetic. We set:
1459
1460 TEMP is the arithmetic insn.
1461 TEMP1 is the SET doing the arithmetic.
1462 TEMP2 is the operand being incremented or decremented.
1463 TEMP3 to the condition being tested.
1464 TEMP4 to the earliest insn used to find the condition. */
1465
1466 if ((BRANCH_COST >= 2
1467 #ifdef HAVE_incscc
1468 || HAVE_incscc
1469 #endif
1470 #ifdef HAVE_decscc
1471 || HAVE_decscc
1472 #endif
1473 )
1474 && ! reload_completed
1475 && this_is_condjump && ! this_is_simplejump
1476 && (temp = next_nonnote_insn (insn)) != 0
1477 && (temp1 = single_set (temp)) != 0
1478 && (temp2 = SET_DEST (temp1),
1479 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1480 && GET_CODE (SET_SRC (temp1)) == PLUS
1481 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1482 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1483 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1484 && ! side_effects_p (temp2)
1485 && ! may_trap_p (temp2)
1486 /* INSN must either branch to the insn after TEMP or the insn
1487 after TEMP must branch to the same place as INSN. */
1488 && (reallabelprev == temp
1489 || ((temp3 = next_active_insn (temp)) != 0
1490 && simplejump_p (temp3)
1491 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1492 && (temp3 = get_condition (insn, &temp4)) != 0
1493 /* We must be comparing objects whose modes imply the size.
1494 We could handle BLKmode if (1) emit_store_flag could
1495 and (2) we could find the size reliably. */
1496 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1497 && can_reverse_comparison_p (temp3, insn))
1498 {
1499 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1500 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1501
1502 start_sequence ();
1503
1504 /* It must be the case that TEMP2 is not modified in the range
1505 [TEMP4, INSN). The one exception we make is if the insn
1506 before INSN sets TEMP2 to something which is also unchanged
1507 in that range. In that case, we can move the initialization
1508 into our sequence. */
1509
1510 if ((temp5 = prev_active_insn (insn)) != 0
1511 && no_labels_between_p (temp5, insn)
1512 && GET_CODE (temp5) == INSN
1513 && (temp6 = single_set (temp5)) != 0
1514 && rtx_equal_p (temp2, SET_DEST (temp6))
1515 && (CONSTANT_P (SET_SRC (temp6))
1516 || GET_CODE (SET_SRC (temp6)) == REG
1517 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1518 {
1519 emit_insn (PATTERN (temp5));
1520 init_insn = temp5;
1521 init = SET_SRC (temp6);
1522 }
1523
1524 if (CONSTANT_P (init)
1525 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1526 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1527 XEXP (temp3, 0), XEXP (temp3, 1),
1528 VOIDmode,
1529 (code == LTU || code == LEU
1530 || code == GTU || code == GEU), 1);
1531
1532 /* If we can do the store-flag, do the addition or
1533 subtraction. */
1534
1535 if (target)
1536 target = expand_binop (GET_MODE (temp2),
1537 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1538 ? add_optab : sub_optab),
1539 temp2, target, temp2, 0, OPTAB_WIDEN);
1540
1541 if (target != 0)
1542 {
1543 /* Put the result back in temp2 in case it isn't already.
1544 Then replace the jump, possible a CC0-setting insn in
1545 front of the jump, and TEMP, with the sequence we have
1546 made. */
1547
1548 if (target != temp2)
1549 emit_move_insn (temp2, target);
1550
1551 seq = get_insns ();
1552 end_sequence ();
1553
1554 emit_insns_before (seq, temp4);
1555 delete_insn (temp);
1556
1557 if (init_insn)
1558 delete_insn (init_insn);
1559
1560 next = NEXT_INSN (insn);
1561 #ifdef HAVE_cc0
1562 delete_insn (prev_nonnote_insn (insn));
1563 #endif
1564 delete_insn (insn);
1565 changed = 1;
1566 continue;
1567 }
1568 else
1569 end_sequence ();
1570 }
1571
1572 /* Simplify if (...) x = 1; else {...} if (x) ...
1573 We recognize this case scanning backwards as well.
1574
1575 TEMP is the assignment to x;
1576 TEMP1 is the label at the head of the second if. */
1577 /* ?? This should call get_condition to find the values being
1578 compared, instead of looking for a COMPARE insn when HAVE_cc0
1579 is not defined. This would allow it to work on the m88k. */
1580 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1581 is not defined and the condition is tested by a separate compare
1582 insn. This is because the code below assumes that the result
1583 of the compare dies in the following branch.
1584
1585 Not only that, but there might be other insns between the
1586 compare and branch whose results are live. Those insns need
1587 to be executed.
1588
1589 A way to fix this is to move the insns at JUMP_LABEL (insn)
1590 to before INSN. If we are running before flow, they will
1591 be deleted if they aren't needed. But this doesn't work
1592 well after flow.
1593
1594 This is really a special-case of jump threading, anyway. The
1595 right thing to do is to replace this and jump threading with
1596 much simpler code in cse.
1597
1598 This code has been turned off in the non-cc0 case in the
1599 meantime. */
1600
1601 #ifdef HAVE_cc0
1602 else if (this_is_simplejump
1603 /* Safe to skip USE and CLOBBER insns here
1604 since they will not be deleted. */
1605 && (temp = prev_active_insn (insn))
1606 && no_labels_between_p (temp, insn)
1607 && GET_CODE (temp) == INSN
1608 && GET_CODE (PATTERN (temp)) == SET
1609 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1610 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1611 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1612 /* If we find that the next value tested is `x'
1613 (TEMP1 is the insn where this happens), win. */
1614 && GET_CODE (temp1) == INSN
1615 && GET_CODE (PATTERN (temp1)) == SET
1616 #ifdef HAVE_cc0
1617 /* Does temp1 `tst' the value of x? */
1618 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1619 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1620 && (temp1 = next_nonnote_insn (temp1))
1621 #else
1622 /* Does temp1 compare the value of x against zero? */
1623 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1624 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1625 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1626 == SET_DEST (PATTERN (temp)))
1627 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1628 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1629 #endif
1630 && condjump_p (temp1))
1631 {
1632 /* Get the if_then_else from the condjump. */
1633 rtx choice = SET_SRC (PATTERN (temp1));
1634 if (GET_CODE (choice) == IF_THEN_ELSE)
1635 {
1636 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1637 rtx val = SET_SRC (PATTERN (temp));
1638 rtx cond
1639 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1640 val, const0_rtx);
1641 rtx ultimate;
1642
1643 if (cond == const_true_rtx)
1644 ultimate = XEXP (choice, 1);
1645 else if (cond == const0_rtx)
1646 ultimate = XEXP (choice, 2);
1647 else
1648 ultimate = 0;
1649
1650 if (ultimate == pc_rtx)
1651 ultimate = get_label_after (temp1);
1652 else if (ultimate && GET_CODE (ultimate) != RETURN)
1653 ultimate = XEXP (ultimate, 0);
1654
1655 if (ultimate && JUMP_LABEL(insn) != ultimate)
1656 changed |= redirect_jump (insn, ultimate);
1657 }
1658 }
1659 #endif
1660
1661 #if 0
1662 /* @@ This needs a bit of work before it will be right.
1663
1664 Any type of comparison can be accepted for the first and
1665 second compare. When rewriting the first jump, we must
1666 compute the what conditions can reach label3, and use the
1667 appropriate code. We can not simply reverse/swap the code
1668 of the first jump. In some cases, the second jump must be
1669 rewritten also.
1670
1671 For example,
1672 < == converts to > ==
1673 < != converts to == >
1674 etc.
1675
1676 If the code is written to only accept an '==' test for the second
1677 compare, then all that needs to be done is to swap the condition
1678 of the first branch.
1679
1680 It is questionable whether we want this optimization anyways,
1681 since if the user wrote code like this because he/she knew that
1682 the jump to label1 is taken most of the time, then rewriting
1683 this gives slower code. */
1684 /* @@ This should call get_condition to find the values being
1685 compared, instead of looking for a COMPARE insn when HAVE_cc0
1686 is not defined. This would allow it to work on the m88k. */
1687 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1688 is not defined and the condition is tested by a separate compare
1689 insn. This is because the code below assumes that the result
1690 of the compare dies in the following branch. */
1691
1692 /* Simplify test a ~= b
1693 condjump label1;
1694 test a == b
1695 condjump label2;
1696 jump label3;
1697 label1:
1698
1699 rewriting as
1700 test a ~~= b
1701 condjump label3
1702 test a == b
1703 condjump label2
1704 label1:
1705
1706 where ~= is an inequality, e.g. >, and ~~= is the swapped
1707 inequality, e.g. <.
1708
1709 We recognize this case scanning backwards.
1710
1711 TEMP is the conditional jump to `label2';
1712 TEMP1 is the test for `a == b';
1713 TEMP2 is the conditional jump to `label1';
1714 TEMP3 is the test for `a ~= b'. */
1715 else if (this_is_simplejump
1716 && (temp = prev_active_insn (insn))
1717 && no_labels_between_p (temp, insn)
1718 && condjump_p (temp)
1719 && (temp1 = prev_active_insn (temp))
1720 && no_labels_between_p (temp1, temp)
1721 && GET_CODE (temp1) == INSN
1722 && GET_CODE (PATTERN (temp1)) == SET
1723 #ifdef HAVE_cc0
1724 && sets_cc0_p (PATTERN (temp1)) == 1
1725 #else
1726 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1727 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1728 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1729 #endif
1730 && (temp2 = prev_active_insn (temp1))
1731 && no_labels_between_p (temp2, temp1)
1732 && condjump_p (temp2)
1733 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1734 && (temp3 = prev_active_insn (temp2))
1735 && no_labels_between_p (temp3, temp2)
1736 && GET_CODE (PATTERN (temp3)) == SET
1737 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1738 SET_DEST (PATTERN (temp1)))
1739 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1740 SET_SRC (PATTERN (temp3)))
1741 && ! inequality_comparisons_p (PATTERN (temp))
1742 && inequality_comparisons_p (PATTERN (temp2)))
1743 {
1744 rtx fallthrough_label = JUMP_LABEL (temp2);
1745
1746 ++LABEL_NUSES (fallthrough_label);
1747 if (swap_jump (temp2, JUMP_LABEL (insn)))
1748 {
1749 delete_insn (insn);
1750 changed = 1;
1751 }
1752
1753 if (--LABEL_NUSES (fallthrough_label) == 0)
1754 delete_insn (fallthrough_label);
1755 }
1756 #endif
1757 /* Simplify if (...) {... x = 1;} if (x) ...
1758
1759 We recognize this case backwards.
1760
1761 TEMP is the test of `x';
1762 TEMP1 is the assignment to `x' at the end of the
1763 previous statement. */
1764 /* @@ This should call get_condition to find the values being
1765 compared, instead of looking for a COMPARE insn when HAVE_cc0
1766 is not defined. This would allow it to work on the m88k. */
1767 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1768 is not defined and the condition is tested by a separate compare
1769 insn. This is because the code below assumes that the result
1770 of the compare dies in the following branch. */
1771
1772 /* ??? This has to be turned off. The problem is that the
1773 unconditional jump might indirectly end up branching to the
1774 label between TEMP1 and TEMP. We can't detect this, in general,
1775 since it may become a jump to there after further optimizations.
1776 If that jump is done, it will be deleted, so we will retry
1777 this optimization in the next pass, thus an infinite loop.
1778
1779 The present code prevents this by putting the jump after the
1780 label, but this is not logically correct. */
1781 #if 0
1782 else if (this_is_condjump
1783 /* Safe to skip USE and CLOBBER insns here
1784 since they will not be deleted. */
1785 && (temp = prev_active_insn (insn))
1786 && no_labels_between_p (temp, insn)
1787 && GET_CODE (temp) == INSN
1788 && GET_CODE (PATTERN (temp)) == SET
1789 #ifdef HAVE_cc0
1790 && sets_cc0_p (PATTERN (temp)) == 1
1791 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1792 #else
1793 /* Temp must be a compare insn, we can not accept a register
1794 to register move here, since it may not be simply a
1795 tst insn. */
1796 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1797 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1798 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1799 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1800 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1801 #endif
1802 /* May skip USE or CLOBBER insns here
1803 for checking for opportunity, since we
1804 take care of them later. */
1805 && (temp1 = prev_active_insn (temp))
1806 && GET_CODE (temp1) == INSN
1807 && GET_CODE (PATTERN (temp1)) == SET
1808 #ifdef HAVE_cc0
1809 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1810 #else
1811 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1812 == SET_DEST (PATTERN (temp1)))
1813 #endif
1814 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1815 /* If this isn't true, cse will do the job. */
1816 && ! no_labels_between_p (temp1, temp))
1817 {
1818 /* Get the if_then_else from the condjump. */
1819 rtx choice = SET_SRC (PATTERN (insn));
1820 if (GET_CODE (choice) == IF_THEN_ELSE
1821 && (GET_CODE (XEXP (choice, 0)) == EQ
1822 || GET_CODE (XEXP (choice, 0)) == NE))
1823 {
1824 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1825 rtx last_insn;
1826 rtx ultimate;
1827 rtx p;
1828
1829 /* Get the place that condjump will jump to
1830 if it is reached from here. */
1831 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1832 == want_nonzero)
1833 ultimate = XEXP (choice, 1);
1834 else
1835 ultimate = XEXP (choice, 2);
1836 /* Get it as a CODE_LABEL. */
1837 if (ultimate == pc_rtx)
1838 ultimate = get_label_after (insn);
1839 else
1840 /* Get the label out of the LABEL_REF. */
1841 ultimate = XEXP (ultimate, 0);
1842
1843 /* Insert the jump immediately before TEMP, specifically
1844 after the label that is between TEMP1 and TEMP. */
1845 last_insn = PREV_INSN (temp);
1846
1847 /* If we would be branching to the next insn, the jump
1848 would immediately be deleted and the re-inserted in
1849 a subsequent pass over the code. So don't do anything
1850 in that case. */
1851 if (next_active_insn (last_insn)
1852 != next_active_insn (ultimate))
1853 {
1854 emit_barrier_after (last_insn);
1855 p = emit_jump_insn_after (gen_jump (ultimate),
1856 last_insn);
1857 JUMP_LABEL (p) = ultimate;
1858 ++LABEL_NUSES (ultimate);
1859 if (INSN_UID (ultimate) < max_jump_chain
1860 && INSN_CODE (p) < max_jump_chain)
1861 {
1862 jump_chain[INSN_UID (p)]
1863 = jump_chain[INSN_UID (ultimate)];
1864 jump_chain[INSN_UID (ultimate)] = p;
1865 }
1866 changed = 1;
1867 continue;
1868 }
1869 }
1870 }
1871 #endif
1872 /* Detect a conditional jump going to the same place
1873 as an immediately following unconditional jump. */
1874 else if (this_is_condjump
1875 && (temp = next_active_insn (insn)) != 0
1876 && simplejump_p (temp)
1877 && (next_active_insn (JUMP_LABEL (insn))
1878 == next_active_insn (JUMP_LABEL (temp))))
1879 {
1880 rtx tem = temp;
1881
1882 /* ??? Optional. Disables some optimizations, but makes
1883 gcov output more accurate with -O. */
1884 if (flag_test_coverage && !reload_completed)
1885 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1886 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1887 break;
1888
1889 if (tem == temp)
1890 {
1891 delete_jump (insn);
1892 changed = 1;
1893 continue;
1894 }
1895 }
1896 #ifdef HAVE_trap
1897 /* Detect a conditional jump jumping over an unconditional trap. */
1898 else if (HAVE_trap
1899 && this_is_condjump && ! this_is_simplejump
1900 && reallabelprev != 0
1901 && GET_CODE (reallabelprev) == INSN
1902 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1903 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1904 && prev_active_insn (reallabelprev) == insn
1905 && no_labels_between_p (insn, reallabelprev)
1906 && (temp2 = get_condition (insn, &temp4))
1907 && can_reverse_comparison_p (temp2, insn))
1908 {
1909 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1910 XEXP (temp2, 0), XEXP (temp2, 1),
1911 TRAP_CODE (PATTERN (reallabelprev)));
1912
1913 if (new)
1914 {
1915 emit_insn_before (new, temp4);
1916 delete_insn (reallabelprev);
1917 delete_jump (insn);
1918 changed = 1;
1919 continue;
1920 }
1921 }
1922 /* Detect a jump jumping to an unconditional trap. */
1923 else if (HAVE_trap && this_is_condjump
1924 && (temp = next_active_insn (JUMP_LABEL (insn)))
1925 && GET_CODE (temp) == INSN
1926 && GET_CODE (PATTERN (temp)) == TRAP_IF
1927 && (this_is_simplejump
1928 || (temp2 = get_condition (insn, &temp4))))
1929 {
1930 rtx tc = TRAP_CONDITION (PATTERN (temp));
1931
1932 if (tc == const_true_rtx
1933 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1934 {
1935 rtx new;
1936 /* Replace an unconditional jump to a trap with a trap. */
1937 if (this_is_simplejump)
1938 {
1939 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1940 delete_jump (insn);
1941 changed = 1;
1942 continue;
1943 }
1944 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1945 XEXP (temp2, 1),
1946 TRAP_CODE (PATTERN (temp)));
1947 if (new)
1948 {
1949 emit_insn_before (new, temp4);
1950 delete_jump (insn);
1951 changed = 1;
1952 continue;
1953 }
1954 }
1955 /* If the trap condition and jump condition are mutually
1956 exclusive, redirect the jump to the following insn. */
1957 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1958 && ! this_is_simplejump
1959 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1960 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1961 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1962 && redirect_jump (insn, get_label_after (temp)))
1963 {
1964 changed = 1;
1965 continue;
1966 }
1967 }
1968 #endif
1969
1970 /* Detect a conditional jump jumping over an unconditional jump. */
1971
1972 else if ((this_is_condjump || this_is_condjump_in_parallel)
1973 && ! this_is_simplejump
1974 && reallabelprev != 0
1975 && GET_CODE (reallabelprev) == JUMP_INSN
1976 && prev_active_insn (reallabelprev) == insn
1977 && no_labels_between_p (insn, reallabelprev)
1978 && simplejump_p (reallabelprev))
1979 {
1980 /* When we invert the unconditional jump, we will be
1981 decrementing the usage count of its old label.
1982 Make sure that we don't delete it now because that
1983 might cause the following code to be deleted. */
1984 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1985 rtx prev_label = JUMP_LABEL (insn);
1986
1987 if (prev_label)
1988 ++LABEL_NUSES (prev_label);
1989
1990 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1991 {
1992 /* It is very likely that if there are USE insns before
1993 this jump, they hold REG_DEAD notes. These REG_DEAD
1994 notes are no longer valid due to this optimization,
1995 and will cause the life-analysis that following passes
1996 (notably delayed-branch scheduling) to think that
1997 these registers are dead when they are not.
1998
1999 To prevent this trouble, we just remove the USE insns
2000 from the insn chain. */
2001
2002 while (prev_uses && GET_CODE (prev_uses) == INSN
2003 && GET_CODE (PATTERN (prev_uses)) == USE)
2004 {
2005 rtx useless = prev_uses;
2006 prev_uses = prev_nonnote_insn (prev_uses);
2007 delete_insn (useless);
2008 }
2009
2010 delete_insn (reallabelprev);
2011 next = insn;
2012 changed = 1;
2013 }
2014
2015 /* We can now safely delete the label if it is unreferenced
2016 since the delete_insn above has deleted the BARRIER. */
2017 if (prev_label && --LABEL_NUSES (prev_label) == 0)
2018 delete_insn (prev_label);
2019 continue;
2020 }
2021 else
2022 {
2023 /* Detect a jump to a jump. */
2024
2025 nlabel = follow_jumps (JUMP_LABEL (insn));
2026 if (nlabel != JUMP_LABEL (insn)
2027 && redirect_jump (insn, nlabel))
2028 {
2029 changed = 1;
2030 next = insn;
2031 }
2032
2033 /* Look for if (foo) bar; else break; */
2034 /* The insns look like this:
2035 insn = condjump label1;
2036 ...range1 (some insns)...
2037 jump label2;
2038 label1:
2039 ...range2 (some insns)...
2040 jump somewhere unconditionally
2041 label2: */
2042 {
2043 rtx label1 = next_label (insn);
2044 rtx range1end = label1 ? prev_active_insn (label1) : 0;
2045 /* Don't do this optimization on the first round, so that
2046 jump-around-a-jump gets simplified before we ask here
2047 whether a jump is unconditional.
2048
2049 Also don't do it when we are called after reload since
2050 it will confuse reorg. */
2051 if (! first
2052 && (reload_completed ? ! flag_delayed_branch : 1)
2053 /* Make sure INSN is something we can invert. */
2054 && condjump_p (insn)
2055 && label1 != 0
2056 && JUMP_LABEL (insn) == label1
2057 && LABEL_NUSES (label1) == 1
2058 && GET_CODE (range1end) == JUMP_INSN
2059 && simplejump_p (range1end))
2060 {
2061 rtx label2 = next_label (label1);
2062 rtx range2end = label2 ? prev_active_insn (label2) : 0;
2063 if (range1end != range2end
2064 && JUMP_LABEL (range1end) == label2
2065 && GET_CODE (range2end) == JUMP_INSN
2066 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
2067 /* Invert the jump condition, so we
2068 still execute the same insns in each case. */
2069 && invert_jump (insn, label1))
2070 {
2071 rtx range1beg = next_active_insn (insn);
2072 rtx range2beg = next_active_insn (label1);
2073 rtx range1after, range2after;
2074 rtx range1before, range2before;
2075 rtx rangenext;
2076
2077 /* Include in each range any notes before it, to be
2078 sure that we get the line number note if any, even
2079 if there are other notes here. */
2080 while (PREV_INSN (range1beg)
2081 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
2082 range1beg = PREV_INSN (range1beg);
2083
2084 while (PREV_INSN (range2beg)
2085 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
2086 range2beg = PREV_INSN (range2beg);
2087
2088 /* Don't move NOTEs for blocks or loops; shift them
2089 outside the ranges, where they'll stay put. */
2090 range1beg = squeeze_notes (range1beg, range1end);
2091 range2beg = squeeze_notes (range2beg, range2end);
2092
2093 /* Get current surrounds of the 2 ranges. */
2094 range1before = PREV_INSN (range1beg);
2095 range2before = PREV_INSN (range2beg);
2096 range1after = NEXT_INSN (range1end);
2097 range2after = NEXT_INSN (range2end);
2098
2099 /* Splice range2 where range1 was. */
2100 NEXT_INSN (range1before) = range2beg;
2101 PREV_INSN (range2beg) = range1before;
2102 NEXT_INSN (range2end) = range1after;
2103 PREV_INSN (range1after) = range2end;
2104 /* Splice range1 where range2 was. */
2105 NEXT_INSN (range2before) = range1beg;
2106 PREV_INSN (range1beg) = range2before;
2107 NEXT_INSN (range1end) = range2after;
2108 PREV_INSN (range2after) = range1end;
2109
2110 /* Check for a loop end note between the end of
2111 range2, and the next code label. If there is one,
2112 then what we have really seen is
2113 if (foo) break; end_of_loop;
2114 and moved the break sequence outside the loop.
2115 We must move the LOOP_END note to where the
2116 loop really ends now, or we will confuse loop
2117 optimization. Stop if we find a LOOP_BEG note
2118 first, since we don't want to move the LOOP_END
2119 note in that case. */
2120 for (;range2after != label2; range2after = rangenext)
2121 {
2122 rangenext = NEXT_INSN (range2after);
2123 if (GET_CODE (range2after) == NOTE)
2124 {
2125 if (NOTE_LINE_NUMBER (range2after)
2126 == NOTE_INSN_LOOP_END)
2127 {
2128 NEXT_INSN (PREV_INSN (range2after))
2129 = rangenext;
2130 PREV_INSN (rangenext)
2131 = PREV_INSN (range2after);
2132 PREV_INSN (range2after)
2133 = PREV_INSN (range1beg);
2134 NEXT_INSN (range2after) = range1beg;
2135 NEXT_INSN (PREV_INSN (range1beg))
2136 = range2after;
2137 PREV_INSN (range1beg) = range2after;
2138 }
2139 else if (NOTE_LINE_NUMBER (range2after)
2140 == NOTE_INSN_LOOP_BEG)
2141 break;
2142 }
2143 }
2144 changed = 1;
2145 continue;
2146 }
2147 }
2148 }
2149
2150 /* Now that the jump has been tensioned,
2151 try cross jumping: check for identical code
2152 before the jump and before its target label. */
2153
2154 /* First, cross jumping of conditional jumps: */
2155
2156 if (cross_jump && condjump_p (insn))
2157 {
2158 rtx newjpos, newlpos;
2159 rtx x = prev_real_insn (JUMP_LABEL (insn));
2160
2161 /* A conditional jump may be crossjumped
2162 only if the place it jumps to follows
2163 an opposing jump that comes back here. */
2164
2165 if (x != 0 && ! jump_back_p (x, insn))
2166 /* We have no opposing jump;
2167 cannot cross jump this insn. */
2168 x = 0;
2169
2170 newjpos = 0;
2171 /* TARGET is nonzero if it is ok to cross jump
2172 to code before TARGET. If so, see if matches. */
2173 if (x != 0)
2174 find_cross_jump (insn, x, 2,
2175 &newjpos, &newlpos);
2176
2177 if (newjpos != 0)
2178 {
2179 do_cross_jump (insn, newjpos, newlpos);
2180 /* Make the old conditional jump
2181 into an unconditional one. */
2182 SET_SRC (PATTERN (insn))
2183 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2184 INSN_CODE (insn) = -1;
2185 emit_barrier_after (insn);
2186 /* Add to jump_chain unless this is a new label
2187 whose UID is too large. */
2188 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2189 {
2190 jump_chain[INSN_UID (insn)]
2191 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2192 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2193 }
2194 changed = 1;
2195 next = insn;
2196 }
2197 }
2198
2199 /* Cross jumping of unconditional jumps:
2200 a few differences. */
2201
2202 if (cross_jump && simplejump_p (insn))
2203 {
2204 rtx newjpos, newlpos;
2205 rtx target;
2206
2207 newjpos = 0;
2208
2209 /* TARGET is nonzero if it is ok to cross jump
2210 to code before TARGET. If so, see if matches. */
2211 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2212 &newjpos, &newlpos);
2213
2214 /* If cannot cross jump to code before the label,
2215 see if we can cross jump to another jump to
2216 the same label. */
2217 /* Try each other jump to this label. */
2218 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2219 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2220 target != 0 && newjpos == 0;
2221 target = jump_chain[INSN_UID (target)])
2222 if (target != insn
2223 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2224 /* Ignore TARGET if it's deleted. */
2225 && ! INSN_DELETED_P (target))
2226 find_cross_jump (insn, target, 2,
2227 &newjpos, &newlpos);
2228
2229 if (newjpos != 0)
2230 {
2231 do_cross_jump (insn, newjpos, newlpos);
2232 changed = 1;
2233 next = insn;
2234 }
2235 }
2236
2237 /* This code was dead in the previous jump.c! */
2238 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2239 {
2240 /* Return insns all "jump to the same place"
2241 so we can cross-jump between any two of them. */
2242
2243 rtx newjpos, newlpos, target;
2244
2245 newjpos = 0;
2246
2247 /* If cannot cross jump to code before the label,
2248 see if we can cross jump to another jump to
2249 the same label. */
2250 /* Try each other jump to this label. */
2251 for (target = jump_chain[0];
2252 target != 0 && newjpos == 0;
2253 target = jump_chain[INSN_UID (target)])
2254 if (target != insn
2255 && ! INSN_DELETED_P (target)
2256 && GET_CODE (PATTERN (target)) == RETURN)
2257 find_cross_jump (insn, target, 2,
2258 &newjpos, &newlpos);
2259
2260 if (newjpos != 0)
2261 {
2262 do_cross_jump (insn, newjpos, newlpos);
2263 changed = 1;
2264 next = insn;
2265 }
2266 }
2267 }
2268 }
2269
2270 first = 0;
2271 }
2272
2273 /* Delete extraneous line number notes.
2274 Note that two consecutive notes for different lines are not really
2275 extraneous. There should be some indication where that line belonged,
2276 even if it became empty. */
2277
2278 {
2279 rtx last_note = 0;
2280
2281 for (insn = f; insn; insn = NEXT_INSN (insn))
2282 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2283 {
2284 /* Delete this note if it is identical to previous note. */
2285 if (last_note
2286 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2287 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2288 {
2289 delete_insn (insn);
2290 continue;
2291 }
2292
2293 last_note = insn;
2294 }
2295 }
2296
2297 #ifdef HAVE_return
2298 if (HAVE_return)
2299 {
2300 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2301 in front of it. If the machine allows it at this point (we might be
2302 after reload for a leaf routine), it will improve optimization for it
2303 to be there. We do this both here and at the start of this pass since
2304 the RETURN might have been deleted by some of our optimizations. */
2305 insn = get_last_insn ();
2306 while (insn && GET_CODE (insn) == NOTE)
2307 insn = PREV_INSN (insn);
2308
2309 if (insn && GET_CODE (insn) != BARRIER)
2310 {
2311 emit_jump_insn (gen_return ());
2312 emit_barrier ();
2313 }
2314 }
2315 #endif
2316
2317 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2318 If so, delete it, and record that this function can drop off the end. */
2319
2320 insn = last_insn;
2321 {
2322 int n_labels = 1;
2323 while (insn
2324 /* One label can follow the end-note: the return label. */
2325 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2326 /* Ordinary insns can follow it if returning a structure. */
2327 || GET_CODE (insn) == INSN
2328 /* If machine uses explicit RETURN insns, no epilogue,
2329 then one of them follows the note. */
2330 || (GET_CODE (insn) == JUMP_INSN
2331 && GET_CODE (PATTERN (insn)) == RETURN)
2332 /* A barrier can follow the return insn. */
2333 || GET_CODE (insn) == BARRIER
2334 /* Other kinds of notes can follow also. */
2335 || (GET_CODE (insn) == NOTE
2336 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2337 insn = PREV_INSN (insn);
2338 }
2339
2340 /* Report if control can fall through at the end of the function. */
2341 if (insn && GET_CODE (insn) == NOTE
2342 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2343 {
2344 can_reach_end = 1;
2345 delete_insn (insn);
2346 }
2347
2348 /* Show JUMP_CHAIN no longer valid. */
2349 jump_chain = 0;
2350 }
2351 \f
2352 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2353 jump. Assume that this unconditional jump is to the exit test code. If
2354 the code is sufficiently simple, make a copy of it before INSN,
2355 followed by a jump to the exit of the loop. Then delete the unconditional
2356 jump after INSN.
2357
2358 Return 1 if we made the change, else 0.
2359
2360 This is only safe immediately after a regscan pass because it uses the
2361 values of regno_first_uid and regno_last_uid. */
2362
2363 static int
2364 duplicate_loop_exit_test (loop_start)
2365 rtx loop_start;
2366 {
2367 rtx insn, set, reg, p, link;
2368 rtx copy = 0;
2369 int num_insns = 0;
2370 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2371 rtx lastexit;
2372 int max_reg = max_reg_num ();
2373 rtx *reg_map = 0;
2374
2375 /* Scan the exit code. We do not perform this optimization if any insn:
2376
2377 is a CALL_INSN
2378 is a CODE_LABEL
2379 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2380 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2381 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2382 are not valid
2383
2384 Also, don't do this if the exit code is more than 20 insns. */
2385
2386 for (insn = exitcode;
2387 insn
2388 && ! (GET_CODE (insn) == NOTE
2389 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2390 insn = NEXT_INSN (insn))
2391 {
2392 switch (GET_CODE (insn))
2393 {
2394 case CODE_LABEL:
2395 case CALL_INSN:
2396 return 0;
2397 case NOTE:
2398 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2399 a jump immediately after the loop start that branches outside
2400 the loop but within an outer loop, near the exit test.
2401 If we copied this exit test and created a phony
2402 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2403 before the exit test look like these could be safely moved
2404 out of the loop even if they actually may be never executed.
2405 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2406
2407 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2408 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2409 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2410 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2411 return 0;
2412 break;
2413 case JUMP_INSN:
2414 case INSN:
2415 if (++num_insns > 20
2416 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2417 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2418 return 0;
2419 break;
2420 default:
2421 break;
2422 }
2423 }
2424
2425 /* Unless INSN is zero, we can do the optimization. */
2426 if (insn == 0)
2427 return 0;
2428
2429 lastexit = insn;
2430
2431 /* See if any insn sets a register only used in the loop exit code and
2432 not a user variable. If so, replace it with a new register. */
2433 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2434 if (GET_CODE (insn) == INSN
2435 && (set = single_set (insn)) != 0
2436 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2437 || (GET_CODE (reg) == SUBREG
2438 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2439 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2440 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2441 {
2442 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2443 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2444 break;
2445
2446 if (p != lastexit)
2447 {
2448 /* We can do the replacement. Allocate reg_map if this is the
2449 first replacement we found. */
2450 if (reg_map == 0)
2451 {
2452 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2453 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2454 }
2455
2456 REG_LOOP_TEST_P (reg) = 1;
2457
2458 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2459 }
2460 }
2461
2462 /* Now copy each insn. */
2463 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2464 switch (GET_CODE (insn))
2465 {
2466 case BARRIER:
2467 copy = emit_barrier_before (loop_start);
2468 break;
2469 case NOTE:
2470 /* Only copy line-number notes. */
2471 if (NOTE_LINE_NUMBER (insn) >= 0)
2472 {
2473 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2474 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2475 }
2476 break;
2477
2478 case INSN:
2479 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2480 if (reg_map)
2481 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2482
2483 mark_jump_label (PATTERN (copy), copy, 0);
2484
2485 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2486 make them. */
2487 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2488 if (REG_NOTE_KIND (link) != REG_LABEL)
2489 REG_NOTES (copy)
2490 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2491 XEXP (link, 0),
2492 REG_NOTES (copy)));
2493 if (reg_map && REG_NOTES (copy))
2494 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2495 break;
2496
2497 case JUMP_INSN:
2498 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2499 if (reg_map)
2500 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2501 mark_jump_label (PATTERN (copy), copy, 0);
2502 if (REG_NOTES (insn))
2503 {
2504 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2505 if (reg_map)
2506 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2507 }
2508
2509 /* If this is a simple jump, add it to the jump chain. */
2510
2511 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2512 && simplejump_p (copy))
2513 {
2514 jump_chain[INSN_UID (copy)]
2515 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2516 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2517 }
2518 break;
2519
2520 default:
2521 abort ();
2522 }
2523
2524 /* Now clean up by emitting a jump to the end label and deleting the jump
2525 at the start of the loop. */
2526 if (! copy || GET_CODE (copy) != BARRIER)
2527 {
2528 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2529 loop_start);
2530 mark_jump_label (PATTERN (copy), copy, 0);
2531 if (INSN_UID (copy) < max_jump_chain
2532 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2533 {
2534 jump_chain[INSN_UID (copy)]
2535 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2536 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2537 }
2538 emit_barrier_before (loop_start);
2539 }
2540
2541 /* Mark the exit code as the virtual top of the converted loop. */
2542 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2543
2544 delete_insn (next_nonnote_insn (loop_start));
2545
2546 return 1;
2547 }
2548 \f
2549 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2550 loop-end notes between START and END out before START. Assume that
2551 END is not such a note. START may be such a note. Returns the value
2552 of the new starting insn, which may be different if the original start
2553 was such a note. */
2554
2555 rtx
2556 squeeze_notes (start, end)
2557 rtx start, end;
2558 {
2559 rtx insn;
2560 rtx next;
2561
2562 for (insn = start; insn != end; insn = next)
2563 {
2564 next = NEXT_INSN (insn);
2565 if (GET_CODE (insn) == NOTE
2566 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2569 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2570 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2571 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2572 {
2573 if (insn == start)
2574 start = next;
2575 else
2576 {
2577 rtx prev = PREV_INSN (insn);
2578 PREV_INSN (insn) = PREV_INSN (start);
2579 NEXT_INSN (insn) = start;
2580 NEXT_INSN (PREV_INSN (insn)) = insn;
2581 PREV_INSN (NEXT_INSN (insn)) = insn;
2582 NEXT_INSN (prev) = next;
2583 PREV_INSN (next) = prev;
2584 }
2585 }
2586 }
2587
2588 return start;
2589 }
2590 \f
2591 /* Compare the instructions before insn E1 with those before E2
2592 to find an opportunity for cross jumping.
2593 (This means detecting identical sequences of insns followed by
2594 jumps to the same place, or followed by a label and a jump
2595 to that label, and replacing one with a jump to the other.)
2596
2597 Assume E1 is a jump that jumps to label E2
2598 (that is not always true but it might as well be).
2599 Find the longest possible equivalent sequences
2600 and store the first insns of those sequences into *F1 and *F2.
2601 Store zero there if no equivalent preceding instructions are found.
2602
2603 We give up if we find a label in stream 1.
2604 Actually we could transfer that label into stream 2. */
2605
2606 static void
2607 find_cross_jump (e1, e2, minimum, f1, f2)
2608 rtx e1, e2;
2609 int minimum;
2610 rtx *f1, *f2;
2611 {
2612 register rtx i1 = e1, i2 = e2;
2613 register rtx p1, p2;
2614 int lose = 0;
2615
2616 rtx last1 = 0, last2 = 0;
2617 rtx afterlast1 = 0, afterlast2 = 0;
2618
2619 *f1 = 0;
2620 *f2 = 0;
2621
2622 while (1)
2623 {
2624 i1 = prev_nonnote_insn (i1);
2625
2626 i2 = PREV_INSN (i2);
2627 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2628 i2 = PREV_INSN (i2);
2629
2630 if (i1 == 0)
2631 break;
2632
2633 /* Don't allow the range of insns preceding E1 or E2
2634 to include the other (E2 or E1). */
2635 if (i2 == e1 || i1 == e2)
2636 break;
2637
2638 /* If we will get to this code by jumping, those jumps will be
2639 tensioned to go directly to the new label (before I2),
2640 so this cross-jumping won't cost extra. So reduce the minimum. */
2641 if (GET_CODE (i1) == CODE_LABEL)
2642 {
2643 --minimum;
2644 break;
2645 }
2646
2647 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2648 break;
2649
2650 p1 = PATTERN (i1);
2651 p2 = PATTERN (i2);
2652
2653 /* If this is a CALL_INSN, compare register usage information.
2654 If we don't check this on stack register machines, the two
2655 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2656 numbers of stack registers in the same basic block.
2657 If we don't check this on machines with delay slots, a delay slot may
2658 be filled that clobbers a parameter expected by the subroutine.
2659
2660 ??? We take the simple route for now and assume that if they're
2661 equal, they were constructed identically. */
2662
2663 if (GET_CODE (i1) == CALL_INSN
2664 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2665 CALL_INSN_FUNCTION_USAGE (i2)))
2666 lose = 1;
2667
2668 #ifdef STACK_REGS
2669 /* If cross_jump_death_matters is not 0, the insn's mode
2670 indicates whether or not the insn contains any stack-like
2671 regs. */
2672
2673 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2674 {
2675 /* If register stack conversion has already been done, then
2676 death notes must also be compared before it is certain that
2677 the two instruction streams match. */
2678
2679 rtx note;
2680 HARD_REG_SET i1_regset, i2_regset;
2681
2682 CLEAR_HARD_REG_SET (i1_regset);
2683 CLEAR_HARD_REG_SET (i2_regset);
2684
2685 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2686 if (REG_NOTE_KIND (note) == REG_DEAD
2687 && STACK_REG_P (XEXP (note, 0)))
2688 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2689
2690 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2691 if (REG_NOTE_KIND (note) == REG_DEAD
2692 && STACK_REG_P (XEXP (note, 0)))
2693 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2694
2695 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2696
2697 lose = 1;
2698
2699 done:
2700 ;
2701 }
2702 #endif
2703
2704 /* Don't allow old-style asm or volatile extended asms to be accepted
2705 for cross jumping purposes. It is conceptually correct to allow
2706 them, since cross-jumping preserves the dynamic instruction order
2707 even though it is changing the static instruction order. However,
2708 if an asm is being used to emit an assembler pseudo-op, such as
2709 the MIPS `.set reorder' pseudo-op, then the static instruction order
2710 matters and it must be preserved. */
2711 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2712 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2713 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2714 lose = 1;
2715
2716 if (lose || GET_CODE (p1) != GET_CODE (p2)
2717 || ! rtx_renumbered_equal_p (p1, p2))
2718 {
2719 /* The following code helps take care of G++ cleanups. */
2720 rtx equiv1;
2721 rtx equiv2;
2722
2723 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2724 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2725 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2726 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2727 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2728 /* If the equivalences are not to a constant, they may
2729 reference pseudos that no longer exist, so we can't
2730 use them. */
2731 && CONSTANT_P (XEXP (equiv1, 0))
2732 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2733 {
2734 rtx s1 = single_set (i1);
2735 rtx s2 = single_set (i2);
2736 if (s1 != 0 && s2 != 0
2737 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2738 {
2739 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2740 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2741 if (! rtx_renumbered_equal_p (p1, p2))
2742 cancel_changes (0);
2743 else if (apply_change_group ())
2744 goto win;
2745 }
2746 }
2747
2748 /* Insns fail to match; cross jumping is limited to the following
2749 insns. */
2750
2751 #ifdef HAVE_cc0
2752 /* Don't allow the insn after a compare to be shared by
2753 cross-jumping unless the compare is also shared.
2754 Here, if either of these non-matching insns is a compare,
2755 exclude the following insn from possible cross-jumping. */
2756 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2757 last1 = afterlast1, last2 = afterlast2, ++minimum;
2758 #endif
2759
2760 /* If cross-jumping here will feed a jump-around-jump
2761 optimization, this jump won't cost extra, so reduce
2762 the minimum. */
2763 if (GET_CODE (i1) == JUMP_INSN
2764 && JUMP_LABEL (i1)
2765 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2766 --minimum;
2767 break;
2768 }
2769
2770 win:
2771 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2772 {
2773 /* Ok, this insn is potentially includable in a cross-jump here. */
2774 afterlast1 = last1, afterlast2 = last2;
2775 last1 = i1, last2 = i2, --minimum;
2776 }
2777 }
2778
2779 if (minimum <= 0 && last1 != 0 && last1 != e1)
2780 *f1 = last1, *f2 = last2;
2781 }
2782
2783 static void
2784 do_cross_jump (insn, newjpos, newlpos)
2785 rtx insn, newjpos, newlpos;
2786 {
2787 /* Find an existing label at this point
2788 or make a new one if there is none. */
2789 register rtx label = get_label_before (newlpos);
2790
2791 /* Make the same jump insn jump to the new point. */
2792 if (GET_CODE (PATTERN (insn)) == RETURN)
2793 {
2794 /* Remove from jump chain of returns. */
2795 delete_from_jump_chain (insn);
2796 /* Change the insn. */
2797 PATTERN (insn) = gen_jump (label);
2798 INSN_CODE (insn) = -1;
2799 JUMP_LABEL (insn) = label;
2800 LABEL_NUSES (label)++;
2801 /* Add to new the jump chain. */
2802 if (INSN_UID (label) < max_jump_chain
2803 && INSN_UID (insn) < max_jump_chain)
2804 {
2805 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2806 jump_chain[INSN_UID (label)] = insn;
2807 }
2808 }
2809 else
2810 redirect_jump (insn, label);
2811
2812 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2813 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2814 the NEWJPOS stream. */
2815
2816 while (newjpos != insn)
2817 {
2818 rtx lnote;
2819
2820 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2821 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2822 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2823 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2824 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2825 remove_note (newlpos, lnote);
2826
2827 delete_insn (newjpos);
2828 newjpos = next_real_insn (newjpos);
2829 newlpos = next_real_insn (newlpos);
2830 }
2831 }
2832 \f
2833 /* Return the label before INSN, or put a new label there. */
2834
2835 rtx
2836 get_label_before (insn)
2837 rtx insn;
2838 {
2839 rtx label;
2840
2841 /* Find an existing label at this point
2842 or make a new one if there is none. */
2843 label = prev_nonnote_insn (insn);
2844
2845 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2846 {
2847 rtx prev = PREV_INSN (insn);
2848
2849 label = gen_label_rtx ();
2850 emit_label_after (label, prev);
2851 LABEL_NUSES (label) = 0;
2852 }
2853 return label;
2854 }
2855
2856 /* Return the label after INSN, or put a new label there. */
2857
2858 rtx
2859 get_label_after (insn)
2860 rtx insn;
2861 {
2862 rtx label;
2863
2864 /* Find an existing label at this point
2865 or make a new one if there is none. */
2866 label = next_nonnote_insn (insn);
2867
2868 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2869 {
2870 label = gen_label_rtx ();
2871 emit_label_after (label, insn);
2872 LABEL_NUSES (label) = 0;
2873 }
2874 return label;
2875 }
2876 \f
2877 /* Return 1 if INSN is a jump that jumps to right after TARGET
2878 only on the condition that TARGET itself would drop through.
2879 Assumes that TARGET is a conditional jump. */
2880
2881 static int
2882 jump_back_p (insn, target)
2883 rtx insn, target;
2884 {
2885 rtx cinsn, ctarget;
2886 enum rtx_code codei, codet;
2887
2888 if (simplejump_p (insn) || ! condjump_p (insn)
2889 || simplejump_p (target)
2890 || target != prev_real_insn (JUMP_LABEL (insn)))
2891 return 0;
2892
2893 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2894 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2895
2896 codei = GET_CODE (cinsn);
2897 codet = GET_CODE (ctarget);
2898
2899 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2900 {
2901 if (! can_reverse_comparison_p (cinsn, insn))
2902 return 0;
2903 codei = reverse_condition (codei);
2904 }
2905
2906 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2907 {
2908 if (! can_reverse_comparison_p (ctarget, target))
2909 return 0;
2910 codet = reverse_condition (codet);
2911 }
2912
2913 return (codei == codet
2914 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2915 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2916 }
2917 \f
2918 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2919 return non-zero if it is safe to reverse this comparison. It is if our
2920 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2921 this is known to be an integer comparison. */
2922
2923 int
2924 can_reverse_comparison_p (comparison, insn)
2925 rtx comparison;
2926 rtx insn;
2927 {
2928 rtx arg0;
2929
2930 /* If this is not actually a comparison, we can't reverse it. */
2931 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2932 return 0;
2933
2934 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2935 /* If this is an NE comparison, it is safe to reverse it to an EQ
2936 comparison and vice versa, even for floating point. If no operands
2937 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2938 always false and NE is always true, so the reversal is also valid. */
2939 || flag_fast_math
2940 || GET_CODE (comparison) == NE
2941 || GET_CODE (comparison) == EQ)
2942 return 1;
2943
2944 arg0 = XEXP (comparison, 0);
2945
2946 /* Make sure ARG0 is one of the actual objects being compared. If we
2947 can't do this, we can't be sure the comparison can be reversed.
2948
2949 Handle cc0 and a MODE_CC register. */
2950 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2951 #ifdef HAVE_cc0
2952 || arg0 == cc0_rtx
2953 #endif
2954 )
2955 {
2956 rtx prev = prev_nonnote_insn (insn);
2957 rtx set = single_set (prev);
2958
2959 if (set == 0 || SET_DEST (set) != arg0)
2960 return 0;
2961
2962 arg0 = SET_SRC (set);
2963
2964 if (GET_CODE (arg0) == COMPARE)
2965 arg0 = XEXP (arg0, 0);
2966 }
2967
2968 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2969 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2970 return (GET_CODE (arg0) == CONST_INT
2971 || (GET_MODE (arg0) != VOIDmode
2972 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2973 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2974 }
2975
2976 /* Given an rtx-code for a comparison, return the code
2977 for the negated comparison.
2978 WATCH OUT! reverse_condition is not safe to use on a jump
2979 that might be acting on the results of an IEEE floating point comparison,
2980 because of the special treatment of non-signaling nans in comparisons.
2981 Use can_reverse_comparison_p to be sure. */
2982
2983 enum rtx_code
2984 reverse_condition (code)
2985 enum rtx_code code;
2986 {
2987 switch (code)
2988 {
2989 case EQ:
2990 return NE;
2991
2992 case NE:
2993 return EQ;
2994
2995 case GT:
2996 return LE;
2997
2998 case GE:
2999 return LT;
3000
3001 case LT:
3002 return GE;
3003
3004 case LE:
3005 return GT;
3006
3007 case GTU:
3008 return LEU;
3009
3010 case GEU:
3011 return LTU;
3012
3013 case LTU:
3014 return GEU;
3015
3016 case LEU:
3017 return GTU;
3018
3019 default:
3020 abort ();
3021 return UNKNOWN;
3022 }
3023 }
3024
3025 /* Similar, but return the code when two operands of a comparison are swapped.
3026 This IS safe for IEEE floating-point. */
3027
3028 enum rtx_code
3029 swap_condition (code)
3030 enum rtx_code code;
3031 {
3032 switch (code)
3033 {
3034 case EQ:
3035 case NE:
3036 return code;
3037
3038 case GT:
3039 return LT;
3040
3041 case GE:
3042 return LE;
3043
3044 case LT:
3045 return GT;
3046
3047 case LE:
3048 return GE;
3049
3050 case GTU:
3051 return LTU;
3052
3053 case GEU:
3054 return LEU;
3055
3056 case LTU:
3057 return GTU;
3058
3059 case LEU:
3060 return GEU;
3061
3062 default:
3063 abort ();
3064 return UNKNOWN;
3065 }
3066 }
3067
3068 /* Given a comparison CODE, return the corresponding unsigned comparison.
3069 If CODE is an equality comparison or already an unsigned comparison,
3070 CODE is returned. */
3071
3072 enum rtx_code
3073 unsigned_condition (code)
3074 enum rtx_code code;
3075 {
3076 switch (code)
3077 {
3078 case EQ:
3079 case NE:
3080 case GTU:
3081 case GEU:
3082 case LTU:
3083 case LEU:
3084 return code;
3085
3086 case GT:
3087 return GTU;
3088
3089 case GE:
3090 return GEU;
3091
3092 case LT:
3093 return LTU;
3094
3095 case LE:
3096 return LEU;
3097
3098 default:
3099 abort ();
3100 }
3101 }
3102
3103 /* Similarly, return the signed version of a comparison. */
3104
3105 enum rtx_code
3106 signed_condition (code)
3107 enum rtx_code code;
3108 {
3109 switch (code)
3110 {
3111 case EQ:
3112 case NE:
3113 case GT:
3114 case GE:
3115 case LT:
3116 case LE:
3117 return code;
3118
3119 case GTU:
3120 return GT;
3121
3122 case GEU:
3123 return GE;
3124
3125 case LTU:
3126 return LT;
3127
3128 case LEU:
3129 return LE;
3130
3131 default:
3132 abort ();
3133 }
3134 }
3135 \f
3136 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3137 truth of CODE1 implies the truth of CODE2. */
3138
3139 int
3140 comparison_dominates_p (code1, code2)
3141 enum rtx_code code1, code2;
3142 {
3143 if (code1 == code2)
3144 return 1;
3145
3146 switch (code1)
3147 {
3148 case EQ:
3149 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3150 return 1;
3151 break;
3152
3153 case LT:
3154 if (code2 == LE || code2 == NE)
3155 return 1;
3156 break;
3157
3158 case GT:
3159 if (code2 == GE || code2 == NE)
3160 return 1;
3161 break;
3162
3163 case LTU:
3164 if (code2 == LEU || code2 == NE)
3165 return 1;
3166 break;
3167
3168 case GTU:
3169 if (code2 == GEU || code2 == NE)
3170 return 1;
3171 break;
3172
3173 default:
3174 break;
3175 }
3176
3177 return 0;
3178 }
3179 \f
3180 /* Return 1 if INSN is an unconditional jump and nothing else. */
3181
3182 int
3183 simplejump_p (insn)
3184 rtx insn;
3185 {
3186 return (GET_CODE (insn) == JUMP_INSN
3187 && GET_CODE (PATTERN (insn)) == SET
3188 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3189 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3190 }
3191
3192 /* Return nonzero if INSN is a (possibly) conditional jump
3193 and nothing more. */
3194
3195 int
3196 condjump_p (insn)
3197 rtx insn;
3198 {
3199 register rtx x = PATTERN (insn);
3200 if (GET_CODE (x) != SET)
3201 return 0;
3202 if (GET_CODE (SET_DEST (x)) != PC)
3203 return 0;
3204 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3205 return 1;
3206 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3207 return 0;
3208 if (XEXP (SET_SRC (x), 2) == pc_rtx
3209 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3210 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3211 return 1;
3212 if (XEXP (SET_SRC (x), 1) == pc_rtx
3213 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3214 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3215 return 1;
3216 return 0;
3217 }
3218
3219 /* Return nonzero if INSN is a (possibly) conditional jump
3220 and nothing more. */
3221
3222 int
3223 condjump_in_parallel_p (insn)
3224 rtx insn;
3225 {
3226 register rtx x = PATTERN (insn);
3227
3228 if (GET_CODE (x) != PARALLEL)
3229 return 0;
3230 else
3231 x = XVECEXP (x, 0, 0);
3232
3233 if (GET_CODE (x) != SET)
3234 return 0;
3235 if (GET_CODE (SET_DEST (x)) != PC)
3236 return 0;
3237 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3238 return 1;
3239 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3240 return 0;
3241 if (XEXP (SET_SRC (x), 2) == pc_rtx
3242 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3243 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3244 return 1;
3245 if (XEXP (SET_SRC (x), 1) == pc_rtx
3246 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3247 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3248 return 1;
3249 return 0;
3250 }
3251
3252 /* Return 1 if X is an RTX that does nothing but set the condition codes
3253 and CLOBBER or USE registers.
3254 Return -1 if X does explicitly set the condition codes,
3255 but also does other things. */
3256
3257 int
3258 sets_cc0_p (x)
3259 rtx x;
3260 {
3261 #ifdef HAVE_cc0
3262 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3263 return 1;
3264 if (GET_CODE (x) == PARALLEL)
3265 {
3266 int i;
3267 int sets_cc0 = 0;
3268 int other_things = 0;
3269 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3270 {
3271 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3272 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3273 sets_cc0 = 1;
3274 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3275 other_things = 1;
3276 }
3277 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3278 }
3279 return 0;
3280 #else
3281 abort ();
3282 #endif
3283 }
3284 \f
3285 /* Follow any unconditional jump at LABEL;
3286 return the ultimate label reached by any such chain of jumps.
3287 If LABEL is not followed by a jump, return LABEL.
3288 If the chain loops or we can't find end, return LABEL,
3289 since that tells caller to avoid changing the insn.
3290
3291 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3292 a USE or CLOBBER. */
3293
3294 rtx
3295 follow_jumps (label)
3296 rtx label;
3297 {
3298 register rtx insn;
3299 register rtx next;
3300 register rtx value = label;
3301 register int depth;
3302
3303 for (depth = 0;
3304 (depth < 10
3305 && (insn = next_active_insn (value)) != 0
3306 && GET_CODE (insn) == JUMP_INSN
3307 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3308 || GET_CODE (PATTERN (insn)) == RETURN)
3309 && (next = NEXT_INSN (insn))
3310 && GET_CODE (next) == BARRIER);
3311 depth++)
3312 {
3313 /* Don't chain through the insn that jumps into a loop
3314 from outside the loop,
3315 since that would create multiple loop entry jumps
3316 and prevent loop optimization. */
3317 rtx tem;
3318 if (!reload_completed)
3319 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3320 if (GET_CODE (tem) == NOTE
3321 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3322 /* ??? Optional. Disables some optimizations, but makes
3323 gcov output more accurate with -O. */
3324 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3325 return value;
3326
3327 /* If we have found a cycle, make the insn jump to itself. */
3328 if (JUMP_LABEL (insn) == label)
3329 return label;
3330
3331 tem = next_active_insn (JUMP_LABEL (insn));
3332 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3333 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3334 break;
3335
3336 value = JUMP_LABEL (insn);
3337 }
3338 if (depth == 10)
3339 return label;
3340 return value;
3341 }
3342
3343 /* Assuming that field IDX of X is a vector of label_refs,
3344 replace each of them by the ultimate label reached by it.
3345 Return nonzero if a change is made.
3346 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3347
3348 static int
3349 tension_vector_labels (x, idx)
3350 register rtx x;
3351 register int idx;
3352 {
3353 int changed = 0;
3354 register int i;
3355 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3356 {
3357 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3358 register rtx nlabel = follow_jumps (olabel);
3359 if (nlabel && nlabel != olabel)
3360 {
3361 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3362 ++LABEL_NUSES (nlabel);
3363 if (--LABEL_NUSES (olabel) == 0)
3364 delete_insn (olabel);
3365 changed = 1;
3366 }
3367 }
3368 return changed;
3369 }
3370 \f
3371 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3372 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3373 in INSN, then store one of them in JUMP_LABEL (INSN).
3374 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3375 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3376 Also, when there are consecutive labels, canonicalize on the last of them.
3377
3378 Note that two labels separated by a loop-beginning note
3379 must be kept distinct if we have not yet done loop-optimization,
3380 because the gap between them is where loop-optimize
3381 will want to move invariant code to. CROSS_JUMP tells us
3382 that loop-optimization is done with.
3383
3384 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3385 two labels distinct if they are separated by only USE or CLOBBER insns. */
3386
3387 static void
3388 mark_jump_label (x, insn, cross_jump)
3389 register rtx x;
3390 rtx insn;
3391 int cross_jump;
3392 {
3393 register RTX_CODE code = GET_CODE (x);
3394 register int i;
3395 register char *fmt;
3396
3397 switch (code)
3398 {
3399 case PC:
3400 case CC0:
3401 case REG:
3402 case SUBREG:
3403 case CONST_INT:
3404 case SYMBOL_REF:
3405 case CONST_DOUBLE:
3406 case CLOBBER:
3407 case CALL:
3408 return;
3409
3410 case MEM:
3411 /* If this is a constant-pool reference, see if it is a label. */
3412 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3413 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3414 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3415 break;
3416
3417 case LABEL_REF:
3418 {
3419 rtx label = XEXP (x, 0);
3420 rtx olabel = label;
3421 rtx note;
3422 rtx next;
3423
3424 if (GET_CODE (label) != CODE_LABEL)
3425 abort ();
3426
3427 /* Ignore references to labels of containing functions. */
3428 if (LABEL_REF_NONLOCAL_P (x))
3429 break;
3430
3431 /* If there are other labels following this one,
3432 replace it with the last of the consecutive labels. */
3433 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3434 {
3435 if (GET_CODE (next) == CODE_LABEL)
3436 label = next;
3437 else if (cross_jump && GET_CODE (next) == INSN
3438 && (GET_CODE (PATTERN (next)) == USE
3439 || GET_CODE (PATTERN (next)) == CLOBBER))
3440 continue;
3441 else if (GET_CODE (next) != NOTE)
3442 break;
3443 else if (! cross_jump
3444 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3445 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3446 /* ??? Optional. Disables some optimizations, but
3447 makes gcov output more accurate with -O. */
3448 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3449 break;
3450 }
3451
3452 XEXP (x, 0) = label;
3453 if (! insn || ! INSN_DELETED_P (insn))
3454 ++LABEL_NUSES (label);
3455
3456 if (insn)
3457 {
3458 if (GET_CODE (insn) == JUMP_INSN)
3459 JUMP_LABEL (insn) = label;
3460
3461 /* If we've changed OLABEL and we had a REG_LABEL note
3462 for it, update it as well. */
3463 else if (label != olabel
3464 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3465 XEXP (note, 0) = label;
3466
3467 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3468 is one. */
3469 else if (! find_reg_note (insn, REG_LABEL, label))
3470 {
3471 /* This code used to ignore labels which refered to dispatch
3472 tables to avoid flow.c generating worse code.
3473
3474 However, in the presense of global optimizations like
3475 gcse which call find_basic_blocks without calling
3476 life_analysis, not recording such labels will lead
3477 to compiler aborts because of inconsistencies in the
3478 flow graph. So we go ahead and record the label.
3479
3480 It may also be the case that the optimization argument
3481 is no longer valid because of the more accurate cfg
3482 we build in find_basic_blocks -- it no longer pessimizes
3483 code when it finds a REG_LABEL note. */
3484 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3485 REG_NOTES (insn));
3486 }
3487 }
3488 return;
3489 }
3490
3491 /* Do walk the labels in a vector, but not the first operand of an
3492 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3493 case ADDR_VEC:
3494 case ADDR_DIFF_VEC:
3495 if (! INSN_DELETED_P (insn))
3496 {
3497 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3498
3499 for (i = 0; i < XVECLEN (x, eltnum); i++)
3500 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3501 }
3502 return;
3503
3504 default:
3505 break;
3506 }
3507
3508 fmt = GET_RTX_FORMAT (code);
3509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3510 {
3511 if (fmt[i] == 'e')
3512 mark_jump_label (XEXP (x, i), insn, cross_jump);
3513 else if (fmt[i] == 'E')
3514 {
3515 register int j;
3516 for (j = 0; j < XVECLEN (x, i); j++)
3517 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3518 }
3519 }
3520 }
3521
3522 /* If all INSN does is set the pc, delete it,
3523 and delete the insn that set the condition codes for it
3524 if that's what the previous thing was. */
3525
3526 void
3527 delete_jump (insn)
3528 rtx insn;
3529 {
3530 register rtx set = single_set (insn);
3531
3532 if (set && GET_CODE (SET_DEST (set)) == PC)
3533 delete_computation (insn);
3534 }
3535
3536 /* Delete INSN and recursively delete insns that compute values used only
3537 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3538 If we are running before flow.c, we need do nothing since flow.c will
3539 delete dead code. We also can't know if the registers being used are
3540 dead or not at this point.
3541
3542 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3543 nothing other than set a register that dies in this insn, we can delete
3544 that insn as well.
3545
3546 On machines with CC0, if CC0 is used in this insn, we may be able to
3547 delete the insn that set it. */
3548
3549 static void
3550 delete_computation (insn)
3551 rtx insn;
3552 {
3553 rtx note, next;
3554
3555 #ifdef HAVE_cc0
3556 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3557 {
3558 rtx prev = prev_nonnote_insn (insn);
3559 /* We assume that at this stage
3560 CC's are always set explicitly
3561 and always immediately before the jump that
3562 will use them. So if the previous insn
3563 exists to set the CC's, delete it
3564 (unless it performs auto-increments, etc.). */
3565 if (prev && GET_CODE (prev) == INSN
3566 && sets_cc0_p (PATTERN (prev)))
3567 {
3568 if (sets_cc0_p (PATTERN (prev)) > 0
3569 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3570 delete_computation (prev);
3571 else
3572 /* Otherwise, show that cc0 won't be used. */
3573 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3574 cc0_rtx, REG_NOTES (prev));
3575 }
3576 }
3577 #endif
3578
3579 for (note = REG_NOTES (insn); note; note = next)
3580 {
3581 rtx our_prev;
3582
3583 next = XEXP (note, 1);
3584
3585 if (REG_NOTE_KIND (note) != REG_DEAD
3586 /* Verify that the REG_NOTE is legitimate. */
3587 || GET_CODE (XEXP (note, 0)) != REG)
3588 continue;
3589
3590 for (our_prev = prev_nonnote_insn (insn);
3591 our_prev && GET_CODE (our_prev) == INSN;
3592 our_prev = prev_nonnote_insn (our_prev))
3593 {
3594 /* If we reach a SEQUENCE, it is too complex to try to
3595 do anything with it, so give up. */
3596 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3597 break;
3598
3599 if (GET_CODE (PATTERN (our_prev)) == USE
3600 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3601 /* reorg creates USEs that look like this. We leave them
3602 alone because reorg needs them for its own purposes. */
3603 break;
3604
3605 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3606 {
3607 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3608 break;
3609
3610 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3611 {
3612 /* If we find a SET of something else, we can't
3613 delete the insn. */
3614
3615 int i;
3616
3617 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3618 {
3619 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3620
3621 if (GET_CODE (part) == SET
3622 && SET_DEST (part) != XEXP (note, 0))
3623 break;
3624 }
3625
3626 if (i == XVECLEN (PATTERN (our_prev), 0))
3627 delete_computation (our_prev);
3628 }
3629 else if (GET_CODE (PATTERN (our_prev)) == SET
3630 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3631 delete_computation (our_prev);
3632
3633 break;
3634 }
3635
3636 /* If OUR_PREV references the register that dies here, it is an
3637 additional use. Hence any prior SET isn't dead. However, this
3638 insn becomes the new place for the REG_DEAD note. */
3639 if (reg_overlap_mentioned_p (XEXP (note, 0),
3640 PATTERN (our_prev)))
3641 {
3642 XEXP (note, 1) = REG_NOTES (our_prev);
3643 REG_NOTES (our_prev) = note;
3644 break;
3645 }
3646 }
3647 }
3648
3649 delete_insn (insn);
3650 }
3651 \f
3652 /* Delete insn INSN from the chain of insns and update label ref counts.
3653 May delete some following insns as a consequence; may even delete
3654 a label elsewhere and insns that follow it.
3655
3656 Returns the first insn after INSN that was not deleted. */
3657
3658 rtx
3659 delete_insn (insn)
3660 register rtx insn;
3661 {
3662 register rtx next = NEXT_INSN (insn);
3663 register rtx prev = PREV_INSN (insn);
3664 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3665 register int dont_really_delete = 0;
3666
3667 while (next && INSN_DELETED_P (next))
3668 next = NEXT_INSN (next);
3669
3670 /* This insn is already deleted => return first following nondeleted. */
3671 if (INSN_DELETED_P (insn))
3672 return next;
3673
3674 /* Don't delete user-declared labels. Convert them to special NOTEs
3675 instead. */
3676 if (was_code_label && LABEL_NAME (insn) != 0
3677 && optimize && ! dont_really_delete)
3678 {
3679 PUT_CODE (insn, NOTE);
3680 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3681 NOTE_SOURCE_FILE (insn) = 0;
3682 dont_really_delete = 1;
3683 }
3684 else
3685 /* Mark this insn as deleted. */
3686 INSN_DELETED_P (insn) = 1;
3687
3688 /* If this is an unconditional jump, delete it from the jump chain. */
3689 if (simplejump_p (insn))
3690 delete_from_jump_chain (insn);
3691
3692 /* If instruction is followed by a barrier,
3693 delete the barrier too. */
3694
3695 if (next != 0 && GET_CODE (next) == BARRIER)
3696 {
3697 INSN_DELETED_P (next) = 1;
3698 next = NEXT_INSN (next);
3699 }
3700
3701 /* Patch out INSN (and the barrier if any) */
3702
3703 if (optimize && ! dont_really_delete)
3704 {
3705 if (prev)
3706 {
3707 NEXT_INSN (prev) = next;
3708 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3709 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3710 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3711 }
3712
3713 if (next)
3714 {
3715 PREV_INSN (next) = prev;
3716 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3717 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3718 }
3719
3720 if (prev && NEXT_INSN (prev) == 0)
3721 set_last_insn (prev);
3722 }
3723
3724 /* If deleting a jump, decrement the count of the label,
3725 and delete the label if it is now unused. */
3726
3727 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3728 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3729 {
3730 /* This can delete NEXT or PREV,
3731 either directly if NEXT is JUMP_LABEL (INSN),
3732 or indirectly through more levels of jumps. */
3733 delete_insn (JUMP_LABEL (insn));
3734 /* I feel a little doubtful about this loop,
3735 but I see no clean and sure alternative way
3736 to find the first insn after INSN that is not now deleted.
3737 I hope this works. */
3738 while (next && INSN_DELETED_P (next))
3739 next = NEXT_INSN (next);
3740 return next;
3741 }
3742
3743 /* Likewise if we're deleting a dispatch table. */
3744
3745 if (GET_CODE (insn) == JUMP_INSN
3746 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3747 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3748 {
3749 rtx pat = PATTERN (insn);
3750 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3751 int len = XVECLEN (pat, diff_vec_p);
3752
3753 for (i = 0; i < len; i++)
3754 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3755 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3756 while (next && INSN_DELETED_P (next))
3757 next = NEXT_INSN (next);
3758 return next;
3759 }
3760
3761 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3762 prev = PREV_INSN (prev);
3763
3764 /* If INSN was a label and a dispatch table follows it,
3765 delete the dispatch table. The tablejump must have gone already.
3766 It isn't useful to fall through into a table. */
3767
3768 if (was_code_label
3769 && NEXT_INSN (insn) != 0
3770 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3771 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3772 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3773 next = delete_insn (NEXT_INSN (insn));
3774
3775 /* If INSN was a label, delete insns following it if now unreachable. */
3776
3777 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3778 {
3779 register RTX_CODE code;
3780 while (next != 0
3781 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3782 || code == NOTE || code == BARRIER
3783 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3784 {
3785 if (code == NOTE
3786 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3787 next = NEXT_INSN (next);
3788 /* Keep going past other deleted labels to delete what follows. */
3789 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3790 next = NEXT_INSN (next);
3791 else
3792 /* Note: if this deletes a jump, it can cause more
3793 deletion of unreachable code, after a different label.
3794 As long as the value from this recursive call is correct,
3795 this invocation functions correctly. */
3796 next = delete_insn (next);
3797 }
3798 }
3799
3800 return next;
3801 }
3802
3803 /* Advance from INSN till reaching something not deleted
3804 then return that. May return INSN itself. */
3805
3806 rtx
3807 next_nondeleted_insn (insn)
3808 rtx insn;
3809 {
3810 while (INSN_DELETED_P (insn))
3811 insn = NEXT_INSN (insn);
3812 return insn;
3813 }
3814 \f
3815 /* Delete a range of insns from FROM to TO, inclusive.
3816 This is for the sake of peephole optimization, so assume
3817 that whatever these insns do will still be done by a new
3818 peephole insn that will replace them. */
3819
3820 void
3821 delete_for_peephole (from, to)
3822 register rtx from, to;
3823 {
3824 register rtx insn = from;
3825
3826 while (1)
3827 {
3828 register rtx next = NEXT_INSN (insn);
3829 register rtx prev = PREV_INSN (insn);
3830
3831 if (GET_CODE (insn) != NOTE)
3832 {
3833 INSN_DELETED_P (insn) = 1;
3834
3835 /* Patch this insn out of the chain. */
3836 /* We don't do this all at once, because we
3837 must preserve all NOTEs. */
3838 if (prev)
3839 NEXT_INSN (prev) = next;
3840
3841 if (next)
3842 PREV_INSN (next) = prev;
3843 }
3844
3845 if (insn == to)
3846 break;
3847 insn = next;
3848 }
3849
3850 /* Note that if TO is an unconditional jump
3851 we *do not* delete the BARRIER that follows,
3852 since the peephole that replaces this sequence
3853 is also an unconditional jump in that case. */
3854 }
3855 \f
3856 /* Invert the condition of the jump JUMP, and make it jump
3857 to label NLABEL instead of where it jumps now. */
3858
3859 int
3860 invert_jump (jump, nlabel)
3861 rtx jump, nlabel;
3862 {
3863 /* We have to either invert the condition and change the label or
3864 do neither. Either operation could fail. We first try to invert
3865 the jump. If that succeeds, we try changing the label. If that fails,
3866 we invert the jump back to what it was. */
3867
3868 if (! invert_exp (PATTERN (jump), jump))
3869 return 0;
3870
3871 if (redirect_jump (jump, nlabel))
3872 {
3873 if (flag_branch_probabilities)
3874 {
3875 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
3876
3877 /* An inverted jump means that a probability taken becomes a
3878 probability not taken. Subtract the branch probability from the
3879 probability base to convert it back to a taken probability.
3880 (We don't flip the probability on a branch that's never taken. */
3881 if (note && XINT (XEXP (note, 0), 0) >= 0)
3882 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
3883 }
3884
3885 return 1;
3886 }
3887
3888 if (! invert_exp (PATTERN (jump), jump))
3889 /* This should just be putting it back the way it was. */
3890 abort ();
3891
3892 return 0;
3893 }
3894
3895 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3896
3897 Return 1 if we can do so, 0 if we cannot find a way to do so that
3898 matches a pattern. */
3899
3900 int
3901 invert_exp (x, insn)
3902 rtx x;
3903 rtx insn;
3904 {
3905 register RTX_CODE code;
3906 register int i;
3907 register char *fmt;
3908
3909 code = GET_CODE (x);
3910
3911 if (code == IF_THEN_ELSE)
3912 {
3913 register rtx comp = XEXP (x, 0);
3914 register rtx tem;
3915
3916 /* We can do this in two ways: The preferable way, which can only
3917 be done if this is not an integer comparison, is to reverse
3918 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3919 of the IF_THEN_ELSE. If we can't do either, fail. */
3920
3921 if (can_reverse_comparison_p (comp, insn)
3922 && validate_change (insn, &XEXP (x, 0),
3923 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
3924 GET_MODE (comp), XEXP (comp, 0),
3925 XEXP (comp, 1)), 0))
3926 return 1;
3927
3928 tem = XEXP (x, 1);
3929 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3930 validate_change (insn, &XEXP (x, 2), tem, 1);
3931 return apply_change_group ();
3932 }
3933
3934 fmt = GET_RTX_FORMAT (code);
3935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3936 {
3937 if (fmt[i] == 'e')
3938 if (! invert_exp (XEXP (x, i), insn))
3939 return 0;
3940 if (fmt[i] == 'E')
3941 {
3942 register int j;
3943 for (j = 0; j < XVECLEN (x, i); j++)
3944 if (!invert_exp (XVECEXP (x, i, j), insn))
3945 return 0;
3946 }
3947 }
3948
3949 return 1;
3950 }
3951 \f
3952 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3953 If the old jump target label is unused as a result,
3954 it and the code following it may be deleted.
3955
3956 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3957 RETURN insn.
3958
3959 The return value will be 1 if the change was made, 0 if it wasn't (this
3960 can only occur for NLABEL == 0). */
3961
3962 int
3963 redirect_jump (jump, nlabel)
3964 rtx jump, nlabel;
3965 {
3966 register rtx olabel = JUMP_LABEL (jump);
3967
3968 if (nlabel == olabel)
3969 return 1;
3970
3971 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3972 return 0;
3973
3974 /* If this is an unconditional branch, delete it from the jump_chain of
3975 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3976 have UID's in range and JUMP_CHAIN is valid). */
3977 if (jump_chain && (simplejump_p (jump)
3978 || GET_CODE (PATTERN (jump)) == RETURN))
3979 {
3980 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3981
3982 delete_from_jump_chain (jump);
3983 if (label_index < max_jump_chain
3984 && INSN_UID (jump) < max_jump_chain)
3985 {
3986 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3987 jump_chain[label_index] = jump;
3988 }
3989 }
3990
3991 JUMP_LABEL (jump) = nlabel;
3992 if (nlabel)
3993 ++LABEL_NUSES (nlabel);
3994
3995 if (olabel && --LABEL_NUSES (olabel) == 0)
3996 delete_insn (olabel);
3997
3998 return 1;
3999 }
4000
4001 /* Delete the instruction JUMP from any jump chain it might be on. */
4002
4003 static void
4004 delete_from_jump_chain (jump)
4005 rtx jump;
4006 {
4007 int index;
4008 rtx olabel = JUMP_LABEL (jump);
4009
4010 /* Handle unconditional jumps. */
4011 if (jump_chain && olabel != 0
4012 && INSN_UID (olabel) < max_jump_chain
4013 && simplejump_p (jump))
4014 index = INSN_UID (olabel);
4015 /* Handle return insns. */
4016 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4017 index = 0;
4018 else return;
4019
4020 if (jump_chain[index] == jump)
4021 jump_chain[index] = jump_chain[INSN_UID (jump)];
4022 else
4023 {
4024 rtx insn;
4025
4026 for (insn = jump_chain[index];
4027 insn != 0;
4028 insn = jump_chain[INSN_UID (insn)])
4029 if (jump_chain[INSN_UID (insn)] == jump)
4030 {
4031 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4032 break;
4033 }
4034 }
4035 }
4036
4037 /* If NLABEL is nonzero, throughout the rtx at LOC,
4038 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4039 zero, alter (RETURN) to (LABEL_REF NLABEL).
4040
4041 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4042 validity with validate_change. Convert (set (pc) (label_ref olabel))
4043 to (return).
4044
4045 Return 0 if we found a change we would like to make but it is invalid.
4046 Otherwise, return 1. */
4047
4048 int
4049 redirect_exp (loc, olabel, nlabel, insn)
4050 rtx *loc;
4051 rtx olabel, nlabel;
4052 rtx insn;
4053 {
4054 register rtx x = *loc;
4055 register RTX_CODE code = GET_CODE (x);
4056 register int i;
4057 register char *fmt;
4058
4059 if (code == LABEL_REF)
4060 {
4061 if (XEXP (x, 0) == olabel)
4062 {
4063 if (nlabel)
4064 XEXP (x, 0) = nlabel;
4065 else
4066 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4067 return 1;
4068 }
4069 }
4070 else if (code == RETURN && olabel == 0)
4071 {
4072 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4073 if (loc == &PATTERN (insn))
4074 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4075 return validate_change (insn, loc, x, 0);
4076 }
4077
4078 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4079 && GET_CODE (SET_SRC (x)) == LABEL_REF
4080 && XEXP (SET_SRC (x), 0) == olabel)
4081 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4082
4083 fmt = GET_RTX_FORMAT (code);
4084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4085 {
4086 if (fmt[i] == 'e')
4087 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4088 return 0;
4089 if (fmt[i] == 'E')
4090 {
4091 register int j;
4092 for (j = 0; j < XVECLEN (x, i); j++)
4093 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4094 return 0;
4095 }
4096 }
4097
4098 return 1;
4099 }
4100 \f
4101 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4102
4103 If the old jump target label (before the dispatch table) becomes unused,
4104 it and the dispatch table may be deleted. In that case, find the insn
4105 before the jump references that label and delete it and logical successors
4106 too. */
4107
4108 static void
4109 redirect_tablejump (jump, nlabel)
4110 rtx jump, nlabel;
4111 {
4112 register rtx olabel = JUMP_LABEL (jump);
4113
4114 /* Add this jump to the jump_chain of NLABEL. */
4115 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4116 && INSN_UID (jump) < max_jump_chain)
4117 {
4118 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4119 jump_chain[INSN_UID (nlabel)] = jump;
4120 }
4121
4122 PATTERN (jump) = gen_jump (nlabel);
4123 JUMP_LABEL (jump) = nlabel;
4124 ++LABEL_NUSES (nlabel);
4125 INSN_CODE (jump) = -1;
4126
4127 if (--LABEL_NUSES (olabel) == 0)
4128 {
4129 delete_labelref_insn (jump, olabel, 0);
4130 delete_insn (olabel);
4131 }
4132 }
4133
4134 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4135 If we found one, delete it and then delete this insn if DELETE_THIS is
4136 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4137
4138 static int
4139 delete_labelref_insn (insn, label, delete_this)
4140 rtx insn, label;
4141 int delete_this;
4142 {
4143 int deleted = 0;
4144 rtx link;
4145
4146 if (GET_CODE (insn) != NOTE
4147 && reg_mentioned_p (label, PATTERN (insn)))
4148 {
4149 if (delete_this)
4150 {
4151 delete_insn (insn);
4152 deleted = 1;
4153 }
4154 else
4155 return 1;
4156 }
4157
4158 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4159 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4160 {
4161 if (delete_this)
4162 {
4163 delete_insn (insn);
4164 deleted = 1;
4165 }
4166 else
4167 return 1;
4168 }
4169
4170 return deleted;
4171 }
4172 \f
4173 /* Like rtx_equal_p except that it considers two REGs as equal
4174 if they renumber to the same value and considers two commutative
4175 operations to be the same if the order of the operands has been
4176 reversed. */
4177
4178 int
4179 rtx_renumbered_equal_p (x, y)
4180 rtx x, y;
4181 {
4182 register int i;
4183 register RTX_CODE code = GET_CODE (x);
4184 register char *fmt;
4185
4186 if (x == y)
4187 return 1;
4188
4189 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4190 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4191 && GET_CODE (SUBREG_REG (y)) == REG)))
4192 {
4193 int reg_x = -1, reg_y = -1;
4194 int word_x = 0, word_y = 0;
4195
4196 if (GET_MODE (x) != GET_MODE (y))
4197 return 0;
4198
4199 /* If we haven't done any renumbering, don't
4200 make any assumptions. */
4201 if (reg_renumber == 0)
4202 return rtx_equal_p (x, y);
4203
4204 if (code == SUBREG)
4205 {
4206 reg_x = REGNO (SUBREG_REG (x));
4207 word_x = SUBREG_WORD (x);
4208
4209 if (reg_renumber[reg_x] >= 0)
4210 {
4211 reg_x = reg_renumber[reg_x] + word_x;
4212 word_x = 0;
4213 }
4214 }
4215
4216 else
4217 {
4218 reg_x = REGNO (x);
4219 if (reg_renumber[reg_x] >= 0)
4220 reg_x = reg_renumber[reg_x];
4221 }
4222
4223 if (GET_CODE (y) == SUBREG)
4224 {
4225 reg_y = REGNO (SUBREG_REG (y));
4226 word_y = SUBREG_WORD (y);
4227
4228 if (reg_renumber[reg_y] >= 0)
4229 {
4230 reg_y = reg_renumber[reg_y];
4231 word_y = 0;
4232 }
4233 }
4234
4235 else
4236 {
4237 reg_y = REGNO (y);
4238 if (reg_renumber[reg_y] >= 0)
4239 reg_y = reg_renumber[reg_y];
4240 }
4241
4242 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4243 }
4244
4245 /* Now we have disposed of all the cases
4246 in which different rtx codes can match. */
4247 if (code != GET_CODE (y))
4248 return 0;
4249
4250 switch (code)
4251 {
4252 case PC:
4253 case CC0:
4254 case ADDR_VEC:
4255 case ADDR_DIFF_VEC:
4256 return 0;
4257
4258 case CONST_INT:
4259 return INTVAL (x) == INTVAL (y);
4260
4261 case LABEL_REF:
4262 /* We can't assume nonlocal labels have their following insns yet. */
4263 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4264 return XEXP (x, 0) == XEXP (y, 0);
4265
4266 /* Two label-refs are equivalent if they point at labels
4267 in the same position in the instruction stream. */
4268 return (next_real_insn (XEXP (x, 0))
4269 == next_real_insn (XEXP (y, 0)));
4270
4271 case SYMBOL_REF:
4272 return XSTR (x, 0) == XSTR (y, 0);
4273
4274 default:
4275 break;
4276 }
4277
4278 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4279
4280 if (GET_MODE (x) != GET_MODE (y))
4281 return 0;
4282
4283 /* For commutative operations, the RTX match if the operand match in any
4284 order. Also handle the simple binary and unary cases without a loop. */
4285 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4286 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4287 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4288 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4289 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4290 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4291 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4292 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4293 else if (GET_RTX_CLASS (code) == '1')
4294 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4295
4296 /* Compare the elements. If any pair of corresponding elements
4297 fail to match, return 0 for the whole things. */
4298
4299 fmt = GET_RTX_FORMAT (code);
4300 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4301 {
4302 register int j;
4303 switch (fmt[i])
4304 {
4305 case 'w':
4306 if (XWINT (x, i) != XWINT (y, i))
4307 return 0;
4308 break;
4309
4310 case 'i':
4311 if (XINT (x, i) != XINT (y, i))
4312 return 0;
4313 break;
4314
4315 case 's':
4316 if (strcmp (XSTR (x, i), XSTR (y, i)))
4317 return 0;
4318 break;
4319
4320 case 'e':
4321 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4322 return 0;
4323 break;
4324
4325 case 'u':
4326 if (XEXP (x, i) != XEXP (y, i))
4327 return 0;
4328 /* fall through. */
4329 case '0':
4330 break;
4331
4332 case 'E':
4333 if (XVECLEN (x, i) != XVECLEN (y, i))
4334 return 0;
4335 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4336 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4337 return 0;
4338 break;
4339
4340 default:
4341 abort ();
4342 }
4343 }
4344 return 1;
4345 }
4346 \f
4347 /* If X is a hard register or equivalent to one or a subregister of one,
4348 return the hard register number. If X is a pseudo register that was not
4349 assigned a hard register, return the pseudo register number. Otherwise,
4350 return -1. Any rtx is valid for X. */
4351
4352 int
4353 true_regnum (x)
4354 rtx x;
4355 {
4356 if (GET_CODE (x) == REG)
4357 {
4358 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4359 return reg_renumber[REGNO (x)];
4360 return REGNO (x);
4361 }
4362 if (GET_CODE (x) == SUBREG)
4363 {
4364 int base = true_regnum (SUBREG_REG (x));
4365 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4366 return SUBREG_WORD (x) + base;
4367 }
4368 return -1;
4369 }
4370 \f
4371 /* Optimize code of the form:
4372
4373 for (x = a[i]; x; ...)
4374 ...
4375 for (x = a[i]; x; ...)
4376 ...
4377 foo:
4378
4379 Loop optimize will change the above code into
4380
4381 if (x = a[i])
4382 for (;;)
4383 { ...; if (! (x = ...)) break; }
4384 if (x = a[i])
4385 for (;;)
4386 { ...; if (! (x = ...)) break; }
4387 foo:
4388
4389 In general, if the first test fails, the program can branch
4390 directly to `foo' and skip the second try which is doomed to fail.
4391 We run this after loop optimization and before flow analysis. */
4392
4393 /* When comparing the insn patterns, we track the fact that different
4394 pseudo-register numbers may have been used in each computation.
4395 The following array stores an equivalence -- same_regs[I] == J means
4396 that pseudo register I was used in the first set of tests in a context
4397 where J was used in the second set. We also count the number of such
4398 pending equivalences. If nonzero, the expressions really aren't the
4399 same. */
4400
4401 static int *same_regs;
4402
4403 static int num_same_regs;
4404
4405 /* Track any registers modified between the target of the first jump and
4406 the second jump. They never compare equal. */
4407
4408 static char *modified_regs;
4409
4410 /* Record if memory was modified. */
4411
4412 static int modified_mem;
4413
4414 /* Called via note_stores on each insn between the target of the first
4415 branch and the second branch. It marks any changed registers. */
4416
4417 static void
4418 mark_modified_reg (dest, x)
4419 rtx dest;
4420 rtx x ATTRIBUTE_UNUSED;
4421 {
4422 int regno, i;
4423
4424 if (GET_CODE (dest) == SUBREG)
4425 dest = SUBREG_REG (dest);
4426
4427 if (GET_CODE (dest) == MEM)
4428 modified_mem = 1;
4429
4430 if (GET_CODE (dest) != REG)
4431 return;
4432
4433 regno = REGNO (dest);
4434 if (regno >= FIRST_PSEUDO_REGISTER)
4435 modified_regs[regno] = 1;
4436 else
4437 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4438 modified_regs[regno + i] = 1;
4439 }
4440
4441 /* F is the first insn in the chain of insns. */
4442
4443 void
4444 thread_jumps (f, max_reg, flag_before_loop)
4445 rtx f;
4446 int max_reg;
4447 int flag_before_loop;
4448 {
4449 /* Basic algorithm is to find a conditional branch,
4450 the label it may branch to, and the branch after
4451 that label. If the two branches test the same condition,
4452 walk back from both branch paths until the insn patterns
4453 differ, or code labels are hit. If we make it back to
4454 the target of the first branch, then we know that the first branch
4455 will either always succeed or always fail depending on the relative
4456 senses of the two branches. So adjust the first branch accordingly
4457 in this case. */
4458
4459 rtx label, b1, b2, t1, t2;
4460 enum rtx_code code1, code2;
4461 rtx b1op0, b1op1, b2op0, b2op1;
4462 int changed = 1;
4463 int i;
4464 int *all_reset;
4465
4466 /* Allocate register tables and quick-reset table. */
4467 modified_regs = (char *) alloca (max_reg * sizeof (char));
4468 same_regs = (int *) alloca (max_reg * sizeof (int));
4469 all_reset = (int *) alloca (max_reg * sizeof (int));
4470 for (i = 0; i < max_reg; i++)
4471 all_reset[i] = -1;
4472
4473 while (changed)
4474 {
4475 changed = 0;
4476
4477 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4478 {
4479 /* Get to a candidate branch insn. */
4480 if (GET_CODE (b1) != JUMP_INSN
4481 || ! condjump_p (b1) || simplejump_p (b1)
4482 || JUMP_LABEL (b1) == 0)
4483 continue;
4484
4485 bzero (modified_regs, max_reg * sizeof (char));
4486 modified_mem = 0;
4487
4488 bcopy ((char *) all_reset, (char *) same_regs,
4489 max_reg * sizeof (int));
4490 num_same_regs = 0;
4491
4492 label = JUMP_LABEL (b1);
4493
4494 /* Look for a branch after the target. Record any registers and
4495 memory modified between the target and the branch. Stop when we
4496 get to a label since we can't know what was changed there. */
4497 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4498 {
4499 if (GET_CODE (b2) == CODE_LABEL)
4500 break;
4501
4502 else if (GET_CODE (b2) == JUMP_INSN)
4503 {
4504 /* If this is an unconditional jump and is the only use of
4505 its target label, we can follow it. */
4506 if (simplejump_p (b2)
4507 && JUMP_LABEL (b2) != 0
4508 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4509 {
4510 b2 = JUMP_LABEL (b2);
4511 continue;
4512 }
4513 else
4514 break;
4515 }
4516
4517 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4518 continue;
4519
4520 if (GET_CODE (b2) == CALL_INSN)
4521 {
4522 modified_mem = 1;
4523 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4524 if (call_used_regs[i] && ! fixed_regs[i]
4525 && i != STACK_POINTER_REGNUM
4526 && i != FRAME_POINTER_REGNUM
4527 && i != HARD_FRAME_POINTER_REGNUM
4528 && i != ARG_POINTER_REGNUM)
4529 modified_regs[i] = 1;
4530 }
4531
4532 note_stores (PATTERN (b2), mark_modified_reg);
4533 }
4534
4535 /* Check the next candidate branch insn from the label
4536 of the first. */
4537 if (b2 == 0
4538 || GET_CODE (b2) != JUMP_INSN
4539 || b2 == b1
4540 || ! condjump_p (b2)
4541 || simplejump_p (b2))
4542 continue;
4543
4544 /* Get the comparison codes and operands, reversing the
4545 codes if appropriate. If we don't have comparison codes,
4546 we can't do anything. */
4547 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4548 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4549 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4550 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4551 code1 = reverse_condition (code1);
4552
4553 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4554 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4555 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4556 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4557 code2 = reverse_condition (code2);
4558
4559 /* If they test the same things and knowing that B1 branches
4560 tells us whether or not B2 branches, check if we
4561 can thread the branch. */
4562 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4563 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4564 && (comparison_dominates_p (code1, code2)
4565 || (comparison_dominates_p (code1, reverse_condition (code2))
4566 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4567 0),
4568 b1))))
4569 {
4570 t1 = prev_nonnote_insn (b1);
4571 t2 = prev_nonnote_insn (b2);
4572
4573 while (t1 != 0 && t2 != 0)
4574 {
4575 if (t2 == label)
4576 {
4577 /* We have reached the target of the first branch.
4578 If there are no pending register equivalents,
4579 we know that this branch will either always
4580 succeed (if the senses of the two branches are
4581 the same) or always fail (if not). */
4582 rtx new_label;
4583
4584 if (num_same_regs != 0)
4585 break;
4586
4587 if (comparison_dominates_p (code1, code2))
4588 new_label = JUMP_LABEL (b2);
4589 else
4590 new_label = get_label_after (b2);
4591
4592 if (JUMP_LABEL (b1) != new_label)
4593 {
4594 rtx prev = PREV_INSN (new_label);
4595
4596 if (flag_before_loop
4597 && GET_CODE (prev) == NOTE
4598 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4599 {
4600 /* Don't thread to the loop label. If a loop
4601 label is reused, loop optimization will
4602 be disabled for that loop. */
4603 new_label = gen_label_rtx ();
4604 emit_label_after (new_label, PREV_INSN (prev));
4605 }
4606 changed |= redirect_jump (b1, new_label);
4607 }
4608 break;
4609 }
4610
4611 /* If either of these is not a normal insn (it might be
4612 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4613 have already been skipped above.) Similarly, fail
4614 if the insns are different. */
4615 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4616 || recog_memoized (t1) != recog_memoized (t2)
4617 || ! rtx_equal_for_thread_p (PATTERN (t1),
4618 PATTERN (t2), t2))
4619 break;
4620
4621 t1 = prev_nonnote_insn (t1);
4622 t2 = prev_nonnote_insn (t2);
4623 }
4624 }
4625 }
4626 }
4627 }
4628 \f
4629 /* This is like RTX_EQUAL_P except that it knows about our handling of
4630 possibly equivalent registers and knows to consider volatile and
4631 modified objects as not equal.
4632
4633 YINSN is the insn containing Y. */
4634
4635 int
4636 rtx_equal_for_thread_p (x, y, yinsn)
4637 rtx x, y;
4638 rtx yinsn;
4639 {
4640 register int i;
4641 register int j;
4642 register enum rtx_code code;
4643 register char *fmt;
4644
4645 code = GET_CODE (x);
4646 /* Rtx's of different codes cannot be equal. */
4647 if (code != GET_CODE (y))
4648 return 0;
4649
4650 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4651 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4652
4653 if (GET_MODE (x) != GET_MODE (y))
4654 return 0;
4655
4656 /* For floating-point, consider everything unequal. This is a bit
4657 pessimistic, but this pass would only rarely do anything for FP
4658 anyway. */
4659 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4660 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4661 return 0;
4662
4663 /* For commutative operations, the RTX match if the operand match in any
4664 order. Also handle the simple binary and unary cases without a loop. */
4665 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4666 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4667 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4668 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4669 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4670 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4671 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4672 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4673 else if (GET_RTX_CLASS (code) == '1')
4674 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4675
4676 /* Handle special-cases first. */
4677 switch (code)
4678 {
4679 case REG:
4680 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4681 return 1;
4682
4683 /* If neither is user variable or hard register, check for possible
4684 equivalence. */
4685 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4686 || REGNO (x) < FIRST_PSEUDO_REGISTER
4687 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4688 return 0;
4689
4690 if (same_regs[REGNO (x)] == -1)
4691 {
4692 same_regs[REGNO (x)] = REGNO (y);
4693 num_same_regs++;
4694
4695 /* If this is the first time we are seeing a register on the `Y'
4696 side, see if it is the last use. If not, we can't thread the
4697 jump, so mark it as not equivalent. */
4698 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4699 return 0;
4700
4701 return 1;
4702 }
4703 else
4704 return (same_regs[REGNO (x)] == REGNO (y));
4705
4706 break;
4707
4708 case MEM:
4709 /* If memory modified or either volatile, not equivalent.
4710 Else, check address. */
4711 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4712 return 0;
4713
4714 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4715
4716 case ASM_INPUT:
4717 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4718 return 0;
4719
4720 break;
4721
4722 case SET:
4723 /* Cancel a pending `same_regs' if setting equivalenced registers.
4724 Then process source. */
4725 if (GET_CODE (SET_DEST (x)) == REG
4726 && GET_CODE (SET_DEST (y)) == REG)
4727 {
4728 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4729 {
4730 same_regs[REGNO (SET_DEST (x))] = -1;
4731 num_same_regs--;
4732 }
4733 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4734 return 0;
4735 }
4736 else
4737 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4738 return 0;
4739
4740 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4741
4742 case LABEL_REF:
4743 return XEXP (x, 0) == XEXP (y, 0);
4744
4745 case SYMBOL_REF:
4746 return XSTR (x, 0) == XSTR (y, 0);
4747
4748 default:
4749 break;
4750 }
4751
4752 if (x == y)
4753 return 1;
4754
4755 fmt = GET_RTX_FORMAT (code);
4756 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4757 {
4758 switch (fmt[i])
4759 {
4760 case 'w':
4761 if (XWINT (x, i) != XWINT (y, i))
4762 return 0;
4763 break;
4764
4765 case 'n':
4766 case 'i':
4767 if (XINT (x, i) != XINT (y, i))
4768 return 0;
4769 break;
4770
4771 case 'V':
4772 case 'E':
4773 /* Two vectors must have the same length. */
4774 if (XVECLEN (x, i) != XVECLEN (y, i))
4775 return 0;
4776
4777 /* And the corresponding elements must match. */
4778 for (j = 0; j < XVECLEN (x, i); j++)
4779 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4780 XVECEXP (y, i, j), yinsn) == 0)
4781 return 0;
4782 break;
4783
4784 case 'e':
4785 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4786 return 0;
4787 break;
4788
4789 case 'S':
4790 case 's':
4791 if (strcmp (XSTR (x, i), XSTR (y, i)))
4792 return 0;
4793 break;
4794
4795 case 'u':
4796 /* These are just backpointers, so they don't matter. */
4797 break;
4798
4799 case '0':
4800 break;
4801
4802 /* It is believed that rtx's at this level will never
4803 contain anything but integers and other rtx's,
4804 except for within LABEL_REFs and SYMBOL_REFs. */
4805 default:
4806 abort ();
4807 }
4808 }
4809 return 1;
4810 }
4811 \f
4812
4813 #ifndef HAVE_cc0
4814 /* Return the insn that NEW can be safely inserted in front of starting at
4815 the jump insn INSN. Return 0 if it is not safe to do this jump
4816 optimization. Note that NEW must contain a single set. */
4817
4818 static rtx
4819 find_insert_position (insn, new)
4820 rtx insn;
4821 rtx new;
4822 {
4823 int i;
4824 rtx prev;
4825
4826 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
4827 if (GET_CODE (PATTERN (new)) != PARALLEL)
4828 return insn;
4829
4830 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4831 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4832 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4833 insn))
4834 break;
4835
4836 if (i < 0)
4837 return insn;
4838
4839 /* There is a good chance that the previous insn PREV sets the thing
4840 being clobbered (often the CC in a hard reg). If PREV does not
4841 use what NEW sets, we can insert NEW before PREV. */
4842
4843 prev = prev_active_insn (insn);
4844 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4845 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4846 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4847 insn)
4848 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4849 prev))
4850 return 0;
4851
4852 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
4853 }
4854 #endif /* !HAVE_cc0 */
4855
4856 /* Return 1 if the value of X is unsafe to arbitrarily evaluate, i.e.
4857 might fault on some arguments. This is used in connection with
4858 conditional move optimization. */
4859
4860 static int
4861 rtx_unsafe_p (x)
4862 rtx x;
4863 {
4864 register RTX_CODE code = GET_CODE (x);
4865 register int i;
4866 register char *fmt;
4867
4868 switch (code)
4869 {
4870 case MEM:
4871 return ! RTX_UNCHANGING_P (x);
4872
4873 case QUEUED:
4874 return 1;
4875
4876 case CONST_INT:
4877 case CONST_DOUBLE:
4878 case CONST_STRING:
4879 case CONST:
4880 case PC:
4881 case LABEL_REF:
4882 case SYMBOL_REF:
4883 case ADDRESSOF:
4884 case REG:
4885 return 0;
4886
4887 case DIV:
4888 case MOD:
4889 case UDIV:
4890 case UMOD:
4891 case SQRT:
4892 return 1;
4893
4894 default:
4895 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4896 && !flag_fast_math
4897 && FLOAT_MODE_P (GET_MODE (x)))
4898 return 1;
4899
4900 switch (GET_RTX_CLASS (code))
4901 {
4902 case '<':
4903 case '1':
4904 case '2':
4905 case '3':
4906 case 'c':
4907 case 'b':
4908 break;
4909
4910 default:
4911 return 1;
4912 }
4913 break;
4914 }
4915
4916 fmt = GET_RTX_FORMAT (code);
4917 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4918 if (fmt[i] == 'e')
4919 if (rtx_unsafe_p (XEXP (x, i)))
4920 return 1;
4921
4922 return 0;
4923 }
This page took 0.314666 seconds and 6 git commands to generate.