]> gcc.gnu.org Git - gcc.git/blob - gcc/jump.c
(jump_optimize): If we reverse "if (foo) bar else break;" and there follows a NOTE_IN...
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This is the jump-optimization pass of the compiler.
22 It is run two or three times: once before cse, sometimes once after cse,
23 and once after reload (before final).
24
25 jump_optimize deletes unreachable code and labels that are not used.
26 It also deletes jumps that jump to the following insn,
27 and simplifies jumps around unconditional jumps and jumps
28 to unconditional jumps.
29
30 Each CODE_LABEL has a count of the times it is used
31 stored in the LABEL_NUSES internal field, and each JUMP_INSN
32 has one label that it refers to stored in the
33 JUMP_LABEL internal field. With this we can detect labels that
34 become unused because of the deletion of all the jumps that
35 formerly used them. The JUMP_LABEL info is sometimes looked
36 at by later passes.
37
38 Optionally, cross-jumping can be done. Currently it is done
39 only the last time (when after reload and before final).
40 In fact, the code for cross-jumping now assumes that register
41 allocation has been done, since it uses `rtx_renumbered_equal_p'.
42
43 Jump optimization is done after cse when cse's constant-propagation
44 causes jumps to become unconditional or to be deleted.
45
46 Unreachable loops are not detected here, because the labels
47 have references and the insns appear reachable from the labels.
48 find_basic_blocks in flow.c finds and deletes such loops.
49
50 The subroutines delete_insn, redirect_jump, and invert_jump are used
51 from other passes as well. */
52
53 #include "config.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "hard-reg-set.h"
57 #include "regs.h"
58 #include "expr.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
61 #include "real.h"
62
63 /* ??? Eventually must record somehow the labels used by jumps
64 from nested functions. */
65 /* Pre-record the next or previous real insn for each label?
66 No, this pass is very fast anyway. */
67 /* Condense consecutive labels?
68 This would make life analysis faster, maybe. */
69 /* Optimize jump y; x: ... y: jumpif... x?
70 Don't know if it is worth bothering with. */
71 /* Optimize two cases of conditional jump to conditional jump?
72 This can never delete any instruction or make anything dead,
73 or even change what is live at any point.
74 So perhaps let combiner do it. */
75
76 /* Vector indexed by uid.
77 For each CODE_LABEL, index by its uid to get first unconditional jump
78 that jumps to the label.
79 For each JUMP_INSN, index by its uid to get the next unconditional jump
80 that jumps to the same label.
81 Element 0 is the start of a chain of all return insns.
82 (It is safe to use element 0 because insn uid 0 is not used. */
83
84 static rtx *jump_chain;
85
86 /* List of labels referred to from initializers.
87 These can never be deleted. */
88 rtx forced_labels;
89
90 /* Maximum index in jump_chain. */
91
92 static int max_jump_chain;
93
94 /* Set nonzero by jump_optimize if control can fall through
95 to the end of the function. */
96 int can_reach_end;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int duplicate_loop_exit_test PROTO((rtx));
107 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
108 static void do_cross_jump PROTO((rtx, rtx, rtx));
109 static int jump_back_p PROTO((rtx, rtx));
110 static int tension_vector_labels PROTO((rtx, int));
111 static void mark_jump_label PROTO((rtx, rtx, int));
112 static void delete_computation PROTO((rtx));
113 static void delete_from_jump_chain PROTO((rtx));
114 static int delete_labelref_insn PROTO((rtx, rtx, int));
115 static void redirect_tablejump PROTO((rtx, rtx));
116 \f
117 /* Delete no-op jumps and optimize jumps to jumps
118 and jumps around jumps.
119 Delete unused labels and unreachable code.
120
121 If CROSS_JUMP is 1, detect matching code
122 before a jump and its destination and unify them.
123 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
124
125 If NOOP_MOVES is nonzero, delete no-op move insns.
126
127 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
128 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
129
130 If `optimize' is zero, don't change any code,
131 just determine whether control drops off the end of the function.
132 This case occurs when we have -W and not -O.
133 It works because `delete_insn' checks the value of `optimize'
134 and refrains from actually deleting when that is 0. */
135
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 register rtx insn, next, note;
144 int changed;
145 int first = 1;
146 int max_uid = 0;
147 rtx last_insn;
148
149 cross_jump_death_matters = (cross_jump == 2);
150
151 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
152 notes whose labels don't occur in the insn any more. */
153
154 for (insn = f; insn; insn = NEXT_INSN (insn))
155 {
156 if (GET_CODE (insn) == CODE_LABEL)
157 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
158 else if (GET_CODE (insn) == JUMP_INSN)
159 JUMP_LABEL (insn) = 0;
160 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
161 for (note = REG_NOTES (insn); note; note = next)
162 {
163 next = XEXP (note, 1);
164 if (REG_NOTE_KIND (note) == REG_LABEL
165 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
166 remove_note (insn, note);
167 }
168
169 if (INSN_UID (insn) > max_uid)
170 max_uid = INSN_UID (insn);
171 }
172
173 max_uid++;
174
175 /* Delete insns following barriers, up to next label. */
176
177 for (insn = f; insn;)
178 {
179 if (GET_CODE (insn) == BARRIER)
180 {
181 insn = NEXT_INSN (insn);
182 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
183 {
184 if (GET_CODE (insn) == NOTE
185 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
186 insn = NEXT_INSN (insn);
187 else
188 insn = delete_insn (insn);
189 }
190 /* INSN is now the code_label. */
191 }
192 else
193 insn = NEXT_INSN (insn);
194 }
195
196 /* Leave some extra room for labels and duplicate exit test insns
197 we make. */
198 max_jump_chain = max_uid * 14 / 10;
199 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
200 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
201
202 /* Mark the label each jump jumps to.
203 Combine consecutive labels, and count uses of labels.
204
205 For each label, make a chain (using `jump_chain')
206 of all the *unconditional* jumps that jump to it;
207 also make a chain of all returns. */
208
209 for (insn = f; insn; insn = NEXT_INSN (insn))
210 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
211 && ! INSN_DELETED_P (insn))
212 {
213 mark_jump_label (PATTERN (insn), insn, cross_jump);
214 if (GET_CODE (insn) == JUMP_INSN)
215 {
216 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
217 {
218 jump_chain[INSN_UID (insn)]
219 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
220 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
221 }
222 if (GET_CODE (PATTERN (insn)) == RETURN)
223 {
224 jump_chain[INSN_UID (insn)] = jump_chain[0];
225 jump_chain[0] = insn;
226 }
227 }
228 }
229
230 /* Keep track of labels used from static data;
231 they cannot ever be deleted. */
232
233 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
234 LABEL_NUSES (XEXP (insn, 0))++;
235
236 /* Delete all labels already not referenced.
237 Also find the last insn. */
238
239 last_insn = 0;
240 for (insn = f; insn; )
241 {
242 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
243 insn = delete_insn (insn);
244 else
245 {
246 last_insn = insn;
247 insn = NEXT_INSN (insn);
248 }
249 }
250
251 if (!optimize)
252 {
253 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
254 If so record that this function can drop off the end. */
255
256 insn = last_insn;
257 {
258 int n_labels = 1;
259 while (insn
260 /* One label can follow the end-note: the return label. */
261 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
262 /* Ordinary insns can follow it if returning a structure. */
263 || GET_CODE (insn) == INSN
264 /* If machine uses explicit RETURN insns, no epilogue,
265 then one of them follows the note. */
266 || (GET_CODE (insn) == JUMP_INSN
267 && GET_CODE (PATTERN (insn)) == RETURN)
268 /* Other kinds of notes can follow also. */
269 || (GET_CODE (insn) == NOTE
270 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
271 insn = PREV_INSN (insn);
272 }
273
274 /* Report if control can fall through at the end of the function. */
275 if (insn && GET_CODE (insn) == NOTE
276 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
277 && ! INSN_DELETED_P (insn))
278 can_reach_end = 1;
279
280 /* Zero the "deleted" flag of all the "deleted" insns. */
281 for (insn = f; insn; insn = NEXT_INSN (insn))
282 INSN_DELETED_P (insn) = 0;
283 return;
284 }
285
286 #ifdef HAVE_return
287 if (HAVE_return)
288 {
289 /* If we fall through to the epilogue, see if we can insert a RETURN insn
290 in front of it. If the machine allows it at this point (we might be
291 after reload for a leaf routine), it will improve optimization for it
292 to be there. */
293 insn = get_last_insn ();
294 while (insn && GET_CODE (insn) == NOTE)
295 insn = PREV_INSN (insn);
296
297 if (insn && GET_CODE (insn) != BARRIER)
298 {
299 emit_jump_insn (gen_return ());
300 emit_barrier ();
301 }
302 }
303 #endif
304
305 if (noop_moves)
306 for (insn = f; insn; )
307 {
308 next = NEXT_INSN (insn);
309
310 if (GET_CODE (insn) == INSN)
311 {
312 register rtx body = PATTERN (insn);
313
314 /* Combine stack_adjusts with following push_insns. */
315 #ifdef PUSH_ROUNDING
316 if (GET_CODE (body) == SET
317 && SET_DEST (body) == stack_pointer_rtx
318 && GET_CODE (SET_SRC (body)) == PLUS
319 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
320 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
321 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
322 {
323 rtx p;
324 rtx stack_adjust_insn = insn;
325 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
326 int total_pushed = 0;
327 int pushes = 0;
328
329 /* Find all successive push insns. */
330 p = insn;
331 /* Don't convert more than three pushes;
332 that starts adding too many displaced addresses
333 and the whole thing starts becoming a losing
334 proposition. */
335 while (pushes < 3)
336 {
337 rtx pbody, dest;
338 p = next_nonnote_insn (p);
339 if (p == 0 || GET_CODE (p) != INSN)
340 break;
341 pbody = PATTERN (p);
342 if (GET_CODE (pbody) != SET)
343 break;
344 dest = SET_DEST (pbody);
345 /* Allow a no-op move between the adjust and the push. */
346 if (GET_CODE (dest) == REG
347 && GET_CODE (SET_SRC (pbody)) == REG
348 && REGNO (dest) == REGNO (SET_SRC (pbody)))
349 continue;
350 if (! (GET_CODE (dest) == MEM
351 && GET_CODE (XEXP (dest, 0)) == POST_INC
352 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
353 break;
354 pushes++;
355 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
356 > stack_adjust_amount)
357 break;
358 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
359 }
360
361 /* Discard the amount pushed from the stack adjust;
362 maybe eliminate it entirely. */
363 if (total_pushed >= stack_adjust_amount)
364 {
365 delete_computation (stack_adjust_insn);
366 total_pushed = stack_adjust_amount;
367 }
368 else
369 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
370 = GEN_INT (stack_adjust_amount - total_pushed);
371
372 /* Change the appropriate push insns to ordinary stores. */
373 p = insn;
374 while (total_pushed > 0)
375 {
376 rtx pbody, dest;
377 p = next_nonnote_insn (p);
378 if (GET_CODE (p) != INSN)
379 break;
380 pbody = PATTERN (p);
381 if (GET_CODE (pbody) == SET)
382 break;
383 dest = SET_DEST (pbody);
384 if (! (GET_CODE (dest) == MEM
385 && GET_CODE (XEXP (dest, 0)) == POST_INC
386 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
387 break;
388 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
389 /* If this push doesn't fully fit in the space
390 of the stack adjust that we deleted,
391 make another stack adjust here for what we
392 didn't use up. There should be peepholes
393 to recognize the resulting sequence of insns. */
394 if (total_pushed < 0)
395 {
396 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
397 GEN_INT (- total_pushed)),
398 p);
399 break;
400 }
401 XEXP (dest, 0)
402 = plus_constant (stack_pointer_rtx, total_pushed);
403 }
404 }
405 #endif
406
407 /* Detect and delete no-op move instructions
408 resulting from not allocating a parameter in a register. */
409
410 if (GET_CODE (body) == SET
411 && (SET_DEST (body) == SET_SRC (body)
412 || (GET_CODE (SET_DEST (body)) == MEM
413 && GET_CODE (SET_SRC (body)) == MEM
414 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
415 && ! (GET_CODE (SET_DEST (body)) == MEM
416 && MEM_VOLATILE_P (SET_DEST (body)))
417 && ! (GET_CODE (SET_SRC (body)) == MEM
418 && MEM_VOLATILE_P (SET_SRC (body))))
419 delete_computation (insn);
420
421 /* Detect and ignore no-op move instructions
422 resulting from smart or fortuitous register allocation. */
423
424 else if (GET_CODE (body) == SET)
425 {
426 int sreg = true_regnum (SET_SRC (body));
427 int dreg = true_regnum (SET_DEST (body));
428
429 if (sreg == dreg && sreg >= 0)
430 delete_insn (insn);
431 else if (sreg >= 0 && dreg >= 0)
432 {
433 rtx trial;
434 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
435 sreg, NULL_PTR, dreg,
436 GET_MODE (SET_SRC (body)));
437
438 #ifdef PRESERVE_DEATH_INFO_REGNO_P
439 /* Deleting insn could lose a death-note for SREG or DREG
440 so don't do it if final needs accurate death-notes. */
441 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg)
442 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg))
443 #endif
444 {
445 /* DREG may have been the target of a REG_DEAD note in
446 the insn which makes INSN redundant. If so, reorg
447 would still think it is dead. So search for such a
448 note and delete it if we find it. */
449 for (trial = prev_nonnote_insn (insn);
450 trial && GET_CODE (trial) != CODE_LABEL;
451 trial = prev_nonnote_insn (trial))
452 if (find_regno_note (trial, REG_DEAD, dreg))
453 {
454 remove_death (dreg, trial);
455 break;
456 }
457
458 if (tem != 0
459 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
460 delete_insn (insn);
461 }
462 }
463 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
464 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
465 NULL_PTR, 0,
466 GET_MODE (SET_DEST (body))))
467 {
468 /* This handles the case where we have two consecutive
469 assignments of the same constant to pseudos that didn't
470 get a hard reg. Each SET from the constant will be
471 converted into a SET of the spill register and an
472 output reload will be made following it. This produces
473 two loads of the same constant into the same spill
474 register. */
475
476 rtx in_insn = insn;
477
478 /* Look back for a death note for the first reg.
479 If there is one, it is no longer accurate. */
480 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
481 {
482 if ((GET_CODE (in_insn) == INSN
483 || GET_CODE (in_insn) == JUMP_INSN)
484 && find_regno_note (in_insn, REG_DEAD, dreg))
485 {
486 remove_death (dreg, in_insn);
487 break;
488 }
489 in_insn = PREV_INSN (in_insn);
490 }
491
492 /* Delete the second load of the value. */
493 delete_insn (insn);
494 }
495 }
496 else if (GET_CODE (body) == PARALLEL)
497 {
498 /* If each part is a set between two identical registers or
499 a USE or CLOBBER, delete the insn. */
500 int i, sreg, dreg;
501 rtx tem;
502
503 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
504 {
505 tem = XVECEXP (body, 0, i);
506 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
507 continue;
508
509 if (GET_CODE (tem) != SET
510 || (sreg = true_regnum (SET_SRC (tem))) < 0
511 || (dreg = true_regnum (SET_DEST (tem))) < 0
512 || dreg != sreg)
513 break;
514 }
515
516 if (i < 0)
517 delete_insn (insn);
518 }
519 #if !BYTES_BIG_ENDIAN /* Not worth the hair to detect this
520 in the big-endian case. */
521 /* Also delete insns to store bit fields if they are no-ops. */
522 else if (GET_CODE (body) == SET
523 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
524 && XEXP (SET_DEST (body), 2) == const0_rtx
525 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
526 && ! (GET_CODE (SET_SRC (body)) == MEM
527 && MEM_VOLATILE_P (SET_SRC (body))))
528 delete_insn (insn);
529 #endif /* not BYTES_BIG_ENDIAN */
530 }
531 insn = next;
532 }
533
534 /* If we haven't yet gotten to reload and we have just run regscan,
535 delete any insn that sets a register that isn't used elsewhere.
536 This helps some of the optimizations below by having less insns
537 being jumped around. */
538
539 if (! reload_completed && after_regscan)
540 for (insn = f; insn; insn = next)
541 {
542 rtx set = single_set (insn);
543
544 next = NEXT_INSN (insn);
545
546 if (set && GET_CODE (SET_DEST (set)) == REG
547 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
548 && regno_first_uid[REGNO (SET_DEST (set))] == INSN_UID (insn)
549 /* We use regno_last_note_uid so as not to delete the setting
550 of a reg that's used in notes. A subsequent optimization
551 might arrange to use that reg for real. */
552 && regno_last_note_uid[REGNO (SET_DEST (set))] == INSN_UID (insn)
553 && ! side_effects_p (SET_SRC (set))
554 && ! find_reg_note (insn, REG_RETVAL, 0))
555 delete_insn (insn);
556 }
557
558 /* Now iterate optimizing jumps until nothing changes over one pass. */
559 changed = 1;
560 while (changed)
561 {
562 changed = 0;
563
564 for (insn = f; insn; insn = next)
565 {
566 rtx reallabelprev;
567 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
568 rtx nlabel;
569 int this_is_simplejump, this_is_condjump, reversep;
570 int this_is_condjump_in_parallel;
571 #if 0
572 /* If NOT the first iteration, if this is the last jump pass
573 (just before final), do the special peephole optimizations.
574 Avoiding the first iteration gives ordinary jump opts
575 a chance to work before peephole opts. */
576
577 if (reload_completed && !first && !flag_no_peephole)
578 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
579 peephole (insn);
580 #endif
581
582 /* That could have deleted some insns after INSN, so check now
583 what the following insn is. */
584
585 next = NEXT_INSN (insn);
586
587 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
588 jump. Try to optimize by duplicating the loop exit test if so.
589 This is only safe immediately after regscan, because it uses
590 the values of regno_first_uid and regno_last_uid. */
591 if (after_regscan && GET_CODE (insn) == NOTE
592 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
593 && (temp1 = next_nonnote_insn (insn)) != 0
594 && simplejump_p (temp1))
595 {
596 temp = PREV_INSN (insn);
597 if (duplicate_loop_exit_test (insn))
598 {
599 changed = 1;
600 next = NEXT_INSN (temp);
601 continue;
602 }
603 }
604
605 if (GET_CODE (insn) != JUMP_INSN)
606 continue;
607
608 this_is_simplejump = simplejump_p (insn);
609 this_is_condjump = condjump_p (insn);
610 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
611
612 /* Tension the labels in dispatch tables. */
613
614 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
615 changed |= tension_vector_labels (PATTERN (insn), 0);
616 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
617 changed |= tension_vector_labels (PATTERN (insn), 1);
618
619 /* If a dispatch table always goes to the same place,
620 get rid of it and replace the insn that uses it. */
621
622 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
623 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
624 {
625 int i;
626 rtx pat = PATTERN (insn);
627 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
628 int len = XVECLEN (pat, diff_vec_p);
629 rtx dispatch = prev_real_insn (insn);
630
631 for (i = 0; i < len; i++)
632 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
633 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
634 break;
635 if (i == len
636 && dispatch != 0
637 && GET_CODE (dispatch) == JUMP_INSN
638 && JUMP_LABEL (dispatch) != 0
639 /* Don't mess with a casesi insn. */
640 && !(GET_CODE (PATTERN (dispatch)) == SET
641 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
642 == IF_THEN_ELSE))
643 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
644 {
645 redirect_tablejump (dispatch,
646 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
647 changed = 1;
648 }
649 }
650
651 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
652
653 /* If a jump references the end of the function, try to turn
654 it into a RETURN insn, possibly a conditional one. */
655 if (JUMP_LABEL (insn)
656 && (next_active_insn (JUMP_LABEL (insn)) == 0
657 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
658 == RETURN))
659 changed |= redirect_jump (insn, NULL_RTX);
660
661 /* Detect jump to following insn. */
662 if (reallabelprev == insn && condjump_p (insn))
663 {
664 delete_jump (insn);
665 changed = 1;
666 continue;
667 }
668
669 /* If we have an unconditional jump preceded by a USE, try to put
670 the USE before the target and jump there. This simplifies many
671 of the optimizations below since we don't have to worry about
672 dealing with these USE insns. We only do this if the label
673 being branch to already has the identical USE or if code
674 never falls through to that label. */
675
676 if (this_is_simplejump
677 && (temp = prev_nonnote_insn (insn)) != 0
678 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
679 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
680 && (GET_CODE (temp1) == BARRIER
681 || (GET_CODE (temp1) == INSN
682 && rtx_equal_p (PATTERN (temp), PATTERN (temp1)))))
683 {
684 if (GET_CODE (temp1) == BARRIER)
685 {
686 emit_insn_after (PATTERN (temp), temp1);
687 temp1 = NEXT_INSN (temp1);
688 }
689
690 delete_insn (temp);
691 redirect_jump (insn, get_label_before (temp1));
692 reallabelprev = prev_real_insn (temp1);
693 changed = 1;
694 }
695
696 /* Simplify if (...) x = a; else x = b; by converting it
697 to x = b; if (...) x = a;
698 if B is sufficiently simple, the test doesn't involve X,
699 and nothing in the test modifies B or X.
700
701 If we have small register classes, we also can't do this if X
702 is a hard register.
703
704 If the "x = b;" insn has any REG_NOTES, we don't do this because
705 of the possibility that we are running after CSE and there is a
706 REG_EQUAL note that is only valid if the branch has already been
707 taken. If we move the insn with the REG_EQUAL note, we may
708 fold the comparison to always be false in a later CSE pass.
709 (We could also delete the REG_NOTES when moving the insn, but it
710 seems simpler to not move it.) An exception is that we can move
711 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
712 value is the same as "b".
713
714 INSN is the branch over the `else' part.
715
716 We set:
717
718 TEMP to the jump insn preceding "x = a;"
719 TEMP1 to X
720 TEMP2 to the insn that sets "x = b;"
721 TEMP3 to the insn that sets "x = a;"
722 TEMP4 to the set of "x = b"; */
723
724 if (this_is_simplejump
725 && (temp3 = prev_active_insn (insn)) != 0
726 && GET_CODE (temp3) == INSN
727 && (temp4 = single_set (temp3)) != 0
728 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
729 #ifdef SMALL_REGISTER_CLASSES
730 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
731 #endif
732 && (temp2 = next_active_insn (insn)) != 0
733 && GET_CODE (temp2) == INSN
734 && (temp4 = single_set (temp2)) != 0
735 && rtx_equal_p (SET_DEST (temp4), temp1)
736 && (GET_CODE (SET_SRC (temp4)) == REG
737 || GET_CODE (SET_SRC (temp4)) == SUBREG
738 || CONSTANT_P (SET_SRC (temp4)))
739 && (REG_NOTES (temp2) == 0
740 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
741 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
742 && XEXP (REG_NOTES (temp2), 1) == 0
743 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
744 SET_SRC (temp4))))
745 && (temp = prev_active_insn (temp3)) != 0
746 && condjump_p (temp) && ! simplejump_p (temp)
747 /* TEMP must skip over the "x = a;" insn */
748 && prev_real_insn (JUMP_LABEL (temp)) == insn
749 && no_labels_between_p (insn, JUMP_LABEL (temp))
750 /* There must be no other entries to the "x = b;" insn. */
751 && no_labels_between_p (JUMP_LABEL (temp), temp2)
752 /* INSN must either branch to the insn after TEMP2 or the insn
753 after TEMP2 must branch to the same place as INSN. */
754 && (reallabelprev == temp2
755 || ((temp5 = next_active_insn (temp2)) != 0
756 && simplejump_p (temp5)
757 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
758 {
759 /* The test expression, X, may be a complicated test with
760 multiple branches. See if we can find all the uses of
761 the label that TEMP branches to without hitting a CALL_INSN
762 or a jump to somewhere else. */
763 rtx target = JUMP_LABEL (temp);
764 int nuses = LABEL_NUSES (target);
765 rtx p, q;
766
767 /* Set P to the first jump insn that goes around "x = a;". */
768 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
769 {
770 if (GET_CODE (p) == JUMP_INSN)
771 {
772 if (condjump_p (p) && ! simplejump_p (p)
773 && JUMP_LABEL (p) == target)
774 {
775 nuses--;
776 if (nuses == 0)
777 break;
778 }
779 else
780 break;
781 }
782 else if (GET_CODE (p) == CALL_INSN)
783 break;
784 }
785
786 #ifdef HAVE_cc0
787 /* We cannot insert anything between a set of cc and its use
788 so if P uses cc0, we must back up to the previous insn. */
789 q = prev_nonnote_insn (p);
790 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
791 && sets_cc0_p (PATTERN (q)))
792 p = q;
793 #endif
794
795 if (p)
796 p = PREV_INSN (p);
797
798 /* If we found all the uses and there was no data conflict, we
799 can move the assignment unless we can branch into the middle
800 from somewhere. */
801 if (nuses == 0 && p
802 && no_labels_between_p (p, insn)
803 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
804 && ! reg_set_between_p (temp1, p, temp3)
805 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
806 || ! reg_set_between_p (SET_SRC (temp4), p, temp2)))
807 {
808 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
809 delete_insn (temp2);
810
811 /* Set NEXT to an insn that we know won't go away. */
812 next = next_active_insn (insn);
813
814 /* Delete the jump around the set. Note that we must do
815 this before we redirect the test jumps so that it won't
816 delete the code immediately following the assignment
817 we moved (which might be a jump). */
818
819 delete_insn (insn);
820
821 /* We either have two consecutive labels or a jump to
822 a jump, so adjust all the JUMP_INSNs to branch to where
823 INSN branches to. */
824 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
825 if (GET_CODE (p) == JUMP_INSN)
826 redirect_jump (p, target);
827
828 changed = 1;
829 continue;
830 }
831 }
832
833 #ifndef HAVE_cc0
834 /* If we have if (...) x = exp; and branches are expensive,
835 EXP is a single insn, does not have any side effects, cannot
836 trap, and is not too costly, convert this to
837 t = exp; if (...) x = t;
838
839 Don't do this when we have CC0 because it is unlikely to help
840 and we'd need to worry about where to place the new insn and
841 the potential for conflicts. We also can't do this when we have
842 notes on the insn for the same reason as above.
843
844 We set:
845
846 TEMP to the "x = exp;" insn.
847 TEMP1 to the single set in the "x = exp; insn.
848 TEMP2 to "x". */
849
850 if (! reload_completed
851 && this_is_condjump && ! this_is_simplejump
852 && BRANCH_COST >= 3
853 && (temp = next_nonnote_insn (insn)) != 0
854 && GET_CODE (temp) == INSN
855 && REG_NOTES (temp) == 0
856 && (reallabelprev == temp
857 || ((temp2 = next_active_insn (temp)) != 0
858 && simplejump_p (temp2)
859 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
860 && (temp1 = single_set (temp)) != 0
861 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
862 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
863 #ifdef SMALL_REGISTER_CLASSES
864 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
865 #endif
866 && GET_CODE (SET_SRC (temp1)) != REG
867 && GET_CODE (SET_SRC (temp1)) != SUBREG
868 && GET_CODE (SET_SRC (temp1)) != CONST_INT
869 && ! side_effects_p (SET_SRC (temp1))
870 && ! may_trap_p (SET_SRC (temp1))
871 && rtx_cost (SET_SRC (temp1)) < 10)
872 {
873 rtx new = gen_reg_rtx (GET_MODE (temp2));
874
875 if (validate_change (temp, &SET_DEST (temp1), new, 0))
876 {
877 next = emit_insn_after (gen_move_insn (temp2, new), insn);
878 emit_insn_after_with_line_notes (PATTERN (temp),
879 PREV_INSN (insn), temp);
880 delete_insn (temp);
881 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
882 }
883 }
884
885 /* Similarly, if it takes two insns to compute EXP but they
886 have the same destination. Here TEMP3 will be the second
887 insn and TEMP4 the SET from that insn. */
888
889 if (! reload_completed
890 && this_is_condjump && ! this_is_simplejump
891 && BRANCH_COST >= 4
892 && (temp = next_nonnote_insn (insn)) != 0
893 && GET_CODE (temp) == INSN
894 && REG_NOTES (temp) == 0
895 && (temp3 = next_nonnote_insn (temp)) != 0
896 && GET_CODE (temp3) == INSN
897 && REG_NOTES (temp3) == 0
898 && (reallabelprev == temp3
899 || ((temp2 = next_active_insn (temp3)) != 0
900 && simplejump_p (temp2)
901 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
902 && (temp1 = single_set (temp)) != 0
903 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
904 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
905 #ifdef SMALL_REGISTER_CLASSES
906 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
907 #endif
908 && ! side_effects_p (SET_SRC (temp1))
909 && ! may_trap_p (SET_SRC (temp1))
910 && rtx_cost (SET_SRC (temp1)) < 10
911 && (temp4 = single_set (temp3)) != 0
912 && rtx_equal_p (SET_DEST (temp4), temp2)
913 && ! side_effects_p (SET_SRC (temp4))
914 && ! may_trap_p (SET_SRC (temp4))
915 && rtx_cost (SET_SRC (temp4)) < 10)
916 {
917 rtx new = gen_reg_rtx (GET_MODE (temp2));
918
919 if (validate_change (temp, &SET_DEST (temp1), new, 0))
920 {
921 next = emit_insn_after (gen_move_insn (temp2, new), insn);
922 emit_insn_after_with_line_notes (PATTERN (temp),
923 PREV_INSN (insn), temp);
924 emit_insn_after_with_line_notes
925 (replace_rtx (PATTERN (temp3), temp2, new),
926 PREV_INSN (insn), temp3);
927 delete_insn (temp);
928 delete_insn (temp3);
929 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
930 }
931 }
932
933 /* Finally, handle the case where two insns are used to
934 compute EXP but a temporary register is used. Here we must
935 ensure that the temporary register is not used anywhere else. */
936
937 if (! reload_completed
938 && after_regscan
939 && this_is_condjump && ! this_is_simplejump
940 && BRANCH_COST >= 4
941 && (temp = next_nonnote_insn (insn)) != 0
942 && GET_CODE (temp) == INSN
943 && REG_NOTES (temp) == 0
944 && (temp3 = next_nonnote_insn (temp)) != 0
945 && GET_CODE (temp3) == INSN
946 && REG_NOTES (temp3) == 0
947 && (reallabelprev == temp3
948 || ((temp2 = next_active_insn (temp3)) != 0
949 && simplejump_p (temp2)
950 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
951 && (temp1 = single_set (temp)) != 0
952 && (temp5 = SET_DEST (temp1),
953 (GET_CODE (temp5) == REG
954 || (GET_CODE (temp5) == SUBREG
955 && (temp5 = SUBREG_REG (temp5),
956 GET_CODE (temp5) == REG))))
957 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
958 && regno_first_uid[REGNO (temp5)] == INSN_UID (temp)
959 && regno_last_uid[REGNO (temp5)] == INSN_UID (temp3)
960 && ! side_effects_p (SET_SRC (temp1))
961 && ! may_trap_p (SET_SRC (temp1))
962 && rtx_cost (SET_SRC (temp1)) < 10
963 && (temp4 = single_set (temp3)) != 0
964 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
965 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
966 #ifdef SMALL_REGISTER_CLASSES
967 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
968 #endif
969 && rtx_equal_p (SET_DEST (temp4), temp2)
970 && ! side_effects_p (SET_SRC (temp4))
971 && ! may_trap_p (SET_SRC (temp4))
972 && rtx_cost (SET_SRC (temp4)) < 10)
973 {
974 rtx new = gen_reg_rtx (GET_MODE (temp2));
975
976 if (validate_change (temp3, &SET_DEST (temp4), new, 0))
977 {
978 next = emit_insn_after (gen_move_insn (temp2, new), insn);
979 emit_insn_after_with_line_notes (PATTERN (temp),
980 PREV_INSN (insn), temp);
981 emit_insn_after_with_line_notes (PATTERN (temp3),
982 PREV_INSN (insn), temp3);
983 delete_insn (temp);
984 delete_insn (temp3);
985 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
986 }
987 }
988 #endif /* HAVE_cc0 */
989
990 /* We deal with four cases:
991
992 1) x = a; if (...) x = b; and either A or B is zero,
993 2) if (...) x = 0; and jumps are expensive,
994 3) x = a; if (...) x = b; and A and B are constants where all the
995 set bits in A are also set in B and jumps are expensive, and
996 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
997 more expensive.
998 5) if (...) x = b; if jumps are even more expensive.
999
1000 In each of these try to use a store-flag insn to avoid the jump.
1001 (If the jump would be faster, the machine should not have
1002 defined the scc insns!). These cases are often made by the
1003 previous optimization.
1004
1005 INSN here is the jump around the store. We set:
1006
1007 TEMP to the "x = b;" insn.
1008 TEMP1 to X.
1009 TEMP2 to B (const0_rtx in the second case).
1010 TEMP3 to A (X in the second case).
1011 TEMP4 to the condition being tested.
1012 TEMP5 to the earliest insn used to find the condition. */
1013
1014 if (/* We can't do this after reload has completed. */
1015 ! reload_completed
1016 && this_is_condjump && ! this_is_simplejump
1017 /* Set TEMP to the "x = b;" insn. */
1018 && (temp = next_nonnote_insn (insn)) != 0
1019 && GET_CODE (temp) == INSN
1020 && GET_CODE (PATTERN (temp)) == SET
1021 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
1022 #ifdef SMALL_REGISTER_CLASSES
1023 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
1024 #endif
1025 && GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1026 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
1027 || GET_CODE (temp2) == SUBREG
1028 || GET_CODE (temp2) == CONST_INT)
1029 /* Allow either form, but prefer the former if both apply.
1030 There is no point in using the old value of TEMP1 if
1031 it is a register, since cse will alias them. It can
1032 lose if the old value were a hard register since CSE
1033 won't replace hard registers. */
1034 && (((temp3 = reg_set_last (temp1, insn)) != 0
1035 && GET_CODE (temp3) == CONST_INT)
1036 /* Make the latter case look like x = x; if (...) x = 0; */
1037 || (temp3 = temp1,
1038 ((BRANCH_COST >= 2
1039 && temp2 == const0_rtx)
1040 #ifdef HAVE_conditional_move
1041 || HAVE_conditional_move
1042 #endif
1043 || BRANCH_COST >= 3)))
1044 /* INSN must either branch to the insn after TEMP or the insn
1045 after TEMP must branch to the same place as INSN. */
1046 && (reallabelprev == temp
1047 || ((temp4 = next_active_insn (temp)) != 0
1048 && simplejump_p (temp4)
1049 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1050 && (temp4 = get_condition (insn, &temp5)) != 0
1051 /* We must be comparing objects whose modes imply the size.
1052 We could handle BLKmode if (1) emit_store_flag could
1053 and (2) we could find the size reliably. */
1054 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1055
1056 /* If B is zero, OK; if A is zero, can only do (1) if we
1057 can reverse the condition. See if (3) applies possibly
1058 by reversing the condition. Prefer reversing to (4) when
1059 branches are very expensive. */
1060 && ((reversep = 0, temp2 == const0_rtx)
1061 || (temp3 == const0_rtx
1062 && (reversep = can_reverse_comparison_p (temp4, insn)))
1063 || (BRANCH_COST >= 2
1064 && GET_CODE (temp2) == CONST_INT
1065 && GET_CODE (temp3) == CONST_INT
1066 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1067 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1068 && (reversep = can_reverse_comparison_p (temp4,
1069 insn)))))
1070 #ifdef HAVE_conditional_move
1071 || HAVE_conditional_move
1072 #endif
1073 || BRANCH_COST >= 3)
1074 #ifdef HAVE_cc0
1075 /* If the previous insn sets CC0 and something else, we can't
1076 do this since we are going to delete that insn. */
1077
1078 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1079 && GET_CODE (temp6) == INSN
1080 && (sets_cc0_p (PATTERN (temp6)) == -1
1081 || (sets_cc0_p (PATTERN (temp6)) == 1
1082 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1083 #endif
1084 )
1085 {
1086 enum rtx_code code = GET_CODE (temp4);
1087 rtx uval, cval, var = temp1;
1088 int normalizep;
1089 rtx target;
1090
1091 /* If necessary, reverse the condition. */
1092 if (reversep)
1093 code = reverse_condition (code), uval = temp2, cval = temp3;
1094 else
1095 uval = temp3, cval = temp2;
1096
1097 /* See if we can do this with a store-flag insn. */
1098 start_sequence ();
1099
1100 /* If CVAL is non-zero, normalize to -1. Otherwise,
1101 if UVAL is the constant 1, it is best to just compute
1102 the result directly. If UVAL is constant and STORE_FLAG_VALUE
1103 includes all of its bits, it is best to compute the flag
1104 value unnormalized and `and' it with UVAL. Otherwise,
1105 normalize to -1 and `and' with UVAL. */
1106 normalizep = (cval != const0_rtx ? -1
1107 : (uval == const1_rtx ? 1
1108 : (GET_CODE (uval) == CONST_INT
1109 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1110 ? 0 : -1));
1111
1112 /* We will be putting the store-flag insn immediately in
1113 front of the comparison that was originally being done,
1114 so we know all the variables in TEMP4 will be valid.
1115 However, this might be in front of the assignment of
1116 A to VAR. If it is, it would clobber the store-flag
1117 we will be emitting.
1118
1119 Therefore, emit into a temporary which will be copied to
1120 VAR immediately after TEMP. */
1121
1122 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1123 XEXP (temp4, 0), XEXP (temp4, 1),
1124 VOIDmode,
1125 (code == LTU || code == LEU
1126 || code == GEU || code == GTU),
1127 normalizep);
1128 if (target)
1129 {
1130 rtx before = insn;
1131 rtx seq;
1132
1133 /* Put the store-flag insns in front of the first insn
1134 used to compute the condition to ensure that we
1135 use the same values of them as the current
1136 comparison. However, the remainder of the insns we
1137 generate will be placed directly in front of the
1138 jump insn, in case any of the pseudos we use
1139 are modified earlier. */
1140
1141 seq = get_insns ();
1142 end_sequence ();
1143
1144 emit_insns_before (seq, temp5);
1145
1146 start_sequence ();
1147
1148 /* Both CVAL and UVAL are non-zero. */
1149 if (cval != const0_rtx && uval != const0_rtx)
1150 {
1151 rtx tem1, tem2;
1152
1153 tem1 = expand_and (uval, target, NULL_RTX);
1154 if (GET_CODE (cval) == CONST_INT
1155 && GET_CODE (uval) == CONST_INT
1156 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1157 tem2 = cval;
1158 else
1159 {
1160 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1161 target, NULL_RTX, 0);
1162 tem2 = expand_and (cval, tem2,
1163 (GET_CODE (tem2) == REG
1164 ? tem2 : 0));
1165 }
1166
1167 /* If we usually make new pseudos, do so here. This
1168 turns out to help machines that have conditional
1169 move insns. */
1170
1171 if (flag_expensive_optimizations)
1172 target = 0;
1173
1174 target = expand_binop (GET_MODE (var), ior_optab,
1175 tem1, tem2, target,
1176 1, OPTAB_WIDEN);
1177 }
1178 else if (normalizep != 1)
1179 {
1180 /* We know that either CVAL or UVAL is zero. If
1181 UVAL is zero, negate TARGET and `and' with CVAL.
1182 Otherwise, `and' with UVAL. */
1183 if (uval == const0_rtx)
1184 {
1185 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1186 target, NULL_RTX, 0);
1187 uval = cval;
1188 }
1189
1190 target = expand_and (uval, target,
1191 (GET_CODE (target) == REG
1192 && ! preserve_subexpressions_p ()
1193 ? target : NULL_RTX));
1194 }
1195
1196 emit_move_insn (var, target);
1197 seq = get_insns ();
1198 end_sequence ();
1199
1200 #ifdef HAVE_cc0
1201 /* If INSN uses CC0, we must not separate it from the
1202 insn that sets cc0. */
1203
1204 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1205 before = prev_nonnote_insn (before);
1206 #endif
1207
1208 emit_insns_before (seq, before);
1209
1210 delete_insn (temp);
1211 next = NEXT_INSN (insn);
1212
1213 delete_jump (insn);
1214 changed = 1;
1215 continue;
1216 }
1217 else
1218 end_sequence ();
1219 }
1220
1221 /* If branches are expensive, convert
1222 if (foo) bar++; to bar += (foo != 0);
1223 and similarly for "bar--;"
1224
1225 INSN is the conditional branch around the arithmetic. We set:
1226
1227 TEMP is the arithmetic insn.
1228 TEMP1 is the SET doing the arithmetic.
1229 TEMP2 is the operand being incremented or decremented.
1230 TEMP3 to the condition being tested.
1231 TEMP4 to the earliest insn used to find the condition. */
1232
1233 if ((BRANCH_COST >= 2
1234 #ifdef HAVE_incscc
1235 || HAVE_incscc
1236 #endif
1237 #ifdef HAVE_decscc
1238 || HAVE_decscc
1239 #endif
1240 )
1241 && ! reload_completed
1242 && this_is_condjump && ! this_is_simplejump
1243 && (temp = next_nonnote_insn (insn)) != 0
1244 && (temp1 = single_set (temp)) != 0
1245 && (temp2 = SET_DEST (temp1),
1246 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1247 && GET_CODE (SET_SRC (temp1)) == PLUS
1248 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1249 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1250 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1251 /* INSN must either branch to the insn after TEMP or the insn
1252 after TEMP must branch to the same place as INSN. */
1253 && (reallabelprev == temp
1254 || ((temp3 = next_active_insn (temp)) != 0
1255 && simplejump_p (temp3)
1256 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1257 && (temp3 = get_condition (insn, &temp4)) != 0
1258 /* We must be comparing objects whose modes imply the size.
1259 We could handle BLKmode if (1) emit_store_flag could
1260 and (2) we could find the size reliably. */
1261 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1262 && can_reverse_comparison_p (temp3, insn))
1263 {
1264 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1265 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1266
1267 start_sequence ();
1268
1269 /* It must be the case that TEMP2 is not modified in the range
1270 [TEMP4, INSN). The one exception we make is if the insn
1271 before INSN sets TEMP2 to something which is also unchanged
1272 in that range. In that case, we can move the initialization
1273 into our sequence. */
1274
1275 if ((temp5 = prev_active_insn (insn)) != 0
1276 && GET_CODE (temp5) == INSN
1277 && (temp6 = single_set (temp5)) != 0
1278 && rtx_equal_p (temp2, SET_DEST (temp6))
1279 && (CONSTANT_P (SET_SRC (temp6))
1280 || GET_CODE (SET_SRC (temp6)) == REG
1281 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1282 {
1283 emit_insn (PATTERN (temp5));
1284 init_insn = temp5;
1285 init = SET_SRC (temp6);
1286 }
1287
1288 if (CONSTANT_P (init)
1289 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1290 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1291 XEXP (temp3, 0), XEXP (temp3, 1),
1292 VOIDmode,
1293 (code == LTU || code == LEU
1294 || code == GTU || code == GEU), 1);
1295
1296 /* If we can do the store-flag, do the addition or
1297 subtraction. */
1298
1299 if (target)
1300 target = expand_binop (GET_MODE (temp2),
1301 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1302 ? add_optab : sub_optab),
1303 temp2, target, temp2, 0, OPTAB_WIDEN);
1304
1305 if (target != 0)
1306 {
1307 /* Put the result back in temp2 in case it isn't already.
1308 Then replace the jump, possible a CC0-setting insn in
1309 front of the jump, and TEMP, with the sequence we have
1310 made. */
1311
1312 if (target != temp2)
1313 emit_move_insn (temp2, target);
1314
1315 seq = get_insns ();
1316 end_sequence ();
1317
1318 emit_insns_before (seq, temp4);
1319 delete_insn (temp);
1320
1321 if (init_insn)
1322 delete_insn (init_insn);
1323
1324 next = NEXT_INSN (insn);
1325 #ifdef HAVE_cc0
1326 delete_insn (prev_nonnote_insn (insn));
1327 #endif
1328 delete_insn (insn);
1329 changed = 1;
1330 continue;
1331 }
1332 else
1333 end_sequence ();
1334 }
1335
1336 /* Simplify if (...) x = 1; else {...} if (x) ...
1337 We recognize this case scanning backwards as well.
1338
1339 TEMP is the assignment to x;
1340 TEMP1 is the label at the head of the second if. */
1341 /* ?? This should call get_condition to find the values being
1342 compared, instead of looking for a COMPARE insn when HAVE_cc0
1343 is not defined. This would allow it to work on the m88k. */
1344 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1345 is not defined and the condition is tested by a separate compare
1346 insn. This is because the code below assumes that the result
1347 of the compare dies in the following branch.
1348
1349 Not only that, but there might be other insns between the
1350 compare and branch whose results are live. Those insns need
1351 to be executed.
1352
1353 A way to fix this is to move the insns at JUMP_LABEL (insn)
1354 to before INSN. If we are running before flow, they will
1355 be deleted if they aren't needed. But this doesn't work
1356 well after flow.
1357
1358 This is really a special-case of jump threading, anyway. The
1359 right thing to do is to replace this and jump threading with
1360 much simpler code in cse.
1361
1362 This code has been turned off in the non-cc0 case in the
1363 meantime. */
1364
1365 #ifdef HAVE_cc0
1366 else if (this_is_simplejump
1367 /* Safe to skip USE and CLOBBER insns here
1368 since they will not be deleted. */
1369 && (temp = prev_active_insn (insn))
1370 && no_labels_between_p (temp, insn)
1371 && GET_CODE (temp) == INSN
1372 && GET_CODE (PATTERN (temp)) == SET
1373 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1374 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1375 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1376 /* If we find that the next value tested is `x'
1377 (TEMP1 is the insn where this happens), win. */
1378 && GET_CODE (temp1) == INSN
1379 && GET_CODE (PATTERN (temp1)) == SET
1380 #ifdef HAVE_cc0
1381 /* Does temp1 `tst' the value of x? */
1382 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1383 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1384 && (temp1 = next_nonnote_insn (temp1))
1385 #else
1386 /* Does temp1 compare the value of x against zero? */
1387 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1388 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1389 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1390 == SET_DEST (PATTERN (temp)))
1391 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1392 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1393 #endif
1394 && condjump_p (temp1))
1395 {
1396 /* Get the if_then_else from the condjump. */
1397 rtx choice = SET_SRC (PATTERN (temp1));
1398 if (GET_CODE (choice) == IF_THEN_ELSE)
1399 {
1400 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1401 rtx val = SET_SRC (PATTERN (temp));
1402 rtx cond
1403 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1404 val, const0_rtx);
1405 rtx ultimate;
1406
1407 if (cond == const_true_rtx)
1408 ultimate = XEXP (choice, 1);
1409 else if (cond == const0_rtx)
1410 ultimate = XEXP (choice, 2);
1411 else
1412 ultimate = 0;
1413
1414 if (ultimate == pc_rtx)
1415 ultimate = get_label_after (temp1);
1416 else if (ultimate && GET_CODE (ultimate) != RETURN)
1417 ultimate = XEXP (ultimate, 0);
1418
1419 if (ultimate)
1420 changed |= redirect_jump (insn, ultimate);
1421 }
1422 }
1423 #endif
1424
1425 #if 0
1426 /* @@ This needs a bit of work before it will be right.
1427
1428 Any type of comparison can be accepted for the first and
1429 second compare. When rewriting the first jump, we must
1430 compute the what conditions can reach label3, and use the
1431 appropriate code. We can not simply reverse/swap the code
1432 of the first jump. In some cases, the second jump must be
1433 rewritten also.
1434
1435 For example,
1436 < == converts to > ==
1437 < != converts to == >
1438 etc.
1439
1440 If the code is written to only accept an '==' test for the second
1441 compare, then all that needs to be done is to swap the condition
1442 of the first branch.
1443
1444 It is questionable whether we want this optimization anyways,
1445 since if the user wrote code like this because he/she knew that
1446 the jump to label1 is taken most of the time, then rewriting
1447 this gives slower code. */
1448 /* @@ This should call get_condition to find the values being
1449 compared, instead of looking for a COMPARE insn when HAVE_cc0
1450 is not defined. This would allow it to work on the m88k. */
1451 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1452 is not defined and the condition is tested by a separate compare
1453 insn. This is because the code below assumes that the result
1454 of the compare dies in the following branch. */
1455
1456 /* Simplify test a ~= b
1457 condjump label1;
1458 test a == b
1459 condjump label2;
1460 jump label3;
1461 label1:
1462
1463 rewriting as
1464 test a ~~= b
1465 condjump label3
1466 test a == b
1467 condjump label2
1468 label1:
1469
1470 where ~= is an inequality, e.g. >, and ~~= is the swapped
1471 inequality, e.g. <.
1472
1473 We recognize this case scanning backwards.
1474
1475 TEMP is the conditional jump to `label2';
1476 TEMP1 is the test for `a == b';
1477 TEMP2 is the conditional jump to `label1';
1478 TEMP3 is the test for `a ~= b'. */
1479 else if (this_is_simplejump
1480 && (temp = prev_active_insn (insn))
1481 && no_labels_between_p (temp, insn)
1482 && condjump_p (temp)
1483 && (temp1 = prev_active_insn (temp))
1484 && no_labels_between_p (temp1, temp)
1485 && GET_CODE (temp1) == INSN
1486 && GET_CODE (PATTERN (temp1)) == SET
1487 #ifdef HAVE_cc0
1488 && sets_cc0_p (PATTERN (temp1)) == 1
1489 #else
1490 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1491 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1492 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1493 #endif
1494 && (temp2 = prev_active_insn (temp1))
1495 && no_labels_between_p (temp2, temp1)
1496 && condjump_p (temp2)
1497 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1498 && (temp3 = prev_active_insn (temp2))
1499 && no_labels_between_p (temp3, temp2)
1500 && GET_CODE (PATTERN (temp3)) == SET
1501 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1502 SET_DEST (PATTERN (temp1)))
1503 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1504 SET_SRC (PATTERN (temp3)))
1505 && ! inequality_comparisons_p (PATTERN (temp))
1506 && inequality_comparisons_p (PATTERN (temp2)))
1507 {
1508 rtx fallthrough_label = JUMP_LABEL (temp2);
1509
1510 ++LABEL_NUSES (fallthrough_label);
1511 if (swap_jump (temp2, JUMP_LABEL (insn)))
1512 {
1513 delete_insn (insn);
1514 changed = 1;
1515 }
1516
1517 if (--LABEL_NUSES (fallthrough_label) == 0)
1518 delete_insn (fallthrough_label);
1519 }
1520 #endif
1521 /* Simplify if (...) {... x = 1;} if (x) ...
1522
1523 We recognize this case backwards.
1524
1525 TEMP is the test of `x';
1526 TEMP1 is the assignment to `x' at the end of the
1527 previous statement. */
1528 /* @@ This should call get_condition to find the values being
1529 compared, instead of looking for a COMPARE insn when HAVE_cc0
1530 is not defined. This would allow it to work on the m88k. */
1531 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1532 is not defined and the condition is tested by a separate compare
1533 insn. This is because the code below assumes that the result
1534 of the compare dies in the following branch. */
1535
1536 /* ??? This has to be turned off. The problem is that the
1537 unconditional jump might indirectly end up branching to the
1538 label between TEMP1 and TEMP. We can't detect this, in general,
1539 since it may become a jump to there after further optimizations.
1540 If that jump is done, it will be deleted, so we will retry
1541 this optimization in the next pass, thus an infinite loop.
1542
1543 The present code prevents this by putting the jump after the
1544 label, but this is not logically correct. */
1545 #if 0
1546 else if (this_is_condjump
1547 /* Safe to skip USE and CLOBBER insns here
1548 since they will not be deleted. */
1549 && (temp = prev_active_insn (insn))
1550 && no_labels_between_p (temp, insn)
1551 && GET_CODE (temp) == INSN
1552 && GET_CODE (PATTERN (temp)) == SET
1553 #ifdef HAVE_cc0
1554 && sets_cc0_p (PATTERN (temp)) == 1
1555 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1556 #else
1557 /* Temp must be a compare insn, we can not accept a register
1558 to register move here, since it may not be simply a
1559 tst insn. */
1560 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1561 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1562 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1563 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1564 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1565 #endif
1566 /* May skip USE or CLOBBER insns here
1567 for checking for opportunity, since we
1568 take care of them later. */
1569 && (temp1 = prev_active_insn (temp))
1570 && GET_CODE (temp1) == INSN
1571 && GET_CODE (PATTERN (temp1)) == SET
1572 #ifdef HAVE_cc0
1573 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1574 #else
1575 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1576 == SET_DEST (PATTERN (temp1)))
1577 #endif
1578 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1579 /* If this isn't true, cse will do the job. */
1580 && ! no_labels_between_p (temp1, temp))
1581 {
1582 /* Get the if_then_else from the condjump. */
1583 rtx choice = SET_SRC (PATTERN (insn));
1584 if (GET_CODE (choice) == IF_THEN_ELSE
1585 && (GET_CODE (XEXP (choice, 0)) == EQ
1586 || GET_CODE (XEXP (choice, 0)) == NE))
1587 {
1588 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1589 rtx last_insn;
1590 rtx ultimate;
1591 rtx p;
1592
1593 /* Get the place that condjump will jump to
1594 if it is reached from here. */
1595 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1596 == want_nonzero)
1597 ultimate = XEXP (choice, 1);
1598 else
1599 ultimate = XEXP (choice, 2);
1600 /* Get it as a CODE_LABEL. */
1601 if (ultimate == pc_rtx)
1602 ultimate = get_label_after (insn);
1603 else
1604 /* Get the label out of the LABEL_REF. */
1605 ultimate = XEXP (ultimate, 0);
1606
1607 /* Insert the jump immediately before TEMP, specifically
1608 after the label that is between TEMP1 and TEMP. */
1609 last_insn = PREV_INSN (temp);
1610
1611 /* If we would be branching to the next insn, the jump
1612 would immediately be deleted and the re-inserted in
1613 a subsequent pass over the code. So don't do anything
1614 in that case. */
1615 if (next_active_insn (last_insn)
1616 != next_active_insn (ultimate))
1617 {
1618 emit_barrier_after (last_insn);
1619 p = emit_jump_insn_after (gen_jump (ultimate),
1620 last_insn);
1621 JUMP_LABEL (p) = ultimate;
1622 ++LABEL_NUSES (ultimate);
1623 if (INSN_UID (ultimate) < max_jump_chain
1624 && INSN_CODE (p) < max_jump_chain)
1625 {
1626 jump_chain[INSN_UID (p)]
1627 = jump_chain[INSN_UID (ultimate)];
1628 jump_chain[INSN_UID (ultimate)] = p;
1629 }
1630 changed = 1;
1631 continue;
1632 }
1633 }
1634 }
1635 #endif
1636 /* Detect a conditional jump going to the same place
1637 as an immediately following unconditional jump. */
1638 else if (this_is_condjump
1639 && (temp = next_active_insn (insn)) != 0
1640 && simplejump_p (temp)
1641 && (next_active_insn (JUMP_LABEL (insn))
1642 == next_active_insn (JUMP_LABEL (temp))))
1643 {
1644 delete_jump (insn);
1645 changed = 1;
1646 continue;
1647 }
1648 /* Detect a conditional jump jumping over an unconditional jump. */
1649
1650 else if ((this_is_condjump || this_is_condjump_in_parallel)
1651 && ! this_is_simplejump
1652 && reallabelprev != 0
1653 && GET_CODE (reallabelprev) == JUMP_INSN
1654 && prev_active_insn (reallabelprev) == insn
1655 && no_labels_between_p (insn, reallabelprev)
1656 && simplejump_p (reallabelprev))
1657 {
1658 /* When we invert the unconditional jump, we will be
1659 decrementing the usage count of its old label.
1660 Make sure that we don't delete it now because that
1661 might cause the following code to be deleted. */
1662 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1663 rtx prev_label = JUMP_LABEL (insn);
1664
1665 if (prev_label)
1666 ++LABEL_NUSES (prev_label);
1667
1668 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1669 {
1670 /* It is very likely that if there are USE insns before
1671 this jump, they hold REG_DEAD notes. These REG_DEAD
1672 notes are no longer valid due to this optimization,
1673 and will cause the life-analysis that following passes
1674 (notably delayed-branch scheduling) to think that
1675 these registers are dead when they are not.
1676
1677 To prevent this trouble, we just remove the USE insns
1678 from the insn chain. */
1679
1680 while (prev_uses && GET_CODE (prev_uses) == INSN
1681 && GET_CODE (PATTERN (prev_uses)) == USE)
1682 {
1683 rtx useless = prev_uses;
1684 prev_uses = prev_nonnote_insn (prev_uses);
1685 delete_insn (useless);
1686 }
1687
1688 delete_insn (reallabelprev);
1689 next = insn;
1690 changed = 1;
1691 }
1692
1693 /* We can now safely delete the label if it is unreferenced
1694 since the delete_insn above has deleted the BARRIER. */
1695 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1696 delete_insn (prev_label);
1697 continue;
1698 }
1699 else
1700 {
1701 /* Detect a jump to a jump. */
1702
1703 nlabel = follow_jumps (JUMP_LABEL (insn));
1704 if (nlabel != JUMP_LABEL (insn)
1705 && redirect_jump (insn, nlabel))
1706 {
1707 changed = 1;
1708 next = insn;
1709 }
1710
1711 /* Look for if (foo) bar; else break; */
1712 /* The insns look like this:
1713 insn = condjump label1;
1714 ...range1 (some insns)...
1715 jump label2;
1716 label1:
1717 ...range2 (some insns)...
1718 jump somewhere unconditionally
1719 label2: */
1720 {
1721 rtx label1 = next_label (insn);
1722 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1723 /* Don't do this optimization on the first round, so that
1724 jump-around-a-jump gets simplified before we ask here
1725 whether a jump is unconditional.
1726
1727 Also don't do it when we are called after reload since
1728 it will confuse reorg. */
1729 if (! first
1730 && (reload_completed ? ! flag_delayed_branch : 1)
1731 /* Make sure INSN is something we can invert. */
1732 && condjump_p (insn)
1733 && label1 != 0
1734 && JUMP_LABEL (insn) == label1
1735 && LABEL_NUSES (label1) == 1
1736 && GET_CODE (range1end) == JUMP_INSN
1737 && simplejump_p (range1end))
1738 {
1739 rtx label2 = next_label (label1);
1740 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1741 if (range1end != range2end
1742 && JUMP_LABEL (range1end) == label2
1743 && GET_CODE (range2end) == JUMP_INSN
1744 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1745 /* Invert the jump condition, so we
1746 still execute the same insns in each case. */
1747 && invert_jump (insn, label1))
1748 {
1749 rtx range1beg = next_active_insn (insn);
1750 rtx range2beg = next_active_insn (label1);
1751 rtx range1after, range2after;
1752 rtx range1before, range2before;
1753 rtx rangenext;
1754
1755 /* Include in each range any notes before it, to be
1756 sure that we get the line number note if any, even
1757 if there are other notes here. */
1758 while (PREV_INSN (range1beg)
1759 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1760 range1beg = PREV_INSN (range1beg);
1761
1762 while (PREV_INSN (range2beg)
1763 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1764 range2beg = PREV_INSN (range2beg);
1765
1766 /* Don't move NOTEs for blocks or loops; shift them
1767 outside the ranges, where they'll stay put. */
1768 range1beg = squeeze_notes (range1beg, range1end);
1769 range2beg = squeeze_notes (range2beg, range2end);
1770
1771 /* Get current surrounds of the 2 ranges. */
1772 range1before = PREV_INSN (range1beg);
1773 range2before = PREV_INSN (range2beg);
1774 range1after = NEXT_INSN (range1end);
1775 range2after = NEXT_INSN (range2end);
1776
1777 /* Splice range2 where range1 was. */
1778 NEXT_INSN (range1before) = range2beg;
1779 PREV_INSN (range2beg) = range1before;
1780 NEXT_INSN (range2end) = range1after;
1781 PREV_INSN (range1after) = range2end;
1782 /* Splice range1 where range2 was. */
1783 NEXT_INSN (range2before) = range1beg;
1784 PREV_INSN (range1beg) = range2before;
1785 NEXT_INSN (range1end) = range2after;
1786 PREV_INSN (range2after) = range1end;
1787
1788 /* Check for a loop end note between the end of
1789 range2, and the next code label. If there is one,
1790 then what we have really seen is
1791 if (foo) break; end_of_loop;
1792 and moved the break sequence outside the loop.
1793 We must move the LOOP_END note to where the
1794 loop really ends now, or we will confuse loop
1795 optimization. */
1796 for (;range2after != label2; range2after = rangenext)
1797 {
1798 rangenext = NEXT_INSN (range2after);
1799 if (GET_CODE (range2after) == NOTE
1800 && (NOTE_LINE_NUMBER (range2after)
1801 == NOTE_INSN_LOOP_END))
1802 {
1803 NEXT_INSN (PREV_INSN (range2after))
1804 = rangenext;
1805 PREV_INSN (rangenext)
1806 = PREV_INSN (range2after);
1807 PREV_INSN (range2after)
1808 = PREV_INSN (range1beg);
1809 NEXT_INSN (range2after) = range1beg;
1810 NEXT_INSN (PREV_INSN (range1beg))
1811 = range2after;
1812 PREV_INSN (range1beg) = range2after;
1813 }
1814 }
1815 changed = 1;
1816 continue;
1817 }
1818 }
1819 }
1820
1821 /* Now that the jump has been tensioned,
1822 try cross jumping: check for identical code
1823 before the jump and before its target label. */
1824
1825 /* First, cross jumping of conditional jumps: */
1826
1827 if (cross_jump && condjump_p (insn))
1828 {
1829 rtx newjpos, newlpos;
1830 rtx x = prev_real_insn (JUMP_LABEL (insn));
1831
1832 /* A conditional jump may be crossjumped
1833 only if the place it jumps to follows
1834 an opposing jump that comes back here. */
1835
1836 if (x != 0 && ! jump_back_p (x, insn))
1837 /* We have no opposing jump;
1838 cannot cross jump this insn. */
1839 x = 0;
1840
1841 newjpos = 0;
1842 /* TARGET is nonzero if it is ok to cross jump
1843 to code before TARGET. If so, see if matches. */
1844 if (x != 0)
1845 find_cross_jump (insn, x, 2,
1846 &newjpos, &newlpos);
1847
1848 if (newjpos != 0)
1849 {
1850 do_cross_jump (insn, newjpos, newlpos);
1851 /* Make the old conditional jump
1852 into an unconditional one. */
1853 SET_SRC (PATTERN (insn))
1854 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
1855 INSN_CODE (insn) = -1;
1856 emit_barrier_after (insn);
1857 /* Add to jump_chain unless this is a new label
1858 whose UID is too large. */
1859 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1860 {
1861 jump_chain[INSN_UID (insn)]
1862 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1863 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1864 }
1865 changed = 1;
1866 next = insn;
1867 }
1868 }
1869
1870 /* Cross jumping of unconditional jumps:
1871 a few differences. */
1872
1873 if (cross_jump && simplejump_p (insn))
1874 {
1875 rtx newjpos, newlpos;
1876 rtx target;
1877
1878 newjpos = 0;
1879
1880 /* TARGET is nonzero if it is ok to cross jump
1881 to code before TARGET. If so, see if matches. */
1882 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1883 &newjpos, &newlpos);
1884
1885 /* If cannot cross jump to code before the label,
1886 see if we can cross jump to another jump to
1887 the same label. */
1888 /* Try each other jump to this label. */
1889 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1890 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1891 target != 0 && newjpos == 0;
1892 target = jump_chain[INSN_UID (target)])
1893 if (target != insn
1894 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1895 /* Ignore TARGET if it's deleted. */
1896 && ! INSN_DELETED_P (target))
1897 find_cross_jump (insn, target, 2,
1898 &newjpos, &newlpos);
1899
1900 if (newjpos != 0)
1901 {
1902 do_cross_jump (insn, newjpos, newlpos);
1903 changed = 1;
1904 next = insn;
1905 }
1906 }
1907
1908 /* This code was dead in the previous jump.c! */
1909 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
1910 {
1911 /* Return insns all "jump to the same place"
1912 so we can cross-jump between any two of them. */
1913
1914 rtx newjpos, newlpos, target;
1915
1916 newjpos = 0;
1917
1918 /* If cannot cross jump to code before the label,
1919 see if we can cross jump to another jump to
1920 the same label. */
1921 /* Try each other jump to this label. */
1922 for (target = jump_chain[0];
1923 target != 0 && newjpos == 0;
1924 target = jump_chain[INSN_UID (target)])
1925 if (target != insn
1926 && ! INSN_DELETED_P (target)
1927 && GET_CODE (PATTERN (target)) == RETURN)
1928 find_cross_jump (insn, target, 2,
1929 &newjpos, &newlpos);
1930
1931 if (newjpos != 0)
1932 {
1933 do_cross_jump (insn, newjpos, newlpos);
1934 changed = 1;
1935 next = insn;
1936 }
1937 }
1938 }
1939 }
1940
1941 first = 0;
1942 }
1943
1944 /* Delete extraneous line number notes.
1945 Note that two consecutive notes for different lines are not really
1946 extraneous. There should be some indication where that line belonged,
1947 even if it became empty. */
1948
1949 {
1950 rtx last_note = 0;
1951
1952 for (insn = f; insn; insn = NEXT_INSN (insn))
1953 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
1954 {
1955 /* Delete this note if it is identical to previous note. */
1956 if (last_note
1957 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
1958 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
1959 {
1960 delete_insn (insn);
1961 continue;
1962 }
1963
1964 last_note = insn;
1965 }
1966 }
1967
1968 #ifdef HAVE_return
1969 if (HAVE_return)
1970 {
1971 /* If we fall through to the epilogue, see if we can insert a RETURN insn
1972 in front of it. If the machine allows it at this point (we might be
1973 after reload for a leaf routine), it will improve optimization for it
1974 to be there. We do this both here and at the start of this pass since
1975 the RETURN might have been deleted by some of our optimizations. */
1976 insn = get_last_insn ();
1977 while (insn && GET_CODE (insn) == NOTE)
1978 insn = PREV_INSN (insn);
1979
1980 if (insn && GET_CODE (insn) != BARRIER)
1981 {
1982 emit_jump_insn (gen_return ());
1983 emit_barrier ();
1984 }
1985 }
1986 #endif
1987
1988 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1989 If so, delete it, and record that this function can drop off the end. */
1990
1991 insn = last_insn;
1992 {
1993 int n_labels = 1;
1994 while (insn
1995 /* One label can follow the end-note: the return label. */
1996 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
1997 /* Ordinary insns can follow it if returning a structure. */
1998 || GET_CODE (insn) == INSN
1999 /* If machine uses explicit RETURN insns, no epilogue,
2000 then one of them follows the note. */
2001 || (GET_CODE (insn) == JUMP_INSN
2002 && GET_CODE (PATTERN (insn)) == RETURN)
2003 /* Other kinds of notes can follow also. */
2004 || (GET_CODE (insn) == NOTE
2005 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2006 insn = PREV_INSN (insn);
2007 }
2008
2009 /* Report if control can fall through at the end of the function. */
2010 if (insn && GET_CODE (insn) == NOTE
2011 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2012 {
2013 can_reach_end = 1;
2014 delete_insn (insn);
2015 }
2016
2017 /* Show JUMP_CHAIN no longer valid. */
2018 jump_chain = 0;
2019 }
2020 \f
2021 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2022 jump. Assume that this unconditional jump is to the exit test code. If
2023 the code is sufficiently simple, make a copy of it before INSN,
2024 followed by a jump to the exit of the loop. Then delete the unconditional
2025 jump after INSN.
2026
2027 Note that it is possible we can get confused here if the jump immediately
2028 after the loop start branches outside the loop but within an outer loop.
2029 If we are near the exit of that loop, we will copy its exit test. This
2030 will not generate incorrect code, but could suppress some optimizations.
2031 However, such cases are degenerate loops anyway.
2032
2033 Return 1 if we made the change, else 0.
2034
2035 This is only safe immediately after a regscan pass because it uses the
2036 values of regno_first_uid and regno_last_uid. */
2037
2038 static int
2039 duplicate_loop_exit_test (loop_start)
2040 rtx loop_start;
2041 {
2042 rtx insn, set, p, link;
2043 rtx copy = 0;
2044 int num_insns = 0;
2045 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2046 rtx lastexit;
2047 int max_reg = max_reg_num ();
2048 rtx *reg_map = 0;
2049
2050 /* Scan the exit code. We do not perform this optimization if any insn:
2051
2052 is a CALL_INSN
2053 is a CODE_LABEL
2054 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2055 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2056 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2057 are not valid
2058
2059 Also, don't do this if the exit code is more than 20 insns. */
2060
2061 for (insn = exitcode;
2062 insn
2063 && ! (GET_CODE (insn) == NOTE
2064 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2065 insn = NEXT_INSN (insn))
2066 {
2067 switch (GET_CODE (insn))
2068 {
2069 case CODE_LABEL:
2070 case CALL_INSN:
2071 return 0;
2072 case NOTE:
2073 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2074 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2075 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
2076 return 0;
2077 break;
2078 case JUMP_INSN:
2079 case INSN:
2080 if (++num_insns > 20
2081 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2082 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2083 return 0;
2084 break;
2085 }
2086 }
2087
2088 /* Unless INSN is zero, we can do the optimization. */
2089 if (insn == 0)
2090 return 0;
2091
2092 lastexit = insn;
2093
2094 /* See if any insn sets a register only used in the loop exit code and
2095 not a user variable. If so, replace it with a new register. */
2096 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2097 if (GET_CODE (insn) == INSN
2098 && (set = single_set (insn)) != 0
2099 && GET_CODE (SET_DEST (set)) == REG
2100 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
2101 && regno_first_uid[REGNO (SET_DEST (set))] == INSN_UID (insn))
2102 {
2103 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2104 if (regno_last_uid[REGNO (SET_DEST (set))] == INSN_UID (p))
2105 break;
2106
2107 if (p != lastexit)
2108 {
2109 /* We can do the replacement. Allocate reg_map if this is the
2110 first replacement we found. */
2111 if (reg_map == 0)
2112 {
2113 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2114 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2115 }
2116
2117 REG_LOOP_TEST_P (SET_DEST (set)) = 1;
2118
2119 reg_map[REGNO (SET_DEST (set))]
2120 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
2121 }
2122 }
2123
2124 /* Now copy each insn. */
2125 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2126 switch (GET_CODE (insn))
2127 {
2128 case BARRIER:
2129 copy = emit_barrier_before (loop_start);
2130 break;
2131 case NOTE:
2132 /* Only copy line-number notes. */
2133 if (NOTE_LINE_NUMBER (insn) >= 0)
2134 {
2135 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2136 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2137 }
2138 break;
2139
2140 case INSN:
2141 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2142 if (reg_map)
2143 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2144
2145 mark_jump_label (PATTERN (copy), copy, 0);
2146
2147 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2148 make them. */
2149 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2150 if (REG_NOTE_KIND (link) != REG_LABEL)
2151 REG_NOTES (copy)
2152 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
2153 XEXP (link, 0), REG_NOTES (copy)));
2154 if (reg_map && REG_NOTES (copy))
2155 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2156 break;
2157
2158 case JUMP_INSN:
2159 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2160 if (reg_map)
2161 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2162 mark_jump_label (PATTERN (copy), copy, 0);
2163 if (REG_NOTES (insn))
2164 {
2165 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2166 if (reg_map)
2167 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2168 }
2169
2170 /* If this is a simple jump, add it to the jump chain. */
2171
2172 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2173 && simplejump_p (copy))
2174 {
2175 jump_chain[INSN_UID (copy)]
2176 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2177 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2178 }
2179 break;
2180
2181 default:
2182 abort ();
2183 }
2184
2185 /* Now clean up by emitting a jump to the end label and deleting the jump
2186 at the start of the loop. */
2187 if (! copy || GET_CODE (copy) != BARRIER)
2188 {
2189 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2190 loop_start);
2191 mark_jump_label (PATTERN (copy), copy, 0);
2192 if (INSN_UID (copy) < max_jump_chain
2193 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2194 {
2195 jump_chain[INSN_UID (copy)]
2196 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2197 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2198 }
2199 emit_barrier_before (loop_start);
2200 }
2201
2202 delete_insn (next_nonnote_insn (loop_start));
2203
2204 /* Mark the exit code as the virtual top of the converted loop. */
2205 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2206
2207 return 1;
2208 }
2209 \f
2210 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2211 loop-end notes between START and END out before START. Assume that
2212 END is not such a note. START may be such a note. Returns the value
2213 of the new starting insn, which may be different if the original start
2214 was such a note. */
2215
2216 rtx
2217 squeeze_notes (start, end)
2218 rtx start, end;
2219 {
2220 rtx insn;
2221 rtx next;
2222
2223 for (insn = start; insn != end; insn = next)
2224 {
2225 next = NEXT_INSN (insn);
2226 if (GET_CODE (insn) == NOTE
2227 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2228 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2229 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2230 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2231 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2232 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2233 {
2234 if (insn == start)
2235 start = next;
2236 else
2237 {
2238 rtx prev = PREV_INSN (insn);
2239 PREV_INSN (insn) = PREV_INSN (start);
2240 NEXT_INSN (insn) = start;
2241 NEXT_INSN (PREV_INSN (insn)) = insn;
2242 PREV_INSN (NEXT_INSN (insn)) = insn;
2243 NEXT_INSN (prev) = next;
2244 PREV_INSN (next) = prev;
2245 }
2246 }
2247 }
2248
2249 return start;
2250 }
2251 \f
2252 /* Compare the instructions before insn E1 with those before E2
2253 to find an opportunity for cross jumping.
2254 (This means detecting identical sequences of insns followed by
2255 jumps to the same place, or followed by a label and a jump
2256 to that label, and replacing one with a jump to the other.)
2257
2258 Assume E1 is a jump that jumps to label E2
2259 (that is not always true but it might as well be).
2260 Find the longest possible equivalent sequences
2261 and store the first insns of those sequences into *F1 and *F2.
2262 Store zero there if no equivalent preceding instructions are found.
2263
2264 We give up if we find a label in stream 1.
2265 Actually we could transfer that label into stream 2. */
2266
2267 static void
2268 find_cross_jump (e1, e2, minimum, f1, f2)
2269 rtx e1, e2;
2270 int minimum;
2271 rtx *f1, *f2;
2272 {
2273 register rtx i1 = e1, i2 = e2;
2274 register rtx p1, p2;
2275 int lose = 0;
2276
2277 rtx last1 = 0, last2 = 0;
2278 rtx afterlast1 = 0, afterlast2 = 0;
2279 rtx prev1;
2280
2281 *f1 = 0;
2282 *f2 = 0;
2283
2284 while (1)
2285 {
2286 i1 = prev_nonnote_insn (i1);
2287
2288 i2 = PREV_INSN (i2);
2289 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2290 i2 = PREV_INSN (i2);
2291
2292 if (i1 == 0)
2293 break;
2294
2295 /* Don't allow the range of insns preceding E1 or E2
2296 to include the other (E2 or E1). */
2297 if (i2 == e1 || i1 == e2)
2298 break;
2299
2300 /* If we will get to this code by jumping, those jumps will be
2301 tensioned to go directly to the new label (before I2),
2302 so this cross-jumping won't cost extra. So reduce the minimum. */
2303 if (GET_CODE (i1) == CODE_LABEL)
2304 {
2305 --minimum;
2306 break;
2307 }
2308
2309 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2310 break;
2311
2312 p1 = PATTERN (i1);
2313 p2 = PATTERN (i2);
2314
2315 /* If this is a CALL_INSN, compare register usage information.
2316 If we don't check this on stack register machines, the two
2317 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2318 numbers of stack registers in the same basic block.
2319 If we don't check this on machines with delay slots, a delay slot may
2320 be filled that clobbers a parameter expected by the subroutine.
2321
2322 ??? We take the simple route for now and assume that if they're
2323 equal, they were constructed identically. */
2324
2325 if (GET_CODE (i1) == CALL_INSN
2326 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2327 CALL_INSN_FUNCTION_USAGE (i2)))
2328 lose = 1;
2329
2330 #ifdef STACK_REGS
2331 /* If cross_jump_death_matters is not 0, the insn's mode
2332 indicates whether or not the insn contains any stack-like
2333 regs. */
2334
2335 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2336 {
2337 /* If register stack conversion has already been done, then
2338 death notes must also be compared before it is certain that
2339 the two instruction streams match. */
2340
2341 rtx note;
2342 HARD_REG_SET i1_regset, i2_regset;
2343
2344 CLEAR_HARD_REG_SET (i1_regset);
2345 CLEAR_HARD_REG_SET (i2_regset);
2346
2347 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2348 if (REG_NOTE_KIND (note) == REG_DEAD
2349 && STACK_REG_P (XEXP (note, 0)))
2350 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2351
2352 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2353 if (REG_NOTE_KIND (note) == REG_DEAD
2354 && STACK_REG_P (XEXP (note, 0)))
2355 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2356
2357 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2358
2359 lose = 1;
2360
2361 done:
2362 ;
2363 }
2364 #endif
2365
2366 if (lose || GET_CODE (p1) != GET_CODE (p2)
2367 || ! rtx_renumbered_equal_p (p1, p2))
2368 {
2369 /* The following code helps take care of G++ cleanups. */
2370 rtx equiv1;
2371 rtx equiv2;
2372
2373 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2374 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2375 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2376 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2377 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2378 /* If the equivalences are not to a constant, they may
2379 reference pseudos that no longer exist, so we can't
2380 use them. */
2381 && CONSTANT_P (XEXP (equiv1, 0))
2382 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2383 {
2384 rtx s1 = single_set (i1);
2385 rtx s2 = single_set (i2);
2386 if (s1 != 0 && s2 != 0
2387 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2388 {
2389 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2390 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2391 if (! rtx_renumbered_equal_p (p1, p2))
2392 cancel_changes (0);
2393 else if (apply_change_group ())
2394 goto win;
2395 }
2396 }
2397
2398 /* Insns fail to match; cross jumping is limited to the following
2399 insns. */
2400
2401 #ifdef HAVE_cc0
2402 /* Don't allow the insn after a compare to be shared by
2403 cross-jumping unless the compare is also shared.
2404 Here, if either of these non-matching insns is a compare,
2405 exclude the following insn from possible cross-jumping. */
2406 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2407 last1 = afterlast1, last2 = afterlast2, ++minimum;
2408 #endif
2409
2410 /* If cross-jumping here will feed a jump-around-jump
2411 optimization, this jump won't cost extra, so reduce
2412 the minimum. */
2413 if (GET_CODE (i1) == JUMP_INSN
2414 && JUMP_LABEL (i1)
2415 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2416 --minimum;
2417 break;
2418 }
2419
2420 win:
2421 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2422 {
2423 /* Ok, this insn is potentially includable in a cross-jump here. */
2424 afterlast1 = last1, afterlast2 = last2;
2425 last1 = i1, last2 = i2, --minimum;
2426 }
2427 }
2428
2429 if (minimum <= 0 && last1 != 0 && last1 != e1)
2430 *f1 = last1, *f2 = last2;
2431 }
2432
2433 static void
2434 do_cross_jump (insn, newjpos, newlpos)
2435 rtx insn, newjpos, newlpos;
2436 {
2437 /* Find an existing label at this point
2438 or make a new one if there is none. */
2439 register rtx label = get_label_before (newlpos);
2440
2441 /* Make the same jump insn jump to the new point. */
2442 if (GET_CODE (PATTERN (insn)) == RETURN)
2443 {
2444 /* Remove from jump chain of returns. */
2445 delete_from_jump_chain (insn);
2446 /* Change the insn. */
2447 PATTERN (insn) = gen_jump (label);
2448 INSN_CODE (insn) = -1;
2449 JUMP_LABEL (insn) = label;
2450 LABEL_NUSES (label)++;
2451 /* Add to new the jump chain. */
2452 if (INSN_UID (label) < max_jump_chain
2453 && INSN_UID (insn) < max_jump_chain)
2454 {
2455 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2456 jump_chain[INSN_UID (label)] = insn;
2457 }
2458 }
2459 else
2460 redirect_jump (insn, label);
2461
2462 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2463 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2464 the NEWJPOS stream. */
2465
2466 while (newjpos != insn)
2467 {
2468 rtx lnote;
2469
2470 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2471 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2472 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2473 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2474 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2475 remove_note (newlpos, lnote);
2476
2477 delete_insn (newjpos);
2478 newjpos = next_real_insn (newjpos);
2479 newlpos = next_real_insn (newlpos);
2480 }
2481 }
2482 \f
2483 /* Return the label before INSN, or put a new label there. */
2484
2485 rtx
2486 get_label_before (insn)
2487 rtx insn;
2488 {
2489 rtx label;
2490
2491 /* Find an existing label at this point
2492 or make a new one if there is none. */
2493 label = prev_nonnote_insn (insn);
2494
2495 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2496 {
2497 rtx prev = PREV_INSN (insn);
2498
2499 label = gen_label_rtx ();
2500 emit_label_after (label, prev);
2501 LABEL_NUSES (label) = 0;
2502 }
2503 return label;
2504 }
2505
2506 /* Return the label after INSN, or put a new label there. */
2507
2508 rtx
2509 get_label_after (insn)
2510 rtx insn;
2511 {
2512 rtx label;
2513
2514 /* Find an existing label at this point
2515 or make a new one if there is none. */
2516 label = next_nonnote_insn (insn);
2517
2518 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2519 {
2520 label = gen_label_rtx ();
2521 emit_label_after (label, insn);
2522 LABEL_NUSES (label) = 0;
2523 }
2524 return label;
2525 }
2526 \f
2527 /* Return 1 if INSN is a jump that jumps to right after TARGET
2528 only on the condition that TARGET itself would drop through.
2529 Assumes that TARGET is a conditional jump. */
2530
2531 static int
2532 jump_back_p (insn, target)
2533 rtx insn, target;
2534 {
2535 rtx cinsn, ctarget;
2536 enum rtx_code codei, codet;
2537
2538 if (simplejump_p (insn) || ! condjump_p (insn)
2539 || simplejump_p (target)
2540 || target != prev_real_insn (JUMP_LABEL (insn)))
2541 return 0;
2542
2543 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2544 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2545
2546 codei = GET_CODE (cinsn);
2547 codet = GET_CODE (ctarget);
2548
2549 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2550 {
2551 if (! can_reverse_comparison_p (cinsn, insn))
2552 return 0;
2553 codei = reverse_condition (codei);
2554 }
2555
2556 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2557 {
2558 if (! can_reverse_comparison_p (ctarget, target))
2559 return 0;
2560 codet = reverse_condition (codet);
2561 }
2562
2563 return (codei == codet
2564 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2565 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2566 }
2567 \f
2568 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2569 return non-zero if it is safe to reverse this comparison. It is if our
2570 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2571 this is known to be an integer comparison. */
2572
2573 int
2574 can_reverse_comparison_p (comparison, insn)
2575 rtx comparison;
2576 rtx insn;
2577 {
2578 rtx arg0;
2579
2580 /* If this is not actually a comparison, we can't reverse it. */
2581 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2582 return 0;
2583
2584 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2585 /* If this is an NE comparison, it is safe to reverse it to an EQ
2586 comparison and vice versa, even for floating point. If no operands
2587 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2588 always false and NE is always true, so the reversal is also valid. */
2589 || flag_fast_math
2590 || GET_CODE (comparison) == NE
2591 || GET_CODE (comparison) == EQ)
2592 return 1;
2593
2594 arg0 = XEXP (comparison, 0);
2595
2596 /* Make sure ARG0 is one of the actual objects being compared. If we
2597 can't do this, we can't be sure the comparison can be reversed.
2598
2599 Handle cc0 and a MODE_CC register. */
2600 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2601 #ifdef HAVE_cc0
2602 || arg0 == cc0_rtx
2603 #endif
2604 )
2605 {
2606 rtx prev = prev_nonnote_insn (insn);
2607 rtx set = single_set (prev);
2608
2609 if (set == 0 || SET_DEST (set) != arg0)
2610 return 0;
2611
2612 arg0 = SET_SRC (set);
2613
2614 if (GET_CODE (arg0) == COMPARE)
2615 arg0 = XEXP (arg0, 0);
2616 }
2617
2618 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2619 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2620 return (GET_CODE (arg0) == CONST_INT
2621 || (GET_MODE (arg0) != VOIDmode
2622 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2623 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2624 }
2625
2626 /* Given an rtx-code for a comparison, return the code
2627 for the negated comparison.
2628 WATCH OUT! reverse_condition is not safe to use on a jump
2629 that might be acting on the results of an IEEE floating point comparison,
2630 because of the special treatment of non-signaling nans in comparisons.
2631 Use can_reverse_comparison_p to be sure. */
2632
2633 enum rtx_code
2634 reverse_condition (code)
2635 enum rtx_code code;
2636 {
2637 switch (code)
2638 {
2639 case EQ:
2640 return NE;
2641
2642 case NE:
2643 return EQ;
2644
2645 case GT:
2646 return LE;
2647
2648 case GE:
2649 return LT;
2650
2651 case LT:
2652 return GE;
2653
2654 case LE:
2655 return GT;
2656
2657 case GTU:
2658 return LEU;
2659
2660 case GEU:
2661 return LTU;
2662
2663 case LTU:
2664 return GEU;
2665
2666 case LEU:
2667 return GTU;
2668
2669 default:
2670 abort ();
2671 return UNKNOWN;
2672 }
2673 }
2674
2675 /* Similar, but return the code when two operands of a comparison are swapped.
2676 This IS safe for IEEE floating-point. */
2677
2678 enum rtx_code
2679 swap_condition (code)
2680 enum rtx_code code;
2681 {
2682 switch (code)
2683 {
2684 case EQ:
2685 case NE:
2686 return code;
2687
2688 case GT:
2689 return LT;
2690
2691 case GE:
2692 return LE;
2693
2694 case LT:
2695 return GT;
2696
2697 case LE:
2698 return GE;
2699
2700 case GTU:
2701 return LTU;
2702
2703 case GEU:
2704 return LEU;
2705
2706 case LTU:
2707 return GTU;
2708
2709 case LEU:
2710 return GEU;
2711
2712 default:
2713 abort ();
2714 return UNKNOWN;
2715 }
2716 }
2717
2718 /* Given a comparison CODE, return the corresponding unsigned comparison.
2719 If CODE is an equality comparison or already an unsigned comparison,
2720 CODE is returned. */
2721
2722 enum rtx_code
2723 unsigned_condition (code)
2724 enum rtx_code code;
2725 {
2726 switch (code)
2727 {
2728 case EQ:
2729 case NE:
2730 case GTU:
2731 case GEU:
2732 case LTU:
2733 case LEU:
2734 return code;
2735
2736 case GT:
2737 return GTU;
2738
2739 case GE:
2740 return GEU;
2741
2742 case LT:
2743 return LTU;
2744
2745 case LE:
2746 return LEU;
2747
2748 default:
2749 abort ();
2750 }
2751 }
2752
2753 /* Similarly, return the signed version of a comparison. */
2754
2755 enum rtx_code
2756 signed_condition (code)
2757 enum rtx_code code;
2758 {
2759 switch (code)
2760 {
2761 case EQ:
2762 case NE:
2763 case GT:
2764 case GE:
2765 case LT:
2766 case LE:
2767 return code;
2768
2769 case GTU:
2770 return GT;
2771
2772 case GEU:
2773 return GE;
2774
2775 case LTU:
2776 return LT;
2777
2778 case LEU:
2779 return LE;
2780
2781 default:
2782 abort ();
2783 }
2784 }
2785 \f
2786 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2787 truth of CODE1 implies the truth of CODE2. */
2788
2789 int
2790 comparison_dominates_p (code1, code2)
2791 enum rtx_code code1, code2;
2792 {
2793 if (code1 == code2)
2794 return 1;
2795
2796 switch (code1)
2797 {
2798 case EQ:
2799 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
2800 return 1;
2801 break;
2802
2803 case LT:
2804 if (code2 == LE || code2 == NE)
2805 return 1;
2806 break;
2807
2808 case GT:
2809 if (code2 == GE || code2 == NE)
2810 return 1;
2811 break;
2812
2813 case LTU:
2814 if (code2 == LEU || code2 == NE)
2815 return 1;
2816 break;
2817
2818 case GTU:
2819 if (code2 == GEU || code2 == NE)
2820 return 1;
2821 break;
2822 }
2823
2824 return 0;
2825 }
2826 \f
2827 /* Return 1 if INSN is an unconditional jump and nothing else. */
2828
2829 int
2830 simplejump_p (insn)
2831 rtx insn;
2832 {
2833 return (GET_CODE (insn) == JUMP_INSN
2834 && GET_CODE (PATTERN (insn)) == SET
2835 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2836 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2837 }
2838
2839 /* Return nonzero if INSN is a (possibly) conditional jump
2840 and nothing more. */
2841
2842 int
2843 condjump_p (insn)
2844 rtx insn;
2845 {
2846 register rtx x = PATTERN (insn);
2847 if (GET_CODE (x) != SET)
2848 return 0;
2849 if (GET_CODE (SET_DEST (x)) != PC)
2850 return 0;
2851 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2852 return 1;
2853 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2854 return 0;
2855 if (XEXP (SET_SRC (x), 2) == pc_rtx
2856 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2857 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2858 return 1;
2859 if (XEXP (SET_SRC (x), 1) == pc_rtx
2860 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2861 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2862 return 1;
2863 return 0;
2864 }
2865
2866 /* Return nonzero if INSN is a (possibly) conditional jump
2867 and nothing more. */
2868
2869 int
2870 condjump_in_parallel_p (insn)
2871 rtx insn;
2872 {
2873 register rtx x = PATTERN (insn);
2874
2875 if (GET_CODE (x) != PARALLEL)
2876 return 0;
2877 else
2878 x = XVECEXP (x, 0, 0);
2879
2880 if (GET_CODE (x) != SET)
2881 return 0;
2882 if (GET_CODE (SET_DEST (x)) != PC)
2883 return 0;
2884 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2885 return 1;
2886 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2887 return 0;
2888 if (XEXP (SET_SRC (x), 2) == pc_rtx
2889 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2890 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2891 return 1;
2892 if (XEXP (SET_SRC (x), 1) == pc_rtx
2893 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2894 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2895 return 1;
2896 return 0;
2897 }
2898
2899 /* Return 1 if X is an RTX that does nothing but set the condition codes
2900 and CLOBBER or USE registers.
2901 Return -1 if X does explicitly set the condition codes,
2902 but also does other things. */
2903
2904 int
2905 sets_cc0_p (x)
2906 rtx x;
2907 {
2908 #ifdef HAVE_cc0
2909 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2910 return 1;
2911 if (GET_CODE (x) == PARALLEL)
2912 {
2913 int i;
2914 int sets_cc0 = 0;
2915 int other_things = 0;
2916 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2917 {
2918 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2919 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2920 sets_cc0 = 1;
2921 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2922 other_things = 1;
2923 }
2924 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2925 }
2926 return 0;
2927 #else
2928 abort ();
2929 #endif
2930 }
2931 \f
2932 /* Follow any unconditional jump at LABEL;
2933 return the ultimate label reached by any such chain of jumps.
2934 If LABEL is not followed by a jump, return LABEL.
2935 If the chain loops or we can't find end, return LABEL,
2936 since that tells caller to avoid changing the insn.
2937
2938 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2939 a USE or CLOBBER. */
2940
2941 rtx
2942 follow_jumps (label)
2943 rtx label;
2944 {
2945 register rtx insn;
2946 register rtx next;
2947 register rtx value = label;
2948 register int depth;
2949
2950 for (depth = 0;
2951 (depth < 10
2952 && (insn = next_active_insn (value)) != 0
2953 && GET_CODE (insn) == JUMP_INSN
2954 && (JUMP_LABEL (insn) != 0 || GET_CODE (PATTERN (insn)) == RETURN)
2955 && (next = NEXT_INSN (insn))
2956 && GET_CODE (next) == BARRIER);
2957 depth++)
2958 {
2959 /* Don't chain through the insn that jumps into a loop
2960 from outside the loop,
2961 since that would create multiple loop entry jumps
2962 and prevent loop optimization. */
2963 rtx tem;
2964 if (!reload_completed)
2965 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2966 if (GET_CODE (tem) == NOTE
2967 && NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG)
2968 return value;
2969
2970 /* If we have found a cycle, make the insn jump to itself. */
2971 if (JUMP_LABEL (insn) == label)
2972 return label;
2973
2974 tem = next_active_insn (JUMP_LABEL (insn));
2975 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2976 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2977 break;
2978
2979 value = JUMP_LABEL (insn);
2980 }
2981 if (depth == 10)
2982 return label;
2983 return value;
2984 }
2985
2986 /* Assuming that field IDX of X is a vector of label_refs,
2987 replace each of them by the ultimate label reached by it.
2988 Return nonzero if a change is made.
2989 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2990
2991 static int
2992 tension_vector_labels (x, idx)
2993 register rtx x;
2994 register int idx;
2995 {
2996 int changed = 0;
2997 register int i;
2998 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2999 {
3000 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3001 register rtx nlabel = follow_jumps (olabel);
3002 if (nlabel && nlabel != olabel)
3003 {
3004 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3005 ++LABEL_NUSES (nlabel);
3006 if (--LABEL_NUSES (olabel) == 0)
3007 delete_insn (olabel);
3008 changed = 1;
3009 }
3010 }
3011 return changed;
3012 }
3013 \f
3014 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3015 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3016 in INSN, then store one of them in JUMP_LABEL (INSN).
3017 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3018 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3019 Also, when there are consecutive labels, canonicalize on the last of them.
3020
3021 Note that two labels separated by a loop-beginning note
3022 must be kept distinct if we have not yet done loop-optimization,
3023 because the gap between them is where loop-optimize
3024 will want to move invariant code to. CROSS_JUMP tells us
3025 that loop-optimization is done with.
3026
3027 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3028 two labels distinct if they are separated by only USE or CLOBBER insns. */
3029
3030 static void
3031 mark_jump_label (x, insn, cross_jump)
3032 register rtx x;
3033 rtx insn;
3034 int cross_jump;
3035 {
3036 register RTX_CODE code = GET_CODE (x);
3037 register int i;
3038 register char *fmt;
3039
3040 switch (code)
3041 {
3042 case PC:
3043 case CC0:
3044 case REG:
3045 case SUBREG:
3046 case CONST_INT:
3047 case SYMBOL_REF:
3048 case CONST_DOUBLE:
3049 case CLOBBER:
3050 case CALL:
3051 return;
3052
3053 case MEM:
3054 /* If this is a constant-pool reference, see if it is a label. */
3055 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3056 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3057 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3058 break;
3059
3060 case LABEL_REF:
3061 {
3062 rtx label = XEXP (x, 0);
3063 rtx olabel = label;
3064 rtx note;
3065 rtx next;
3066
3067 if (GET_CODE (label) != CODE_LABEL)
3068 abort ();
3069
3070 /* Ignore references to labels of containing functions. */
3071 if (LABEL_REF_NONLOCAL_P (x))
3072 break;
3073
3074 /* If there are other labels following this one,
3075 replace it with the last of the consecutive labels. */
3076 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3077 {
3078 if (GET_CODE (next) == CODE_LABEL)
3079 label = next;
3080 else if (cross_jump && GET_CODE (next) == INSN
3081 && (GET_CODE (PATTERN (next)) == USE
3082 || GET_CODE (PATTERN (next)) == CLOBBER))
3083 continue;
3084 else if (GET_CODE (next) != NOTE)
3085 break;
3086 else if (! cross_jump
3087 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3088 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END))
3089 break;
3090 }
3091
3092 XEXP (x, 0) = label;
3093 ++LABEL_NUSES (label);
3094
3095 if (insn)
3096 {
3097 if (GET_CODE (insn) == JUMP_INSN)
3098 JUMP_LABEL (insn) = label;
3099
3100 /* If we've changed OLABEL and we had a REG_LABEL note
3101 for it, update it as well. */
3102 else if (label != olabel
3103 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3104 XEXP (note, 0) = label;
3105
3106 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3107 is one. */
3108 else if (! find_reg_note (insn, REG_LABEL, label))
3109 {
3110 rtx next = next_real_insn (label);
3111 /* Don't record labels that refer to dispatch tables.
3112 This is not necessary, since the tablejump
3113 references the same label.
3114 And if we did record them, flow.c would make worse code. */
3115 if (next == 0
3116 || ! (GET_CODE (next) == JUMP_INSN
3117 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3118 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
3119 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
3120 REG_NOTES (insn));
3121 }
3122 }
3123 return;
3124 }
3125
3126 /* Do walk the labels in a vector, but not the first operand of an
3127 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3128 case ADDR_VEC:
3129 case ADDR_DIFF_VEC:
3130 {
3131 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3132
3133 for (i = 0; i < XVECLEN (x, eltnum); i++)
3134 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3135 return;
3136 }
3137 }
3138
3139 fmt = GET_RTX_FORMAT (code);
3140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3141 {
3142 if (fmt[i] == 'e')
3143 mark_jump_label (XEXP (x, i), insn, cross_jump);
3144 else if (fmt[i] == 'E')
3145 {
3146 register int j;
3147 for (j = 0; j < XVECLEN (x, i); j++)
3148 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3149 }
3150 }
3151 }
3152
3153 /* If all INSN does is set the pc, delete it,
3154 and delete the insn that set the condition codes for it
3155 if that's what the previous thing was. */
3156
3157 void
3158 delete_jump (insn)
3159 rtx insn;
3160 {
3161 register rtx set = single_set (insn);
3162
3163 if (set && GET_CODE (SET_DEST (set)) == PC)
3164 delete_computation (insn);
3165 }
3166
3167 /* Delete INSN and recursively delete insns that compute values used only
3168 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3169 If we are running before flow.c, we need do nothing since flow.c will
3170 delete dead code. We also can't know if the registers being used are
3171 dead or not at this point.
3172
3173 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3174 nothing other than set a register that dies in this insn, we can delete
3175 that insn as well.
3176
3177 On machines with CC0, if CC0 is used in this insn, we may be able to
3178 delete the insn that set it. */
3179
3180 static void
3181 delete_computation (insn)
3182 rtx insn;
3183 {
3184 rtx note, next;
3185
3186 #ifdef HAVE_cc0
3187 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3188 {
3189 rtx prev = prev_nonnote_insn (insn);
3190 /* We assume that at this stage
3191 CC's are always set explicitly
3192 and always immediately before the jump that
3193 will use them. So if the previous insn
3194 exists to set the CC's, delete it
3195 (unless it performs auto-increments, etc.). */
3196 if (prev && GET_CODE (prev) == INSN
3197 && sets_cc0_p (PATTERN (prev)))
3198 {
3199 if (sets_cc0_p (PATTERN (prev)) > 0
3200 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3201 delete_computation (prev);
3202 else
3203 /* Otherwise, show that cc0 won't be used. */
3204 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
3205 cc0_rtx, REG_NOTES (prev));
3206 }
3207 }
3208 #endif
3209
3210 for (note = REG_NOTES (insn); note; note = next)
3211 {
3212 rtx our_prev;
3213
3214 next = XEXP (note, 1);
3215
3216 if (REG_NOTE_KIND (note) != REG_DEAD
3217 /* Verify that the REG_NOTE is legitimate. */
3218 || GET_CODE (XEXP (note, 0)) != REG)
3219 continue;
3220
3221 for (our_prev = prev_nonnote_insn (insn);
3222 our_prev && GET_CODE (our_prev) == INSN;
3223 our_prev = prev_nonnote_insn (our_prev))
3224 {
3225 /* If we reach a SEQUENCE, it is too complex to try to
3226 do anything with it, so give up. */
3227 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3228 break;
3229
3230 if (GET_CODE (PATTERN (our_prev)) == USE
3231 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3232 /* reorg creates USEs that look like this. We leave them
3233 alone because reorg needs them for its own purposes. */
3234 break;
3235
3236 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3237 {
3238 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3239 break;
3240
3241 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3242 {
3243 /* If we find a SET of something else, we can't
3244 delete the insn. */
3245
3246 int i;
3247
3248 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3249 {
3250 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3251
3252 if (GET_CODE (part) == SET
3253 && SET_DEST (part) != XEXP (note, 0))
3254 break;
3255 }
3256
3257 if (i == XVECLEN (PATTERN (our_prev), 0))
3258 delete_computation (our_prev);
3259 }
3260 else if (GET_CODE (PATTERN (our_prev)) == SET
3261 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3262 delete_computation (our_prev);
3263
3264 break;
3265 }
3266
3267 /* If OUR_PREV references the register that dies here, it is an
3268 additional use. Hence any prior SET isn't dead. However, this
3269 insn becomes the new place for the REG_DEAD note. */
3270 if (reg_overlap_mentioned_p (XEXP (note, 0),
3271 PATTERN (our_prev)))
3272 {
3273 XEXP (note, 1) = REG_NOTES (our_prev);
3274 REG_NOTES (our_prev) = note;
3275 break;
3276 }
3277 }
3278 }
3279
3280 delete_insn (insn);
3281 }
3282 \f
3283 /* Delete insn INSN from the chain of insns and update label ref counts.
3284 May delete some following insns as a consequence; may even delete
3285 a label elsewhere and insns that follow it.
3286
3287 Returns the first insn after INSN that was not deleted. */
3288
3289 rtx
3290 delete_insn (insn)
3291 register rtx insn;
3292 {
3293 register rtx next = NEXT_INSN (insn);
3294 register rtx prev = PREV_INSN (insn);
3295 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3296 register int dont_really_delete = 0;
3297
3298 while (next && INSN_DELETED_P (next))
3299 next = NEXT_INSN (next);
3300
3301 /* This insn is already deleted => return first following nondeleted. */
3302 if (INSN_DELETED_P (insn))
3303 return next;
3304
3305 /* Don't delete user-declared labels. Convert them to special NOTEs
3306 instead. */
3307 if (was_code_label && LABEL_NAME (insn) != 0
3308 && optimize && ! dont_really_delete)
3309 {
3310 PUT_CODE (insn, NOTE);
3311 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3312 NOTE_SOURCE_FILE (insn) = 0;
3313 dont_really_delete = 1;
3314 }
3315 else
3316 /* Mark this insn as deleted. */
3317 INSN_DELETED_P (insn) = 1;
3318
3319 /* If this is an unconditional jump, delete it from the jump chain. */
3320 if (simplejump_p (insn))
3321 delete_from_jump_chain (insn);
3322
3323 /* If instruction is followed by a barrier,
3324 delete the barrier too. */
3325
3326 if (next != 0 && GET_CODE (next) == BARRIER)
3327 {
3328 INSN_DELETED_P (next) = 1;
3329 next = NEXT_INSN (next);
3330 }
3331
3332 /* Patch out INSN (and the barrier if any) */
3333
3334 if (optimize && ! dont_really_delete)
3335 {
3336 if (prev)
3337 {
3338 NEXT_INSN (prev) = next;
3339 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3340 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3341 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3342 }
3343
3344 if (next)
3345 {
3346 PREV_INSN (next) = prev;
3347 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3348 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3349 }
3350
3351 if (prev && NEXT_INSN (prev) == 0)
3352 set_last_insn (prev);
3353 }
3354
3355 /* If deleting a jump, decrement the count of the label,
3356 and delete the label if it is now unused. */
3357
3358 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3359 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3360 {
3361 /* This can delete NEXT or PREV,
3362 either directly if NEXT is JUMP_LABEL (INSN),
3363 or indirectly through more levels of jumps. */
3364 delete_insn (JUMP_LABEL (insn));
3365 /* I feel a little doubtful about this loop,
3366 but I see no clean and sure alternative way
3367 to find the first insn after INSN that is not now deleted.
3368 I hope this works. */
3369 while (next && INSN_DELETED_P (next))
3370 next = NEXT_INSN (next);
3371 return next;
3372 }
3373
3374 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3375 prev = PREV_INSN (prev);
3376
3377 /* If INSN was a label and a dispatch table follows it,
3378 delete the dispatch table. The tablejump must have gone already.
3379 It isn't useful to fall through into a table. */
3380
3381 if (was_code_label
3382 && NEXT_INSN (insn) != 0
3383 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3384 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3385 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3386 next = delete_insn (NEXT_INSN (insn));
3387
3388 /* If INSN was a label, delete insns following it if now unreachable. */
3389
3390 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3391 {
3392 register RTX_CODE code;
3393 while (next != 0
3394 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3395 || code == NOTE
3396 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3397 {
3398 if (code == NOTE
3399 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3400 next = NEXT_INSN (next);
3401 /* Keep going past other deleted labels to delete what follows. */
3402 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3403 next = NEXT_INSN (next);
3404 else
3405 /* Note: if this deletes a jump, it can cause more
3406 deletion of unreachable code, after a different label.
3407 As long as the value from this recursive call is correct,
3408 this invocation functions correctly. */
3409 next = delete_insn (next);
3410 }
3411 }
3412
3413 return next;
3414 }
3415
3416 /* Advance from INSN till reaching something not deleted
3417 then return that. May return INSN itself. */
3418
3419 rtx
3420 next_nondeleted_insn (insn)
3421 rtx insn;
3422 {
3423 while (INSN_DELETED_P (insn))
3424 insn = NEXT_INSN (insn);
3425 return insn;
3426 }
3427 \f
3428 /* Delete a range of insns from FROM to TO, inclusive.
3429 This is for the sake of peephole optimization, so assume
3430 that whatever these insns do will still be done by a new
3431 peephole insn that will replace them. */
3432
3433 void
3434 delete_for_peephole (from, to)
3435 register rtx from, to;
3436 {
3437 register rtx insn = from;
3438
3439 while (1)
3440 {
3441 register rtx next = NEXT_INSN (insn);
3442 register rtx prev = PREV_INSN (insn);
3443
3444 if (GET_CODE (insn) != NOTE)
3445 {
3446 INSN_DELETED_P (insn) = 1;
3447
3448 /* Patch this insn out of the chain. */
3449 /* We don't do this all at once, because we
3450 must preserve all NOTEs. */
3451 if (prev)
3452 NEXT_INSN (prev) = next;
3453
3454 if (next)
3455 PREV_INSN (next) = prev;
3456 }
3457
3458 if (insn == to)
3459 break;
3460 insn = next;
3461 }
3462
3463 /* Note that if TO is an unconditional jump
3464 we *do not* delete the BARRIER that follows,
3465 since the peephole that replaces this sequence
3466 is also an unconditional jump in that case. */
3467 }
3468 \f
3469 /* Invert the condition of the jump JUMP, and make it jump
3470 to label NLABEL instead of where it jumps now. */
3471
3472 int
3473 invert_jump (jump, nlabel)
3474 rtx jump, nlabel;
3475 {
3476 /* We have to either invert the condition and change the label or
3477 do neither. Either operation could fail. We first try to invert
3478 the jump. If that succeeds, we try changing the label. If that fails,
3479 we invert the jump back to what it was. */
3480
3481 if (! invert_exp (PATTERN (jump), jump))
3482 return 0;
3483
3484 if (redirect_jump (jump, nlabel))
3485 return 1;
3486
3487 if (! invert_exp (PATTERN (jump), jump))
3488 /* This should just be putting it back the way it was. */
3489 abort ();
3490
3491 return 0;
3492 }
3493
3494 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3495
3496 Return 1 if we can do so, 0 if we cannot find a way to do so that
3497 matches a pattern. */
3498
3499 int
3500 invert_exp (x, insn)
3501 rtx x;
3502 rtx insn;
3503 {
3504 register RTX_CODE code;
3505 register int i;
3506 register char *fmt;
3507
3508 code = GET_CODE (x);
3509
3510 if (code == IF_THEN_ELSE)
3511 {
3512 register rtx comp = XEXP (x, 0);
3513 register rtx tem;
3514
3515 /* We can do this in two ways: The preferable way, which can only
3516 be done if this is not an integer comparison, is to reverse
3517 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3518 of the IF_THEN_ELSE. If we can't do either, fail. */
3519
3520 if (can_reverse_comparison_p (comp, insn)
3521 && validate_change (insn, &XEXP (x, 0),
3522 gen_rtx (reverse_condition (GET_CODE (comp)),
3523 GET_MODE (comp), XEXP (comp, 0),
3524 XEXP (comp, 1)), 0))
3525 return 1;
3526
3527 tem = XEXP (x, 1);
3528 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3529 validate_change (insn, &XEXP (x, 2), tem, 1);
3530 return apply_change_group ();
3531 }
3532
3533 fmt = GET_RTX_FORMAT (code);
3534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3535 {
3536 if (fmt[i] == 'e')
3537 if (! invert_exp (XEXP (x, i), insn))
3538 return 0;
3539 if (fmt[i] == 'E')
3540 {
3541 register int j;
3542 for (j = 0; j < XVECLEN (x, i); j++)
3543 if (!invert_exp (XVECEXP (x, i, j), insn))
3544 return 0;
3545 }
3546 }
3547
3548 return 1;
3549 }
3550 \f
3551 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3552 If the old jump target label is unused as a result,
3553 it and the code following it may be deleted.
3554
3555 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3556 RETURN insn.
3557
3558 The return value will be 1 if the change was made, 0 if it wasn't (this
3559 can only occur for NLABEL == 0). */
3560
3561 int
3562 redirect_jump (jump, nlabel)
3563 rtx jump, nlabel;
3564 {
3565 register rtx olabel = JUMP_LABEL (jump);
3566
3567 if (nlabel == olabel)
3568 return 1;
3569
3570 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3571 return 0;
3572
3573 /* If this is an unconditional branch, delete it from the jump_chain of
3574 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3575 have UID's in range and JUMP_CHAIN is valid). */
3576 if (jump_chain && (simplejump_p (jump)
3577 || GET_CODE (PATTERN (jump)) == RETURN))
3578 {
3579 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3580
3581 delete_from_jump_chain (jump);
3582 if (label_index < max_jump_chain
3583 && INSN_UID (jump) < max_jump_chain)
3584 {
3585 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3586 jump_chain[label_index] = jump;
3587 }
3588 }
3589
3590 JUMP_LABEL (jump) = nlabel;
3591 if (nlabel)
3592 ++LABEL_NUSES (nlabel);
3593
3594 if (olabel && --LABEL_NUSES (olabel) == 0)
3595 delete_insn (olabel);
3596
3597 return 1;
3598 }
3599
3600 /* Delete the instruction JUMP from any jump chain it might be on. */
3601
3602 static void
3603 delete_from_jump_chain (jump)
3604 rtx jump;
3605 {
3606 int index;
3607 rtx olabel = JUMP_LABEL (jump);
3608
3609 /* Handle unconditional jumps. */
3610 if (jump_chain && olabel != 0
3611 && INSN_UID (olabel) < max_jump_chain
3612 && simplejump_p (jump))
3613 index = INSN_UID (olabel);
3614 /* Handle return insns. */
3615 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3616 index = 0;
3617 else return;
3618
3619 if (jump_chain[index] == jump)
3620 jump_chain[index] = jump_chain[INSN_UID (jump)];
3621 else
3622 {
3623 rtx insn;
3624
3625 for (insn = jump_chain[index];
3626 insn != 0;
3627 insn = jump_chain[INSN_UID (insn)])
3628 if (jump_chain[INSN_UID (insn)] == jump)
3629 {
3630 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3631 break;
3632 }
3633 }
3634 }
3635
3636 /* If NLABEL is nonzero, throughout the rtx at LOC,
3637 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3638 zero, alter (RETURN) to (LABEL_REF NLABEL).
3639
3640 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3641 validity with validate_change. Convert (set (pc) (label_ref olabel))
3642 to (return).
3643
3644 Return 0 if we found a change we would like to make but it is invalid.
3645 Otherwise, return 1. */
3646
3647 int
3648 redirect_exp (loc, olabel, nlabel, insn)
3649 rtx *loc;
3650 rtx olabel, nlabel;
3651 rtx insn;
3652 {
3653 register rtx x = *loc;
3654 register RTX_CODE code = GET_CODE (x);
3655 register int i;
3656 register char *fmt;
3657
3658 if (code == LABEL_REF)
3659 {
3660 if (XEXP (x, 0) == olabel)
3661 {
3662 if (nlabel)
3663 XEXP (x, 0) = nlabel;
3664 else
3665 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3666 return 1;
3667 }
3668 }
3669 else if (code == RETURN && olabel == 0)
3670 {
3671 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3672 if (loc == &PATTERN (insn))
3673 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3674 return validate_change (insn, loc, x, 0);
3675 }
3676
3677 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3678 && GET_CODE (SET_SRC (x)) == LABEL_REF
3679 && XEXP (SET_SRC (x), 0) == olabel)
3680 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3681
3682 fmt = GET_RTX_FORMAT (code);
3683 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3684 {
3685 if (fmt[i] == 'e')
3686 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
3687 return 0;
3688 if (fmt[i] == 'E')
3689 {
3690 register int j;
3691 for (j = 0; j < XVECLEN (x, i); j++)
3692 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
3693 return 0;
3694 }
3695 }
3696
3697 return 1;
3698 }
3699 \f
3700 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3701
3702 If the old jump target label (before the dispatch table) becomes unused,
3703 it and the dispatch table may be deleted. In that case, find the insn
3704 before the jump references that label and delete it and logical successors
3705 too. */
3706
3707 static void
3708 redirect_tablejump (jump, nlabel)
3709 rtx jump, nlabel;
3710 {
3711 register rtx olabel = JUMP_LABEL (jump);
3712
3713 /* Add this jump to the jump_chain of NLABEL. */
3714 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3715 && INSN_UID (jump) < max_jump_chain)
3716 {
3717 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3718 jump_chain[INSN_UID (nlabel)] = jump;
3719 }
3720
3721 PATTERN (jump) = gen_jump (nlabel);
3722 JUMP_LABEL (jump) = nlabel;
3723 ++LABEL_NUSES (nlabel);
3724 INSN_CODE (jump) = -1;
3725
3726 if (--LABEL_NUSES (olabel) == 0)
3727 {
3728 delete_labelref_insn (jump, olabel, 0);
3729 delete_insn (olabel);
3730 }
3731 }
3732
3733 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3734 If we found one, delete it and then delete this insn if DELETE_THIS is
3735 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3736
3737 static int
3738 delete_labelref_insn (insn, label, delete_this)
3739 rtx insn, label;
3740 int delete_this;
3741 {
3742 int deleted = 0;
3743 rtx link;
3744
3745 if (GET_CODE (insn) != NOTE
3746 && reg_mentioned_p (label, PATTERN (insn)))
3747 {
3748 if (delete_this)
3749 {
3750 delete_insn (insn);
3751 deleted = 1;
3752 }
3753 else
3754 return 1;
3755 }
3756
3757 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3758 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3759 {
3760 if (delete_this)
3761 {
3762 delete_insn (insn);
3763 deleted = 1;
3764 }
3765 else
3766 return 1;
3767 }
3768
3769 return deleted;
3770 }
3771 \f
3772 /* Like rtx_equal_p except that it considers two REGs as equal
3773 if they renumber to the same value and considers two commutative
3774 operations to be the same if the order of the operands has been
3775 reversed. */
3776
3777 int
3778 rtx_renumbered_equal_p (x, y)
3779 rtx x, y;
3780 {
3781 register int i;
3782 register RTX_CODE code = GET_CODE (x);
3783 register char *fmt;
3784
3785 if (x == y)
3786 return 1;
3787
3788 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3789 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3790 && GET_CODE (SUBREG_REG (y)) == REG)))
3791 {
3792 int reg_x = -1, reg_y = -1;
3793 int word_x = 0, word_y = 0;
3794
3795 if (GET_MODE (x) != GET_MODE (y))
3796 return 0;
3797
3798 /* If we haven't done any renumbering, don't
3799 make any assumptions. */
3800 if (reg_renumber == 0)
3801 return rtx_equal_p (x, y);
3802
3803 if (code == SUBREG)
3804 {
3805 reg_x = REGNO (SUBREG_REG (x));
3806 word_x = SUBREG_WORD (x);
3807
3808 if (reg_renumber[reg_x] >= 0)
3809 {
3810 reg_x = reg_renumber[reg_x] + word_x;
3811 word_x = 0;
3812 }
3813 }
3814
3815 else
3816 {
3817 reg_x = REGNO (x);
3818 if (reg_renumber[reg_x] >= 0)
3819 reg_x = reg_renumber[reg_x];
3820 }
3821
3822 if (GET_CODE (y) == SUBREG)
3823 {
3824 reg_y = REGNO (SUBREG_REG (y));
3825 word_y = SUBREG_WORD (y);
3826
3827 if (reg_renumber[reg_y] >= 0)
3828 {
3829 reg_y = reg_renumber[reg_y];
3830 word_y = 0;
3831 }
3832 }
3833
3834 else
3835 {
3836 reg_y = REGNO (y);
3837 if (reg_renumber[reg_y] >= 0)
3838 reg_y = reg_renumber[reg_y];
3839 }
3840
3841 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3842 }
3843
3844 /* Now we have disposed of all the cases
3845 in which different rtx codes can match. */
3846 if (code != GET_CODE (y))
3847 return 0;
3848
3849 switch (code)
3850 {
3851 case PC:
3852 case CC0:
3853 case ADDR_VEC:
3854 case ADDR_DIFF_VEC:
3855 return 0;
3856
3857 case CONST_INT:
3858 return INTVAL (x) == INTVAL (y);
3859
3860 case LABEL_REF:
3861 /* We can't assume nonlocal labels have their following insns yet. */
3862 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3863 return XEXP (x, 0) == XEXP (y, 0);
3864
3865 /* Two label-refs are equivalent if they point at labels
3866 in the same position in the instruction stream. */
3867 return (next_real_insn (XEXP (x, 0))
3868 == next_real_insn (XEXP (y, 0)));
3869
3870 case SYMBOL_REF:
3871 return XSTR (x, 0) == XSTR (y, 0);
3872 }
3873
3874 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3875
3876 if (GET_MODE (x) != GET_MODE (y))
3877 return 0;
3878
3879 /* For commutative operations, the RTX match if the operand match in any
3880 order. Also handle the simple binary and unary cases without a loop. */
3881 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3882 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3883 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3884 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3885 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3886 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3887 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3888 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3889 else if (GET_RTX_CLASS (code) == '1')
3890 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3891
3892 /* Compare the elements. If any pair of corresponding elements
3893 fail to match, return 0 for the whole things. */
3894
3895 fmt = GET_RTX_FORMAT (code);
3896 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3897 {
3898 register int j;
3899 switch (fmt[i])
3900 {
3901 case 'w':
3902 if (XWINT (x, i) != XWINT (y, i))
3903 return 0;
3904 break;
3905
3906 case 'i':
3907 if (XINT (x, i) != XINT (y, i))
3908 return 0;
3909 break;
3910
3911 case 's':
3912 if (strcmp (XSTR (x, i), XSTR (y, i)))
3913 return 0;
3914 break;
3915
3916 case 'e':
3917 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3918 return 0;
3919 break;
3920
3921 case 'u':
3922 if (XEXP (x, i) != XEXP (y, i))
3923 return 0;
3924 /* fall through. */
3925 case '0':
3926 break;
3927
3928 case 'E':
3929 if (XVECLEN (x, i) != XVECLEN (y, i))
3930 return 0;
3931 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3932 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3933 return 0;
3934 break;
3935
3936 default:
3937 abort ();
3938 }
3939 }
3940 return 1;
3941 }
3942 \f
3943 /* If X is a hard register or equivalent to one or a subregister of one,
3944 return the hard register number. If X is a pseudo register that was not
3945 assigned a hard register, return the pseudo register number. Otherwise,
3946 return -1. Any rtx is valid for X. */
3947
3948 int
3949 true_regnum (x)
3950 rtx x;
3951 {
3952 if (GET_CODE (x) == REG)
3953 {
3954 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3955 return reg_renumber[REGNO (x)];
3956 return REGNO (x);
3957 }
3958 if (GET_CODE (x) == SUBREG)
3959 {
3960 int base = true_regnum (SUBREG_REG (x));
3961 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3962 return SUBREG_WORD (x) + base;
3963 }
3964 return -1;
3965 }
3966 \f
3967 /* Optimize code of the form:
3968
3969 for (x = a[i]; x; ...)
3970 ...
3971 for (x = a[i]; x; ...)
3972 ...
3973 foo:
3974
3975 Loop optimize will change the above code into
3976
3977 if (x = a[i])
3978 for (;;)
3979 { ...; if (! (x = ...)) break; }
3980 if (x = a[i])
3981 for (;;)
3982 { ...; if (! (x = ...)) break; }
3983 foo:
3984
3985 In general, if the first test fails, the program can branch
3986 directly to `foo' and skip the second try which is doomed to fail.
3987 We run this after loop optimization and before flow analysis. */
3988
3989 /* When comparing the insn patterns, we track the fact that different
3990 pseudo-register numbers may have been used in each computation.
3991 The following array stores an equivalence -- same_regs[I] == J means
3992 that pseudo register I was used in the first set of tests in a context
3993 where J was used in the second set. We also count the number of such
3994 pending equivalences. If nonzero, the expressions really aren't the
3995 same. */
3996
3997 static int *same_regs;
3998
3999 static int num_same_regs;
4000
4001 /* Track any registers modified between the target of the first jump and
4002 the second jump. They never compare equal. */
4003
4004 static char *modified_regs;
4005
4006 /* Record if memory was modified. */
4007
4008 static int modified_mem;
4009
4010 /* Called via note_stores on each insn between the target of the first
4011 branch and the second branch. It marks any changed registers. */
4012
4013 static void
4014 mark_modified_reg (dest, x)
4015 rtx dest;
4016 rtx x;
4017 {
4018 int regno, i;
4019
4020 if (GET_CODE (dest) == SUBREG)
4021 dest = SUBREG_REG (dest);
4022
4023 if (GET_CODE (dest) == MEM)
4024 modified_mem = 1;
4025
4026 if (GET_CODE (dest) != REG)
4027 return;
4028
4029 regno = REGNO (dest);
4030 if (regno >= FIRST_PSEUDO_REGISTER)
4031 modified_regs[regno] = 1;
4032 else
4033 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4034 modified_regs[regno + i] = 1;
4035 }
4036
4037 /* F is the first insn in the chain of insns. */
4038
4039 void
4040 thread_jumps (f, max_reg, flag_before_loop)
4041 rtx f;
4042 int max_reg;
4043 int flag_before_loop;
4044 {
4045 /* Basic algorithm is to find a conditional branch,
4046 the label it may branch to, and the branch after
4047 that label. If the two branches test the same condition,
4048 walk back from both branch paths until the insn patterns
4049 differ, or code labels are hit. If we make it back to
4050 the target of the first branch, then we know that the first branch
4051 will either always succeed or always fail depending on the relative
4052 senses of the two branches. So adjust the first branch accordingly
4053 in this case. */
4054
4055 rtx label, b1, b2, t1, t2;
4056 enum rtx_code code1, code2;
4057 rtx b1op0, b1op1, b2op0, b2op1;
4058 int changed = 1;
4059 int i;
4060 int *all_reset;
4061
4062 /* Allocate register tables and quick-reset table. */
4063 modified_regs = (char *) alloca (max_reg * sizeof (char));
4064 same_regs = (int *) alloca (max_reg * sizeof (int));
4065 all_reset = (int *) alloca (max_reg * sizeof (int));
4066 for (i = 0; i < max_reg; i++)
4067 all_reset[i] = -1;
4068
4069 while (changed)
4070 {
4071 changed = 0;
4072
4073 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4074 {
4075 /* Get to a candidate branch insn. */
4076 if (GET_CODE (b1) != JUMP_INSN
4077 || ! condjump_p (b1) || simplejump_p (b1)
4078 || JUMP_LABEL (b1) == 0)
4079 continue;
4080
4081 bzero (modified_regs, max_reg * sizeof (char));
4082 modified_mem = 0;
4083
4084 bcopy ((char *) all_reset, (char *) same_regs,
4085 max_reg * sizeof (int));
4086 num_same_regs = 0;
4087
4088 label = JUMP_LABEL (b1);
4089
4090 /* Look for a branch after the target. Record any registers and
4091 memory modified between the target and the branch. Stop when we
4092 get to a label since we can't know what was changed there. */
4093 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4094 {
4095 if (GET_CODE (b2) == CODE_LABEL)
4096 break;
4097
4098 else if (GET_CODE (b2) == JUMP_INSN)
4099 {
4100 /* If this is an unconditional jump and is the only use of
4101 its target label, we can follow it. */
4102 if (simplejump_p (b2)
4103 && JUMP_LABEL (b2) != 0
4104 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4105 {
4106 b2 = JUMP_LABEL (b2);
4107 continue;
4108 }
4109 else
4110 break;
4111 }
4112
4113 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4114 continue;
4115
4116 if (GET_CODE (b2) == CALL_INSN)
4117 {
4118 modified_mem = 1;
4119 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4120 if (call_used_regs[i] && ! fixed_regs[i]
4121 && i != STACK_POINTER_REGNUM
4122 && i != FRAME_POINTER_REGNUM
4123 && i != HARD_FRAME_POINTER_REGNUM
4124 && i != ARG_POINTER_REGNUM)
4125 modified_regs[i] = 1;
4126 }
4127
4128 note_stores (PATTERN (b2), mark_modified_reg);
4129 }
4130
4131 /* Check the next candidate branch insn from the label
4132 of the first. */
4133 if (b2 == 0
4134 || GET_CODE (b2) != JUMP_INSN
4135 || b2 == b1
4136 || ! condjump_p (b2)
4137 || simplejump_p (b2))
4138 continue;
4139
4140 /* Get the comparison codes and operands, reversing the
4141 codes if appropriate. If we don't have comparison codes,
4142 we can't do anything. */
4143 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4144 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4145 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4146 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4147 code1 = reverse_condition (code1);
4148
4149 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4150 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4151 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4152 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4153 code2 = reverse_condition (code2);
4154
4155 /* If they test the same things and knowing that B1 branches
4156 tells us whether or not B2 branches, check if we
4157 can thread the branch. */
4158 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4159 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4160 && (comparison_dominates_p (code1, code2)
4161 || comparison_dominates_p (code1, reverse_condition (code2))))
4162 {
4163 t1 = prev_nonnote_insn (b1);
4164 t2 = prev_nonnote_insn (b2);
4165
4166 while (t1 != 0 && t2 != 0)
4167 {
4168 if (t2 == label)
4169 {
4170 /* We have reached the target of the first branch.
4171 If there are no pending register equivalents,
4172 we know that this branch will either always
4173 succeed (if the senses of the two branches are
4174 the same) or always fail (if not). */
4175 rtx new_label;
4176
4177 if (num_same_regs != 0)
4178 break;
4179
4180 if (comparison_dominates_p (code1, code2))
4181 new_label = JUMP_LABEL (b2);
4182 else
4183 new_label = get_label_after (b2);
4184
4185 if (JUMP_LABEL (b1) != new_label)
4186 {
4187 rtx prev = PREV_INSN (new_label);
4188
4189 if (flag_before_loop
4190 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4191 {
4192 /* Don't thread to the loop label. If a loop
4193 label is reused, loop optimization will
4194 be disabled for that loop. */
4195 new_label = gen_label_rtx ();
4196 emit_label_after (new_label, PREV_INSN (prev));
4197 }
4198 changed |= redirect_jump (b1, new_label);
4199 }
4200 break;
4201 }
4202
4203 /* If either of these is not a normal insn (it might be
4204 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4205 have already been skipped above.) Similarly, fail
4206 if the insns are different. */
4207 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4208 || recog_memoized (t1) != recog_memoized (t2)
4209 || ! rtx_equal_for_thread_p (PATTERN (t1),
4210 PATTERN (t2), t2))
4211 break;
4212
4213 t1 = prev_nonnote_insn (t1);
4214 t2 = prev_nonnote_insn (t2);
4215 }
4216 }
4217 }
4218 }
4219 }
4220 \f
4221 /* This is like RTX_EQUAL_P except that it knows about our handling of
4222 possibly equivalent registers and knows to consider volatile and
4223 modified objects as not equal.
4224
4225 YINSN is the insn containing Y. */
4226
4227 int
4228 rtx_equal_for_thread_p (x, y, yinsn)
4229 rtx x, y;
4230 rtx yinsn;
4231 {
4232 register int i;
4233 register int j;
4234 register enum rtx_code code;
4235 register char *fmt;
4236
4237 code = GET_CODE (x);
4238 /* Rtx's of different codes cannot be equal. */
4239 if (code != GET_CODE (y))
4240 return 0;
4241
4242 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4243 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4244
4245 if (GET_MODE (x) != GET_MODE (y))
4246 return 0;
4247
4248 /* For commutative operations, the RTX match if the operand match in any
4249 order. Also handle the simple binary and unary cases without a loop. */
4250 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4251 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4252 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4253 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4254 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4255 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4256 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4257 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4258 else if (GET_RTX_CLASS (code) == '1')
4259 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4260
4261 /* Handle special-cases first. */
4262 switch (code)
4263 {
4264 case REG:
4265 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4266 return 1;
4267
4268 /* If neither is user variable or hard register, check for possible
4269 equivalence. */
4270 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4271 || REGNO (x) < FIRST_PSEUDO_REGISTER
4272 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4273 return 0;
4274
4275 if (same_regs[REGNO (x)] == -1)
4276 {
4277 same_regs[REGNO (x)] = REGNO (y);
4278 num_same_regs++;
4279
4280 /* If this is the first time we are seeing a register on the `Y'
4281 side, see if it is the last use. If not, we can't thread the
4282 jump, so mark it as not equivalent. */
4283 if (regno_last_uid[REGNO (y)] != INSN_UID (yinsn))
4284 return 0;
4285
4286 return 1;
4287 }
4288 else
4289 return (same_regs[REGNO (x)] == REGNO (y));
4290
4291 break;
4292
4293 case MEM:
4294 /* If memory modified or either volatile, not equivalent.
4295 Else, check address. */
4296 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4297 return 0;
4298
4299 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4300
4301 case ASM_INPUT:
4302 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4303 return 0;
4304
4305 break;
4306
4307 case SET:
4308 /* Cancel a pending `same_regs' if setting equivalenced registers.
4309 Then process source. */
4310 if (GET_CODE (SET_DEST (x)) == REG
4311 && GET_CODE (SET_DEST (y)) == REG)
4312 {
4313 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4314 {
4315 same_regs[REGNO (SET_DEST (x))] = -1;
4316 num_same_regs--;
4317 }
4318 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4319 return 0;
4320 }
4321 else
4322 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4323 return 0;
4324
4325 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4326
4327 case LABEL_REF:
4328 return XEXP (x, 0) == XEXP (y, 0);
4329
4330 case SYMBOL_REF:
4331 return XSTR (x, 0) == XSTR (y, 0);
4332 }
4333
4334 if (x == y)
4335 return 1;
4336
4337 fmt = GET_RTX_FORMAT (code);
4338 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4339 {
4340 switch (fmt[i])
4341 {
4342 case 'w':
4343 if (XWINT (x, i) != XWINT (y, i))
4344 return 0;
4345 break;
4346
4347 case 'n':
4348 case 'i':
4349 if (XINT (x, i) != XINT (y, i))
4350 return 0;
4351 break;
4352
4353 case 'V':
4354 case 'E':
4355 /* Two vectors must have the same length. */
4356 if (XVECLEN (x, i) != XVECLEN (y, i))
4357 return 0;
4358
4359 /* And the corresponding elements must match. */
4360 for (j = 0; j < XVECLEN (x, i); j++)
4361 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4362 XVECEXP (y, i, j), yinsn) == 0)
4363 return 0;
4364 break;
4365
4366 case 'e':
4367 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4368 return 0;
4369 break;
4370
4371 case 'S':
4372 case 's':
4373 if (strcmp (XSTR (x, i), XSTR (y, i)))
4374 return 0;
4375 break;
4376
4377 case 'u':
4378 /* These are just backpointers, so they don't matter. */
4379 break;
4380
4381 case '0':
4382 break;
4383
4384 /* It is believed that rtx's at this level will never
4385 contain anything but integers and other rtx's,
4386 except for within LABEL_REFs and SYMBOL_REFs. */
4387 default:
4388 abort ();
4389 }
4390 }
4391 return 1;
4392 }
This page took 0.244016 seconds and 6 git commands to generate.