]> gcc.gnu.org Git - gcc.git/blame - gcc/jump.c
Bring in final gcc-2.8.0 changes.
[gcc.git] / gcc / jump.c
CommitLineData
15a63be1 1/* Optimize jump instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
15a63be1
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
15a63be1
RK
20
21
22/* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54#include "config.h"
e9a25f70 55#include <stdio.h>
15a63be1
RK
56#include "rtl.h"
57#include "flags.h"
58#include "hard-reg-set.h"
59#include "regs.h"
15a63be1
RK
60#include "insn-config.h"
61#include "insn-flags.h"
e9a25f70 62#include "recog.h"
3c86a619 63#include "expr.h"
15a63be1 64#include "real.h"
6adb4e3a 65#include "except.h"
15a63be1
RK
66
67/* ??? Eventually must record somehow the labels used by jumps
68 from nested functions. */
69/* Pre-record the next or previous real insn for each label?
70 No, this pass is very fast anyway. */
71/* Condense consecutive labels?
72 This would make life analysis faster, maybe. */
73/* Optimize jump y; x: ... y: jumpif... x?
74 Don't know if it is worth bothering with. */
75/* Optimize two cases of conditional jump to conditional jump?
76 This can never delete any instruction or make anything dead,
77 or even change what is live at any point.
78 So perhaps let combiner do it. */
79
80/* Vector indexed by uid.
81 For each CODE_LABEL, index by its uid to get first unconditional jump
82 that jumps to the label.
83 For each JUMP_INSN, index by its uid to get the next unconditional jump
84 that jumps to the same label.
85 Element 0 is the start of a chain of all return insns.
86 (It is safe to use element 0 because insn uid 0 is not used. */
87
88static rtx *jump_chain;
89
90/* List of labels referred to from initializers.
91 These can never be deleted. */
92rtx forced_labels;
93
94/* Maximum index in jump_chain. */
95
96static int max_jump_chain;
97
98/* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100int can_reach_end;
101
102/* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
0f41302f 106 case. */
15a63be1
RK
107
108static int cross_jump_death_matters = 0;
109
8cd2aff2
RK
110static int duplicate_loop_exit_test PROTO((rtx));
111static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
112static void do_cross_jump PROTO((rtx, rtx, rtx));
113static int jump_back_p PROTO((rtx, rtx));
114static int tension_vector_labels PROTO((rtx, int));
115static void mark_jump_label PROTO((rtx, rtx, int));
116static void delete_computation PROTO((rtx));
117static void delete_from_jump_chain PROTO((rtx));
118static int delete_labelref_insn PROTO((rtx, rtx, int));
119static void redirect_tablejump PROTO((rtx, rtx));
956d6950 120static rtx find_insert_position PROTO((rtx, rtx));
15a63be1
RK
121\f
122/* Delete no-op jumps and optimize jumps to jumps
123 and jumps around jumps.
124 Delete unused labels and unreachable code.
125
126 If CROSS_JUMP is 1, detect matching code
127 before a jump and its destination and unify them.
128 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
129
130 If NOOP_MOVES is nonzero, delete no-op move insns.
131
132 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
133 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
134
135 If `optimize' is zero, don't change any code,
136 just determine whether control drops off the end of the function.
137 This case occurs when we have -W and not -O.
138 It works because `delete_insn' checks the value of `optimize'
139 and refrains from actually deleting when that is 0. */
140
141void
142jump_optimize (f, cross_jump, noop_moves, after_regscan)
143 rtx f;
144 int cross_jump;
145 int noop_moves;
146 int after_regscan;
147{
484c3924 148 register rtx insn, next, note;
15a63be1
RK
149 int changed;
150 int first = 1;
151 int max_uid = 0;
152 rtx last_insn;
153
154 cross_jump_death_matters = (cross_jump == 2);
155
484c3924
RK
156 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
157 notes whose labels don't occur in the insn any more. */
15a63be1
RK
158
159 for (insn = f; insn; insn = NEXT_INSN (insn))
160 {
161 if (GET_CODE (insn) == CODE_LABEL)
162 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
163 else if (GET_CODE (insn) == JUMP_INSN)
164 JUMP_LABEL (insn) = 0;
484c3924
RK
165 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
166 for (note = REG_NOTES (insn); note; note = next)
167 {
168 next = XEXP (note, 1);
169 if (REG_NOTE_KIND (note) == REG_LABEL
170 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
171 remove_note (insn, note);
172 }
173
15a63be1
RK
174 if (INSN_UID (insn) > max_uid)
175 max_uid = INSN_UID (insn);
176 }
177
178 max_uid++;
179
180 /* Delete insns following barriers, up to next label. */
181
182 for (insn = f; insn;)
183 {
184 if (GET_CODE (insn) == BARRIER)
185 {
186 insn = NEXT_INSN (insn);
187 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
188 {
189 if (GET_CODE (insn) == NOTE
190 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
191 insn = NEXT_INSN (insn);
192 else
193 insn = delete_insn (insn);
194 }
195 /* INSN is now the code_label. */
196 }
197 else
198 insn = NEXT_INSN (insn);
199 }
200
201 /* Leave some extra room for labels and duplicate exit test insns
202 we make. */
203 max_jump_chain = max_uid * 14 / 10;
204 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
4c9a05bc 205 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
15a63be1
RK
206
207 /* Mark the label each jump jumps to.
208 Combine consecutive labels, and count uses of labels.
209
210 For each label, make a chain (using `jump_chain')
211 of all the *unconditional* jumps that jump to it;
212 also make a chain of all returns. */
213
214 for (insn = f; insn; insn = NEXT_INSN (insn))
8cd2aff2 215 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
15a63be1
RK
216 && ! INSN_DELETED_P (insn))
217 {
218 mark_jump_label (PATTERN (insn), insn, cross_jump);
219 if (GET_CODE (insn) == JUMP_INSN)
220 {
221 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
222 {
223 jump_chain[INSN_UID (insn)]
224 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
225 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
226 }
227 if (GET_CODE (PATTERN (insn)) == RETURN)
228 {
229 jump_chain[INSN_UID (insn)] = jump_chain[0];
230 jump_chain[0] = insn;
231 }
232 }
233 }
234
235 /* Keep track of labels used from static data;
236 they cannot ever be deleted. */
237
238 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
239 LABEL_NUSES (XEXP (insn, 0))++;
240
6adb4e3a
MS
241 check_exception_handler_labels ();
242
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
245
246 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
247 LABEL_NUSES (XEXP (insn, 0))++;
248
249 exception_optimize ();
250
15a63be1
RK
251 /* Delete all labels already not referenced.
252 Also find the last insn. */
253
254 last_insn = 0;
255 for (insn = f; insn; )
256 {
257 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
258 insn = delete_insn (insn);
259 else
260 {
261 last_insn = insn;
262 insn = NEXT_INSN (insn);
263 }
264 }
265
266 if (!optimize)
267 {
268 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
269 If so record that this function can drop off the end. */
270
271 insn = last_insn;
272 {
273 int n_labels = 1;
274 while (insn
275 /* One label can follow the end-note: the return label. */
276 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
277 /* Ordinary insns can follow it if returning a structure. */
278 || GET_CODE (insn) == INSN
279 /* If machine uses explicit RETURN insns, no epilogue,
280 then one of them follows the note. */
281 || (GET_CODE (insn) == JUMP_INSN
282 && GET_CODE (PATTERN (insn)) == RETURN)
60374599
DE
283 /* A barrier can follow the return insn. */
284 || GET_CODE (insn) == BARRIER
15a63be1
RK
285 /* Other kinds of notes can follow also. */
286 || (GET_CODE (insn) == NOTE
287 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
288 insn = PREV_INSN (insn);
289 }
290
291 /* Report if control can fall through at the end of the function. */
292 if (insn && GET_CODE (insn) == NOTE
293 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
294 && ! INSN_DELETED_P (insn))
295 can_reach_end = 1;
296
297 /* Zero the "deleted" flag of all the "deleted" insns. */
298 for (insn = f; insn; insn = NEXT_INSN (insn))
299 INSN_DELETED_P (insn) = 0;
300 return;
301 }
302
303#ifdef HAVE_return
304 if (HAVE_return)
305 {
306 /* If we fall through to the epilogue, see if we can insert a RETURN insn
307 in front of it. If the machine allows it at this point (we might be
308 after reload for a leaf routine), it will improve optimization for it
309 to be there. */
310 insn = get_last_insn ();
311 while (insn && GET_CODE (insn) == NOTE)
312 insn = PREV_INSN (insn);
313
314 if (insn && GET_CODE (insn) != BARRIER)
315 {
316 emit_jump_insn (gen_return ());
317 emit_barrier ();
318 }
319 }
320#endif
321
322 if (noop_moves)
323 for (insn = f; insn; )
324 {
2156dfe3 325 next = NEXT_INSN (insn);
15a63be1
RK
326
327 if (GET_CODE (insn) == INSN)
328 {
329 register rtx body = PATTERN (insn);
330
331/* Combine stack_adjusts with following push_insns. */
332#ifdef PUSH_ROUNDING
333 if (GET_CODE (body) == SET
334 && SET_DEST (body) == stack_pointer_rtx
335 && GET_CODE (SET_SRC (body)) == PLUS
336 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
337 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
338 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
339 {
340 rtx p;
341 rtx stack_adjust_insn = insn;
342 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
343 int total_pushed = 0;
344 int pushes = 0;
345
346 /* Find all successive push insns. */
347 p = insn;
348 /* Don't convert more than three pushes;
349 that starts adding too many displaced addresses
350 and the whole thing starts becoming a losing
351 proposition. */
352 while (pushes < 3)
353 {
354 rtx pbody, dest;
355 p = next_nonnote_insn (p);
356 if (p == 0 || GET_CODE (p) != INSN)
357 break;
358 pbody = PATTERN (p);
359 if (GET_CODE (pbody) != SET)
360 break;
361 dest = SET_DEST (pbody);
362 /* Allow a no-op move between the adjust and the push. */
363 if (GET_CODE (dest) == REG
364 && GET_CODE (SET_SRC (pbody)) == REG
365 && REGNO (dest) == REGNO (SET_SRC (pbody)))
366 continue;
367 if (! (GET_CODE (dest) == MEM
368 && GET_CODE (XEXP (dest, 0)) == POST_INC
369 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
370 break;
371 pushes++;
1ad4c71a 372 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
15a63be1
RK
373 > stack_adjust_amount)
374 break;
1ad4c71a 375 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
15a63be1
RK
376 }
377
378 /* Discard the amount pushed from the stack adjust;
379 maybe eliminate it entirely. */
380 if (total_pushed >= stack_adjust_amount)
381 {
344a8220 382 delete_computation (stack_adjust_insn);
15a63be1
RK
383 total_pushed = stack_adjust_amount;
384 }
385 else
386 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
5f4f0e22 387 = GEN_INT (stack_adjust_amount - total_pushed);
15a63be1
RK
388
389 /* Change the appropriate push insns to ordinary stores. */
390 p = insn;
391 while (total_pushed > 0)
392 {
393 rtx pbody, dest;
394 p = next_nonnote_insn (p);
395 if (GET_CODE (p) != INSN)
396 break;
397 pbody = PATTERN (p);
398 if (GET_CODE (pbody) == SET)
399 break;
400 dest = SET_DEST (pbody);
401 if (! (GET_CODE (dest) == MEM
402 && GET_CODE (XEXP (dest, 0)) == POST_INC
403 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
404 break;
1ad4c71a 405 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
15a63be1
RK
406 /* If this push doesn't fully fit in the space
407 of the stack adjust that we deleted,
408 make another stack adjust here for what we
409 didn't use up. There should be peepholes
410 to recognize the resulting sequence of insns. */
411 if (total_pushed < 0)
412 {
413 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
5f4f0e22 414 GEN_INT (- total_pushed)),
15a63be1
RK
415 p);
416 break;
417 }
418 XEXP (dest, 0)
419 = plus_constant (stack_pointer_rtx, total_pushed);
420 }
421 }
422#endif
423
424 /* Detect and delete no-op move instructions
425 resulting from not allocating a parameter in a register. */
426
427 if (GET_CODE (body) == SET
428 && (SET_DEST (body) == SET_SRC (body)
429 || (GET_CODE (SET_DEST (body)) == MEM
430 && GET_CODE (SET_SRC (body)) == MEM
431 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
432 && ! (GET_CODE (SET_DEST (body)) == MEM
433 && MEM_VOLATILE_P (SET_DEST (body)))
434 && ! (GET_CODE (SET_SRC (body)) == MEM
435 && MEM_VOLATILE_P (SET_SRC (body))))
344a8220 436 delete_computation (insn);
15a63be1
RK
437
438 /* Detect and ignore no-op move instructions
439 resulting from smart or fortuitous register allocation. */
440
441 else if (GET_CODE (body) == SET)
442 {
443 int sreg = true_regnum (SET_SRC (body));
444 int dreg = true_regnum (SET_DEST (body));
445
446 if (sreg == dreg && sreg >= 0)
447 delete_insn (insn);
448 else if (sreg >= 0 && dreg >= 0)
449 {
450 rtx trial;
5f4f0e22
CH
451 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
452 sreg, NULL_PTR, dreg,
15a63be1
RK
453 GET_MODE (SET_SRC (body)));
454
db3cf6fb
MS
455 if (tem != 0
456 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
15a63be1
RK
457 {
458 /* DREG may have been the target of a REG_DEAD note in
459 the insn which makes INSN redundant. If so, reorg
460 would still think it is dead. So search for such a
461 note and delete it if we find it. */
59df2b2b
RK
462 if (! find_regno_note (insn, REG_UNUSED, dreg))
463 for (trial = prev_nonnote_insn (insn);
464 trial && GET_CODE (trial) != CODE_LABEL;
465 trial = prev_nonnote_insn (trial))
466 if (find_regno_note (trial, REG_DEAD, dreg))
467 {
468 remove_death (dreg, trial);
469 break;
470 }
471#ifdef PRESERVE_DEATH_INFO_REGNO_P
472 /* Deleting insn could lose a death-note for SREG
473 so don't do it if final needs accurate
474 death-notes. */
475 if (PRESERVE_DEATH_INFO_REGNO_P (sreg)
476 && (trial = find_regno_note (insn, REG_DEAD, sreg)))
477 {
478 /* Change this into a USE so that we won't emit
479 code for it, but still can keep the note. */
480 PATTERN (insn)
481 = gen_rtx (USE, VOIDmode, XEXP (trial, 0));
d8db8192 482 INSN_CODE (insn) = -1;
59df2b2b
RK
483 /* Remove all reg notes but the REG_DEAD one. */
484 REG_NOTES (insn) = trial;
485 XEXP (trial, 1) = NULL_RTX;
486 }
487 else
488#endif
15a63be1
RK
489 delete_insn (insn);
490 }
491 }
492 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
5f4f0e22
CH
493 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
494 NULL_PTR, 0,
495 GET_MODE (SET_DEST (body))))
15a63be1
RK
496 {
497 /* This handles the case where we have two consecutive
498 assignments of the same constant to pseudos that didn't
499 get a hard reg. Each SET from the constant will be
500 converted into a SET of the spill register and an
501 output reload will be made following it. This produces
502 two loads of the same constant into the same spill
503 register. */
504
505 rtx in_insn = insn;
506
507 /* Look back for a death note for the first reg.
508 If there is one, it is no longer accurate. */
509 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
510 {
511 if ((GET_CODE (in_insn) == INSN
512 || GET_CODE (in_insn) == JUMP_INSN)
513 && find_regno_note (in_insn, REG_DEAD, dreg))
514 {
515 remove_death (dreg, in_insn);
516 break;
517 }
518 in_insn = PREV_INSN (in_insn);
519 }
520
521 /* Delete the second load of the value. */
522 delete_insn (insn);
523 }
524 }
525 else if (GET_CODE (body) == PARALLEL)
526 {
527 /* If each part is a set between two identical registers or
0f41302f 528 a USE or CLOBBER, delete the insn. */
15a63be1
RK
529 int i, sreg, dreg;
530 rtx tem;
531
532 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
533 {
534 tem = XVECEXP (body, 0, i);
535 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
536 continue;
537
538 if (GET_CODE (tem) != SET
539 || (sreg = true_regnum (SET_SRC (tem))) < 0
540 || (dreg = true_regnum (SET_DEST (tem))) < 0
541 || dreg != sreg)
542 break;
543 }
544
545 if (i < 0)
546 delete_insn (insn);
547 }
15a63be1 548 /* Also delete insns to store bit fields if they are no-ops. */
f76b9db2
ILT
549 /* Not worth the hair to detect this in the big-endian case. */
550 else if (! BYTES_BIG_ENDIAN
551 && GET_CODE (body) == SET
15a63be1
RK
552 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
553 && XEXP (SET_DEST (body), 2) == const0_rtx
554 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
555 && ! (GET_CODE (SET_SRC (body)) == MEM
556 && MEM_VOLATILE_P (SET_SRC (body))))
557 delete_insn (insn);
15a63be1
RK
558 }
559 insn = next;
560 }
561
2156dfe3
RK
562 /* If we haven't yet gotten to reload and we have just run regscan,
563 delete any insn that sets a register that isn't used elsewhere.
564 This helps some of the optimizations below by having less insns
565 being jumped around. */
566
567 if (! reload_completed && after_regscan)
568 for (insn = f; insn; insn = next)
569 {
570 rtx set = single_set (insn);
571
572 next = NEXT_INSN (insn);
573
574 if (set && GET_CODE (SET_DEST (set)) == REG
575 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
b1f21e0a 576 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
b803fb99
RS
577 /* We use regno_last_note_uid so as not to delete the setting
578 of a reg that's used in notes. A subsequent optimization
579 might arrange to use that reg for real. */
b1f21e0a 580 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
d008e26c
RK
581 && ! side_effects_p (SET_SRC (set))
582 && ! find_reg_note (insn, REG_RETVAL, 0))
2156dfe3
RK
583 delete_insn (insn);
584 }
585
15a63be1
RK
586 /* Now iterate optimizing jumps until nothing changes over one pass. */
587 changed = 1;
588 while (changed)
589 {
15a63be1
RK
590 changed = 0;
591
592 for (insn = f; insn; insn = next)
593 {
594 rtx reallabelprev;
2156dfe3 595 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
15a63be1 596 rtx nlabel;
3915de94 597 int this_is_simplejump, this_is_condjump, reversep;
3480bb98 598 int this_is_condjump_in_parallel;
15a63be1
RK
599#if 0
600 /* If NOT the first iteration, if this is the last jump pass
601 (just before final), do the special peephole optimizations.
602 Avoiding the first iteration gives ordinary jump opts
603 a chance to work before peephole opts. */
604
605 if (reload_completed && !first && !flag_no_peephole)
606 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
607 peephole (insn);
608#endif
609
610 /* That could have deleted some insns after INSN, so check now
611 what the following insn is. */
612
613 next = NEXT_INSN (insn);
614
615 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
616 jump. Try to optimize by duplicating the loop exit test if so.
617 This is only safe immediately after regscan, because it uses
618 the values of regno_first_uid and regno_last_uid. */
619 if (after_regscan && GET_CODE (insn) == NOTE
620 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
621 && (temp1 = next_nonnote_insn (insn)) != 0
622 && simplejump_p (temp1))
623 {
624 temp = PREV_INSN (insn);
625 if (duplicate_loop_exit_test (insn))
626 {
627 changed = 1;
628 next = NEXT_INSN (temp);
629 continue;
630 }
631 }
632
633 if (GET_CODE (insn) != JUMP_INSN)
634 continue;
635
636 this_is_simplejump = simplejump_p (insn);
637 this_is_condjump = condjump_p (insn);
3480bb98 638 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
15a63be1
RK
639
640 /* Tension the labels in dispatch tables. */
641
642 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
643 changed |= tension_vector_labels (PATTERN (insn), 0);
644 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
645 changed |= tension_vector_labels (PATTERN (insn), 1);
646
647 /* If a dispatch table always goes to the same place,
648 get rid of it and replace the insn that uses it. */
649
650 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
651 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
652 {
653 int i;
654 rtx pat = PATTERN (insn);
655 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
656 int len = XVECLEN (pat, diff_vec_p);
657 rtx dispatch = prev_real_insn (insn);
658
659 for (i = 0; i < len; i++)
660 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
661 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
662 break;
663 if (i == len
0546e268 664 && dispatch != 0
15a63be1
RK
665 && GET_CODE (dispatch) == JUMP_INSN
666 && JUMP_LABEL (dispatch) != 0
667 /* Don't mess with a casesi insn. */
668 && !(GET_CODE (PATTERN (dispatch)) == SET
669 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
670 == IF_THEN_ELSE))
671 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
672 {
673 redirect_tablejump (dispatch,
674 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
675 changed = 1;
676 }
677 }
678
679 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
680
681 /* If a jump references the end of the function, try to turn
682 it into a RETURN insn, possibly a conditional one. */
683 if (JUMP_LABEL (insn)
8e318904
JL
684 && (next_active_insn (JUMP_LABEL (insn)) == 0
685 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
686 == RETURN))
5f4f0e22 687 changed |= redirect_jump (insn, NULL_RTX);
15a63be1
RK
688
689 /* Detect jump to following insn. */
690 if (reallabelprev == insn && condjump_p (insn))
691 {
cd423ead 692 next = next_real_insn (JUMP_LABEL (insn));
15a63be1
RK
693 delete_jump (insn);
694 changed = 1;
695 continue;
696 }
697
d45cf215 698 /* If we have an unconditional jump preceded by a USE, try to put
15a63be1
RK
699 the USE before the target and jump there. This simplifies many
700 of the optimizations below since we don't have to worry about
701 dealing with these USE insns. We only do this if the label
702 being branch to already has the identical USE or if code
703 never falls through to that label. */
704
705 if (this_is_simplejump
706 && (temp = prev_nonnote_insn (insn)) != 0
707 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
708 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
709 && (GET_CODE (temp1) == BARRIER
710 || (GET_CODE (temp1) == INSN
9740123d
JW
711 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
712 /* Don't do this optimization if we have a loop containing only
713 the USE instruction, and the loop start label has a usage
714 count of 1. This is because we will redo this optimization
715 everytime through the outer loop, and jump opt will never
716 exit. */
717 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
718 && temp2 == JUMP_LABEL (insn)
719 && LABEL_NUSES (temp2) == 1))
15a63be1
RK
720 {
721 if (GET_CODE (temp1) == BARRIER)
722 {
2156dfe3 723 emit_insn_after (PATTERN (temp), temp1);
15a63be1
RK
724 temp1 = NEXT_INSN (temp1);
725 }
15a63be1 726
2156dfe3 727 delete_insn (temp);
15a63be1
RK
728 redirect_jump (insn, get_label_before (temp1));
729 reallabelprev = prev_real_insn (temp1);
730 changed = 1;
731 }
732
733 /* Simplify if (...) x = a; else x = b; by converting it
734 to x = b; if (...) x = a;
735 if B is sufficiently simple, the test doesn't involve X,
736 and nothing in the test modifies B or X.
737
738 If we have small register classes, we also can't do this if X
739 is a hard register.
740
741 If the "x = b;" insn has any REG_NOTES, we don't do this because
742 of the possibility that we are running after CSE and there is a
743 REG_EQUAL note that is only valid if the branch has already been
744 taken. If we move the insn with the REG_EQUAL note, we may
745 fold the comparison to always be false in a later CSE pass.
746 (We could also delete the REG_NOTES when moving the insn, but it
747 seems simpler to not move it.) An exception is that we can move
748 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
749 value is the same as "b".
750
751 INSN is the branch over the `else' part.
752
753 We set:
754
d45cf215 755 TEMP to the jump insn preceding "x = a;"
15a63be1
RK
756 TEMP1 to X
757 TEMP2 to the insn that sets "x = b;"
2156dfe3
RK
758 TEMP3 to the insn that sets "x = a;"
759 TEMP4 to the set of "x = b"; */
15a63be1
RK
760
761 if (this_is_simplejump
762 && (temp3 = prev_active_insn (insn)) != 0
763 && GET_CODE (temp3) == INSN
2156dfe3
RK
764 && (temp4 = single_set (temp3)) != 0
765 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
766 && (! SMALL_REGISTER_CLASSES
767 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
15a63be1
RK
768 && (temp2 = next_active_insn (insn)) != 0
769 && GET_CODE (temp2) == INSN
2156dfe3
RK
770 && (temp4 = single_set (temp2)) != 0
771 && rtx_equal_p (SET_DEST (temp4), temp1)
772 && (GET_CODE (SET_SRC (temp4)) == REG
773 || GET_CODE (SET_SRC (temp4)) == SUBREG
774 || CONSTANT_P (SET_SRC (temp4)))
15a63be1
RK
775 && (REG_NOTES (temp2) == 0
776 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
777 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
778 && XEXP (REG_NOTES (temp2), 1) == 0
779 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
2156dfe3 780 SET_SRC (temp4))))
15a63be1
RK
781 && (temp = prev_active_insn (temp3)) != 0
782 && condjump_p (temp) && ! simplejump_p (temp)
783 /* TEMP must skip over the "x = a;" insn */
784 && prev_real_insn (JUMP_LABEL (temp)) == insn
785 && no_labels_between_p (insn, JUMP_LABEL (temp))
786 /* There must be no other entries to the "x = b;" insn. */
787 && no_labels_between_p (JUMP_LABEL (temp), temp2)
788 /* INSN must either branch to the insn after TEMP2 or the insn
789 after TEMP2 must branch to the same place as INSN. */
790 && (reallabelprev == temp2
2156dfe3
RK
791 || ((temp5 = next_active_insn (temp2)) != 0
792 && simplejump_p (temp5)
793 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
15a63be1
RK
794 {
795 /* The test expression, X, may be a complicated test with
796 multiple branches. See if we can find all the uses of
797 the label that TEMP branches to without hitting a CALL_INSN
798 or a jump to somewhere else. */
799 rtx target = JUMP_LABEL (temp);
800 int nuses = LABEL_NUSES (target);
801 rtx p, q;
802
803 /* Set P to the first jump insn that goes around "x = a;". */
804 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
805 {
806 if (GET_CODE (p) == JUMP_INSN)
807 {
808 if (condjump_p (p) && ! simplejump_p (p)
809 && JUMP_LABEL (p) == target)
810 {
811 nuses--;
812 if (nuses == 0)
813 break;
814 }
815 else
816 break;
817 }
818 else if (GET_CODE (p) == CALL_INSN)
819 break;
820 }
821
822#ifdef HAVE_cc0
823 /* We cannot insert anything between a set of cc and its use
824 so if P uses cc0, we must back up to the previous insn. */
825 q = prev_nonnote_insn (p);
826 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
827 && sets_cc0_p (PATTERN (q)))
828 p = q;
829#endif
830
831 if (p)
832 p = PREV_INSN (p);
833
834 /* If we found all the uses and there was no data conflict, we
835 can move the assignment unless we can branch into the middle
836 from somewhere. */
837 if (nuses == 0 && p
838 && no_labels_between_p (p, insn)
839 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
840 && ! reg_set_between_p (temp1, p, temp3)
2156dfe3
RK
841 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
842 || ! reg_set_between_p (SET_SRC (temp4), p, temp2)))
15a63be1 843 {
2156dfe3
RK
844 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
845 delete_insn (temp2);
15a63be1
RK
846
847 /* Set NEXT to an insn that we know won't go away. */
848 next = next_active_insn (insn);
849
850 /* Delete the jump around the set. Note that we must do
851 this before we redirect the test jumps so that it won't
852 delete the code immediately following the assignment
853 we moved (which might be a jump). */
854
855 delete_insn (insn);
856
857 /* We either have two consecutive labels or a jump to
858 a jump, so adjust all the JUMP_INSNs to branch to where
859 INSN branches to. */
860 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
861 if (GET_CODE (p) == JUMP_INSN)
862 redirect_jump (p, target);
863
864 changed = 1;
865 continue;
866 }
867 }
868
66bd9361
JW
869 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
870 to x = a; if (...) goto l; x = b;
871 if A is sufficiently simple, the test doesn't involve X,
872 and nothing in the test modifies A or X.
873
874 If we have small register classes, we also can't do this if X
875 is a hard register.
876
877 If the "x = a;" insn has any REG_NOTES, we don't do this because
878 of the possibility that we are running after CSE and there is a
879 REG_EQUAL note that is only valid if the branch has already been
880 taken. If we move the insn with the REG_EQUAL note, we may
881 fold the comparison to always be false in a later CSE pass.
882 (We could also delete the REG_NOTES when moving the insn, but it
883 seems simpler to not move it.) An exception is that we can move
884 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
885 value is the same as "a".
886
887 INSN is the goto.
888
889 We set:
890
891 TEMP to the jump insn preceding "x = a;"
892 TEMP1 to X
893 TEMP2 to the insn that sets "x = b;"
894 TEMP3 to the insn that sets "x = a;"
895 TEMP4 to the set of "x = a"; */
896
897 if (this_is_simplejump
898 && (temp2 = next_active_insn (insn)) != 0
899 && GET_CODE (temp2) == INSN
900 && (temp4 = single_set (temp2)) != 0
901 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
f95182a4
ILT
902 && (! SMALL_REGISTER_CLASSES
903 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
66bd9361
JW
904 && (temp3 = prev_active_insn (insn)) != 0
905 && GET_CODE (temp3) == INSN
906 && (temp4 = single_set (temp3)) != 0
907 && rtx_equal_p (SET_DEST (temp4), temp1)
908 && (GET_CODE (SET_SRC (temp4)) == REG
909 || GET_CODE (SET_SRC (temp4)) == SUBREG
910 || CONSTANT_P (SET_SRC (temp4)))
911 && (REG_NOTES (temp3) == 0
912 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
913 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
914 && XEXP (REG_NOTES (temp3), 1) == 0
915 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
916 SET_SRC (temp4))))
917 && (temp = prev_active_insn (temp3)) != 0
918 && condjump_p (temp) && ! simplejump_p (temp)
919 /* TEMP must skip over the "x = a;" insn */
920 && prev_real_insn (JUMP_LABEL (temp)) == insn
921 && no_labels_between_p (temp, insn))
922 {
923 rtx prev_label = JUMP_LABEL (temp);
924 rtx insert_after = prev_nonnote_insn (temp);
925
926#ifdef HAVE_cc0
927 /* We cannot insert anything between a set of cc and its use. */
928 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
929 && sets_cc0_p (PATTERN (insert_after)))
930 insert_after = prev_nonnote_insn (insert_after);
931#endif
932 ++LABEL_NUSES (prev_label);
933
934 if (insert_after
935 && no_labels_between_p (insert_after, temp)
7a7233ff 936 && ! reg_referenced_between_p (temp1, insert_after, temp3)
04bd0246
DE
937 && ! reg_referenced_between_p (temp1, temp3,
938 NEXT_INSN (temp2))
66bd9361
JW
939 && ! reg_set_between_p (temp1, insert_after, temp)
940 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
941 || ! reg_set_between_p (SET_SRC (temp4),
942 insert_after, temp))
943 && invert_jump (temp, JUMP_LABEL (insn)))
944 {
945 emit_insn_after_with_line_notes (PATTERN (temp3),
946 insert_after, temp3);
947 delete_insn (temp3);
948 delete_insn (insn);
949 /* Set NEXT to an insn that we know won't go away. */
950 next = temp2;
951 changed = 1;
952 }
953 if (prev_label && --LABEL_NUSES (prev_label) == 0)
954 delete_insn (prev_label);
955 if (changed)
956 continue;
957 }
958
2156dfe3
RK
959#ifndef HAVE_cc0
960 /* If we have if (...) x = exp; and branches are expensive,
961 EXP is a single insn, does not have any side effects, cannot
962 trap, and is not too costly, convert this to
963 t = exp; if (...) x = t;
964
965 Don't do this when we have CC0 because it is unlikely to help
966 and we'd need to worry about where to place the new insn and
967 the potential for conflicts. We also can't do this when we have
968 notes on the insn for the same reason as above.
969
970 We set:
971
972 TEMP to the "x = exp;" insn.
973 TEMP1 to the single set in the "x = exp; insn.
974 TEMP2 to "x". */
975
976 if (! reload_completed
977 && this_is_condjump && ! this_is_simplejump
978 && BRANCH_COST >= 3
979 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 980 && GET_CODE (temp) == INSN
2156dfe3
RK
981 && REG_NOTES (temp) == 0
982 && (reallabelprev == temp
983 || ((temp2 = next_active_insn (temp)) != 0
984 && simplejump_p (temp2)
985 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
986 && (temp1 = single_set (temp)) != 0
987 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
988 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
989 && (! SMALL_REGISTER_CLASSES
990 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
991 && GET_CODE (SET_SRC (temp1)) != REG
992 && GET_CODE (SET_SRC (temp1)) != SUBREG
993 && GET_CODE (SET_SRC (temp1)) != CONST_INT
994 && ! side_effects_p (SET_SRC (temp1))
995 && ! may_trap_p (SET_SRC (temp1))
97fa962f 996 && rtx_cost (SET_SRC (temp1), SET) < 10)
2156dfe3
RK
997 {
998 rtx new = gen_reg_rtx (GET_MODE (temp2));
999
956d6950
JL
1000 if ((temp3 = find_insert_position (insn, temp))
1001 && validate_change (temp, &SET_DEST (temp1), new, 0))
2156dfe3
RK
1002 {
1003 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1004 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 1005 PREV_INSN (temp3), temp);
2156dfe3 1006 delete_insn (temp);
5954c8a8 1007 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1008 }
1009 }
1010
1011 /* Similarly, if it takes two insns to compute EXP but they
1012 have the same destination. Here TEMP3 will be the second
1013 insn and TEMP4 the SET from that insn. */
1014
1015 if (! reload_completed
1016 && this_is_condjump && ! this_is_simplejump
1017 && BRANCH_COST >= 4
1018 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 1019 && GET_CODE (temp) == INSN
2156dfe3
RK
1020 && REG_NOTES (temp) == 0
1021 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 1022 && GET_CODE (temp3) == INSN
2156dfe3
RK
1023 && REG_NOTES (temp3) == 0
1024 && (reallabelprev == temp3
1025 || ((temp2 = next_active_insn (temp3)) != 0
1026 && simplejump_p (temp2)
1027 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1028 && (temp1 = single_set (temp)) != 0
1029 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
1030 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
1031 && (! SMALL_REGISTER_CLASSES
1032 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
1033 && ! side_effects_p (SET_SRC (temp1))
1034 && ! may_trap_p (SET_SRC (temp1))
97fa962f 1035 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
1036 && (temp4 = single_set (temp3)) != 0
1037 && rtx_equal_p (SET_DEST (temp4), temp2)
1038 && ! side_effects_p (SET_SRC (temp4))
1039 && ! may_trap_p (SET_SRC (temp4))
97fa962f 1040 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
1041 {
1042 rtx new = gen_reg_rtx (GET_MODE (temp2));
1043
956d6950
JL
1044 if ((temp5 = find_insert_position (insn, temp))
1045 && (temp6 = find_insert_position (insn, temp3))
1046 && validate_change (temp, &SET_DEST (temp1), new, 0))
2156dfe3 1047 {
956d6950
JL
1048 /* Use the earliest of temp5 and temp6. */
1049 if (temp5 != insn)
1050 temp6 = temp5;
2156dfe3
RK
1051 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1052 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 1053 PREV_INSN (temp6), temp);
2156dfe3
RK
1054 emit_insn_after_with_line_notes
1055 (replace_rtx (PATTERN (temp3), temp2, new),
956d6950 1056 PREV_INSN (temp6), temp3);
2156dfe3
RK
1057 delete_insn (temp);
1058 delete_insn (temp3);
5954c8a8 1059 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1060 }
1061 }
1062
1063 /* Finally, handle the case where two insns are used to
1064 compute EXP but a temporary register is used. Here we must
0f41302f 1065 ensure that the temporary register is not used anywhere else. */
2156dfe3
RK
1066
1067 if (! reload_completed
1068 && after_regscan
1069 && this_is_condjump && ! this_is_simplejump
1070 && BRANCH_COST >= 4
1071 && (temp = next_nonnote_insn (insn)) != 0
a8fc41af 1072 && GET_CODE (temp) == INSN
2156dfe3
RK
1073 && REG_NOTES (temp) == 0
1074 && (temp3 = next_nonnote_insn (temp)) != 0
a8fc41af 1075 && GET_CODE (temp3) == INSN
2156dfe3
RK
1076 && REG_NOTES (temp3) == 0
1077 && (reallabelprev == temp3
1078 || ((temp2 = next_active_insn (temp3)) != 0
1079 && simplejump_p (temp2)
1080 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1081 && (temp1 = single_set (temp)) != 0
c03c4711
RK
1082 && (temp5 = SET_DEST (temp1),
1083 (GET_CODE (temp5) == REG
1084 || (GET_CODE (temp5) == SUBREG
1085 && (temp5 = SUBREG_REG (temp5),
1086 GET_CODE (temp5) == REG))))
2156dfe3 1087 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
b1f21e0a
MM
1088 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
1089 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
2156dfe3
RK
1090 && ! side_effects_p (SET_SRC (temp1))
1091 && ! may_trap_p (SET_SRC (temp1))
97fa962f 1092 && rtx_cost (SET_SRC (temp1), SET) < 10
2156dfe3
RK
1093 && (temp4 = single_set (temp3)) != 0
1094 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
1095 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
f95182a4
ILT
1096 && (! SMALL_REGISTER_CLASSES
1097 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
2156dfe3
RK
1098 && rtx_equal_p (SET_DEST (temp4), temp2)
1099 && ! side_effects_p (SET_SRC (temp4))
1100 && ! may_trap_p (SET_SRC (temp4))
97fa962f 1101 && rtx_cost (SET_SRC (temp4), SET) < 10)
2156dfe3
RK
1102 {
1103 rtx new = gen_reg_rtx (GET_MODE (temp2));
1104
956d6950
JL
1105 if ((temp5 = find_insert_position (insn, temp))
1106 && (temp6 = find_insert_position (insn, temp3))
1107 && validate_change (temp3, &SET_DEST (temp4), new, 0))
2156dfe3 1108 {
956d6950
JL
1109 /* Use the earliest of temp5 and temp6. */
1110 if (temp5 != insn)
1111 temp6 = temp5;
2156dfe3
RK
1112 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1113 emit_insn_after_with_line_notes (PATTERN (temp),
956d6950 1114 PREV_INSN (temp6), temp);
2156dfe3 1115 emit_insn_after_with_line_notes (PATTERN (temp3),
956d6950 1116 PREV_INSN (temp6), temp3);
2156dfe3
RK
1117 delete_insn (temp);
1118 delete_insn (temp3);
5954c8a8 1119 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
2156dfe3
RK
1120 }
1121 }
1122#endif /* HAVE_cc0 */
1123
7209f1f9
DE
1124 /* Try to use a conditional move (if the target has them), or a
1125 store-flag insn. The general case is:
1126
1127 1) x = a; if (...) x = b; and
1128 2) if (...) x = b;
1129
1130 If the jump would be faster, the machine should not have defined
1131 the movcc or scc insns!. These cases are often made by the
3915de94 1132 previous optimization.
15a63be1 1133
7209f1f9
DE
1134 The second case is treated as x = x; if (...) x = b;.
1135
15a63be1
RK
1136 INSN here is the jump around the store. We set:
1137
1138 TEMP to the "x = b;" insn.
1139 TEMP1 to X.
7209f1f9 1140 TEMP2 to B.
15a63be1
RK
1141 TEMP3 to A (X in the second case).
1142 TEMP4 to the condition being tested.
1143 TEMP5 to the earliest insn used to find the condition. */
1144
1145 if (/* We can't do this after reload has completed. */
1146 ! reload_completed
1147 && this_is_condjump && ! this_is_simplejump
1148 /* Set TEMP to the "x = b;" insn. */
1149 && (temp = next_nonnote_insn (insn)) != 0
1150 && GET_CODE (temp) == INSN
1151 && GET_CODE (PATTERN (temp)) == SET
1152 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
f95182a4
ILT
1153 && (! SMALL_REGISTER_CLASSES
1154 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
15a63be1 1155 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
a73f9fc9 1156 || GET_CODE (temp2) == SUBREG
7209f1f9 1157 /* ??? How about floating point constants? */
15a63be1 1158 || GET_CODE (temp2) == CONST_INT)
2156dfe3
RK
1159 /* Allow either form, but prefer the former if both apply.
1160 There is no point in using the old value of TEMP1 if
1161 it is a register, since cse will alias them. It can
1162 lose if the old value were a hard register since CSE
c85f7c16
JL
1163 won't replace hard registers. Avoid using TEMP3 if
1164 small register classes and it is a hard register. */
1165 && (((temp3 = reg_set_last (temp1, insn)) != 0
1166 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1167 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
7209f1f9
DE
1168 /* Make the latter case look like x = x; if (...) x = b; */
1169 || (temp3 = temp1, 1))
15a63be1
RK
1170 /* INSN must either branch to the insn after TEMP or the insn
1171 after TEMP must branch to the same place as INSN. */
1172 && (reallabelprev == temp
1173 || ((temp4 = next_active_insn (temp)) != 0
1174 && simplejump_p (temp4)
1175 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1176 && (temp4 = get_condition (insn, &temp5)) != 0
7124e1e5
RS
1177 /* We must be comparing objects whose modes imply the size.
1178 We could handle BLKmode if (1) emit_store_flag could
1179 and (2) we could find the size reliably. */
1180 && GET_MODE (XEXP (temp4, 0)) != BLKmode
04d23d7c
RK
1181 /* Even if branches are cheap, the store_flag optimization
1182 can win when the operation to be performed can be
1183 expressed directly. */
2156dfe3
RK
1184#ifdef HAVE_cc0
1185 /* If the previous insn sets CC0 and something else, we can't
1186 do this since we are going to delete that insn. */
1187
1188 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1189 && GET_CODE (temp6) == INSN
01ca1b91
RK
1190 && (sets_cc0_p (PATTERN (temp6)) == -1
1191 || (sets_cc0_p (PATTERN (temp6)) == 1
1192 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
2156dfe3
RK
1193#endif
1194 )
15a63be1 1195 {
7209f1f9
DE
1196#ifdef HAVE_conditional_move
1197 /* First try a conditional move. */
1198 {
1199 enum rtx_code code = GET_CODE (temp4);
1200 rtx var = temp1;
1201 rtx cond0, cond1, aval, bval;
1202 rtx target;
1203
a99503b1
SC
1204 if (SMALL_REGISTER_CLASSES
1205 && FUNCTION_VALUE_REGNO_P (REGNO (temp3)))
1206 temp3 = temp1;
1207
7209f1f9
DE
1208 /* Copy the compared variables into cond0 and cond1, so that
1209 any side effects performed in or after the old comparison,
1210 will not affect our compare which will come later. */
1211 /* ??? Is it possible to just use the comparison in the jump
1212 insn? After all, we're going to delete it. We'd have
1213 to modify emit_conditional_move to take a comparison rtx
1214 instead or write a new function. */
1215 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1216 /* We want the target to be able to simplify comparisons with
1217 zero (and maybe other constants as well), so don't create
1218 pseudos for them. There's no need to either. */
1219 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1220 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1221 cond1 = XEXP (temp4, 1);
1222 else
1223 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1224
1225 aval = temp3;
1226 bval = temp2;
1227
1228 start_sequence ();
1229 target = emit_conditional_move (var, code,
1230 cond0, cond1, VOIDmode,
1231 aval, bval, GET_MODE (var),
1232 (code == LTU || code == GEU
1233 || code == LEU || code == GTU));
1234
1235 if (target)
1236 {
1237 rtx seq1,seq2;
1238
1239 /* Save the conditional move sequence but don't emit it
1240 yet. On some machines, like the alpha, it is possible
1241 that temp5 == insn, so next generate the sequence that
1242 saves the compared values and then emit both
1243 sequences ensuring seq1 occurs before seq2. */
1244 seq2 = get_insns ();
1245 end_sequence ();
1246
1247 /* Now that we can't fail, generate the copy insns that
1248 preserve the compared values. */
1249 start_sequence ();
1250 emit_move_insn (cond0, XEXP (temp4, 0));
1251 if (cond1 != XEXP (temp4, 1))
1252 emit_move_insn (cond1, XEXP (temp4, 1));
1253 seq1 = get_insns ();
1254 end_sequence ();
1255
1256 emit_insns_before (seq1, temp5);
777e434c
RK
1257 /* Insert conditional move after insn, to be sure that
1258 the jump and a possible compare won't be separated */
1259 emit_insns_after (seq2, insn);
7209f1f9
DE
1260
1261 /* ??? We can also delete the insn that sets X to A.
1262 Flow will do it too though. */
1263 delete_insn (temp);
1264 next = NEXT_INSN (insn);
1265 delete_jump (insn);
1266 changed = 1;
1267 continue;
1268 }
1269 else
2156dfe3 1270 end_sequence ();
7209f1f9
DE
1271 }
1272#endif
2156dfe3 1273
7209f1f9
DE
1274 /* That didn't work, try a store-flag insn.
1275
1276 We further divide the cases into:
1277
1278 1) x = a; if (...) x = b; and either A or B is zero,
1279 2) if (...) x = 0; and jumps are expensive,
1280 3) x = a; if (...) x = b; and A and B are constants where all
1281 the set bits in A are also set in B and jumps are expensive,
1282 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1283 more expensive, and
1284 5) if (...) x = b; if jumps are even more expensive. */
1285
1286 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1287 && ((GET_CODE (temp3) == CONST_INT)
1288 /* Make the latter case look like
1289 x = x; if (...) x = 0; */
1290 || (temp3 = temp1,
1291 ((BRANCH_COST >= 2
1292 && temp2 == const0_rtx)
1293 || BRANCH_COST >= 3)))
1294 /* If B is zero, OK; if A is zero, can only do (1) if we
1295 can reverse the condition. See if (3) applies possibly
1296 by reversing the condition. Prefer reversing to (4) when
1297 branches are very expensive. */
04d23d7c
RK
1298 && (((BRANCH_COST >= 2
1299 || STORE_FLAG_VALUE == -1
1300 || (STORE_FLAG_VALUE == 1
1301 /* Check that the mask is a power of two,
1302 so that it can probably be generated
1303 with a shift. */
1304 && exact_log2 (INTVAL (temp3)) >= 0))
1305 && (reversep = 0, temp2 == const0_rtx))
1306 || ((BRANCH_COST >= 2
1307 || STORE_FLAG_VALUE == -1
1308 || (STORE_FLAG_VALUE == 1
1309 && exact_log2 (INTVAL (temp2)) >= 0))
1310 && temp3 == const0_rtx
7209f1f9
DE
1311 && (reversep = can_reverse_comparison_p (temp4, insn)))
1312 || (BRANCH_COST >= 2
1313 && GET_CODE (temp2) == CONST_INT
1314 && GET_CODE (temp3) == CONST_INT
1315 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1316 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1317 && (reversep = can_reverse_comparison_p (temp4,
1318 insn)))))
1319 || BRANCH_COST >= 3)
1320 )
1321 {
1322 enum rtx_code code = GET_CODE (temp4);
1323 rtx uval, cval, var = temp1;
1324 int normalizep;
1325 rtx target;
1326
1327 /* If necessary, reverse the condition. */
1328 if (reversep)
1329 code = reverse_condition (code), uval = temp2, cval = temp3;
1330 else
1331 uval = temp3, cval = temp2;
1332
1333 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1334 is the constant 1, it is best to just compute the result
1335 directly. If UVAL is constant and STORE_FLAG_VALUE
1336 includes all of its bits, it is best to compute the flag
1337 value unnormalized and `and' it with UVAL. Otherwise,
1338 normalize to -1 and `and' with UVAL. */
1339 normalizep = (cval != const0_rtx ? -1
1340 : (uval == const1_rtx ? 1
1341 : (GET_CODE (uval) == CONST_INT
1342 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1343 ? 0 : -1));
1344
1345 /* We will be putting the store-flag insn immediately in
1346 front of the comparison that was originally being done,
1347 so we know all the variables in TEMP4 will be valid.
1348 However, this might be in front of the assignment of
1349 A to VAR. If it is, it would clobber the store-flag
1350 we will be emitting.
1351
1352 Therefore, emit into a temporary which will be copied to
1353 VAR immediately after TEMP. */
2156dfe3
RK
1354
1355 start_sequence ();
7209f1f9
DE
1356 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1357 XEXP (temp4, 0), XEXP (temp4, 1),
1358 VOIDmode,
1359 (code == LTU || code == LEU
1360 || code == GEU || code == GTU),
1361 normalizep);
1362 if (target)
3915de94 1363 {
7209f1f9
DE
1364 rtx seq;
1365 rtx before = insn;
3915de94 1366
7209f1f9
DE
1367 seq = get_insns ();
1368 end_sequence ();
3915de94 1369
7209f1f9
DE
1370 /* Put the store-flag insns in front of the first insn
1371 used to compute the condition to ensure that we
1372 use the same values of them as the current
1373 comparison. However, the remainder of the insns we
1374 generate will be placed directly in front of the
1375 jump insn, in case any of the pseudos we use
1376 are modified earlier. */
3915de94 1377
7209f1f9 1378 emit_insns_before (seq, temp5);
3915de94 1379
7209f1f9
DE
1380 start_sequence ();
1381
1382 /* Both CVAL and UVAL are non-zero. */
1383 if (cval != const0_rtx && uval != const0_rtx)
c71ebae3 1384 {
7209f1f9
DE
1385 rtx tem1, tem2;
1386
1387 tem1 = expand_and (uval, target, NULL_RTX);
1388 if (GET_CODE (cval) == CONST_INT
1389 && GET_CODE (uval) == CONST_INT
1390 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1391 tem2 = cval;
1392 else
1393 {
1394 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1395 target, NULL_RTX, 0);
1396 tem2 = expand_and (cval, tem2,
1397 (GET_CODE (tem2) == REG
1398 ? tem2 : 0));
1399 }
1400
1401 /* If we usually make new pseudos, do so here. This
1402 turns out to help machines that have conditional
1403 move insns. */
1404 /* ??? Conditional moves have already been handled.
1405 This may be obsolete. */
1406
1407 if (flag_expensive_optimizations)
1408 target = 0;
1409
1410 target = expand_binop (GET_MODE (var), ior_optab,
1411 tem1, tem2, target,
1412 1, OPTAB_WIDEN);
c71ebae3 1413 }
7209f1f9
DE
1414 else if (normalizep != 1)
1415 {
1416 /* We know that either CVAL or UVAL is zero. If
1417 UVAL is zero, negate TARGET and `and' with CVAL.
1418 Otherwise, `and' with UVAL. */
1419 if (uval == const0_rtx)
1420 {
1421 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1422 target, NULL_RTX, 0);
1423 uval = cval;
1424 }
c71ebae3 1425
7209f1f9
DE
1426 target = expand_and (uval, target,
1427 (GET_CODE (target) == REG
1428 && ! preserve_subexpressions_p ()
1429 ? target : NULL_RTX));
1430 }
3915de94 1431
7209f1f9
DE
1432 emit_move_insn (var, target);
1433 seq = get_insns ();
1434 end_sequence ();
01ca1b91 1435#ifdef HAVE_cc0
7209f1f9
DE
1436 /* If INSN uses CC0, we must not separate it from the
1437 insn that sets cc0. */
1438 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1439 before = prev_nonnote_insn (before);
01ca1b91 1440#endif
7209f1f9 1441 emit_insns_before (seq, before);
01ca1b91 1442
7209f1f9
DE
1443 delete_insn (temp);
1444 next = NEXT_INSN (insn);
1445 delete_jump (insn);
1446 changed = 1;
1447 continue;
1448 }
1449 else
1450 end_sequence ();
15a63be1 1451 }
15a63be1
RK
1452 }
1453
1454 /* If branches are expensive, convert
1455 if (foo) bar++; to bar += (foo != 0);
1456 and similarly for "bar--;"
1457
1458 INSN is the conditional branch around the arithmetic. We set:
1459
1460 TEMP is the arithmetic insn.
6dc42e49 1461 TEMP1 is the SET doing the arithmetic.
15a63be1
RK
1462 TEMP2 is the operand being incremented or decremented.
1463 TEMP3 to the condition being tested.
1464 TEMP4 to the earliest insn used to find the condition. */
1465
a8d916d3 1466 if ((BRANCH_COST >= 2
48f16828 1467#ifdef HAVE_incscc
a8d916d3 1468 || HAVE_incscc
48f16828
RK
1469#endif
1470#ifdef HAVE_decscc
d3907945 1471 || HAVE_decscc
a8d916d3
JL
1472#endif
1473 )
15a63be1
RK
1474 && ! reload_completed
1475 && this_is_condjump && ! this_is_simplejump
1476 && (temp = next_nonnote_insn (insn)) != 0
1477 && (temp1 = single_set (temp)) != 0
1478 && (temp2 = SET_DEST (temp1),
1479 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1480 && GET_CODE (SET_SRC (temp1)) == PLUS
1481 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1482 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1483 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
8743ede9
RK
1484 && ! side_effects_p (temp2)
1485 && ! may_trap_p (temp2)
15a63be1
RK
1486 /* INSN must either branch to the insn after TEMP or the insn
1487 after TEMP must branch to the same place as INSN. */
1488 && (reallabelprev == temp
1489 || ((temp3 = next_active_insn (temp)) != 0
1490 && simplejump_p (temp3)
1491 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1492 && (temp3 = get_condition (insn, &temp4)) != 0
7124e1e5
RS
1493 /* We must be comparing objects whose modes imply the size.
1494 We could handle BLKmode if (1) emit_store_flag could
1495 and (2) we could find the size reliably. */
1496 && GET_MODE (XEXP (temp3, 0)) != BLKmode
15a63be1
RK
1497 && can_reverse_comparison_p (temp3, insn))
1498 {
626d0d1d 1499 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
15a63be1
RK
1500 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1501
1502 start_sequence ();
1503
626d0d1d
RK
1504 /* It must be the case that TEMP2 is not modified in the range
1505 [TEMP4, INSN). The one exception we make is if the insn
1506 before INSN sets TEMP2 to something which is also unchanged
1507 in that range. In that case, we can move the initialization
1508 into our sequence. */
1509
1510 if ((temp5 = prev_active_insn (insn)) != 0
19f5ce60 1511 && no_labels_between_p (temp5, insn)
626d0d1d
RK
1512 && GET_CODE (temp5) == INSN
1513 && (temp6 = single_set (temp5)) != 0
1514 && rtx_equal_p (temp2, SET_DEST (temp6))
1515 && (CONSTANT_P (SET_SRC (temp6))
1516 || GET_CODE (SET_SRC (temp6)) == REG
1517 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1518 {
1519 emit_insn (PATTERN (temp5));
1520 init_insn = temp5;
1521 init = SET_SRC (temp6);
1522 }
1523
1524 if (CONSTANT_P (init)
1525 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1526 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1527 XEXP (temp3, 0), XEXP (temp3, 1),
1528 VOIDmode,
1529 (code == LTU || code == LEU
1530 || code == GTU || code == GEU), 1);
15a63be1
RK
1531
1532 /* If we can do the store-flag, do the addition or
1533 subtraction. */
1534
1535 if (target)
1536 target = expand_binop (GET_MODE (temp2),
1537 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1538 ? add_optab : sub_optab),
58f066d1 1539 temp2, target, temp2, 0, OPTAB_WIDEN);
15a63be1
RK
1540
1541 if (target != 0)
1542 {
1543 /* Put the result back in temp2 in case it isn't already.
1544 Then replace the jump, possible a CC0-setting insn in
1545 front of the jump, and TEMP, with the sequence we have
1546 made. */
1547
1548 if (target != temp2)
1549 emit_move_insn (temp2, target);
1550
1551 seq = get_insns ();
1552 end_sequence ();
1553
1554 emit_insns_before (seq, temp4);
1555 delete_insn (temp);
626d0d1d
RK
1556
1557 if (init_insn)
1558 delete_insn (init_insn);
1559
15a63be1
RK
1560 next = NEXT_INSN (insn);
1561#ifdef HAVE_cc0
1562 delete_insn (prev_nonnote_insn (insn));
1563#endif
1564 delete_insn (insn);
1565 changed = 1;
1566 continue;
1567 }
1568 else
1569 end_sequence ();
1570 }
1571
1572 /* Simplify if (...) x = 1; else {...} if (x) ...
1573 We recognize this case scanning backwards as well.
1574
1575 TEMP is the assignment to x;
1576 TEMP1 is the label at the head of the second if. */
1577 /* ?? This should call get_condition to find the values being
1578 compared, instead of looking for a COMPARE insn when HAVE_cc0
1579 is not defined. This would allow it to work on the m88k. */
1580 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1581 is not defined and the condition is tested by a separate compare
1582 insn. This is because the code below assumes that the result
1583 of the compare dies in the following branch.
1584
1585 Not only that, but there might be other insns between the
1586 compare and branch whose results are live. Those insns need
1587 to be executed.
1588
1589 A way to fix this is to move the insns at JUMP_LABEL (insn)
1590 to before INSN. If we are running before flow, they will
1591 be deleted if they aren't needed. But this doesn't work
1592 well after flow.
1593
1594 This is really a special-case of jump threading, anyway. The
1595 right thing to do is to replace this and jump threading with
1596 much simpler code in cse.
1597
1598 This code has been turned off in the non-cc0 case in the
1599 meantime. */
1600
1601#ifdef HAVE_cc0
1602 else if (this_is_simplejump
1603 /* Safe to skip USE and CLOBBER insns here
1604 since they will not be deleted. */
1605 && (temp = prev_active_insn (insn))
1606 && no_labels_between_p (temp, insn)
1607 && GET_CODE (temp) == INSN
1608 && GET_CODE (PATTERN (temp)) == SET
1609 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1610 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1611 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1612 /* If we find that the next value tested is `x'
1613 (TEMP1 is the insn where this happens), win. */
1614 && GET_CODE (temp1) == INSN
1615 && GET_CODE (PATTERN (temp1)) == SET
1616#ifdef HAVE_cc0
1617 /* Does temp1 `tst' the value of x? */
1618 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1619 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1620 && (temp1 = next_nonnote_insn (temp1))
1621#else
1622 /* Does temp1 compare the value of x against zero? */
1623 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1624 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1625 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1626 == SET_DEST (PATTERN (temp)))
1627 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1628 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1629#endif
1630 && condjump_p (temp1))
1631 {
1632 /* Get the if_then_else from the condjump. */
1633 rtx choice = SET_SRC (PATTERN (temp1));
1634 if (GET_CODE (choice) == IF_THEN_ELSE)
1635 {
1636 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1637 rtx val = SET_SRC (PATTERN (temp));
1638 rtx cond
1639 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1640 val, const0_rtx);
1641 rtx ultimate;
1642
1643 if (cond == const_true_rtx)
1644 ultimate = XEXP (choice, 1);
1645 else if (cond == const0_rtx)
1646 ultimate = XEXP (choice, 2);
1647 else
1648 ultimate = 0;
1649
1650 if (ultimate == pc_rtx)
1651 ultimate = get_label_after (temp1);
1652 else if (ultimate && GET_CODE (ultimate) != RETURN)
1653 ultimate = XEXP (ultimate, 0);
1654
9e390837 1655 if (ultimate && JUMP_LABEL(insn) != ultimate)
15a63be1
RK
1656 changed |= redirect_jump (insn, ultimate);
1657 }
1658 }
1659#endif
1660
1661#if 0
1662 /* @@ This needs a bit of work before it will be right.
1663
1664 Any type of comparison can be accepted for the first and
1665 second compare. When rewriting the first jump, we must
1666 compute the what conditions can reach label3, and use the
1667 appropriate code. We can not simply reverse/swap the code
1668 of the first jump. In some cases, the second jump must be
1669 rewritten also.
1670
1671 For example,
1672 < == converts to > ==
1673 < != converts to == >
1674 etc.
1675
1676 If the code is written to only accept an '==' test for the second
1677 compare, then all that needs to be done is to swap the condition
1678 of the first branch.
1679
1680 It is questionable whether we want this optimization anyways,
1681 since if the user wrote code like this because he/she knew that
6dc42e49 1682 the jump to label1 is taken most of the time, then rewriting
15a63be1
RK
1683 this gives slower code. */
1684 /* @@ This should call get_condition to find the values being
1685 compared, instead of looking for a COMPARE insn when HAVE_cc0
1686 is not defined. This would allow it to work on the m88k. */
1687 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1688 is not defined and the condition is tested by a separate compare
1689 insn. This is because the code below assumes that the result
1690 of the compare dies in the following branch. */
1691
1692 /* Simplify test a ~= b
1693 condjump label1;
1694 test a == b
1695 condjump label2;
1696 jump label3;
1697 label1:
1698
1699 rewriting as
1700 test a ~~= b
1701 condjump label3
1702 test a == b
1703 condjump label2
1704 label1:
1705
1706 where ~= is an inequality, e.g. >, and ~~= is the swapped
1707 inequality, e.g. <.
1708
1709 We recognize this case scanning backwards.
1710
1711 TEMP is the conditional jump to `label2';
1712 TEMP1 is the test for `a == b';
1713 TEMP2 is the conditional jump to `label1';
1714 TEMP3 is the test for `a ~= b'. */
1715 else if (this_is_simplejump
1716 && (temp = prev_active_insn (insn))
1717 && no_labels_between_p (temp, insn)
1718 && condjump_p (temp)
1719 && (temp1 = prev_active_insn (temp))
1720 && no_labels_between_p (temp1, temp)
1721 && GET_CODE (temp1) == INSN
1722 && GET_CODE (PATTERN (temp1)) == SET
1723#ifdef HAVE_cc0
1724 && sets_cc0_p (PATTERN (temp1)) == 1
1725#else
1726 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1727 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1728 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1729#endif
1730 && (temp2 = prev_active_insn (temp1))
1731 && no_labels_between_p (temp2, temp1)
1732 && condjump_p (temp2)
1733 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1734 && (temp3 = prev_active_insn (temp2))
1735 && no_labels_between_p (temp3, temp2)
1736 && GET_CODE (PATTERN (temp3)) == SET
1737 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1738 SET_DEST (PATTERN (temp1)))
1739 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1740 SET_SRC (PATTERN (temp3)))
1741 && ! inequality_comparisons_p (PATTERN (temp))
1742 && inequality_comparisons_p (PATTERN (temp2)))
1743 {
1744 rtx fallthrough_label = JUMP_LABEL (temp2);
1745
1746 ++LABEL_NUSES (fallthrough_label);
1747 if (swap_jump (temp2, JUMP_LABEL (insn)))
1748 {
1749 delete_insn (insn);
1750 changed = 1;
1751 }
1752
1753 if (--LABEL_NUSES (fallthrough_label) == 0)
1754 delete_insn (fallthrough_label);
1755 }
1756#endif
1757 /* Simplify if (...) {... x = 1;} if (x) ...
1758
1759 We recognize this case backwards.
1760
1761 TEMP is the test of `x';
1762 TEMP1 is the assignment to `x' at the end of the
1763 previous statement. */
1764 /* @@ This should call get_condition to find the values being
1765 compared, instead of looking for a COMPARE insn when HAVE_cc0
1766 is not defined. This would allow it to work on the m88k. */
1767 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1768 is not defined and the condition is tested by a separate compare
1769 insn. This is because the code below assumes that the result
1770 of the compare dies in the following branch. */
2d20b9df
RS
1771
1772 /* ??? This has to be turned off. The problem is that the
1773 unconditional jump might indirectly end up branching to the
1774 label between TEMP1 and TEMP. We can't detect this, in general,
1775 since it may become a jump to there after further optimizations.
1776 If that jump is done, it will be deleted, so we will retry
1777 this optimization in the next pass, thus an infinite loop.
1778
1779 The present code prevents this by putting the jump after the
1780 label, but this is not logically correct. */
1781#if 0
15a63be1
RK
1782 else if (this_is_condjump
1783 /* Safe to skip USE and CLOBBER insns here
1784 since they will not be deleted. */
1785 && (temp = prev_active_insn (insn))
1786 && no_labels_between_p (temp, insn)
1787 && GET_CODE (temp) == INSN
1788 && GET_CODE (PATTERN (temp)) == SET
1789#ifdef HAVE_cc0
1790 && sets_cc0_p (PATTERN (temp)) == 1
1791 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1792#else
1793 /* Temp must be a compare insn, we can not accept a register
1794 to register move here, since it may not be simply a
1795 tst insn. */
1796 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1797 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1798 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1799 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1800 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1801#endif
1802 /* May skip USE or CLOBBER insns here
1803 for checking for opportunity, since we
1804 take care of them later. */
1805 && (temp1 = prev_active_insn (temp))
1806 && GET_CODE (temp1) == INSN
1807 && GET_CODE (PATTERN (temp1)) == SET
1808#ifdef HAVE_cc0
1809 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1810#else
1811 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1812 == SET_DEST (PATTERN (temp1)))
1813#endif
1814 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1815 /* If this isn't true, cse will do the job. */
1816 && ! no_labels_between_p (temp1, temp))
1817 {
1818 /* Get the if_then_else from the condjump. */
1819 rtx choice = SET_SRC (PATTERN (insn));
1820 if (GET_CODE (choice) == IF_THEN_ELSE
1821 && (GET_CODE (XEXP (choice, 0)) == EQ
1822 || GET_CODE (XEXP (choice, 0)) == NE))
1823 {
1824 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1825 rtx last_insn;
1826 rtx ultimate;
1827 rtx p;
1828
1829 /* Get the place that condjump will jump to
1830 if it is reached from here. */
1831 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1832 == want_nonzero)
1833 ultimate = XEXP (choice, 1);
1834 else
1835 ultimate = XEXP (choice, 2);
1836 /* Get it as a CODE_LABEL. */
1837 if (ultimate == pc_rtx)
1838 ultimate = get_label_after (insn);
1839 else
1840 /* Get the label out of the LABEL_REF. */
1841 ultimate = XEXP (ultimate, 0);
1842
2d20b9df
RS
1843 /* Insert the jump immediately before TEMP, specifically
1844 after the label that is between TEMP1 and TEMP. */
1845 last_insn = PREV_INSN (temp);
15a63be1
RK
1846
1847 /* If we would be branching to the next insn, the jump
1848 would immediately be deleted and the re-inserted in
1849 a subsequent pass over the code. So don't do anything
1850 in that case. */
1851 if (next_active_insn (last_insn)
1852 != next_active_insn (ultimate))
1853 {
1854 emit_barrier_after (last_insn);
1855 p = emit_jump_insn_after (gen_jump (ultimate),
1856 last_insn);
1857 JUMP_LABEL (p) = ultimate;
1858 ++LABEL_NUSES (ultimate);
1859 if (INSN_UID (ultimate) < max_jump_chain
1860 && INSN_CODE (p) < max_jump_chain)
1861 {
1862 jump_chain[INSN_UID (p)]
1863 = jump_chain[INSN_UID (ultimate)];
1864 jump_chain[INSN_UID (ultimate)] = p;
1865 }
1866 changed = 1;
1867 continue;
1868 }
1869 }
1870 }
2d20b9df 1871#endif
15a63be1
RK
1872 /* Detect a conditional jump going to the same place
1873 as an immediately following unconditional jump. */
1874 else if (this_is_condjump
1875 && (temp = next_active_insn (insn)) != 0
1876 && simplejump_p (temp)
1877 && (next_active_insn (JUMP_LABEL (insn))
1878 == next_active_insn (JUMP_LABEL (temp))))
1879 {
f6a6a1b3
DE
1880 rtx tem = temp;
1881
1882 /* ??? Optional. Disables some optimizations, but makes
1883 gcov output more accurate with -O. */
1884 if (flag_test_coverage && !reload_completed)
1885 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1886 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1887 break;
1888
1889 if (tem == temp)
1890 {
1891 delete_jump (insn);
1892 changed = 1;
1893 continue;
1894 }
15a63be1
RK
1895 }
1896 /* Detect a conditional jump jumping over an unconditional jump. */
1897
3480bb98
JL
1898 else if ((this_is_condjump || this_is_condjump_in_parallel)
1899 && ! this_is_simplejump
15a63be1
RK
1900 && reallabelprev != 0
1901 && GET_CODE (reallabelprev) == JUMP_INSN
1902 && prev_active_insn (reallabelprev) == insn
1903 && no_labels_between_p (insn, reallabelprev)
1904 && simplejump_p (reallabelprev))
1905 {
1906 /* When we invert the unconditional jump, we will be
1907 decrementing the usage count of its old label.
1908 Make sure that we don't delete it now because that
1909 might cause the following code to be deleted. */
1910 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1911 rtx prev_label = JUMP_LABEL (insn);
1912
e26a82e4
JW
1913 if (prev_label)
1914 ++LABEL_NUSES (prev_label);
15a63be1
RK
1915
1916 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1917 {
1918 /* It is very likely that if there are USE insns before
1919 this jump, they hold REG_DEAD notes. These REG_DEAD
1920 notes are no longer valid due to this optimization,
1921 and will cause the life-analysis that following passes
1922 (notably delayed-branch scheduling) to think that
1923 these registers are dead when they are not.
1924
1925 To prevent this trouble, we just remove the USE insns
1926 from the insn chain. */
1927
1928 while (prev_uses && GET_CODE (prev_uses) == INSN
1929 && GET_CODE (PATTERN (prev_uses)) == USE)
1930 {
1931 rtx useless = prev_uses;
1932 prev_uses = prev_nonnote_insn (prev_uses);
1933 delete_insn (useless);
1934 }
1935
1936 delete_insn (reallabelprev);
1937 next = insn;
1938 changed = 1;
1939 }
1940
1941 /* We can now safely delete the label if it is unreferenced
1942 since the delete_insn above has deleted the BARRIER. */
e26a82e4 1943 if (prev_label && --LABEL_NUSES (prev_label) == 0)
15a63be1
RK
1944 delete_insn (prev_label);
1945 continue;
1946 }
1947 else
1948 {
1949 /* Detect a jump to a jump. */
1950
1951 nlabel = follow_jumps (JUMP_LABEL (insn));
1952 if (nlabel != JUMP_LABEL (insn)
1953 && redirect_jump (insn, nlabel))
1954 {
1955 changed = 1;
1956 next = insn;
1957 }
1958
1959 /* Look for if (foo) bar; else break; */
1960 /* The insns look like this:
1961 insn = condjump label1;
1962 ...range1 (some insns)...
1963 jump label2;
1964 label1:
1965 ...range2 (some insns)...
1966 jump somewhere unconditionally
1967 label2: */
1968 {
1969 rtx label1 = next_label (insn);
1970 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1971 /* Don't do this optimization on the first round, so that
1972 jump-around-a-jump gets simplified before we ask here
1973 whether a jump is unconditional.
1974
1975 Also don't do it when we are called after reload since
1976 it will confuse reorg. */
1977 if (! first
1978 && (reload_completed ? ! flag_delayed_branch : 1)
1979 /* Make sure INSN is something we can invert. */
1980 && condjump_p (insn)
1981 && label1 != 0
1982 && JUMP_LABEL (insn) == label1
1983 && LABEL_NUSES (label1) == 1
1984 && GET_CODE (range1end) == JUMP_INSN
1985 && simplejump_p (range1end))
1986 {
1987 rtx label2 = next_label (label1);
1988 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1989 if (range1end != range2end
1990 && JUMP_LABEL (range1end) == label2
1991 && GET_CODE (range2end) == JUMP_INSN
1992 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1993 /* Invert the jump condition, so we
1994 still execute the same insns in each case. */
1995 && invert_jump (insn, label1))
1996 {
1997 rtx range1beg = next_active_insn (insn);
1998 rtx range2beg = next_active_insn (label1);
1999 rtx range1after, range2after;
2000 rtx range1before, range2before;
f0e1b9a9 2001 rtx rangenext;
15a63be1 2002
1f109a14
JW
2003 /* Include in each range any notes before it, to be
2004 sure that we get the line number note if any, even
2005 if there are other notes here. */
0e690bdb 2006 while (PREV_INSN (range1beg)
1f109a14 2007 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
0e690bdb
RS
2008 range1beg = PREV_INSN (range1beg);
2009
2010 while (PREV_INSN (range2beg)
1f109a14 2011 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
0e690bdb
RS
2012 range2beg = PREV_INSN (range2beg);
2013
15a63be1
RK
2014 /* Don't move NOTEs for blocks or loops; shift them
2015 outside the ranges, where they'll stay put. */
915f619f
JW
2016 range1beg = squeeze_notes (range1beg, range1end);
2017 range2beg = squeeze_notes (range2beg, range2end);
15a63be1
RK
2018
2019 /* Get current surrounds of the 2 ranges. */
2020 range1before = PREV_INSN (range1beg);
2021 range2before = PREV_INSN (range2beg);
2022 range1after = NEXT_INSN (range1end);
2023 range2after = NEXT_INSN (range2end);
2024
2025 /* Splice range2 where range1 was. */
2026 NEXT_INSN (range1before) = range2beg;
2027 PREV_INSN (range2beg) = range1before;
2028 NEXT_INSN (range2end) = range1after;
2029 PREV_INSN (range1after) = range2end;
2030 /* Splice range1 where range2 was. */
2031 NEXT_INSN (range2before) = range1beg;
2032 PREV_INSN (range1beg) = range2before;
2033 NEXT_INSN (range1end) = range2after;
2034 PREV_INSN (range2after) = range1end;
f0e1b9a9
RE
2035
2036 /* Check for a loop end note between the end of
2037 range2, and the next code label. If there is one,
2038 then what we have really seen is
2039 if (foo) break; end_of_loop;
2040 and moved the break sequence outside the loop.
2041 We must move the LOOP_END note to where the
2042 loop really ends now, or we will confuse loop
ca188f16
JW
2043 optimization. Stop if we find a LOOP_BEG note
2044 first, since we don't want to move the LOOP_END
2045 note in that case. */
f0e1b9a9
RE
2046 for (;range2after != label2; range2after = rangenext)
2047 {
2048 rangenext = NEXT_INSN (range2after);
ca188f16 2049 if (GET_CODE (range2after) == NOTE)
f0e1b9a9 2050 {
ca188f16
JW
2051 if (NOTE_LINE_NUMBER (range2after)
2052 == NOTE_INSN_LOOP_END)
2053 {
2054 NEXT_INSN (PREV_INSN (range2after))
2055 = rangenext;
2056 PREV_INSN (rangenext)
2057 = PREV_INSN (range2after);
2058 PREV_INSN (range2after)
2059 = PREV_INSN (range1beg);
2060 NEXT_INSN (range2after) = range1beg;
2061 NEXT_INSN (PREV_INSN (range1beg))
2062 = range2after;
2063 PREV_INSN (range1beg) = range2after;
2064 }
2065 else if (NOTE_LINE_NUMBER (range2after)
2066 == NOTE_INSN_LOOP_BEG)
2067 break;
f0e1b9a9
RE
2068 }
2069 }
15a63be1
RK
2070 changed = 1;
2071 continue;
2072 }
2073 }
2074 }
2075
2076 /* Now that the jump has been tensioned,
2077 try cross jumping: check for identical code
0f41302f 2078 before the jump and before its target label. */
15a63be1
RK
2079
2080 /* First, cross jumping of conditional jumps: */
2081
2082 if (cross_jump && condjump_p (insn))
2083 {
2084 rtx newjpos, newlpos;
2085 rtx x = prev_real_insn (JUMP_LABEL (insn));
2086
2087 /* A conditional jump may be crossjumped
2088 only if the place it jumps to follows
2089 an opposing jump that comes back here. */
2090
2091 if (x != 0 && ! jump_back_p (x, insn))
2092 /* We have no opposing jump;
2093 cannot cross jump this insn. */
2094 x = 0;
2095
2096 newjpos = 0;
2097 /* TARGET is nonzero if it is ok to cross jump
2098 to code before TARGET. If so, see if matches. */
2099 if (x != 0)
2100 find_cross_jump (insn, x, 2,
2101 &newjpos, &newlpos);
2102
2103 if (newjpos != 0)
2104 {
2105 do_cross_jump (insn, newjpos, newlpos);
2106 /* Make the old conditional jump
2107 into an unconditional one. */
2108 SET_SRC (PATTERN (insn))
2109 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
2110 INSN_CODE (insn) = -1;
2111 emit_barrier_after (insn);
2112 /* Add to jump_chain unless this is a new label
0f41302f 2113 whose UID is too large. */
15a63be1
RK
2114 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2115 {
2116 jump_chain[INSN_UID (insn)]
2117 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2118 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2119 }
2120 changed = 1;
2121 next = insn;
2122 }
2123 }
2124
2125 /* Cross jumping of unconditional jumps:
2126 a few differences. */
2127
2128 if (cross_jump && simplejump_p (insn))
2129 {
2130 rtx newjpos, newlpos;
2131 rtx target;
2132
2133 newjpos = 0;
2134
2135 /* TARGET is nonzero if it is ok to cross jump
2136 to code before TARGET. If so, see if matches. */
2137 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2138 &newjpos, &newlpos);
2139
2140 /* If cannot cross jump to code before the label,
2141 see if we can cross jump to another jump to
2142 the same label. */
2143 /* Try each other jump to this label. */
2144 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2145 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2146 target != 0 && newjpos == 0;
2147 target = jump_chain[INSN_UID (target)])
2148 if (target != insn
2149 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2150 /* Ignore TARGET if it's deleted. */
2151 && ! INSN_DELETED_P (target))
2152 find_cross_jump (insn, target, 2,
2153 &newjpos, &newlpos);
2154
2155 if (newjpos != 0)
2156 {
2157 do_cross_jump (insn, newjpos, newlpos);
2158 changed = 1;
2159 next = insn;
2160 }
2161 }
2162
2163 /* This code was dead in the previous jump.c! */
2164 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2165 {
2166 /* Return insns all "jump to the same place"
2167 so we can cross-jump between any two of them. */
2168
2169 rtx newjpos, newlpos, target;
2170
2171 newjpos = 0;
2172
2173 /* If cannot cross jump to code before the label,
2174 see if we can cross jump to another jump to
2175 the same label. */
2176 /* Try each other jump to this label. */
2177 for (target = jump_chain[0];
2178 target != 0 && newjpos == 0;
2179 target = jump_chain[INSN_UID (target)])
2180 if (target != insn
2181 && ! INSN_DELETED_P (target)
2182 && GET_CODE (PATTERN (target)) == RETURN)
2183 find_cross_jump (insn, target, 2,
2184 &newjpos, &newlpos);
2185
2186 if (newjpos != 0)
2187 {
2188 do_cross_jump (insn, newjpos, newlpos);
2189 changed = 1;
2190 next = insn;
2191 }
2192 }
2193 }
2194 }
2195
2196 first = 0;
2197 }
2198
2199 /* Delete extraneous line number notes.
2200 Note that two consecutive notes for different lines are not really
2201 extraneous. There should be some indication where that line belonged,
2202 even if it became empty. */
2203
2204 {
2205 rtx last_note = 0;
2206
2207 for (insn = f; insn; insn = NEXT_INSN (insn))
2208 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2209 {
2210 /* Delete this note if it is identical to previous note. */
2211 if (last_note
2212 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2213 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2214 {
2215 delete_insn (insn);
2216 continue;
2217 }
2218
2219 last_note = insn;
2220 }
2221 }
2222
683e6ccd
RK
2223#ifdef HAVE_return
2224 if (HAVE_return)
2225 {
2226 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2227 in front of it. If the machine allows it at this point (we might be
2228 after reload for a leaf routine), it will improve optimization for it
2229 to be there. We do this both here and at the start of this pass since
2230 the RETURN might have been deleted by some of our optimizations. */
2231 insn = get_last_insn ();
2232 while (insn && GET_CODE (insn) == NOTE)
2233 insn = PREV_INSN (insn);
2234
2235 if (insn && GET_CODE (insn) != BARRIER)
2236 {
2237 emit_jump_insn (gen_return ());
2238 emit_barrier ();
2239 }
2240 }
2241#endif
2242
15a63be1
RK
2243 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2244 If so, delete it, and record that this function can drop off the end. */
2245
2246 insn = last_insn;
2247 {
2248 int n_labels = 1;
2249 while (insn
2250 /* One label can follow the end-note: the return label. */
2251 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2252 /* Ordinary insns can follow it if returning a structure. */
2253 || GET_CODE (insn) == INSN
2254 /* If machine uses explicit RETURN insns, no epilogue,
2255 then one of them follows the note. */
2256 || (GET_CODE (insn) == JUMP_INSN
2257 && GET_CODE (PATTERN (insn)) == RETURN)
60374599
DE
2258 /* A barrier can follow the return insn. */
2259 || GET_CODE (insn) == BARRIER
15a63be1
RK
2260 /* Other kinds of notes can follow also. */
2261 || (GET_CODE (insn) == NOTE
2262 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2263 insn = PREV_INSN (insn);
2264 }
2265
2266 /* Report if control can fall through at the end of the function. */
2267 if (insn && GET_CODE (insn) == NOTE
2268 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2269 {
2270 can_reach_end = 1;
2271 delete_insn (insn);
2272 }
2273
2274 /* Show JUMP_CHAIN no longer valid. */
2275 jump_chain = 0;
2276}
2277\f
2278/* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2279 jump. Assume that this unconditional jump is to the exit test code. If
2280 the code is sufficiently simple, make a copy of it before INSN,
2281 followed by a jump to the exit of the loop. Then delete the unconditional
2282 jump after INSN.
2283
15a63be1
RK
2284 Return 1 if we made the change, else 0.
2285
2286 This is only safe immediately after a regscan pass because it uses the
2287 values of regno_first_uid and regno_last_uid. */
2288
2289static int
2290duplicate_loop_exit_test (loop_start)
2291 rtx loop_start;
2292{
e33477be 2293 rtx insn, set, reg, p, link;
9c066566 2294 rtx copy = 0;
15a63be1
RK
2295 int num_insns = 0;
2296 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2297 rtx lastexit;
2298 int max_reg = max_reg_num ();
2299 rtx *reg_map = 0;
2300
2301 /* Scan the exit code. We do not perform this optimization if any insn:
2302
2303 is a CALL_INSN
2304 is a CODE_LABEL
2305 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2306 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2307 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2308 are not valid
2309
2310 Also, don't do this if the exit code is more than 20 insns. */
2311
2312 for (insn = exitcode;
2313 insn
2314 && ! (GET_CODE (insn) == NOTE
2315 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2316 insn = NEXT_INSN (insn))
2317 {
2318 switch (GET_CODE (insn))
2319 {
2320 case CODE_LABEL:
2321 case CALL_INSN:
2322 return 0;
2323 case NOTE:
fe464caf
RK
2324 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2325 a jump immediately after the loop start that branches outside
2326 the loop but within an outer loop, near the exit test.
2327 If we copied this exit test and created a phony
2328 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2329 before the exit test look like these could be safely moved
2330 out of the loop even if they actually may be never executed.
2331 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2332
15a63be1
RK
2333 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2334 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
fe464caf
RK
2335 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2336 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
15a63be1
RK
2337 return 0;
2338 break;
2339 case JUMP_INSN:
2340 case INSN:
2341 if (++num_insns > 20
5f4f0e22
CH
2342 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2343 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
15a63be1
RK
2344 return 0;
2345 break;
e9a25f70
JL
2346 default:
2347 break;
15a63be1
RK
2348 }
2349 }
2350
2351 /* Unless INSN is zero, we can do the optimization. */
2352 if (insn == 0)
2353 return 0;
2354
2355 lastexit = insn;
2356
2357 /* See if any insn sets a register only used in the loop exit code and
2358 not a user variable. If so, replace it with a new register. */
2359 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2360 if (GET_CODE (insn) == INSN
2361 && (set = single_set (insn)) != 0
e33477be
RK
2362 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2363 || (GET_CODE (reg) == SUBREG
2364 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2365 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
b1f21e0a 2366 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
15a63be1
RK
2367 {
2368 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
b1f21e0a 2369 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
15a63be1
RK
2370 break;
2371
2372 if (p != lastexit)
2373 {
2374 /* We can do the replacement. Allocate reg_map if this is the
2375 first replacement we found. */
2376 if (reg_map == 0)
2377 {
2378 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
4c9a05bc 2379 bzero ((char *) reg_map, max_reg * sizeof (rtx));
15a63be1
RK
2380 }
2381
e33477be 2382 REG_LOOP_TEST_P (reg) = 1;
15a63be1 2383
e33477be 2384 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
15a63be1
RK
2385 }
2386 }
2387
2388 /* Now copy each insn. */
2389 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2390 switch (GET_CODE (insn))
2391 {
2392 case BARRIER:
2393 copy = emit_barrier_before (loop_start);
2394 break;
2395 case NOTE:
2396 /* Only copy line-number notes. */
2397 if (NOTE_LINE_NUMBER (insn) >= 0)
2398 {
2399 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2400 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2401 }
2402 break;
2403
2404 case INSN:
2405 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2406 if (reg_map)
2407 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2408
2409 mark_jump_label (PATTERN (copy), copy, 0);
2410
2411 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2412 make them. */
2413 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2414 if (REG_NOTE_KIND (link) != REG_LABEL)
2415 REG_NOTES (copy)
2416 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
2417 XEXP (link, 0), REG_NOTES (copy)));
2418 if (reg_map && REG_NOTES (copy))
2419 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2420 break;
2421
2422 case JUMP_INSN:
2423 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2424 if (reg_map)
2425 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2426 mark_jump_label (PATTERN (copy), copy, 0);
2427 if (REG_NOTES (insn))
2428 {
2429 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2430 if (reg_map)
2431 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2432 }
2433
2434 /* If this is a simple jump, add it to the jump chain. */
2435
2436 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2437 && simplejump_p (copy))
2438 {
2439 jump_chain[INSN_UID (copy)]
2440 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2441 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2442 }
2443 break;
2444
2445 default:
2446 abort ();
2447 }
2448
2449 /* Now clean up by emitting a jump to the end label and deleting the jump
2450 at the start of the loop. */
9c066566 2451 if (! copy || GET_CODE (copy) != BARRIER)
15a63be1
RK
2452 {
2453 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2454 loop_start);
2455 mark_jump_label (PATTERN (copy), copy, 0);
2456 if (INSN_UID (copy) < max_jump_chain
2457 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2458 {
2459 jump_chain[INSN_UID (copy)]
2460 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2461 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2462 }
2463 emit_barrier_before (loop_start);
2464 }
2465
15a63be1
RK
2466 /* Mark the exit code as the virtual top of the converted loop. */
2467 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2468
cd423ead
RK
2469 delete_insn (next_nonnote_insn (loop_start));
2470
15a63be1
RK
2471 return 1;
2472}
2473\f
2474/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
915f619f
JW
2475 loop-end notes between START and END out before START. Assume that
2476 END is not such a note. START may be such a note. Returns the value
2477 of the new starting insn, which may be different if the original start
2478 was such a note. */
15a63be1 2479
915f619f 2480rtx
15a63be1
RK
2481squeeze_notes (start, end)
2482 rtx start, end;
2483{
2484 rtx insn;
2485 rtx next;
2486
2487 for (insn = start; insn != end; insn = next)
2488 {
2489 next = NEXT_INSN (insn);
2490 if (GET_CODE (insn) == NOTE
2491 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2492 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2493 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2494 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2495 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2496 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2497 {
915f619f
JW
2498 if (insn == start)
2499 start = next;
2500 else
2501 {
2502 rtx prev = PREV_INSN (insn);
2503 PREV_INSN (insn) = PREV_INSN (start);
2504 NEXT_INSN (insn) = start;
2505 NEXT_INSN (PREV_INSN (insn)) = insn;
2506 PREV_INSN (NEXT_INSN (insn)) = insn;
2507 NEXT_INSN (prev) = next;
2508 PREV_INSN (next) = prev;
2509 }
15a63be1
RK
2510 }
2511 }
915f619f
JW
2512
2513 return start;
15a63be1
RK
2514}
2515\f
2516/* Compare the instructions before insn E1 with those before E2
2517 to find an opportunity for cross jumping.
2518 (This means detecting identical sequences of insns followed by
2519 jumps to the same place, or followed by a label and a jump
2520 to that label, and replacing one with a jump to the other.)
2521
2522 Assume E1 is a jump that jumps to label E2
2523 (that is not always true but it might as well be).
2524 Find the longest possible equivalent sequences
2525 and store the first insns of those sequences into *F1 and *F2.
2526 Store zero there if no equivalent preceding instructions are found.
2527
2528 We give up if we find a label in stream 1.
2529 Actually we could transfer that label into stream 2. */
2530
2531static void
2532find_cross_jump (e1, e2, minimum, f1, f2)
2533 rtx e1, e2;
2534 int minimum;
2535 rtx *f1, *f2;
2536{
2537 register rtx i1 = e1, i2 = e2;
2538 register rtx p1, p2;
2539 int lose = 0;
2540
2541 rtx last1 = 0, last2 = 0;
2542 rtx afterlast1 = 0, afterlast2 = 0;
2543 rtx prev1;
2544
2545 *f1 = 0;
2546 *f2 = 0;
2547
2548 while (1)
2549 {
2550 i1 = prev_nonnote_insn (i1);
2551
2552 i2 = PREV_INSN (i2);
2553 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2554 i2 = PREV_INSN (i2);
2555
2556 if (i1 == 0)
2557 break;
2558
2559 /* Don't allow the range of insns preceding E1 or E2
2560 to include the other (E2 or E1). */
2561 if (i2 == e1 || i1 == e2)
2562 break;
2563
2564 /* If we will get to this code by jumping, those jumps will be
2565 tensioned to go directly to the new label (before I2),
2566 so this cross-jumping won't cost extra. So reduce the minimum. */
2567 if (GET_CODE (i1) == CODE_LABEL)
2568 {
2569 --minimum;
2570 break;
2571 }
2572
2573 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2574 break;
2575
2576 p1 = PATTERN (i1);
2577 p2 = PATTERN (i2);
2578
4d367579
DE
2579 /* If this is a CALL_INSN, compare register usage information.
2580 If we don't check this on stack register machines, the two
2581 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2582 numbers of stack registers in the same basic block.
2583 If we don't check this on machines with delay slots, a delay slot may
2584 be filled that clobbers a parameter expected by the subroutine.
47b0bb94 2585
4d367579
DE
2586 ??? We take the simple route for now and assume that if they're
2587 equal, they were constructed identically. */
2588
2589 if (GET_CODE (i1) == CALL_INSN
2590 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2591 CALL_INSN_FUNCTION_USAGE (i2)))
2592 lose = 1;
2593
2594#ifdef STACK_REGS
15a63be1
RK
2595 /* If cross_jump_death_matters is not 0, the insn's mode
2596 indicates whether or not the insn contains any stack-like
0f41302f 2597 regs. */
15a63be1 2598
47b0bb94 2599 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
15a63be1
RK
2600 {
2601 /* If register stack conversion has already been done, then
2602 death notes must also be compared before it is certain that
0f41302f 2603 the two instruction streams match. */
15a63be1
RK
2604
2605 rtx note;
2606 HARD_REG_SET i1_regset, i2_regset;
2607
2608 CLEAR_HARD_REG_SET (i1_regset);
2609 CLEAR_HARD_REG_SET (i2_regset);
2610
2611 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2612 if (REG_NOTE_KIND (note) == REG_DEAD
2613 && STACK_REG_P (XEXP (note, 0)))
2614 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2615
2616 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2617 if (REG_NOTE_KIND (note) == REG_DEAD
2618 && STACK_REG_P (XEXP (note, 0)))
2619 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2620
2621 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2622
2623 lose = 1;
2624
2625 done:
2626 ;
2627 }
2628#endif
2629
44c389e4
JW
2630 /* Don't allow old-style asm or volatile extended asms to be accepted
2631 for cross jumping purposes. It is conceptually correct to allow
2632 them, since cross-jumping preserves the dynamic instruction order
2633 even though it is changing the static instruction order. However,
2634 if an asm is being used to emit an assembler pseudo-op, such as
2635 the MIPS `.set reorder' pseudo-op, then the static instruction order
2636 matters and it must be preserved. */
2637 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2638 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2639 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2640 lose = 1;
2641
2642 if (lose || GET_CODE (p1) != GET_CODE (p2)
15a63be1
RK
2643 || ! rtx_renumbered_equal_p (p1, p2))
2644 {
2645 /* The following code helps take care of G++ cleanups. */
2646 rtx equiv1;
2647 rtx equiv2;
2648
2649 if (!lose && GET_CODE (p1) == GET_CODE (p2)
5f4f0e22
CH
2650 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2651 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2652 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2653 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
15a63be1
RK
2654 /* If the equivalences are not to a constant, they may
2655 reference pseudos that no longer exist, so we can't
2656 use them. */
2657 && CONSTANT_P (XEXP (equiv1, 0))
2658 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2659 {
2660 rtx s1 = single_set (i1);
2661 rtx s2 = single_set (i2);
2662 if (s1 != 0 && s2 != 0
2663 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2664 {
2665 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2666 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2667 if (! rtx_renumbered_equal_p (p1, p2))
2668 cancel_changes (0);
2669 else if (apply_change_group ())
2670 goto win;
2671 }
2672 }
2673
2674 /* Insns fail to match; cross jumping is limited to the following
2675 insns. */
2676
2677#ifdef HAVE_cc0
2678 /* Don't allow the insn after a compare to be shared by
2679 cross-jumping unless the compare is also shared.
2680 Here, if either of these non-matching insns is a compare,
2681 exclude the following insn from possible cross-jumping. */
2682 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2683 last1 = afterlast1, last2 = afterlast2, ++minimum;
2684#endif
2685
2686 /* If cross-jumping here will feed a jump-around-jump
2687 optimization, this jump won't cost extra, so reduce
2688 the minimum. */
2689 if (GET_CODE (i1) == JUMP_INSN
2690 && JUMP_LABEL (i1)
2691 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2692 --minimum;
2693 break;
2694 }
2695
2696 win:
2697 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2698 {
2699 /* Ok, this insn is potentially includable in a cross-jump here. */
2700 afterlast1 = last1, afterlast2 = last2;
2701 last1 = i1, last2 = i2, --minimum;
2702 }
2703 }
2704
15a63be1
RK
2705 if (minimum <= 0 && last1 != 0 && last1 != e1)
2706 *f1 = last1, *f2 = last2;
2707}
2708
2709static void
2710do_cross_jump (insn, newjpos, newlpos)
2711 rtx insn, newjpos, newlpos;
2712{
2713 /* Find an existing label at this point
2714 or make a new one if there is none. */
2715 register rtx label = get_label_before (newlpos);
2716
2717 /* Make the same jump insn jump to the new point. */
2718 if (GET_CODE (PATTERN (insn)) == RETURN)
2719 {
2720 /* Remove from jump chain of returns. */
2721 delete_from_jump_chain (insn);
2722 /* Change the insn. */
2723 PATTERN (insn) = gen_jump (label);
2724 INSN_CODE (insn) = -1;
2725 JUMP_LABEL (insn) = label;
2726 LABEL_NUSES (label)++;
2727 /* Add to new the jump chain. */
2728 if (INSN_UID (label) < max_jump_chain
2729 && INSN_UID (insn) < max_jump_chain)
2730 {
2731 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2732 jump_chain[INSN_UID (label)] = insn;
2733 }
2734 }
2735 else
2736 redirect_jump (insn, label);
2737
2738 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2739 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2740 the NEWJPOS stream. */
2741
2742 while (newjpos != insn)
2743 {
2744 rtx lnote;
2745
2746 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2747 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2748 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2749 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2750 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2751 remove_note (newlpos, lnote);
2752
2753 delete_insn (newjpos);
2754 newjpos = next_real_insn (newjpos);
2755 newlpos = next_real_insn (newlpos);
2756 }
2757}
2758\f
2759/* Return the label before INSN, or put a new label there. */
2760
2761rtx
2762get_label_before (insn)
2763 rtx insn;
2764{
2765 rtx label;
2766
2767 /* Find an existing label at this point
2768 or make a new one if there is none. */
2769 label = prev_nonnote_insn (insn);
2770
2771 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2772 {
2773 rtx prev = PREV_INSN (insn);
2774
15a63be1
RK
2775 label = gen_label_rtx ();
2776 emit_label_after (label, prev);
2777 LABEL_NUSES (label) = 0;
2778 }
2779 return label;
2780}
2781
2782/* Return the label after INSN, or put a new label there. */
2783
2784rtx
2785get_label_after (insn)
2786 rtx insn;
2787{
2788 rtx label;
2789
2790 /* Find an existing label at this point
2791 or make a new one if there is none. */
2792 label = next_nonnote_insn (insn);
2793
2794 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2795 {
15a63be1
RK
2796 label = gen_label_rtx ();
2797 emit_label_after (label, insn);
2798 LABEL_NUSES (label) = 0;
2799 }
2800 return label;
2801}
2802\f
2803/* Return 1 if INSN is a jump that jumps to right after TARGET
2804 only on the condition that TARGET itself would drop through.
2805 Assumes that TARGET is a conditional jump. */
2806
2807static int
2808jump_back_p (insn, target)
2809 rtx insn, target;
2810{
2811 rtx cinsn, ctarget;
2812 enum rtx_code codei, codet;
2813
2814 if (simplejump_p (insn) || ! condjump_p (insn)
2815 || simplejump_p (target)
2816 || target != prev_real_insn (JUMP_LABEL (insn)))
2817 return 0;
2818
2819 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2820 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2821
2822 codei = GET_CODE (cinsn);
2823 codet = GET_CODE (ctarget);
2824
2825 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2826 {
2827 if (! can_reverse_comparison_p (cinsn, insn))
2828 return 0;
2829 codei = reverse_condition (codei);
2830 }
2831
2832 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2833 {
2834 if (! can_reverse_comparison_p (ctarget, target))
2835 return 0;
2836 codet = reverse_condition (codet);
2837 }
2838
2839 return (codei == codet
2840 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2841 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2842}
2843\f
2844/* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2845 return non-zero if it is safe to reverse this comparison. It is if our
2846 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2847 this is known to be an integer comparison. */
2848
2849int
2850can_reverse_comparison_p (comparison, insn)
2851 rtx comparison;
2852 rtx insn;
2853{
2854 rtx arg0;
2855
2856 /* If this is not actually a comparison, we can't reverse it. */
2857 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2858 return 0;
2859
2860 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2861 /* If this is an NE comparison, it is safe to reverse it to an EQ
2862 comparison and vice versa, even for floating point. If no operands
2863 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2864 always false and NE is always true, so the reversal is also valid. */
9b2e59ad 2865 || flag_fast_math
15a63be1
RK
2866 || GET_CODE (comparison) == NE
2867 || GET_CODE (comparison) == EQ)
2868 return 1;
2869
2870 arg0 = XEXP (comparison, 0);
2871
2872 /* Make sure ARG0 is one of the actual objects being compared. If we
2873 can't do this, we can't be sure the comparison can be reversed.
2874
2875 Handle cc0 and a MODE_CC register. */
2876 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2877#ifdef HAVE_cc0
2878 || arg0 == cc0_rtx
2879#endif
2880 )
2881 {
2882 rtx prev = prev_nonnote_insn (insn);
2883 rtx set = single_set (prev);
2884
2885 if (set == 0 || SET_DEST (set) != arg0)
2886 return 0;
2887
2888 arg0 = SET_SRC (set);
2889
2890 if (GET_CODE (arg0) == COMPARE)
2891 arg0 = XEXP (arg0, 0);
2892 }
2893
2894 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2895 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2896 return (GET_CODE (arg0) == CONST_INT
2897 || (GET_MODE (arg0) != VOIDmode
2898 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2899 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2900}
2901
2902/* Given an rtx-code for a comparison, return the code
2903 for the negated comparison.
2904 WATCH OUT! reverse_condition is not safe to use on a jump
2905 that might be acting on the results of an IEEE floating point comparison,
2906 because of the special treatment of non-signaling nans in comparisons.
2907 Use can_reverse_comparison_p to be sure. */
2908
2909enum rtx_code
2910reverse_condition (code)
2911 enum rtx_code code;
2912{
2913 switch (code)
2914 {
2915 case EQ:
2916 return NE;
2917
2918 case NE:
2919 return EQ;
2920
2921 case GT:
2922 return LE;
2923
2924 case GE:
2925 return LT;
2926
2927 case LT:
2928 return GE;
2929
2930 case LE:
2931 return GT;
2932
2933 case GTU:
2934 return LEU;
2935
2936 case GEU:
2937 return LTU;
2938
2939 case LTU:
2940 return GEU;
2941
2942 case LEU:
2943 return GTU;
2944
2945 default:
2946 abort ();
2947 return UNKNOWN;
2948 }
2949}
2950
2951/* Similar, but return the code when two operands of a comparison are swapped.
2952 This IS safe for IEEE floating-point. */
2953
2954enum rtx_code
2955swap_condition (code)
2956 enum rtx_code code;
2957{
2958 switch (code)
2959 {
2960 case EQ:
2961 case NE:
2962 return code;
2963
2964 case GT:
2965 return LT;
2966
2967 case GE:
2968 return LE;
2969
2970 case LT:
2971 return GT;
2972
2973 case LE:
2974 return GE;
2975
2976 case GTU:
2977 return LTU;
2978
2979 case GEU:
2980 return LEU;
2981
2982 case LTU:
2983 return GTU;
2984
2985 case LEU:
2986 return GEU;
2987
2988 default:
2989 abort ();
2990 return UNKNOWN;
2991 }
2992}
2993
2994/* Given a comparison CODE, return the corresponding unsigned comparison.
2995 If CODE is an equality comparison or already an unsigned comparison,
2996 CODE is returned. */
2997
2998enum rtx_code
2999unsigned_condition (code)
3000 enum rtx_code code;
3001{
3002 switch (code)
3003 {
3004 case EQ:
3005 case NE:
3006 case GTU:
3007 case GEU:
3008 case LTU:
3009 case LEU:
3010 return code;
3011
3012 case GT:
3013 return GTU;
3014
3015 case GE:
3016 return GEU;
3017
3018 case LT:
3019 return LTU;
3020
3021 case LE:
3022 return LEU;
3023
3024 default:
3025 abort ();
3026 }
3027}
3028
3029/* Similarly, return the signed version of a comparison. */
3030
3031enum rtx_code
3032signed_condition (code)
3033 enum rtx_code code;
3034{
3035 switch (code)
3036 {
3037 case EQ:
3038 case NE:
3039 case GT:
3040 case GE:
3041 case LT:
3042 case LE:
3043 return code;
3044
3045 case GTU:
3046 return GT;
3047
3048 case GEU:
3049 return GE;
3050
3051 case LTU:
3052 return LT;
3053
3054 case LEU:
3055 return LE;
3056
3057 default:
3058 abort ();
3059 }
3060}
3061\f
3062/* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3063 truth of CODE1 implies the truth of CODE2. */
3064
3065int
3066comparison_dominates_p (code1, code2)
3067 enum rtx_code code1, code2;
3068{
3069 if (code1 == code2)
3070 return 1;
3071
3072 switch (code1)
3073 {
3074 case EQ:
3075 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3076 return 1;
3077 break;
3078
3079 case LT:
b0c38416 3080 if (code2 == LE || code2 == NE)
15a63be1
RK
3081 return 1;
3082 break;
3083
3084 case GT:
b0c38416 3085 if (code2 == GE || code2 == NE)
15a63be1
RK
3086 return 1;
3087 break;
3088
3089 case LTU:
b0c38416 3090 if (code2 == LEU || code2 == NE)
15a63be1
RK
3091 return 1;
3092 break;
3093
3094 case GTU:
b0c38416 3095 if (code2 == GEU || code2 == NE)
15a63be1
RK
3096 return 1;
3097 break;
e9a25f70
JL
3098
3099 default:
3100 break;
15a63be1
RK
3101 }
3102
3103 return 0;
3104}
3105\f
3106/* Return 1 if INSN is an unconditional jump and nothing else. */
3107
3108int
3109simplejump_p (insn)
3110 rtx insn;
3111{
3112 return (GET_CODE (insn) == JUMP_INSN
3113 && GET_CODE (PATTERN (insn)) == SET
3114 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3115 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3116}
3117
3118/* Return nonzero if INSN is a (possibly) conditional jump
3119 and nothing more. */
3120
3121int
3122condjump_p (insn)
3123 rtx insn;
3124{
3125 register rtx x = PATTERN (insn);
3480bb98
JL
3126 if (GET_CODE (x) != SET)
3127 return 0;
3128 if (GET_CODE (SET_DEST (x)) != PC)
3129 return 0;
3130 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3131 return 1;
3132 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3133 return 0;
3134 if (XEXP (SET_SRC (x), 2) == pc_rtx
3135 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3136 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3137 return 1;
3138 if (XEXP (SET_SRC (x), 1) == pc_rtx
3139 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3140 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3141 return 1;
3142 return 0;
3143}
3144
3145/* Return nonzero if INSN is a (possibly) conditional jump
3146 and nothing more. */
3147
3148int
3149condjump_in_parallel_p (insn)
3150 rtx insn;
3151{
3152 register rtx x = PATTERN (insn);
3153
3154 if (GET_CODE (x) != PARALLEL)
3155 return 0;
3156 else
3157 x = XVECEXP (x, 0, 0);
3158
15a63be1
RK
3159 if (GET_CODE (x) != SET)
3160 return 0;
3161 if (GET_CODE (SET_DEST (x)) != PC)
3162 return 0;
3163 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3164 return 1;
3165 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3166 return 0;
3167 if (XEXP (SET_SRC (x), 2) == pc_rtx
3168 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3169 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3170 return 1;
3171 if (XEXP (SET_SRC (x), 1) == pc_rtx
3172 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3173 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3174 return 1;
3175 return 0;
3176}
3177
3178/* Return 1 if X is an RTX that does nothing but set the condition codes
3179 and CLOBBER or USE registers.
3180 Return -1 if X does explicitly set the condition codes,
3181 but also does other things. */
3182
3183int
3184sets_cc0_p (x)
3185 rtx x;
3186{
3187#ifdef HAVE_cc0
3188 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3189 return 1;
3190 if (GET_CODE (x) == PARALLEL)
3191 {
3192 int i;
3193 int sets_cc0 = 0;
3194 int other_things = 0;
3195 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3196 {
3197 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3198 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3199 sets_cc0 = 1;
3200 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3201 other_things = 1;
3202 }
3203 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3204 }
3205 return 0;
3206#else
3207 abort ();
3208#endif
3209}
3210\f
3211/* Follow any unconditional jump at LABEL;
3212 return the ultimate label reached by any such chain of jumps.
3213 If LABEL is not followed by a jump, return LABEL.
2d20b9df
RS
3214 If the chain loops or we can't find end, return LABEL,
3215 since that tells caller to avoid changing the insn.
15a63be1
RK
3216
3217 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3218 a USE or CLOBBER. */
3219
3220rtx
3221follow_jumps (label)
3222 rtx label;
3223{
3224 register rtx insn;
3225 register rtx next;
3226 register rtx value = label;
3227 register int depth;
3228
3229 for (depth = 0;
3230 (depth < 10
3231 && (insn = next_active_insn (value)) != 0
3232 && GET_CODE (insn) == JUMP_INSN
a9cc9061
JL
3233 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3234 || GET_CODE (PATTERN (insn)) == RETURN)
15a63be1
RK
3235 && (next = NEXT_INSN (insn))
3236 && GET_CODE (next) == BARRIER);
3237 depth++)
3238 {
3239 /* Don't chain through the insn that jumps into a loop
3240 from outside the loop,
3241 since that would create multiple loop entry jumps
3242 and prevent loop optimization. */
3243 rtx tem;
3244 if (!reload_completed)
3245 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3246 if (GET_CODE (tem) == NOTE
f6a6a1b3
DE
3247 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3248 /* ??? Optional. Disables some optimizations, but makes
3249 gcov output more accurate with -O. */
3250 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
15a63be1
RK
3251 return value;
3252
3253 /* If we have found a cycle, make the insn jump to itself. */
3254 if (JUMP_LABEL (insn) == label)
2d20b9df 3255 return label;
b209b3c5
JVA
3256
3257 tem = next_active_insn (JUMP_LABEL (insn));
3258 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3259 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3260 break;
3261
15a63be1
RK
3262 value = JUMP_LABEL (insn);
3263 }
2d20b9df
RS
3264 if (depth == 10)
3265 return label;
15a63be1
RK
3266 return value;
3267}
3268
3269/* Assuming that field IDX of X is a vector of label_refs,
3270 replace each of them by the ultimate label reached by it.
3271 Return nonzero if a change is made.
3272 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3273
3274static int
3275tension_vector_labels (x, idx)
3276 register rtx x;
3277 register int idx;
3278{
3279 int changed = 0;
3280 register int i;
3281 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3282 {
3283 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3284 register rtx nlabel = follow_jumps (olabel);
3285 if (nlabel && nlabel != olabel)
3286 {
3287 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3288 ++LABEL_NUSES (nlabel);
3289 if (--LABEL_NUSES (olabel) == 0)
3290 delete_insn (olabel);
3291 changed = 1;
3292 }
3293 }
3294 return changed;
3295}
3296\f
3297/* Find all CODE_LABELs referred to in X, and increment their use counts.
3298 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3299 in INSN, then store one of them in JUMP_LABEL (INSN).
3300 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3301 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3302 Also, when there are consecutive labels, canonicalize on the last of them.
3303
3304 Note that two labels separated by a loop-beginning note
3305 must be kept distinct if we have not yet done loop-optimization,
3306 because the gap between them is where loop-optimize
3307 will want to move invariant code to. CROSS_JUMP tells us
3308 that loop-optimization is done with.
3309
3310 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3311 two labels distinct if they are separated by only USE or CLOBBER insns. */
3312
3313static void
3314mark_jump_label (x, insn, cross_jump)
3315 register rtx x;
3316 rtx insn;
3317 int cross_jump;
3318{
3319 register RTX_CODE code = GET_CODE (x);
3320 register int i;
3321 register char *fmt;
3322
3323 switch (code)
3324 {
3325 case PC:
3326 case CC0:
3327 case REG:
3328 case SUBREG:
3329 case CONST_INT:
3330 case SYMBOL_REF:
3331 case CONST_DOUBLE:
3332 case CLOBBER:
3333 case CALL:
3334 return;
3335
d7ea4cf6
RK
3336 case MEM:
3337 /* If this is a constant-pool reference, see if it is a label. */
3338 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3339 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3340 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3341 break;
3342
15a63be1
RK
3343 case LABEL_REF:
3344 {
5c5e36c5
RK
3345 rtx label = XEXP (x, 0);
3346 rtx olabel = label;
3347 rtx note;
3348 rtx next;
3349
15a63be1
RK
3350 if (GET_CODE (label) != CODE_LABEL)
3351 abort ();
5c5e36c5 3352
705f26cf
RS
3353 /* Ignore references to labels of containing functions. */
3354 if (LABEL_REF_NONLOCAL_P (x))
3355 break;
5c5e36c5 3356
15a63be1
RK
3357 /* If there are other labels following this one,
3358 replace it with the last of the consecutive labels. */
3359 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3360 {
3361 if (GET_CODE (next) == CODE_LABEL)
3362 label = next;
3363 else if (cross_jump && GET_CODE (next) == INSN
3364 && (GET_CODE (PATTERN (next)) == USE
3365 || GET_CODE (PATTERN (next)) == CLOBBER))
3366 continue;
3367 else if (GET_CODE (next) != NOTE)
3368 break;
3369 else if (! cross_jump
3370 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
f6a6a1b3
DE
3371 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3372 /* ??? Optional. Disables some optimizations, but
3373 makes gcov output more accurate with -O. */
3374 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
15a63be1
RK
3375 break;
3376 }
5c5e36c5 3377
15a63be1
RK
3378 XEXP (x, 0) = label;
3379 ++LABEL_NUSES (label);
5c5e36c5 3380
15a63be1
RK
3381 if (insn)
3382 {
3383 if (GET_CODE (insn) == JUMP_INSN)
3384 JUMP_LABEL (insn) = label;
5c5e36c5
RK
3385
3386 /* If we've changed OLABEL and we had a REG_LABEL note
3387 for it, update it as well. */
3388 else if (label != olabel
3389 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3390 XEXP (note, 0) = label;
3391
3392 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3393 is one. */
0e690bdb 3394 else if (! find_reg_note (insn, REG_LABEL, label))
15a63be1
RK
3395 {
3396 rtx next = next_real_insn (label);
3397 /* Don't record labels that refer to dispatch tables.
3398 This is not necessary, since the tablejump
3399 references the same label.
3400 And if we did record them, flow.c would make worse code. */
3401 if (next == 0
3402 || ! (GET_CODE (next) == JUMP_INSN
3403 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3404 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
8cd2aff2
RK
3405 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
3406 REG_NOTES (insn));
15a63be1
RK
3407 }
3408 }
3409 return;
3410 }
3411
3412 /* Do walk the labels in a vector, but not the first operand of an
3413 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3414 case ADDR_VEC:
3415 case ADDR_DIFF_VEC:
3416 {
3417 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3418
3419 for (i = 0; i < XVECLEN (x, eltnum); i++)
5f4f0e22 3420 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
15a63be1 3421 }
e9a25f70
JL
3422 return;
3423
3424 default:
3425 break;
15a63be1
RK
3426 }
3427
3428 fmt = GET_RTX_FORMAT (code);
3429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3430 {
3431 if (fmt[i] == 'e')
3432 mark_jump_label (XEXP (x, i), insn, cross_jump);
3433 else if (fmt[i] == 'E')
3434 {
3435 register int j;
3436 for (j = 0; j < XVECLEN (x, i); j++)
3437 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3438 }
3439 }
3440}
3441
3442/* If all INSN does is set the pc, delete it,
3443 and delete the insn that set the condition codes for it
3444 if that's what the previous thing was. */
3445
3446void
3447delete_jump (insn)
3448 rtx insn;
3449{
3e5478ea
RK
3450 register rtx set = single_set (insn);
3451
3452 if (set && GET_CODE (SET_DEST (set)) == PC)
3453 delete_computation (insn);
3454}
3455
3456/* Delete INSN and recursively delete insns that compute values used only
3457 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3458 If we are running before flow.c, we need do nothing since flow.c will
3459 delete dead code. We also can't know if the registers being used are
3460 dead or not at this point.
3461
3462 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3463 nothing other than set a register that dies in this insn, we can delete
3464 that insn as well.
3465
3466 On machines with CC0, if CC0 is used in this insn, we may be able to
3467 delete the insn that set it. */
3468
8cd2aff2 3469static void
3e5478ea
RK
3470delete_computation (insn)
3471 rtx insn;
3472{
3473 rtx note, next;
15a63be1 3474
15a63be1 3475#ifdef HAVE_cc0
2fb95912 3476 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3e5478ea 3477 {
77472c5a 3478 rtx prev = prev_nonnote_insn (insn);
15a63be1
RK
3479 /* We assume that at this stage
3480 CC's are always set explicitly
3481 and always immediately before the jump that
3482 will use them. So if the previous insn
3483 exists to set the CC's, delete it
3484 (unless it performs auto-increments, etc.). */
3485 if (prev && GET_CODE (prev) == INSN
3486 && sets_cc0_p (PATTERN (prev)))
3487 {
3488 if (sets_cc0_p (PATTERN (prev)) > 0
5f4f0e22 3489 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3e5478ea 3490 delete_computation (prev);
15a63be1
RK
3491 else
3492 /* Otherwise, show that cc0 won't be used. */
3493 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
3494 cc0_rtx, REG_NOTES (prev));
3495 }
77472c5a 3496 }
3e5478ea 3497#endif
15a63be1 3498
77472c5a
TW
3499 for (note = REG_NOTES (insn); note; note = next)
3500 {
3501 rtx our_prev;
15a63be1 3502
77472c5a 3503 next = XEXP (note, 1);
15a63be1 3504
77472c5a
TW
3505 if (REG_NOTE_KIND (note) != REG_DEAD
3506 /* Verify that the REG_NOTE is legitimate. */
3507 || GET_CODE (XEXP (note, 0)) != REG)
3508 continue;
15a63be1 3509
77472c5a
TW
3510 for (our_prev = prev_nonnote_insn (insn);
3511 our_prev && GET_CODE (our_prev) == INSN;
3512 our_prev = prev_nonnote_insn (our_prev))
3513 {
3514 /* If we reach a SEQUENCE, it is too complex to try to
3515 do anything with it, so give up. */
3516 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3517 break;
15a63be1 3518
77472c5a
TW
3519 if (GET_CODE (PATTERN (our_prev)) == USE
3520 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3521 /* reorg creates USEs that look like this. We leave them
3522 alone because reorg needs them for its own purposes. */
3523 break;
15a63be1 3524
77472c5a
TW
3525 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3526 {
3527 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3528 break;
15a63be1 3529
77472c5a
TW
3530 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3531 {
3532 /* If we find a SET of something else, we can't
3533 delete the insn. */
15a63be1 3534
77472c5a 3535 int i;
15a63be1 3536
77472c5a
TW
3537 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3538 {
3539 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
15a63be1 3540
77472c5a
TW
3541 if (GET_CODE (part) == SET
3542 && SET_DEST (part) != XEXP (note, 0))
3543 break;
3544 }
15a63be1 3545
77472c5a
TW
3546 if (i == XVECLEN (PATTERN (our_prev), 0))
3547 delete_computation (our_prev);
3548 }
3549 else if (GET_CODE (PATTERN (our_prev)) == SET
3550 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3551 delete_computation (our_prev);
3552
3553 break;
3554 }
3555
3556 /* If OUR_PREV references the register that dies here, it is an
3557 additional use. Hence any prior SET isn't dead. However, this
3558 insn becomes the new place for the REG_DEAD note. */
3559 if (reg_overlap_mentioned_p (XEXP (note, 0),
3560 PATTERN (our_prev)))
3561 {
3562 XEXP (note, 1) = REG_NOTES (our_prev);
3563 REG_NOTES (our_prev) = note;
3564 break;
3565 }
3566 }
15a63be1 3567 }
3e5478ea 3568
77472c5a 3569 delete_insn (insn);
15a63be1
RK
3570}
3571\f
3572/* Delete insn INSN from the chain of insns and update label ref counts.
3573 May delete some following insns as a consequence; may even delete
3574 a label elsewhere and insns that follow it.
3575
3576 Returns the first insn after INSN that was not deleted. */
3577
3578rtx
3579delete_insn (insn)
3580 register rtx insn;
3581{
3582 register rtx next = NEXT_INSN (insn);
3583 register rtx prev = PREV_INSN (insn);
196cedd0
RS
3584 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3585 register int dont_really_delete = 0;
15a63be1
RK
3586
3587 while (next && INSN_DELETED_P (next))
3588 next = NEXT_INSN (next);
3589
3590 /* This insn is already deleted => return first following nondeleted. */
3591 if (INSN_DELETED_P (insn))
3592 return next;
3593
196cedd0
RS
3594 /* Don't delete user-declared labels. Convert them to special NOTEs
3595 instead. */
9571f079
RK
3596 if (was_code_label && LABEL_NAME (insn) != 0
3597 && optimize && ! dont_really_delete)
196cedd0
RS
3598 {
3599 PUT_CODE (insn, NOTE);
3600 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3601 NOTE_SOURCE_FILE (insn) = 0;
3602 dont_really_delete = 1;
3603 }
3604 else
3605 /* Mark this insn as deleted. */
3606 INSN_DELETED_P (insn) = 1;
15a63be1
RK
3607
3608 /* If this is an unconditional jump, delete it from the jump chain. */
3609 if (simplejump_p (insn))
3610 delete_from_jump_chain (insn);
3611
3612 /* If instruction is followed by a barrier,
3613 delete the barrier too. */
3614
3615 if (next != 0 && GET_CODE (next) == BARRIER)
3616 {
3617 INSN_DELETED_P (next) = 1;
3618 next = NEXT_INSN (next);
3619 }
3620
3621 /* Patch out INSN (and the barrier if any) */
3622
196cedd0 3623 if (optimize && ! dont_really_delete)
15a63be1
RK
3624 {
3625 if (prev)
3626 {
3627 NEXT_INSN (prev) = next;
3628 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3629 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3630 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3631 }
3632
3633 if (next)
3634 {
3635 PREV_INSN (next) = prev;
3636 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3637 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3638 }
3639
3640 if (prev && NEXT_INSN (prev) == 0)
3641 set_last_insn (prev);
3642 }
3643
3644 /* If deleting a jump, decrement the count of the label,
3645 and delete the label if it is now unused. */
3646
3647 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3648 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3649 {
3650 /* This can delete NEXT or PREV,
3651 either directly if NEXT is JUMP_LABEL (INSN),
3652 or indirectly through more levels of jumps. */
3653 delete_insn (JUMP_LABEL (insn));
3654 /* I feel a little doubtful about this loop,
3655 but I see no clean and sure alternative way
3656 to find the first insn after INSN that is not now deleted.
3657 I hope this works. */
3658 while (next && INSN_DELETED_P (next))
3659 next = NEXT_INSN (next);
3660 return next;
3661 }
3662
3c7d7a4a
DE
3663 /* Likewise if we're deleting a dispatch table. */
3664
3665 if (GET_CODE (insn) == JUMP_INSN
3666 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3667 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3668 {
3669 rtx pat = PATTERN (insn);
3670 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3671 int len = XVECLEN (pat, diff_vec_p);
3672
3673 for (i = 0; i < len; i++)
3674 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3675 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3676 while (next && INSN_DELETED_P (next))
3677 next = NEXT_INSN (next);
3678 return next;
3679 }
3680
15a63be1
RK
3681 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3682 prev = PREV_INSN (prev);
3683
3684 /* If INSN was a label and a dispatch table follows it,
3685 delete the dispatch table. The tablejump must have gone already.
3686 It isn't useful to fall through into a table. */
3687
196cedd0 3688 if (was_code_label
15a63be1
RK
3689 && NEXT_INSN (insn) != 0
3690 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3691 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3692 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3693 next = delete_insn (NEXT_INSN (insn));
3694
3695 /* If INSN was a label, delete insns following it if now unreachable. */
3696
196cedd0 3697 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
15a63be1
RK
3698 {
3699 register RTX_CODE code;
3700 while (next != 0
8cd2aff2 3701 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4134d7fc 3702 || code == NOTE || code == BARRIER
2e1dbf22 3703 || (code == CODE_LABEL && INSN_DELETED_P (next))))
15a63be1
RK
3704 {
3705 if (code == NOTE
3706 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3707 next = NEXT_INSN (next);
2e1dbf22
RS
3708 /* Keep going past other deleted labels to delete what follows. */
3709 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3710 next = NEXT_INSN (next);
15a63be1
RK
3711 else
3712 /* Note: if this deletes a jump, it can cause more
3713 deletion of unreachable code, after a different label.
3714 As long as the value from this recursive call is correct,
3715 this invocation functions correctly. */
3716 next = delete_insn (next);
3717 }
3718 }
3719
3720 return next;
3721}
3722
3723/* Advance from INSN till reaching something not deleted
3724 then return that. May return INSN itself. */
3725
3726rtx
3727next_nondeleted_insn (insn)
3728 rtx insn;
3729{
3730 while (INSN_DELETED_P (insn))
3731 insn = NEXT_INSN (insn);
3732 return insn;
3733}
3734\f
3735/* Delete a range of insns from FROM to TO, inclusive.
3736 This is for the sake of peephole optimization, so assume
3737 that whatever these insns do will still be done by a new
3738 peephole insn that will replace them. */
3739
3740void
3741delete_for_peephole (from, to)
3742 register rtx from, to;
3743{
3744 register rtx insn = from;
3745
3746 while (1)
3747 {
3748 register rtx next = NEXT_INSN (insn);
3749 register rtx prev = PREV_INSN (insn);
3750
3751 if (GET_CODE (insn) != NOTE)
3752 {
3753 INSN_DELETED_P (insn) = 1;
3754
3755 /* Patch this insn out of the chain. */
3756 /* We don't do this all at once, because we
3757 must preserve all NOTEs. */
3758 if (prev)
3759 NEXT_INSN (prev) = next;
3760
3761 if (next)
3762 PREV_INSN (next) = prev;
3763 }
3764
3765 if (insn == to)
3766 break;
3767 insn = next;
3768 }
3769
3770 /* Note that if TO is an unconditional jump
3771 we *do not* delete the BARRIER that follows,
3772 since the peephole that replaces this sequence
3773 is also an unconditional jump in that case. */
3774}
3775\f
3776/* Invert the condition of the jump JUMP, and make it jump
3777 to label NLABEL instead of where it jumps now. */
3778
3779int
3780invert_jump (jump, nlabel)
3781 rtx jump, nlabel;
3782{
15a63be1
RK
3783 /* We have to either invert the condition and change the label or
3784 do neither. Either operation could fail. We first try to invert
3785 the jump. If that succeeds, we try changing the label. If that fails,
3786 we invert the jump back to what it was. */
3787
3788 if (! invert_exp (PATTERN (jump), jump))
3789 return 0;
3790
3791 if (redirect_jump (jump, nlabel))
f6a6a1b3
DE
3792 {
3793 if (flag_branch_probabilities)
3794 {
3795 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
3796
3797 /* An inverted jump means that a probability taken becomes a
3798 probability not taken. Subtract the branch probability from the
3799 probability base to convert it back to a taken probability.
3800 (We don't flip the probability on a branch that's never taken. */
3801 if (note && XINT (XEXP (note, 0), 0) >= 0)
3802 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
3803 }
3804
3805 return 1;
3806 }
15a63be1
RK
3807
3808 if (! invert_exp (PATTERN (jump), jump))
3809 /* This should just be putting it back the way it was. */
3810 abort ();
3811
3812 return 0;
3813}
3814
3815/* Invert the jump condition of rtx X contained in jump insn, INSN.
3816
3817 Return 1 if we can do so, 0 if we cannot find a way to do so that
3818 matches a pattern. */
3819
4214a505 3820int
15a63be1
RK
3821invert_exp (x, insn)
3822 rtx x;
3823 rtx insn;
3824{
3825 register RTX_CODE code;
3826 register int i;
3827 register char *fmt;
3828
3829 code = GET_CODE (x);
3830
3831 if (code == IF_THEN_ELSE)
3832 {
3833 register rtx comp = XEXP (x, 0);
3834 register rtx tem;
3835
3836 /* We can do this in two ways: The preferable way, which can only
3837 be done if this is not an integer comparison, is to reverse
3838 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3839 of the IF_THEN_ELSE. If we can't do either, fail. */
3840
3841 if (can_reverse_comparison_p (comp, insn)
3842 && validate_change (insn, &XEXP (x, 0),
3843 gen_rtx (reverse_condition (GET_CODE (comp)),
3844 GET_MODE (comp), XEXP (comp, 0),
3845 XEXP (comp, 1)), 0))
3846 return 1;
3847
3848 tem = XEXP (x, 1);
3849 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3850 validate_change (insn, &XEXP (x, 2), tem, 1);
3851 return apply_change_group ();
3852 }
3853
3854 fmt = GET_RTX_FORMAT (code);
3855 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3856 {
3857 if (fmt[i] == 'e')
3858 if (! invert_exp (XEXP (x, i), insn))
3859 return 0;
3860 if (fmt[i] == 'E')
3861 {
3862 register int j;
3863 for (j = 0; j < XVECLEN (x, i); j++)
3864 if (!invert_exp (XVECEXP (x, i, j), insn))
3865 return 0;
3866 }
3867 }
3868
3869 return 1;
3870}
3871\f
3872/* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3873 If the old jump target label is unused as a result,
3874 it and the code following it may be deleted.
3875
3876 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3877 RETURN insn.
3878
3879 The return value will be 1 if the change was made, 0 if it wasn't (this
3880 can only occur for NLABEL == 0). */
3881
3882int
3883redirect_jump (jump, nlabel)
3884 rtx jump, nlabel;
3885{
3886 register rtx olabel = JUMP_LABEL (jump);
3887
3888 if (nlabel == olabel)
3889 return 1;
3890
3891 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3892 return 0;
3893
3894 /* If this is an unconditional branch, delete it from the jump_chain of
3895 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3896 have UID's in range and JUMP_CHAIN is valid). */
3897 if (jump_chain && (simplejump_p (jump)
3898 || GET_CODE (PATTERN (jump)) == RETURN))
3899 {
3900 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3901
3902 delete_from_jump_chain (jump);
2d20b9df
RS
3903 if (label_index < max_jump_chain
3904 && INSN_UID (jump) < max_jump_chain)
15a63be1
RK
3905 {
3906 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3907 jump_chain[label_index] = jump;
3908 }
3909 }
3910
3911 JUMP_LABEL (jump) = nlabel;
3912 if (nlabel)
3913 ++LABEL_NUSES (nlabel);
3914
3915 if (olabel && --LABEL_NUSES (olabel) == 0)
3916 delete_insn (olabel);
3917
3918 return 1;
3919}
3920
3921/* Delete the instruction JUMP from any jump chain it might be on. */
3922
3923static void
3924delete_from_jump_chain (jump)
3925 rtx jump;
3926{
3927 int index;
3928 rtx olabel = JUMP_LABEL (jump);
3929
3930 /* Handle unconditional jumps. */
3931 if (jump_chain && olabel != 0
3932 && INSN_UID (olabel) < max_jump_chain
3933 && simplejump_p (jump))
3934 index = INSN_UID (olabel);
3935 /* Handle return insns. */
3936 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3937 index = 0;
3938 else return;
3939
3940 if (jump_chain[index] == jump)
3941 jump_chain[index] = jump_chain[INSN_UID (jump)];
3942 else
3943 {
3944 rtx insn;
3945
3946 for (insn = jump_chain[index];
3947 insn != 0;
3948 insn = jump_chain[INSN_UID (insn)])
3949 if (jump_chain[INSN_UID (insn)] == jump)
3950 {
3951 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3952 break;
3953 }
3954 }
3955}
3956
3957/* If NLABEL is nonzero, throughout the rtx at LOC,
3958 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3959 zero, alter (RETURN) to (LABEL_REF NLABEL).
3960
3961 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3962 validity with validate_change. Convert (set (pc) (label_ref olabel))
3963 to (return).
3964
3965 Return 0 if we found a change we would like to make but it is invalid.
3966 Otherwise, return 1. */
3967
4214a505 3968int
15a63be1
RK
3969redirect_exp (loc, olabel, nlabel, insn)
3970 rtx *loc;
3971 rtx olabel, nlabel;
3972 rtx insn;
3973{
3974 register rtx x = *loc;
3975 register RTX_CODE code = GET_CODE (x);
3976 register int i;
3977 register char *fmt;
3978
3979 if (code == LABEL_REF)
3980 {
3981 if (XEXP (x, 0) == olabel)
3982 {
3983 if (nlabel)
3984 XEXP (x, 0) = nlabel;
3985 else
3986 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3987 return 1;
3988 }
3989 }
3990 else if (code == RETURN && olabel == 0)
3991 {
3992 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3993 if (loc == &PATTERN (insn))
3994 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3995 return validate_change (insn, loc, x, 0);
3996 }
3997
3998 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3999 && GET_CODE (SET_SRC (x)) == LABEL_REF
4000 && XEXP (SET_SRC (x), 0) == olabel)
4001 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
4002
4003 fmt = GET_RTX_FORMAT (code);
4004 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4005 {
4006 if (fmt[i] == 'e')
4007 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4008 return 0;
4009 if (fmt[i] == 'E')
4010 {
4011 register int j;
4012 for (j = 0; j < XVECLEN (x, i); j++)
4013 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4014 return 0;
4015 }
4016 }
4017
4018 return 1;
4019}
4020\f
4021/* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4022
4023 If the old jump target label (before the dispatch table) becomes unused,
4024 it and the dispatch table may be deleted. In that case, find the insn
6dc42e49 4025 before the jump references that label and delete it and logical successors
15a63be1
RK
4026 too. */
4027
8cd2aff2 4028static void
15a63be1
RK
4029redirect_tablejump (jump, nlabel)
4030 rtx jump, nlabel;
4031{
4032 register rtx olabel = JUMP_LABEL (jump);
4033
4034 /* Add this jump to the jump_chain of NLABEL. */
4035 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4036 && INSN_UID (jump) < max_jump_chain)
4037 {
4038 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4039 jump_chain[INSN_UID (nlabel)] = jump;
4040 }
4041
4042 PATTERN (jump) = gen_jump (nlabel);
4043 JUMP_LABEL (jump) = nlabel;
4044 ++LABEL_NUSES (nlabel);
4045 INSN_CODE (jump) = -1;
4046
4047 if (--LABEL_NUSES (olabel) == 0)
4048 {
4049 delete_labelref_insn (jump, olabel, 0);
4050 delete_insn (olabel);
4051 }
4052}
4053
4054/* Find the insn referencing LABEL that is a logical predecessor of INSN.
4055 If we found one, delete it and then delete this insn if DELETE_THIS is
4056 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4057
4058static int
4059delete_labelref_insn (insn, label, delete_this)
4060 rtx insn, label;
4061 int delete_this;
4062{
4063 int deleted = 0;
4064 rtx link;
4065
4066 if (GET_CODE (insn) != NOTE
4067 && reg_mentioned_p (label, PATTERN (insn)))
4068 {
4069 if (delete_this)
4070 {
4071 delete_insn (insn);
4072 deleted = 1;
4073 }
4074 else
4075 return 1;
4076 }
4077
4078 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4079 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4080 {
4081 if (delete_this)
4082 {
4083 delete_insn (insn);
4084 deleted = 1;
4085 }
4086 else
4087 return 1;
4088 }
4089
4090 return deleted;
4091}
4092\f
4093/* Like rtx_equal_p except that it considers two REGs as equal
4fe73cc1
RK
4094 if they renumber to the same value and considers two commutative
4095 operations to be the same if the order of the operands has been
4096 reversed. */
15a63be1
RK
4097
4098int
4099rtx_renumbered_equal_p (x, y)
4100 rtx x, y;
4101{
4102 register int i;
4103 register RTX_CODE code = GET_CODE (x);
4104 register char *fmt;
4105
4106 if (x == y)
4107 return 1;
4fe73cc1 4108
15a63be1
RK
4109 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4110 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4111 && GET_CODE (SUBREG_REG (y)) == REG)))
4112 {
4fe73cc1
RK
4113 int reg_x = -1, reg_y = -1;
4114 int word_x = 0, word_y = 0;
15a63be1
RK
4115
4116 if (GET_MODE (x) != GET_MODE (y))
4117 return 0;
4118
4119 /* If we haven't done any renumbering, don't
4120 make any assumptions. */
4121 if (reg_renumber == 0)
4122 return rtx_equal_p (x, y);
4123
4124 if (code == SUBREG)
4125 {
4fe73cc1
RK
4126 reg_x = REGNO (SUBREG_REG (x));
4127 word_x = SUBREG_WORD (x);
4128
4129 if (reg_renumber[reg_x] >= 0)
4130 {
4131 reg_x = reg_renumber[reg_x] + word_x;
4132 word_x = 0;
4133 }
15a63be1 4134 }
4fe73cc1 4135
15a63be1
RK
4136 else
4137 {
4fe73cc1
RK
4138 reg_x = REGNO (x);
4139 if (reg_renumber[reg_x] >= 0)
4140 reg_x = reg_renumber[reg_x];
15a63be1 4141 }
4fe73cc1 4142
15a63be1
RK
4143 if (GET_CODE (y) == SUBREG)
4144 {
4fe73cc1
RK
4145 reg_y = REGNO (SUBREG_REG (y));
4146 word_y = SUBREG_WORD (y);
4147
4148 if (reg_renumber[reg_y] >= 0)
4149 {
4150 reg_y = reg_renumber[reg_y];
4151 word_y = 0;
4152 }
15a63be1 4153 }
4fe73cc1 4154
15a63be1
RK
4155 else
4156 {
4fe73cc1
RK
4157 reg_y = REGNO (y);
4158 if (reg_renumber[reg_y] >= 0)
4159 reg_y = reg_renumber[reg_y];
15a63be1 4160 }
4fe73cc1
RK
4161
4162 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
15a63be1 4163 }
4fe73cc1 4164
15a63be1
RK
4165 /* Now we have disposed of all the cases
4166 in which different rtx codes can match. */
4167 if (code != GET_CODE (y))
4168 return 0;
4fe73cc1 4169
15a63be1
RK
4170 switch (code)
4171 {
4172 case PC:
4173 case CC0:
4174 case ADDR_VEC:
4175 case ADDR_DIFF_VEC:
4176 return 0;
4177
4178 case CONST_INT:
38b3167e 4179 return INTVAL (x) == INTVAL (y);
15a63be1
RK
4180
4181 case LABEL_REF:
705f26cf
RS
4182 /* We can't assume nonlocal labels have their following insns yet. */
4183 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4184 return XEXP (x, 0) == XEXP (y, 0);
4fe73cc1 4185
15a63be1
RK
4186 /* Two label-refs are equivalent if they point at labels
4187 in the same position in the instruction stream. */
4188 return (next_real_insn (XEXP (x, 0))
4189 == next_real_insn (XEXP (y, 0)));
4190
4191 case SYMBOL_REF:
4192 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70
JL
4193
4194 default:
4195 break;
15a63be1
RK
4196 }
4197
4198 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4199
4200 if (GET_MODE (x) != GET_MODE (y))
4201 return 0;
4202
4fe73cc1
RK
4203 /* For commutative operations, the RTX match if the operand match in any
4204 order. Also handle the simple binary and unary cases without a loop. */
4205 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4206 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4207 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4208 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4209 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4210 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4211 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4212 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4213 else if (GET_RTX_CLASS (code) == '1')
4214 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4215
15a63be1
RK
4216 /* Compare the elements. If any pair of corresponding elements
4217 fail to match, return 0 for the whole things. */
4218
4219 fmt = GET_RTX_FORMAT (code);
4220 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4221 {
4222 register int j;
4223 switch (fmt[i])
4224 {
5f4f0e22
CH
4225 case 'w':
4226 if (XWINT (x, i) != XWINT (y, i))
4227 return 0;
4228 break;
4229
15a63be1
RK
4230 case 'i':
4231 if (XINT (x, i) != XINT (y, i))
4232 return 0;
4233 break;
4234
4235 case 's':
4236 if (strcmp (XSTR (x, i), XSTR (y, i)))
4237 return 0;
4238 break;
4239
4240 case 'e':
4241 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4242 return 0;
4243 break;
4244
4245 case 'u':
4246 if (XEXP (x, i) != XEXP (y, i))
4247 return 0;
4248 /* fall through. */
4249 case '0':
4250 break;
4251
4252 case 'E':
4253 if (XVECLEN (x, i) != XVECLEN (y, i))
4254 return 0;
4255 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4256 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4257 return 0;
4258 break;
4259
4260 default:
4261 abort ();
4262 }
4263 }
4264 return 1;
4265}
4266\f
4267/* If X is a hard register or equivalent to one or a subregister of one,
4268 return the hard register number. If X is a pseudo register that was not
4269 assigned a hard register, return the pseudo register number. Otherwise,
4270 return -1. Any rtx is valid for X. */
4271
4272int
4273true_regnum (x)
4274 rtx x;
4275{
4276 if (GET_CODE (x) == REG)
4277 {
4278 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4279 return reg_renumber[REGNO (x)];
4280 return REGNO (x);
4281 }
4282 if (GET_CODE (x) == SUBREG)
4283 {
4284 int base = true_regnum (SUBREG_REG (x));
4285 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4286 return SUBREG_WORD (x) + base;
4287 }
4288 return -1;
4289}
4290\f
4291/* Optimize code of the form:
4292
4293 for (x = a[i]; x; ...)
4294 ...
4295 for (x = a[i]; x; ...)
4296 ...
4297 foo:
4298
4299 Loop optimize will change the above code into
4300
4301 if (x = a[i])
4302 for (;;)
4303 { ...; if (! (x = ...)) break; }
4304 if (x = a[i])
4305 for (;;)
4306 { ...; if (! (x = ...)) break; }
4307 foo:
4308
4309 In general, if the first test fails, the program can branch
4310 directly to `foo' and skip the second try which is doomed to fail.
4311 We run this after loop optimization and before flow analysis. */
4312
4313/* When comparing the insn patterns, we track the fact that different
4314 pseudo-register numbers may have been used in each computation.
4315 The following array stores an equivalence -- same_regs[I] == J means
4316 that pseudo register I was used in the first set of tests in a context
4317 where J was used in the second set. We also count the number of such
4318 pending equivalences. If nonzero, the expressions really aren't the
4319 same. */
4320
7ee8a9d5 4321static int *same_regs;
15a63be1
RK
4322
4323static int num_same_regs;
4324
4325/* Track any registers modified between the target of the first jump and
4326 the second jump. They never compare equal. */
4327
4328static char *modified_regs;
4329
4330/* Record if memory was modified. */
4331
4332static int modified_mem;
4333
4334/* Called via note_stores on each insn between the target of the first
4335 branch and the second branch. It marks any changed registers. */
4336
4337static void
4338mark_modified_reg (dest, x)
4339 rtx dest;
4340 rtx x;
4341{
4342 int regno, i;
4343
4344 if (GET_CODE (dest) == SUBREG)
4345 dest = SUBREG_REG (dest);
4346
4347 if (GET_CODE (dest) == MEM)
4348 modified_mem = 1;
4349
4350 if (GET_CODE (dest) != REG)
4351 return;
4352
4353 regno = REGNO (dest);
4354 if (regno >= FIRST_PSEUDO_REGISTER)
4355 modified_regs[regno] = 1;
4356 else
4357 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4358 modified_regs[regno + i] = 1;
4359}
4360
4361/* F is the first insn in the chain of insns. */
4362
4363void
aa38b201 4364thread_jumps (f, max_reg, flag_before_loop)
15a63be1
RK
4365 rtx f;
4366 int max_reg;
aa38b201 4367 int flag_before_loop;
15a63be1
RK
4368{
4369 /* Basic algorithm is to find a conditional branch,
4370 the label it may branch to, and the branch after
4371 that label. If the two branches test the same condition,
4372 walk back from both branch paths until the insn patterns
4373 differ, or code labels are hit. If we make it back to
4374 the target of the first branch, then we know that the first branch
4375 will either always succeed or always fail depending on the relative
4376 senses of the two branches. So adjust the first branch accordingly
4377 in this case. */
4378
4379 rtx label, b1, b2, t1, t2;
4380 enum rtx_code code1, code2;
4381 rtx b1op0, b1op1, b2op0, b2op1;
4382 int changed = 1;
4383 int i;
7ee8a9d5 4384 int *all_reset;
15a63be1
RK
4385
4386 /* Allocate register tables and quick-reset table. */
4387 modified_regs = (char *) alloca (max_reg * sizeof (char));
7ee8a9d5
RK
4388 same_regs = (int *) alloca (max_reg * sizeof (int));
4389 all_reset = (int *) alloca (max_reg * sizeof (int));
15a63be1
RK
4390 for (i = 0; i < max_reg; i++)
4391 all_reset[i] = -1;
4392
4393 while (changed)
4394 {
4395 changed = 0;
4396
4397 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4398 {
4399 /* Get to a candidate branch insn. */
4400 if (GET_CODE (b1) != JUMP_INSN
4401 || ! condjump_p (b1) || simplejump_p (b1)
4402 || JUMP_LABEL (b1) == 0)
4403 continue;
4404
4405 bzero (modified_regs, max_reg * sizeof (char));
4406 modified_mem = 0;
4407
4c9a05bc
RK
4408 bcopy ((char *) all_reset, (char *) same_regs,
4409 max_reg * sizeof (int));
15a63be1
RK
4410 num_same_regs = 0;
4411
4412 label = JUMP_LABEL (b1);
4413
4414 /* Look for a branch after the target. Record any registers and
4415 memory modified between the target and the branch. Stop when we
4416 get to a label since we can't know what was changed there. */
4417 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4418 {
4419 if (GET_CODE (b2) == CODE_LABEL)
4420 break;
4421
4422 else if (GET_CODE (b2) == JUMP_INSN)
4423 {
4424 /* If this is an unconditional jump and is the only use of
4425 its target label, we can follow it. */
4426 if (simplejump_p (b2)
4427 && JUMP_LABEL (b2) != 0
4428 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4429 {
4430 b2 = JUMP_LABEL (b2);
4431 continue;
4432 }
4433 else
4434 break;
4435 }
4436
4437 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4438 continue;
4439
4440 if (GET_CODE (b2) == CALL_INSN)
4441 {
4442 modified_mem = 1;
4443 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4444 if (call_used_regs[i] && ! fixed_regs[i]
4445 && i != STACK_POINTER_REGNUM
4446 && i != FRAME_POINTER_REGNUM
cbe23927 4447 && i != HARD_FRAME_POINTER_REGNUM
15a63be1
RK
4448 && i != ARG_POINTER_REGNUM)
4449 modified_regs[i] = 1;
4450 }
4451
4452 note_stores (PATTERN (b2), mark_modified_reg);
4453 }
4454
4455 /* Check the next candidate branch insn from the label
4456 of the first. */
4457 if (b2 == 0
4458 || GET_CODE (b2) != JUMP_INSN
4459 || b2 == b1
4460 || ! condjump_p (b2)
4461 || simplejump_p (b2))
4462 continue;
4463
4464 /* Get the comparison codes and operands, reversing the
4465 codes if appropriate. If we don't have comparison codes,
4466 we can't do anything. */
4467 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4468 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4469 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4470 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4471 code1 = reverse_condition (code1);
4472
4473 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4474 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4475 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4476 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4477 code2 = reverse_condition (code2);
4478
4479 /* If they test the same things and knowing that B1 branches
4480 tells us whether or not B2 branches, check if we
4481 can thread the branch. */
4482 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4483 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4484 && (comparison_dominates_p (code1, code2)
cc5e1642
R
4485 || (comparison_dominates_p (code1, reverse_condition (code2))
4486 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4487 0),
4488 b1))))
15a63be1
RK
4489 {
4490 t1 = prev_nonnote_insn (b1);
4491 t2 = prev_nonnote_insn (b2);
4492
4493 while (t1 != 0 && t2 != 0)
4494 {
15a63be1
RK
4495 if (t2 == label)
4496 {
4497 /* We have reached the target of the first branch.
4498 If there are no pending register equivalents,
4499 we know that this branch will either always
4500 succeed (if the senses of the two branches are
4501 the same) or always fail (if not). */
4502 rtx new_label;
4503
4504 if (num_same_regs != 0)
4505 break;
4506
4507 if (comparison_dominates_p (code1, code2))
4508 new_label = JUMP_LABEL (b2);
4509 else
4510 new_label = get_label_after (b2);
4511
aa38b201
TG
4512 if (JUMP_LABEL (b1) != new_label)
4513 {
4514 rtx prev = PREV_INSN (new_label);
4515
4516 if (flag_before_loop
4517 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4518 {
4519 /* Don't thread to the loop label. If a loop
4520 label is reused, loop optimization will
4521 be disabled for that loop. */
4522 new_label = gen_label_rtx ();
4523 emit_label_after (new_label, PREV_INSN (prev));
4524 }
4525 changed |= redirect_jump (b1, new_label);
4526 }
15a63be1
RK
4527 break;
4528 }
4529
4530 /* If either of these is not a normal insn (it might be
4531 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4532 have already been skipped above.) Similarly, fail
4533 if the insns are different. */
4534 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4535 || recog_memoized (t1) != recog_memoized (t2)
4536 || ! rtx_equal_for_thread_p (PATTERN (t1),
4537 PATTERN (t2), t2))
4538 break;
4539
4540 t1 = prev_nonnote_insn (t1);
4541 t2 = prev_nonnote_insn (t2);
4542 }
4543 }
4544 }
4545 }
4546}
4547\f
4548/* This is like RTX_EQUAL_P except that it knows about our handling of
4549 possibly equivalent registers and knows to consider volatile and
4550 modified objects as not equal.
4551
4552 YINSN is the insn containing Y. */
4553
4554int
4555rtx_equal_for_thread_p (x, y, yinsn)
4556 rtx x, y;
4557 rtx yinsn;
4558{
4559 register int i;
4560 register int j;
4561 register enum rtx_code code;
4562 register char *fmt;
4563
4564 code = GET_CODE (x);
4565 /* Rtx's of different codes cannot be equal. */
4566 if (code != GET_CODE (y))
4567 return 0;
4568
4569 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4570 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4571
4572 if (GET_MODE (x) != GET_MODE (y))
4573 return 0;
4574
35d9eabb
RK
4575 /* For floating-point, consider everything unequal. This is a bit
4576 pessimistic, but this pass would only rarely do anything for FP
4577 anyway. */
4578 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4579 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4580 return 0;
4581
413c72c2
RK
4582 /* For commutative operations, the RTX match if the operand match in any
4583 order. Also handle the simple binary and unary cases without a loop. */
4584 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
c5ea5f3b
RK
4585 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4586 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4587 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4588 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
413c72c2 4589 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
c5ea5f3b
RK
4590 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4591 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
413c72c2 4592 else if (GET_RTX_CLASS (code) == '1')
c5ea5f3b 4593 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
413c72c2 4594
15a63be1
RK
4595 /* Handle special-cases first. */
4596 switch (code)
4597 {
4598 case REG:
4599 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4600 return 1;
4601
4602 /* If neither is user variable or hard register, check for possible
4603 equivalence. */
4604 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4605 || REGNO (x) < FIRST_PSEUDO_REGISTER
4606 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4607 return 0;
4608
4609 if (same_regs[REGNO (x)] == -1)
4610 {
4611 same_regs[REGNO (x)] = REGNO (y);
4612 num_same_regs++;
4613
4614 /* If this is the first time we are seeing a register on the `Y'
4615 side, see if it is the last use. If not, we can't thread the
4616 jump, so mark it as not equivalent. */
b1f21e0a 4617 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
15a63be1
RK
4618 return 0;
4619
4620 return 1;
4621 }
4622 else
4623 return (same_regs[REGNO (x)] == REGNO (y));
4624
4625 break;
4626
4627 case MEM:
6dc42e49 4628 /* If memory modified or either volatile, not equivalent.
0f41302f 4629 Else, check address. */
15a63be1
RK
4630 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4631 return 0;
4632
4633 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4634
4635 case ASM_INPUT:
4636 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4637 return 0;
4638
4639 break;
4640
4641 case SET:
4642 /* Cancel a pending `same_regs' if setting equivalenced registers.
4643 Then process source. */
4644 if (GET_CODE (SET_DEST (x)) == REG
4645 && GET_CODE (SET_DEST (y)) == REG)
4646 {
4647 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4648 {
4649 same_regs[REGNO (SET_DEST (x))] = -1;
4650 num_same_regs--;
4651 }
4652 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4653 return 0;
4654 }
4655 else
4656 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4657 return 0;
4658
4659 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4660
4661 case LABEL_REF:
4662 return XEXP (x, 0) == XEXP (y, 0);
4663
4664 case SYMBOL_REF:
4665 return XSTR (x, 0) == XSTR (y, 0);
e9a25f70
JL
4666
4667 default:
4668 break;
15a63be1
RK
4669 }
4670
4671 if (x == y)
4672 return 1;
4673
4674 fmt = GET_RTX_FORMAT (code);
4675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4676 {
4677 switch (fmt[i])
4678 {
5f4f0e22
CH
4679 case 'w':
4680 if (XWINT (x, i) != XWINT (y, i))
4681 return 0;
4682 break;
4683
15a63be1
RK
4684 case 'n':
4685 case 'i':
4686 if (XINT (x, i) != XINT (y, i))
4687 return 0;
4688 break;
4689
4690 case 'V':
4691 case 'E':
4692 /* Two vectors must have the same length. */
4693 if (XVECLEN (x, i) != XVECLEN (y, i))
4694 return 0;
4695
4696 /* And the corresponding elements must match. */
4697 for (j = 0; j < XVECLEN (x, i); j++)
4698 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4699 XVECEXP (y, i, j), yinsn) == 0)
4700 return 0;
4701 break;
4702
4703 case 'e':
4704 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4705 return 0;
4706 break;
4707
4708 case 'S':
4709 case 's':
4710 if (strcmp (XSTR (x, i), XSTR (y, i)))
4711 return 0;
4712 break;
4713
4714 case 'u':
4715 /* These are just backpointers, so they don't matter. */
4716 break;
4717
4718 case '0':
4719 break;
4720
4721 /* It is believed that rtx's at this level will never
4722 contain anything but integers and other rtx's,
4723 except for within LABEL_REFs and SYMBOL_REFs. */
4724 default:
4725 abort ();
4726 }
4727 }
4728 return 1;
4729}
956d6950
JL
4730\f
4731
4732/* Return the insn that NEW can be safely inserted in front of starting at
4733 the jump insn INSN. Return 0 if it is not safe to do this jump
4734 optimization. Note that NEW must contain a single set. */
4735
4736static rtx
4737find_insert_position (insn, new)
4738 rtx insn;
4739 rtx new;
4740{
4741 int i;
4742 rtx prev;
4743
4744 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
4745 if (GET_CODE (PATTERN (new)) != PARALLEL)
4746 return insn;
4747
4748 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4749 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4750 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4751 insn))
4752 break;
4753
4754 if (i < 0)
4755 return insn;
4756
4757 /* There is a good chance that the previous insn PREV sets the thing
4758 being clobbered (often the CC in a hard reg). If PREV does not
4759 use what NEW sets, we can insert NEW before PREV. */
4760
4761 prev = prev_active_insn (insn);
4762 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4763 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4764 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4765 insn)
4766 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4767 prev))
4768 return 0;
4769
4770 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
4771}
This page took 0.90288 seconds and 5 git commands to generate.