]> gcc.gnu.org Git - gcc.git/blob - gcc/jump.c
jump.c (simplejump_p): Recognize any single_set jump of the proper form.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
128 \f
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
137 {
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
139 }
140
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
147 {
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
149 }
150
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
152
153 void
154 jump_optimize_minimal (f)
155 rtx f;
156 {
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
158 }
159 \f
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
163
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
167
168 If NOOP_MOVES is nonzero, delete no-op move insns.
169
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
172
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
175
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
181
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
183
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
192 */
193
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
203 {
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210
211 cross_jump_death_matters = (cross_jump == 2);
212 max_uid = init_label_info (f) + 1;
213
214 /* If we are performing cross jump optimizations, then initialize
215 tables mapping UIDs to EH regions to avoid incorrect movement
216 of insns from one EH region to another. */
217 if (flag_exceptions && cross_jump)
218 init_insn_eh_region (f, max_uid);
219
220 if (! mark_labels_only)
221 delete_barrier_successors (f);
222
223 /* Leave some extra room for labels and duplicate exit test insns
224 we make. */
225 max_jump_chain = max_uid * 14 / 10;
226 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
227
228 mark_all_labels (f, cross_jump);
229
230 /* Keep track of labels used from static data; we don't track them
231 closely enough to delete them here, so make sure their reference
232 count doesn't drop to zero. */
233
234 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
235 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
236 LABEL_NUSES (XEXP (insn, 0))++;
237
238 check_exception_handler_labels ();
239
240 /* Keep track of labels used for marking handlers for exception
241 regions; they cannot usually be deleted. */
242
243 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
244 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
245 LABEL_NUSES (XEXP (insn, 0))++;
246
247 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
248 notes and recompute LABEL_NUSES. */
249 if (mark_labels_only)
250 goto end;
251
252 if (! minimal)
253 exception_optimize ();
254
255 last_insn = delete_unreferenced_labels (f);
256
257 if (noop_moves)
258 delete_noop_moves (f);
259
260 /* If we haven't yet gotten to reload and we have just run regscan,
261 delete any insn that sets a register that isn't used elsewhere.
262 This helps some of the optimizations below by having less insns
263 being jumped around. */
264
265 if (optimize && ! reload_completed && after_regscan)
266 for (insn = f; insn; insn = next)
267 {
268 rtx set = single_set (insn);
269
270 next = NEXT_INSN (insn);
271
272 if (set && GET_CODE (SET_DEST (set)) == REG
273 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
274 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
275 /* We use regno_last_note_uid so as not to delete the setting
276 of a reg that's used in notes. A subsequent optimization
277 might arrange to use that reg for real. */
278 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
279 && ! side_effects_p (SET_SRC (set))
280 && ! find_reg_note (insn, REG_RETVAL, 0)
281 /* An ADDRESSOF expression can turn into a use of the internal arg
282 pointer, so do not delete the initialization of the internal
283 arg pointer yet. If it is truly dead, flow will delete the
284 initializing insn. */
285 && SET_DEST (set) != current_function_internal_arg_pointer)
286 delete_insn (insn);
287 }
288
289 /* Now iterate optimizing jumps until nothing changes over one pass. */
290 changed = 1;
291 old_max_reg = max_reg_num ();
292 while (changed)
293 {
294 changed = 0;
295
296 for (insn = f; insn; insn = next)
297 {
298 rtx reallabelprev;
299 rtx temp, temp1, temp2 = NULL_RTX;
300 rtx temp4 ATTRIBUTE_UNUSED;
301 rtx nlabel;
302 int this_is_any_uncondjump;
303 int this_is_any_condjump;
304 int this_is_onlyjump;
305
306 next = NEXT_INSN (insn);
307
308 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
309 jump. Try to optimize by duplicating the loop exit test if so.
310 This is only safe immediately after regscan, because it uses
311 the values of regno_first_uid and regno_last_uid. */
312 if (after_regscan && GET_CODE (insn) == NOTE
313 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
314 && (temp1 = next_nonnote_insn (insn)) != 0
315 && any_uncondjump_p (temp1)
316 && onlyjump_p (temp1))
317 {
318 temp = PREV_INSN (insn);
319 if (duplicate_loop_exit_test (insn))
320 {
321 changed = 1;
322 next = NEXT_INSN (temp);
323 continue;
324 }
325 }
326
327 if (GET_CODE (insn) != JUMP_INSN)
328 continue;
329
330 this_is_any_condjump = any_condjump_p (insn);
331 this_is_any_uncondjump = any_uncondjump_p (insn);
332 this_is_onlyjump = onlyjump_p (insn);
333
334 /* Tension the labels in dispatch tables. */
335
336 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
337 changed |= tension_vector_labels (PATTERN (insn), 0);
338 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
339 changed |= tension_vector_labels (PATTERN (insn), 1);
340
341 /* See if this jump goes to another jump and redirect if so. */
342 nlabel = follow_jumps (JUMP_LABEL (insn));
343 if (nlabel != JUMP_LABEL (insn))
344 changed |= redirect_jump (insn, nlabel, 1);
345
346 if (! optimize || minimal)
347 continue;
348
349 /* If a dispatch table always goes to the same place,
350 get rid of it and replace the insn that uses it. */
351
352 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
353 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
354 {
355 int i;
356 rtx pat = PATTERN (insn);
357 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
358 int len = XVECLEN (pat, diff_vec_p);
359 rtx dispatch = prev_real_insn (insn);
360 rtx set;
361
362 for (i = 0; i < len; i++)
363 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
364 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
365 break;
366
367 if (i == len
368 && dispatch != 0
369 && GET_CODE (dispatch) == JUMP_INSN
370 && JUMP_LABEL (dispatch) != 0
371 /* Don't mess with a casesi insn.
372 XXX according to the comment before computed_jump_p(),
373 all casesi insns should be a parallel of the jump
374 and a USE of a LABEL_REF. */
375 && ! ((set = single_set (dispatch)) != NULL
376 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
377 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
378 {
379 redirect_tablejump (dispatch,
380 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
381 changed = 1;
382 }
383 }
384
385 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
386
387 /* Detect jump to following insn. */
388 if (reallabelprev == insn
389 && (this_is_any_condjump || this_is_any_uncondjump)
390 && this_is_onlyjump)
391 {
392 next = next_real_insn (JUMP_LABEL (insn));
393 delete_jump (insn);
394
395 /* Remove the "inactive" but "real" insns (i.e. uses and
396 clobbers) in between here and there. */
397 temp = insn;
398 while ((temp = next_real_insn (temp)) != next)
399 delete_insn (temp);
400
401 changed = 1;
402 continue;
403 }
404
405 /* Detect a conditional jump going to the same place
406 as an immediately following unconditional jump. */
407 else if (this_is_any_condjump && this_is_onlyjump
408 && (temp = next_active_insn (insn)) != 0
409 && simplejump_p (temp)
410 && (next_active_insn (JUMP_LABEL (insn))
411 == next_active_insn (JUMP_LABEL (temp))))
412 {
413 /* Don't mess up test coverage analysis. */
414 temp2 = temp;
415 if (flag_test_coverage && !reload_completed)
416 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
417 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
418 break;
419
420 if (temp2 == temp)
421 {
422 delete_jump (insn);
423 changed = 1;
424 continue;
425 }
426 }
427
428 /* Detect a conditional jump jumping over an unconditional jump. */
429
430 else if (this_is_any_condjump
431 && reallabelprev != 0
432 && GET_CODE (reallabelprev) == JUMP_INSN
433 && prev_active_insn (reallabelprev) == insn
434 && no_labels_between_p (insn, reallabelprev)
435 && any_uncondjump_p (reallabelprev)
436 && onlyjump_p (reallabelprev))
437 {
438 /* When we invert the unconditional jump, we will be
439 decrementing the usage count of its old label.
440 Make sure that we don't delete it now because that
441 might cause the following code to be deleted. */
442 rtx prev_uses = prev_nonnote_insn (reallabelprev);
443 rtx prev_label = JUMP_LABEL (insn);
444
445 if (prev_label)
446 ++LABEL_NUSES (prev_label);
447
448 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
449 {
450 /* It is very likely that if there are USE insns before
451 this jump, they hold REG_DEAD notes. These REG_DEAD
452 notes are no longer valid due to this optimization,
453 and will cause the life-analysis that following passes
454 (notably delayed-branch scheduling) to think that
455 these registers are dead when they are not.
456
457 To prevent this trouble, we just remove the USE insns
458 from the insn chain. */
459
460 while (prev_uses && GET_CODE (prev_uses) == INSN
461 && GET_CODE (PATTERN (prev_uses)) == USE)
462 {
463 rtx useless = prev_uses;
464 prev_uses = prev_nonnote_insn (prev_uses);
465 delete_insn (useless);
466 }
467
468 delete_insn (reallabelprev);
469 changed = 1;
470 }
471
472 /* We can now safely delete the label if it is unreferenced
473 since the delete_insn above has deleted the BARRIER. */
474 if (prev_label && --LABEL_NUSES (prev_label) == 0)
475 delete_insn (prev_label);
476
477 next = NEXT_INSN (insn);
478 }
479
480 /* If we have an unconditional jump preceded by a USE, try to put
481 the USE before the target and jump there. This simplifies many
482 of the optimizations below since we don't have to worry about
483 dealing with these USE insns. We only do this if the label
484 being branch to already has the identical USE or if code
485 never falls through to that label. */
486
487 else if (this_is_any_uncondjump
488 && (temp = prev_nonnote_insn (insn)) != 0
489 && GET_CODE (temp) == INSN
490 && GET_CODE (PATTERN (temp)) == USE
491 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
492 && (GET_CODE (temp1) == BARRIER
493 || (GET_CODE (temp1) == INSN
494 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
495 /* Don't do this optimization if we have a loop containing
496 only the USE instruction, and the loop start label has
497 a usage count of 1. This is because we will redo this
498 optimization everytime through the outer loop, and jump
499 opt will never exit. */
500 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
501 && temp2 == JUMP_LABEL (insn)
502 && LABEL_NUSES (temp2) == 1))
503 {
504 if (GET_CODE (temp1) == BARRIER)
505 {
506 emit_insn_after (PATTERN (temp), temp1);
507 temp1 = NEXT_INSN (temp1);
508 }
509
510 delete_insn (temp);
511 redirect_jump (insn, get_label_before (temp1), 1);
512 reallabelprev = prev_real_insn (temp1);
513 changed = 1;
514 next = NEXT_INSN (insn);
515 }
516
517 #ifdef HAVE_trap
518 /* Detect a conditional jump jumping over an unconditional trap. */
519 if (HAVE_trap
520 && this_is_any_condjump && this_is_onlyjump
521 && reallabelprev != 0
522 && GET_CODE (reallabelprev) == INSN
523 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
524 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
525 && prev_active_insn (reallabelprev) == insn
526 && no_labels_between_p (insn, reallabelprev)
527 && (temp2 = get_condition (insn, &temp4))
528 && can_reverse_comparison_p (temp2, insn))
529 {
530 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
531 XEXP (temp2, 0), XEXP (temp2, 1),
532 TRAP_CODE (PATTERN (reallabelprev)));
533
534 if (new)
535 {
536 emit_insn_before (new, temp4);
537 delete_insn (reallabelprev);
538 delete_jump (insn);
539 changed = 1;
540 continue;
541 }
542 }
543 /* Detect a jump jumping to an unconditional trap. */
544 else if (HAVE_trap && this_is_onlyjump
545 && (temp = next_active_insn (JUMP_LABEL (insn)))
546 && GET_CODE (temp) == INSN
547 && GET_CODE (PATTERN (temp)) == TRAP_IF
548 && (this_is_any_uncondjump
549 || (this_is_any_condjump
550 && (temp2 = get_condition (insn, &temp4)))))
551 {
552 rtx tc = TRAP_CONDITION (PATTERN (temp));
553
554 if (tc == const_true_rtx
555 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
556 {
557 rtx new;
558 /* Replace an unconditional jump to a trap with a trap. */
559 if (this_is_any_uncondjump)
560 {
561 emit_barrier_after (emit_insn_before (gen_trap (), insn));
562 delete_jump (insn);
563 changed = 1;
564 continue;
565 }
566 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
567 XEXP (temp2, 1),
568 TRAP_CODE (PATTERN (temp)));
569 if (new)
570 {
571 emit_insn_before (new, temp4);
572 delete_jump (insn);
573 changed = 1;
574 continue;
575 }
576 }
577 /* If the trap condition and jump condition are mutually
578 exclusive, redirect the jump to the following insn. */
579 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
580 && this_is_any_condjump
581 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
582 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
583 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
584 && redirect_jump (insn, get_label_after (temp), 1))
585 {
586 changed = 1;
587 continue;
588 }
589 }
590 #endif
591 else
592 {
593 /* Now that the jump has been tensioned,
594 try cross jumping: check for identical code
595 before the jump and before its target label. */
596
597 /* First, cross jumping of conditional jumps: */
598
599 if (cross_jump && condjump_p (insn))
600 {
601 rtx newjpos, newlpos;
602 rtx x = prev_real_insn (JUMP_LABEL (insn));
603
604 /* A conditional jump may be crossjumped
605 only if the place it jumps to follows
606 an opposing jump that comes back here. */
607
608 if (x != 0 && ! jump_back_p (x, insn))
609 /* We have no opposing jump;
610 cannot cross jump this insn. */
611 x = 0;
612
613 newjpos = 0;
614 /* TARGET is nonzero if it is ok to cross jump
615 to code before TARGET. If so, see if matches. */
616 if (x != 0)
617 find_cross_jump (insn, x, 2,
618 &newjpos, &newlpos);
619
620 if (newjpos != 0)
621 {
622 do_cross_jump (insn, newjpos, newlpos);
623 /* Make the old conditional jump
624 into an unconditional one. */
625 SET_SRC (PATTERN (insn))
626 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
627 INSN_CODE (insn) = -1;
628 emit_barrier_after (insn);
629 /* Add to jump_chain unless this is a new label
630 whose UID is too large. */
631 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
632 {
633 jump_chain[INSN_UID (insn)]
634 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
635 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
636 }
637 changed = 1;
638 next = insn;
639 }
640 }
641
642 /* Cross jumping of unconditional jumps:
643 a few differences. */
644
645 if (cross_jump && simplejump_p (insn))
646 {
647 rtx newjpos, newlpos;
648 rtx target;
649
650 newjpos = 0;
651
652 /* TARGET is nonzero if it is ok to cross jump
653 to code before TARGET. If so, see if matches. */
654 find_cross_jump (insn, JUMP_LABEL (insn), 1,
655 &newjpos, &newlpos);
656
657 /* If cannot cross jump to code before the label,
658 see if we can cross jump to another jump to
659 the same label. */
660 /* Try each other jump to this label. */
661 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
662 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
663 target != 0 && newjpos == 0;
664 target = jump_chain[INSN_UID (target)])
665 if (target != insn
666 && JUMP_LABEL (target) == JUMP_LABEL (insn)
667 /* Ignore TARGET if it's deleted. */
668 && ! INSN_DELETED_P (target))
669 find_cross_jump (insn, target, 2,
670 &newjpos, &newlpos);
671
672 if (newjpos != 0)
673 {
674 do_cross_jump (insn, newjpos, newlpos);
675 changed = 1;
676 next = insn;
677 }
678 }
679
680 /* This code was dead in the previous jump.c! */
681 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
682 {
683 /* Return insns all "jump to the same place"
684 so we can cross-jump between any two of them. */
685
686 rtx newjpos, newlpos, target;
687
688 newjpos = 0;
689
690 /* If cannot cross jump to code before the label,
691 see if we can cross jump to another jump to
692 the same label. */
693 /* Try each other jump to this label. */
694 for (target = jump_chain[0];
695 target != 0 && newjpos == 0;
696 target = jump_chain[INSN_UID (target)])
697 if (target != insn
698 && ! INSN_DELETED_P (target)
699 && GET_CODE (PATTERN (target)) == RETURN)
700 find_cross_jump (insn, target, 2,
701 &newjpos, &newlpos);
702
703 if (newjpos != 0)
704 {
705 do_cross_jump (insn, newjpos, newlpos);
706 changed = 1;
707 next = insn;
708 }
709 }
710 }
711 }
712
713 first = 0;
714 }
715
716 /* Delete extraneous line number notes.
717 Note that two consecutive notes for different lines are not really
718 extraneous. There should be some indication where that line belonged,
719 even if it became empty. */
720
721 {
722 rtx last_note = 0;
723
724 for (insn = f; insn; insn = NEXT_INSN (insn))
725 if (GET_CODE (insn) == NOTE)
726 {
727 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
728 /* Any previous line note was for the prologue; gdb wants a new
729 note after the prologue even if it is for the same line. */
730 last_note = NULL_RTX;
731 else if (NOTE_LINE_NUMBER (insn) >= 0)
732 {
733 /* Delete this note if it is identical to previous note. */
734 if (last_note
735 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
736 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
737 {
738 delete_insn (insn);
739 continue;
740 }
741
742 last_note = insn;
743 }
744 }
745 }
746
747 end:
748 /* Clean up. */
749 free (jump_chain);
750 jump_chain = 0;
751 }
752 \f
753 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
754 notes whose labels don't occur in the insn any more. Returns the
755 largest INSN_UID found. */
756 static int
757 init_label_info (f)
758 rtx f;
759 {
760 int largest_uid = 0;
761 rtx insn;
762
763 for (insn = f; insn; insn = NEXT_INSN (insn))
764 {
765 if (GET_CODE (insn) == CODE_LABEL)
766 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
767 else if (GET_CODE (insn) == JUMP_INSN)
768 JUMP_LABEL (insn) = 0;
769 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
770 {
771 rtx note, next;
772
773 for (note = REG_NOTES (insn); note; note = next)
774 {
775 next = XEXP (note, 1);
776 if (REG_NOTE_KIND (note) == REG_LABEL
777 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
778 remove_note (insn, note);
779 }
780 }
781 if (INSN_UID (insn) > largest_uid)
782 largest_uid = INSN_UID (insn);
783 }
784
785 return largest_uid;
786 }
787
788 /* Delete insns following barriers, up to next label.
789
790 Also delete no-op jumps created by gcse. */
791
792 static void
793 delete_barrier_successors (f)
794 rtx f;
795 {
796 rtx insn;
797 rtx set;
798
799 for (insn = f; insn;)
800 {
801 if (GET_CODE (insn) == BARRIER)
802 {
803 insn = NEXT_INSN (insn);
804
805 never_reached_warning (insn);
806
807 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
808 {
809 if (GET_CODE (insn) == NOTE
810 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
811 insn = NEXT_INSN (insn);
812 else
813 insn = delete_insn (insn);
814 }
815 /* INSN is now the code_label. */
816 }
817
818 /* Also remove (set (pc) (pc)) insns which can be created by
819 gcse. We eliminate such insns now to avoid having them
820 cause problems later. */
821 else if (GET_CODE (insn) == JUMP_INSN
822 && (set = pc_set (insn)) != NULL
823 && SET_SRC (set) == pc_rtx
824 && SET_DEST (set) == pc_rtx
825 && onlyjump_p (insn))
826 insn = delete_insn (insn);
827
828 else
829 insn = NEXT_INSN (insn);
830 }
831 }
832
833 /* Mark the label each jump jumps to.
834 Combine consecutive labels, and count uses of labels.
835
836 For each label, make a chain (using `jump_chain')
837 of all the *unconditional* jumps that jump to it;
838 also make a chain of all returns.
839
840 CROSS_JUMP indicates whether we are doing cross jumping
841 and if we are whether we will be paying attention to
842 death notes or not. */
843
844 static void
845 mark_all_labels (f, cross_jump)
846 rtx f;
847 int cross_jump;
848 {
849 rtx insn;
850
851 for (insn = f; insn; insn = NEXT_INSN (insn))
852 if (INSN_P (insn))
853 {
854 if (GET_CODE (insn) == CALL_INSN
855 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
856 {
857 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
858 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
859 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
860 continue;
861 }
862
863 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
864 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
865 {
866 /* When we know the LABEL_REF contained in a REG used in
867 an indirect jump, we'll have a REG_LABEL note so that
868 flow can tell where it's going. */
869 if (JUMP_LABEL (insn) == 0)
870 {
871 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
872 if (label_note)
873 {
874 /* But a LABEL_REF around the REG_LABEL note, so
875 that we can canonicalize it. */
876 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
877 XEXP (label_note, 0));
878
879 mark_jump_label (label_ref, insn, cross_jump, 0);
880 XEXP (label_note, 0) = XEXP (label_ref, 0);
881 JUMP_LABEL (insn) = XEXP (label_note, 0);
882 }
883 }
884 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
885 {
886 jump_chain[INSN_UID (insn)]
887 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
888 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
889 }
890 if (GET_CODE (PATTERN (insn)) == RETURN)
891 {
892 jump_chain[INSN_UID (insn)] = jump_chain[0];
893 jump_chain[0] = insn;
894 }
895 }
896 }
897 }
898
899 /* Delete all labels already not referenced.
900 Also find and return the last insn. */
901
902 static rtx
903 delete_unreferenced_labels (f)
904 rtx f;
905 {
906 rtx final = NULL_RTX;
907 rtx insn;
908
909 for (insn = f; insn;)
910 {
911 if (GET_CODE (insn) == CODE_LABEL
912 && LABEL_NUSES (insn) == 0
913 && LABEL_ALTERNATE_NAME (insn) == NULL)
914 insn = delete_insn (insn);
915 else
916 {
917 final = insn;
918 insn = NEXT_INSN (insn);
919 }
920 }
921
922 return final;
923 }
924
925 /* Delete various simple forms of moves which have no necessary
926 side effect. */
927
928 static void
929 delete_noop_moves (f)
930 rtx f;
931 {
932 rtx insn, next;
933
934 for (insn = f; insn;)
935 {
936 next = NEXT_INSN (insn);
937
938 if (GET_CODE (insn) == INSN)
939 {
940 register rtx body = PATTERN (insn);
941
942 /* Detect and delete no-op move instructions
943 resulting from not allocating a parameter in a register. */
944
945 if (GET_CODE (body) == SET
946 && (SET_DEST (body) == SET_SRC (body)
947 || (GET_CODE (SET_DEST (body)) == MEM
948 && GET_CODE (SET_SRC (body)) == MEM
949 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
950 && ! (GET_CODE (SET_DEST (body)) == MEM
951 && MEM_VOLATILE_P (SET_DEST (body)))
952 && ! (GET_CODE (SET_SRC (body)) == MEM
953 && MEM_VOLATILE_P (SET_SRC (body))))
954 delete_computation (insn);
955
956 /* Detect and ignore no-op move instructions
957 resulting from smart or fortuitous register allocation. */
958
959 else if (GET_CODE (body) == SET)
960 {
961 int sreg = true_regnum (SET_SRC (body));
962 int dreg = true_regnum (SET_DEST (body));
963
964 if (sreg == dreg && sreg >= 0)
965 delete_insn (insn);
966 else if (sreg >= 0 && dreg >= 0)
967 {
968 rtx trial;
969 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
970 sreg, NULL_PTR, dreg,
971 GET_MODE (SET_SRC (body)));
972
973 if (tem != 0
974 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
975 {
976 /* DREG may have been the target of a REG_DEAD note in
977 the insn which makes INSN redundant. If so, reorg
978 would still think it is dead. So search for such a
979 note and delete it if we find it. */
980 if (! find_regno_note (insn, REG_UNUSED, dreg))
981 for (trial = prev_nonnote_insn (insn);
982 trial && GET_CODE (trial) != CODE_LABEL;
983 trial = prev_nonnote_insn (trial))
984 if (find_regno_note (trial, REG_DEAD, dreg))
985 {
986 remove_death (dreg, trial);
987 break;
988 }
989
990 /* Deleting insn could lose a death-note for SREG. */
991 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
992 {
993 /* Change this into a USE so that we won't emit
994 code for it, but still can keep the note. */
995 PATTERN (insn)
996 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
997 INSN_CODE (insn) = -1;
998 /* Remove all reg notes but the REG_DEAD one. */
999 REG_NOTES (insn) = trial;
1000 XEXP (trial, 1) = NULL_RTX;
1001 }
1002 else
1003 delete_insn (insn);
1004 }
1005 }
1006 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1007 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1008 NULL_PTR, 0,
1009 GET_MODE (SET_DEST (body))))
1010 {
1011 /* This handles the case where we have two consecutive
1012 assignments of the same constant to pseudos that didn't
1013 get a hard reg. Each SET from the constant will be
1014 converted into a SET of the spill register and an
1015 output reload will be made following it. This produces
1016 two loads of the same constant into the same spill
1017 register. */
1018
1019 rtx in_insn = insn;
1020
1021 /* Look back for a death note for the first reg.
1022 If there is one, it is no longer accurate. */
1023 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1024 {
1025 if ((GET_CODE (in_insn) == INSN
1026 || GET_CODE (in_insn) == JUMP_INSN)
1027 && find_regno_note (in_insn, REG_DEAD, dreg))
1028 {
1029 remove_death (dreg, in_insn);
1030 break;
1031 }
1032 in_insn = PREV_INSN (in_insn);
1033 }
1034
1035 /* Delete the second load of the value. */
1036 delete_insn (insn);
1037 }
1038 }
1039 else if (GET_CODE (body) == PARALLEL)
1040 {
1041 /* If each part is a set between two identical registers or
1042 a USE or CLOBBER, delete the insn. */
1043 int i, sreg, dreg;
1044 rtx tem;
1045
1046 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1047 {
1048 tem = XVECEXP (body, 0, i);
1049 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1050 continue;
1051
1052 if (GET_CODE (tem) != SET
1053 || (sreg = true_regnum (SET_SRC (tem))) < 0
1054 || (dreg = true_regnum (SET_DEST (tem))) < 0
1055 || dreg != sreg)
1056 break;
1057 }
1058
1059 if (i < 0)
1060 delete_insn (insn);
1061 }
1062 /* Also delete insns to store bit fields if they are no-ops. */
1063 /* Not worth the hair to detect this in the big-endian case. */
1064 else if (! BYTES_BIG_ENDIAN
1065 && GET_CODE (body) == SET
1066 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1067 && XEXP (SET_DEST (body), 2) == const0_rtx
1068 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1069 && ! (GET_CODE (SET_SRC (body)) == MEM
1070 && MEM_VOLATILE_P (SET_SRC (body))))
1071 delete_insn (insn);
1072 }
1073 insn = next;
1074 }
1075 }
1076
1077 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1078 jump. Assume that this unconditional jump is to the exit test code. If
1079 the code is sufficiently simple, make a copy of it before INSN,
1080 followed by a jump to the exit of the loop. Then delete the unconditional
1081 jump after INSN.
1082
1083 Return 1 if we made the change, else 0.
1084
1085 This is only safe immediately after a regscan pass because it uses the
1086 values of regno_first_uid and regno_last_uid. */
1087
1088 static int
1089 duplicate_loop_exit_test (loop_start)
1090 rtx loop_start;
1091 {
1092 rtx insn, set, reg, p, link;
1093 rtx copy = 0, first_copy = 0;
1094 int num_insns = 0;
1095 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1096 rtx lastexit;
1097 int max_reg = max_reg_num ();
1098 rtx *reg_map = 0;
1099
1100 /* Scan the exit code. We do not perform this optimization if any insn:
1101
1102 is a CALL_INSN
1103 is a CODE_LABEL
1104 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1105 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1106 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1107 is not valid.
1108
1109 We also do not do this if we find an insn with ASM_OPERANDS. While
1110 this restriction should not be necessary, copying an insn with
1111 ASM_OPERANDS can confuse asm_noperands in some cases.
1112
1113 Also, don't do this if the exit code is more than 20 insns. */
1114
1115 for (insn = exitcode;
1116 insn
1117 && ! (GET_CODE (insn) == NOTE
1118 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1119 insn = NEXT_INSN (insn))
1120 {
1121 switch (GET_CODE (insn))
1122 {
1123 case CODE_LABEL:
1124 case CALL_INSN:
1125 return 0;
1126 case NOTE:
1127 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1128 a jump immediately after the loop start that branches outside
1129 the loop but within an outer loop, near the exit test.
1130 If we copied this exit test and created a phony
1131 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1132 before the exit test look like these could be safely moved
1133 out of the loop even if they actually may be never executed.
1134 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1135
1136 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1137 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1138 return 0;
1139
1140 if (optimize < 2
1141 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1142 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1143 /* If we were to duplicate this code, we would not move
1144 the BLOCK notes, and so debugging the moved code would
1145 be difficult. Thus, we only move the code with -O2 or
1146 higher. */
1147 return 0;
1148
1149 break;
1150 case JUMP_INSN:
1151 case INSN:
1152 /* The code below would grossly mishandle REG_WAS_0 notes,
1153 so get rid of them here. */
1154 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1155 remove_note (insn, p);
1156 if (++num_insns > 20
1157 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1158 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1159 return 0;
1160 break;
1161 default:
1162 break;
1163 }
1164 }
1165
1166 /* Unless INSN is zero, we can do the optimization. */
1167 if (insn == 0)
1168 return 0;
1169
1170 lastexit = insn;
1171
1172 /* See if any insn sets a register only used in the loop exit code and
1173 not a user variable. If so, replace it with a new register. */
1174 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1175 if (GET_CODE (insn) == INSN
1176 && (set = single_set (insn)) != 0
1177 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1178 || (GET_CODE (reg) == SUBREG
1179 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1180 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1181 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1182 {
1183 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1184 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1185 break;
1186
1187 if (p != lastexit)
1188 {
1189 /* We can do the replacement. Allocate reg_map if this is the
1190 first replacement we found. */
1191 if (reg_map == 0)
1192 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1193
1194 REG_LOOP_TEST_P (reg) = 1;
1195
1196 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1197 }
1198 }
1199
1200 /* Now copy each insn. */
1201 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1202 {
1203 switch (GET_CODE (insn))
1204 {
1205 case BARRIER:
1206 copy = emit_barrier_before (loop_start);
1207 break;
1208 case NOTE:
1209 /* Only copy line-number notes. */
1210 if (NOTE_LINE_NUMBER (insn) >= 0)
1211 {
1212 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1213 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1214 }
1215 break;
1216
1217 case INSN:
1218 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1219 if (reg_map)
1220 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1221
1222 mark_jump_label (PATTERN (copy), copy, 0, 0);
1223
1224 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1225 make them. */
1226 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1227 if (REG_NOTE_KIND (link) != REG_LABEL)
1228 {
1229 if (GET_CODE (link) == EXPR_LIST)
1230 REG_NOTES (copy)
1231 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1232 XEXP (link, 0),
1233 REG_NOTES (copy)));
1234 else
1235 REG_NOTES (copy)
1236 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1237 XEXP (link, 0),
1238 REG_NOTES (copy)));
1239 }
1240
1241 if (reg_map && REG_NOTES (copy))
1242 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1243 break;
1244
1245 case JUMP_INSN:
1246 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1247 loop_start);
1248 if (reg_map)
1249 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1250 mark_jump_label (PATTERN (copy), copy, 0, 0);
1251 if (REG_NOTES (insn))
1252 {
1253 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1254 if (reg_map)
1255 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1256 }
1257
1258 /* If this is a simple jump, add it to the jump chain. */
1259
1260 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1261 && simplejump_p (copy))
1262 {
1263 jump_chain[INSN_UID (copy)]
1264 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1265 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1266 }
1267 break;
1268
1269 default:
1270 abort ();
1271 }
1272
1273 /* Record the first insn we copied. We need it so that we can
1274 scan the copied insns for new pseudo registers. */
1275 if (! first_copy)
1276 first_copy = copy;
1277 }
1278
1279 /* Now clean up by emitting a jump to the end label and deleting the jump
1280 at the start of the loop. */
1281 if (! copy || GET_CODE (copy) != BARRIER)
1282 {
1283 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1284 loop_start);
1285
1286 /* Record the first insn we copied. We need it so that we can
1287 scan the copied insns for new pseudo registers. This may not
1288 be strictly necessary since we should have copied at least one
1289 insn above. But I am going to be safe. */
1290 if (! first_copy)
1291 first_copy = copy;
1292
1293 mark_jump_label (PATTERN (copy), copy, 0, 0);
1294 if (INSN_UID (copy) < max_jump_chain
1295 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1296 {
1297 jump_chain[INSN_UID (copy)]
1298 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1299 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1300 }
1301 emit_barrier_before (loop_start);
1302 }
1303
1304 /* Now scan from the first insn we copied to the last insn we copied
1305 (copy) for new pseudo registers. Do this after the code to jump to
1306 the end label since that might create a new pseudo too. */
1307 reg_scan_update (first_copy, copy, max_reg);
1308
1309 /* Mark the exit code as the virtual top of the converted loop. */
1310 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1311
1312 delete_insn (next_nonnote_insn (loop_start));
1313
1314 /* Clean up. */
1315 if (reg_map)
1316 free (reg_map);
1317
1318 return 1;
1319 }
1320 \f
1321 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1322 eh-beg, eh-end notes between START and END out before START. Assume that
1323 END is not such a note. START may be such a note. Returns the value
1324 of the new starting insn, which may be different if the original start
1325 was such a note. */
1326
1327 rtx
1328 squeeze_notes (start, end)
1329 rtx start, end;
1330 {
1331 rtx insn;
1332 rtx next;
1333
1334 for (insn = start; insn != end; insn = next)
1335 {
1336 next = NEXT_INSN (insn);
1337 if (GET_CODE (insn) == NOTE
1338 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1339 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1340 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1341 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1342 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1343 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP
1344 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1345 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1346 {
1347 if (insn == start)
1348 start = next;
1349 else
1350 {
1351 rtx prev = PREV_INSN (insn);
1352 PREV_INSN (insn) = PREV_INSN (start);
1353 NEXT_INSN (insn) = start;
1354 NEXT_INSN (PREV_INSN (insn)) = insn;
1355 PREV_INSN (NEXT_INSN (insn)) = insn;
1356 NEXT_INSN (prev) = next;
1357 PREV_INSN (next) = prev;
1358 }
1359 }
1360 }
1361
1362 return start;
1363 }
1364 \f
1365 /* Compare the instructions before insn E1 with those before E2
1366 to find an opportunity for cross jumping.
1367 (This means detecting identical sequences of insns followed by
1368 jumps to the same place, or followed by a label and a jump
1369 to that label, and replacing one with a jump to the other.)
1370
1371 Assume E1 is a jump that jumps to label E2
1372 (that is not always true but it might as well be).
1373 Find the longest possible equivalent sequences
1374 and store the first insns of those sequences into *F1 and *F2.
1375 Store zero there if no equivalent preceding instructions are found.
1376
1377 We give up if we find a label in stream 1.
1378 Actually we could transfer that label into stream 2. */
1379
1380 static void
1381 find_cross_jump (e1, e2, minimum, f1, f2)
1382 rtx e1, e2;
1383 int minimum;
1384 rtx *f1, *f2;
1385 {
1386 register rtx i1 = e1, i2 = e2;
1387 register rtx p1, p2;
1388 int lose = 0;
1389
1390 rtx last1 = 0, last2 = 0;
1391 rtx afterlast1 = 0, afterlast2 = 0;
1392
1393 *f1 = 0;
1394 *f2 = 0;
1395
1396 while (1)
1397 {
1398 i1 = prev_nonnote_insn (i1);
1399
1400 i2 = PREV_INSN (i2);
1401 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1402 i2 = PREV_INSN (i2);
1403
1404 if (i1 == 0)
1405 break;
1406
1407 /* Don't allow the range of insns preceding E1 or E2
1408 to include the other (E2 or E1). */
1409 if (i2 == e1 || i1 == e2)
1410 break;
1411
1412 /* If we will get to this code by jumping, those jumps will be
1413 tensioned to go directly to the new label (before I2),
1414 so this cross-jumping won't cost extra. So reduce the minimum. */
1415 if (GET_CODE (i1) == CODE_LABEL)
1416 {
1417 --minimum;
1418 break;
1419 }
1420
1421 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1422 break;
1423
1424 /* Avoid moving insns across EH regions if either of the insns
1425 can throw. */
1426 if (flag_exceptions
1427 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1428 && !in_same_eh_region (i1, i2))
1429 break;
1430
1431 p1 = PATTERN (i1);
1432 p2 = PATTERN (i2);
1433
1434 /* If this is a CALL_INSN, compare register usage information.
1435 If we don't check this on stack register machines, the two
1436 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1437 numbers of stack registers in the same basic block.
1438 If we don't check this on machines with delay slots, a delay slot may
1439 be filled that clobbers a parameter expected by the subroutine.
1440
1441 ??? We take the simple route for now and assume that if they're
1442 equal, they were constructed identically. */
1443
1444 if (GET_CODE (i1) == CALL_INSN
1445 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1446 CALL_INSN_FUNCTION_USAGE (i2)))
1447 lose = 1;
1448
1449 #ifdef STACK_REGS
1450 /* If cross_jump_death_matters is not 0, the insn's mode
1451 indicates whether or not the insn contains any stack-like
1452 regs. */
1453
1454 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1455 {
1456 /* If register stack conversion has already been done, then
1457 death notes must also be compared before it is certain that
1458 the two instruction streams match. */
1459
1460 rtx note;
1461 HARD_REG_SET i1_regset, i2_regset;
1462
1463 CLEAR_HARD_REG_SET (i1_regset);
1464 CLEAR_HARD_REG_SET (i2_regset);
1465
1466 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1467 if (REG_NOTE_KIND (note) == REG_DEAD
1468 && STACK_REG_P (XEXP (note, 0)))
1469 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1470
1471 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1472 if (REG_NOTE_KIND (note) == REG_DEAD
1473 && STACK_REG_P (XEXP (note, 0)))
1474 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1475
1476 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1477
1478 lose = 1;
1479
1480 done:
1481 ;
1482 }
1483 #endif
1484
1485 /* Don't allow old-style asm or volatile extended asms to be accepted
1486 for cross jumping purposes. It is conceptually correct to allow
1487 them, since cross-jumping preserves the dynamic instruction order
1488 even though it is changing the static instruction order. However,
1489 if an asm is being used to emit an assembler pseudo-op, such as
1490 the MIPS `.set reorder' pseudo-op, then the static instruction order
1491 matters and it must be preserved. */
1492 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1493 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1494 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1495 lose = 1;
1496
1497 if (lose || GET_CODE (p1) != GET_CODE (p2)
1498 || ! rtx_renumbered_equal_p (p1, p2))
1499 {
1500 /* The following code helps take care of G++ cleanups. */
1501 rtx equiv1;
1502 rtx equiv2;
1503
1504 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1505 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1506 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1507 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1508 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1509 /* If the equivalences are not to a constant, they may
1510 reference pseudos that no longer exist, so we can't
1511 use them. */
1512 && CONSTANT_P (XEXP (equiv1, 0))
1513 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1514 {
1515 rtx s1 = single_set (i1);
1516 rtx s2 = single_set (i2);
1517 if (s1 != 0 && s2 != 0
1518 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1519 {
1520 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1521 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1522 if (! rtx_renumbered_equal_p (p1, p2))
1523 cancel_changes (0);
1524 else if (apply_change_group ())
1525 goto win;
1526 }
1527 }
1528
1529 /* Insns fail to match; cross jumping is limited to the following
1530 insns. */
1531
1532 #ifdef HAVE_cc0
1533 /* Don't allow the insn after a compare to be shared by
1534 cross-jumping unless the compare is also shared.
1535 Here, if either of these non-matching insns is a compare,
1536 exclude the following insn from possible cross-jumping. */
1537 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1538 last1 = afterlast1, last2 = afterlast2, ++minimum;
1539 #endif
1540
1541 /* If cross-jumping here will feed a jump-around-jump
1542 optimization, this jump won't cost extra, so reduce
1543 the minimum. */
1544 if (GET_CODE (i1) == JUMP_INSN
1545 && JUMP_LABEL (i1)
1546 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1547 --minimum;
1548 break;
1549 }
1550
1551 win:
1552 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1553 {
1554 /* Ok, this insn is potentially includable in a cross-jump here. */
1555 afterlast1 = last1, afterlast2 = last2;
1556 last1 = i1, last2 = i2, --minimum;
1557 }
1558 }
1559
1560 if (minimum <= 0 && last1 != 0 && last1 != e1)
1561 *f1 = last1, *f2 = last2;
1562 }
1563
1564 static void
1565 do_cross_jump (insn, newjpos, newlpos)
1566 rtx insn, newjpos, newlpos;
1567 {
1568 /* Find an existing label at this point
1569 or make a new one if there is none. */
1570 register rtx label = get_label_before (newlpos);
1571
1572 /* Make the same jump insn jump to the new point. */
1573 if (GET_CODE (PATTERN (insn)) == RETURN)
1574 {
1575 /* Remove from jump chain of returns. */
1576 delete_from_jump_chain (insn);
1577 /* Change the insn. */
1578 PATTERN (insn) = gen_jump (label);
1579 INSN_CODE (insn) = -1;
1580 JUMP_LABEL (insn) = label;
1581 LABEL_NUSES (label)++;
1582 /* Add to new the jump chain. */
1583 if (INSN_UID (label) < max_jump_chain
1584 && INSN_UID (insn) < max_jump_chain)
1585 {
1586 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1587 jump_chain[INSN_UID (label)] = insn;
1588 }
1589 }
1590 else
1591 redirect_jump (insn, label, 1);
1592
1593 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1594 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1595 the NEWJPOS stream. */
1596
1597 while (newjpos != insn)
1598 {
1599 rtx lnote;
1600
1601 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1602 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1603 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1604 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1605 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1606 remove_note (newlpos, lnote);
1607
1608 delete_insn (newjpos);
1609 newjpos = next_real_insn (newjpos);
1610 newlpos = next_real_insn (newlpos);
1611 }
1612 }
1613 \f
1614 /* Return the label before INSN, or put a new label there. */
1615
1616 rtx
1617 get_label_before (insn)
1618 rtx insn;
1619 {
1620 rtx label;
1621
1622 /* Find an existing label at this point
1623 or make a new one if there is none. */
1624 label = prev_nonnote_insn (insn);
1625
1626 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1627 {
1628 rtx prev = PREV_INSN (insn);
1629
1630 label = gen_label_rtx ();
1631 emit_label_after (label, prev);
1632 LABEL_NUSES (label) = 0;
1633 }
1634 return label;
1635 }
1636
1637 /* Return the label after INSN, or put a new label there. */
1638
1639 rtx
1640 get_label_after (insn)
1641 rtx insn;
1642 {
1643 rtx label;
1644
1645 /* Find an existing label at this point
1646 or make a new one if there is none. */
1647 label = next_nonnote_insn (insn);
1648
1649 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1650 {
1651 label = gen_label_rtx ();
1652 emit_label_after (label, insn);
1653 LABEL_NUSES (label) = 0;
1654 }
1655 return label;
1656 }
1657 \f
1658 /* Return 1 if INSN is a jump that jumps to right after TARGET
1659 only on the condition that TARGET itself would drop through.
1660 Assumes that TARGET is a conditional jump. */
1661
1662 static int
1663 jump_back_p (insn, target)
1664 rtx insn, target;
1665 {
1666 rtx cinsn, ctarget;
1667 enum rtx_code codei, codet;
1668 rtx set, tset;
1669
1670 if (! any_condjump_p (insn)
1671 || any_uncondjump_p (target)
1672 || target != prev_real_insn (JUMP_LABEL (insn)))
1673 return 0;
1674 set = pc_set (insn);
1675 tset = pc_set (target);
1676
1677 cinsn = XEXP (SET_SRC (set), 0);
1678 ctarget = XEXP (SET_SRC (tset), 0);
1679
1680 codei = GET_CODE (cinsn);
1681 codet = GET_CODE (ctarget);
1682
1683 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1684 {
1685 if (! can_reverse_comparison_p (cinsn, insn))
1686 return 0;
1687 codei = reverse_condition (codei);
1688 }
1689
1690 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1691 {
1692 if (! can_reverse_comparison_p (ctarget, target))
1693 return 0;
1694 codet = reverse_condition (codet);
1695 }
1696
1697 return (codei == codet
1698 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1699 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1700 }
1701 \f
1702 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1703 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1704 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1705 know whether it's source is floating point or integer comparison. Machine
1706 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1707 to help this function avoid overhead in these cases. */
1708 enum rtx_code
1709 reversed_comparison_code_parts (code, arg0, arg1, insn)
1710 rtx insn, arg0, arg1;
1711 enum rtx_code code;
1712 {
1713 enum machine_mode mode;
1714
1715 /* If this is not actually a comparison, we can't reverse it. */
1716 if (GET_RTX_CLASS (code) != '<')
1717 return UNKNOWN;
1718
1719 mode = GET_MODE (arg0);
1720 if (mode == VOIDmode)
1721 mode = GET_MODE (arg1);
1722
1723 /* First see if machine description supply us way to reverse the comparison.
1724 Give it priority over everything else to allow machine description to do
1725 tricks. */
1726 #ifdef REVERSIBLE_CC_MODE
1727 if (GET_MODE_CLASS (mode) == MODE_CC
1728 && REVERSIBLE_CC_MODE (mode))
1729 {
1730 #ifdef REVERSE_CONDITION
1731 return REVERSE_CONDITION (code, mode);
1732 #endif
1733 return reverse_condition (code);
1734 }
1735 #endif
1736
1737 /* Try few special cases based on the comparison code. */
1738 switch (code)
1739 {
1740 case GEU:
1741 case GTU:
1742 case LEU:
1743 case LTU:
1744 case NE:
1745 case EQ:
1746 /* It is always safe to reverse EQ and NE, even for the floating
1747 point. Similary the unsigned comparisons are never used for
1748 floating point so we can reverse them in the default way. */
1749 return reverse_condition (code);
1750 case ORDERED:
1751 case UNORDERED:
1752 case LTGT:
1753 case UNEQ:
1754 /* In case we already see unordered comparison, we can be sure to
1755 be dealing with floating point so we don't need any more tests. */
1756 return reverse_condition_maybe_unordered (code);
1757 case UNLT:
1758 case UNLE:
1759 case UNGT:
1760 case UNGE:
1761 /* We don't have safe way to reverse these yet. */
1762 return UNKNOWN;
1763 default:
1764 break;
1765 }
1766
1767 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1768 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1769 || flag_fast_math)
1770 return reverse_condition (code);
1771
1772 if (GET_MODE_CLASS (mode) == MODE_CC
1773 #ifdef HAVE_cc0
1774 || arg0 == cc0_rtx
1775 #endif
1776 )
1777 {
1778 rtx prev;
1779 /* Try to search for the comparison to determine the real mode.
1780 This code is expensive, but with sane machine description it
1781 will be never used, since REVERSIBLE_CC_MODE will return true
1782 in all cases. */
1783 if (! insn)
1784 return UNKNOWN;
1785
1786 for (prev = prev_nonnote_insn (insn);
1787 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1788 prev = prev_nonnote_insn (prev))
1789 {
1790 rtx set = set_of (arg0, prev);
1791 if (set && GET_CODE (set) == SET
1792 && rtx_equal_p (SET_DEST (set), arg0))
1793 {
1794 rtx src = SET_SRC (set);
1795
1796 if (GET_CODE (src) == COMPARE)
1797 {
1798 rtx comparison = src;
1799 arg0 = XEXP (src, 0);
1800 mode = GET_MODE (arg0);
1801 if (mode == VOIDmode)
1802 mode = GET_MODE (XEXP (comparison, 1));
1803 break;
1804 }
1805 /* We can get past reg-reg moves. This may be usefull for model
1806 of i387 comparisons that first move flag registers around. */
1807 if (REG_P (src))
1808 {
1809 arg0 = src;
1810 continue;
1811 }
1812 }
1813 /* If register is clobbered in some ununderstandable way,
1814 give up. */
1815 if (set)
1816 return UNKNOWN;
1817 }
1818 }
1819
1820 /* An integer condition. */
1821 if (GET_CODE (arg0) == CONST_INT
1822 || (GET_MODE (arg0) != VOIDmode
1823 && GET_MODE_CLASS (mode) != MODE_CC
1824 && ! FLOAT_MODE_P (mode)))
1825 return reverse_condition (code);
1826
1827 return UNKNOWN;
1828 }
1829
1830 /* An wrapper around the previous function to take COMPARISON as rtx
1831 expression. This simplifies many callers. */
1832 enum rtx_code
1833 reversed_comparison_code (comparison, insn)
1834 rtx comparison, insn;
1835 {
1836 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1837 return UNKNOWN;
1838 return reversed_comparison_code_parts (GET_CODE (comparison),
1839 XEXP (comparison, 0),
1840 XEXP (comparison, 1), insn);
1841 }
1842 \f
1843 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
1844 return non-zero if it is safe to reverse this comparison. It is if our
1845 floating-point is not IEEE, if this is an NE or EQ comparison, or if
1846 this is known to be an integer comparison.
1847
1848 Use of this function is depreached and you should use
1849 REVERSED_COMPARISON_CODE bits instead.
1850 */
1851
1852 int
1853 can_reverse_comparison_p (comparison, insn)
1854 rtx comparison;
1855 rtx insn;
1856 {
1857 enum rtx_code code;
1858
1859 /* If this is not actually a comparison, we can't reverse it. */
1860 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1861 return 0;
1862
1863 code = reversed_comparison_code (comparison, insn);
1864 if (code == UNKNOWN)
1865 return 0;
1866
1867 /* The code will follow can_reverse_comparison_p with reverse_condition,
1868 so see if it will get proper result. */
1869 return (code == reverse_condition (GET_CODE (comparison)));
1870 }
1871
1872 /* Given an rtx-code for a comparison, return the code for the negated
1873 comparison. If no such code exists, return UNKNOWN.
1874
1875 WATCH OUT! reverse_condition is not safe to use on a jump that might
1876 be acting on the results of an IEEE floating point comparison, because
1877 of the special treatment of non-signaling nans in comparisons.
1878 Use reversed_comparison_code instead. */
1879
1880 enum rtx_code
1881 reverse_condition (code)
1882 enum rtx_code code;
1883 {
1884 switch (code)
1885 {
1886 case EQ:
1887 return NE;
1888 case NE:
1889 return EQ;
1890 case GT:
1891 return LE;
1892 case GE:
1893 return LT;
1894 case LT:
1895 return GE;
1896 case LE:
1897 return GT;
1898 case GTU:
1899 return LEU;
1900 case GEU:
1901 return LTU;
1902 case LTU:
1903 return GEU;
1904 case LEU:
1905 return GTU;
1906 case UNORDERED:
1907 return ORDERED;
1908 case ORDERED:
1909 return UNORDERED;
1910
1911 case UNLT:
1912 case UNLE:
1913 case UNGT:
1914 case UNGE:
1915 case UNEQ:
1916 case LTGT:
1917 return UNKNOWN;
1918
1919 default:
1920 abort ();
1921 }
1922 }
1923
1924 /* Similar, but we're allowed to generate unordered comparisons, which
1925 makes it safe for IEEE floating-point. Of course, we have to recognize
1926 that the target will support them too... */
1927
1928 enum rtx_code
1929 reverse_condition_maybe_unordered (code)
1930 enum rtx_code code;
1931 {
1932 /* Non-IEEE formats don't have unordered conditions. */
1933 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1934 return reverse_condition (code);
1935
1936 switch (code)
1937 {
1938 case EQ:
1939 return NE;
1940 case NE:
1941 return EQ;
1942 case GT:
1943 return UNLE;
1944 case GE:
1945 return UNLT;
1946 case LT:
1947 return UNGE;
1948 case LE:
1949 return UNGT;
1950 case LTGT:
1951 return UNEQ;
1952 case UNORDERED:
1953 return ORDERED;
1954 case ORDERED:
1955 return UNORDERED;
1956 case UNLT:
1957 return GE;
1958 case UNLE:
1959 return GT;
1960 case UNGT:
1961 return LE;
1962 case UNGE:
1963 return LT;
1964 case UNEQ:
1965 return LTGT;
1966
1967 default:
1968 abort ();
1969 }
1970 }
1971
1972 /* Similar, but return the code when two operands of a comparison are swapped.
1973 This IS safe for IEEE floating-point. */
1974
1975 enum rtx_code
1976 swap_condition (code)
1977 enum rtx_code code;
1978 {
1979 switch (code)
1980 {
1981 case EQ:
1982 case NE:
1983 case UNORDERED:
1984 case ORDERED:
1985 case UNEQ:
1986 case LTGT:
1987 return code;
1988
1989 case GT:
1990 return LT;
1991 case GE:
1992 return LE;
1993 case LT:
1994 return GT;
1995 case LE:
1996 return GE;
1997 case GTU:
1998 return LTU;
1999 case GEU:
2000 return LEU;
2001 case LTU:
2002 return GTU;
2003 case LEU:
2004 return GEU;
2005 case UNLT:
2006 return UNGT;
2007 case UNLE:
2008 return UNGE;
2009 case UNGT:
2010 return UNLT;
2011 case UNGE:
2012 return UNLE;
2013
2014 default:
2015 abort ();
2016 }
2017 }
2018
2019 /* Given a comparison CODE, return the corresponding unsigned comparison.
2020 If CODE is an equality comparison or already an unsigned comparison,
2021 CODE is returned. */
2022
2023 enum rtx_code
2024 unsigned_condition (code)
2025 enum rtx_code code;
2026 {
2027 switch (code)
2028 {
2029 case EQ:
2030 case NE:
2031 case GTU:
2032 case GEU:
2033 case LTU:
2034 case LEU:
2035 return code;
2036
2037 case GT:
2038 return GTU;
2039 case GE:
2040 return GEU;
2041 case LT:
2042 return LTU;
2043 case LE:
2044 return LEU;
2045
2046 default:
2047 abort ();
2048 }
2049 }
2050
2051 /* Similarly, return the signed version of a comparison. */
2052
2053 enum rtx_code
2054 signed_condition (code)
2055 enum rtx_code code;
2056 {
2057 switch (code)
2058 {
2059 case EQ:
2060 case NE:
2061 case GT:
2062 case GE:
2063 case LT:
2064 case LE:
2065 return code;
2066
2067 case GTU:
2068 return GT;
2069 case GEU:
2070 return GE;
2071 case LTU:
2072 return LT;
2073 case LEU:
2074 return LE;
2075
2076 default:
2077 abort ();
2078 }
2079 }
2080 \f
2081 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2082 truth of CODE1 implies the truth of CODE2. */
2083
2084 int
2085 comparison_dominates_p (code1, code2)
2086 enum rtx_code code1, code2;
2087 {
2088 if (code1 == code2)
2089 return 1;
2090
2091 switch (code1)
2092 {
2093 case UNEQ:
2094 if (code2 == UNLE || code2 == UNGE)
2095 return 1;
2096 break;
2097
2098 case EQ:
2099 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2100 || code2 == ORDERED)
2101 return 1;
2102 break;
2103
2104 case UNLT:
2105 if (code2 == UNLE || code2 == NE)
2106 return 1;
2107 break;
2108
2109 case LT:
2110 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2111 return 1;
2112 break;
2113
2114 case UNGT:
2115 if (code2 == UNGE || code2 == NE)
2116 return 1;
2117 break;
2118
2119 case GT:
2120 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2121 return 1;
2122 break;
2123
2124 case GE:
2125 case LE:
2126 if (code2 == ORDERED)
2127 return 1;
2128 break;
2129
2130 case LTGT:
2131 if (code2 == NE || code2 == ORDERED)
2132 return 1;
2133 break;
2134
2135 case LTU:
2136 if (code2 == LEU || code2 == NE)
2137 return 1;
2138 break;
2139
2140 case GTU:
2141 if (code2 == GEU || code2 == NE)
2142 return 1;
2143 break;
2144
2145 case UNORDERED:
2146 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2147 || code2 == UNGE || code2 == UNGT)
2148 return 1;
2149 break;
2150
2151 default:
2152 break;
2153 }
2154
2155 return 0;
2156 }
2157 \f
2158 /* Return 1 if INSN is an unconditional jump and nothing else. */
2159
2160 int
2161 simplejump_p (insn)
2162 rtx insn;
2163 {
2164 rtx set;
2165
2166 if (GET_CODE (insn) != JUMP_INSN)
2167 return 0;
2168
2169 set = PATTERN (insn);
2170 if (GET_CODE (set) != SET)
2171 {
2172 set = single_set_1 (insn);
2173 if (set == NULL_RTX)
2174 return 0;
2175 }
2176
2177 return (GET_CODE (SET_DEST (set)) == PC
2178 && GET_CODE (SET_SRC (set)) == LABEL_REF);
2179 }
2180
2181 /* Return nonzero if INSN is a (possibly) conditional jump
2182 and nothing more.
2183
2184 Use this function is deprecated, since we need to support combined
2185 branch and compare insns. Use any_condjump_p instead whenever possible. */
2186
2187 int
2188 condjump_p (insn)
2189 rtx insn;
2190 {
2191 register rtx x = PATTERN (insn);
2192
2193 if (GET_CODE (x) != SET
2194 || GET_CODE (SET_DEST (x)) != PC)
2195 return 0;
2196
2197 x = SET_SRC (x);
2198 if (GET_CODE (x) == LABEL_REF)
2199 return 1;
2200 else
2201 return (GET_CODE (x) == IF_THEN_ELSE
2202 && ((GET_CODE (XEXP (x, 2)) == PC
2203 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2204 || GET_CODE (XEXP (x, 1)) == RETURN))
2205 || (GET_CODE (XEXP (x, 1)) == PC
2206 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2207 || GET_CODE (XEXP (x, 2)) == RETURN))));
2208
2209 return 0;
2210 }
2211
2212 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2213 PARALLEL.
2214
2215 Use this function is deprecated, since we need to support combined
2216 branch and compare insns. Use any_condjump_p instead whenever possible. */
2217
2218 int
2219 condjump_in_parallel_p (insn)
2220 rtx insn;
2221 {
2222 register rtx x = PATTERN (insn);
2223
2224 if (GET_CODE (x) != PARALLEL)
2225 return 0;
2226 else
2227 x = XVECEXP (x, 0, 0);
2228
2229 if (GET_CODE (x) != SET)
2230 return 0;
2231 if (GET_CODE (SET_DEST (x)) != PC)
2232 return 0;
2233 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2234 return 1;
2235 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2236 return 0;
2237 if (XEXP (SET_SRC (x), 2) == pc_rtx
2238 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2239 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2240 return 1;
2241 if (XEXP (SET_SRC (x), 1) == pc_rtx
2242 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2243 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2244 return 1;
2245 return 0;
2246 }
2247
2248 /* Return set of PC, otherwise NULL. */
2249
2250 rtx
2251 pc_set (insn)
2252 rtx insn;
2253 {
2254 rtx pat;
2255 if (GET_CODE (insn) != JUMP_INSN)
2256 return NULL_RTX;
2257 pat = PATTERN (insn);
2258
2259 /* The set is allowed to appear either as the insn pattern or
2260 the first set in a PARALLEL. */
2261 if (GET_CODE (pat) == PARALLEL)
2262 pat = XVECEXP (pat, 0, 0);
2263 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2264 return pat;
2265
2266 return NULL_RTX;
2267 }
2268
2269 /* Return true when insn is an unconditional direct jump,
2270 possibly bundled inside a PARALLEL. */
2271
2272 int
2273 any_uncondjump_p (insn)
2274 rtx insn;
2275 {
2276 rtx x = pc_set (insn);
2277 if (!x)
2278 return 0;
2279 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2280 return 0;
2281 return 1;
2282 }
2283
2284 /* Return true when insn is a conditional jump. This function works for
2285 instructions containing PC sets in PARALLELs. The instruction may have
2286 various other effects so before removing the jump you must verify
2287 onlyjump_p.
2288
2289 Note that unlike condjump_p it returns false for unconditional jumps. */
2290
2291 int
2292 any_condjump_p (insn)
2293 rtx insn;
2294 {
2295 rtx x = pc_set (insn);
2296 enum rtx_code a, b;
2297
2298 if (!x)
2299 return 0;
2300 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2301 return 0;
2302
2303 a = GET_CODE (XEXP (SET_SRC (x), 1));
2304 b = GET_CODE (XEXP (SET_SRC (x), 2));
2305
2306 return ((b == PC && (a == LABEL_REF || a == RETURN))
2307 || (a == PC && (b == LABEL_REF || b == RETURN)));
2308 }
2309
2310 /* Return the label of a conditional jump. */
2311
2312 rtx
2313 condjump_label (insn)
2314 rtx insn;
2315 {
2316 rtx x = pc_set (insn);
2317
2318 if (!x)
2319 return NULL_RTX;
2320 x = SET_SRC (x);
2321 if (GET_CODE (x) == LABEL_REF)
2322 return x;
2323 if (GET_CODE (x) != IF_THEN_ELSE)
2324 return NULL_RTX;
2325 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2326 return XEXP (x, 1);
2327 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2328 return XEXP (x, 2);
2329 return NULL_RTX;
2330 }
2331
2332 /* Return true if INSN is a (possibly conditional) return insn. */
2333
2334 static int
2335 returnjump_p_1 (loc, data)
2336 rtx *loc;
2337 void *data ATTRIBUTE_UNUSED;
2338 {
2339 rtx x = *loc;
2340 return x && GET_CODE (x) == RETURN;
2341 }
2342
2343 int
2344 returnjump_p (insn)
2345 rtx insn;
2346 {
2347 if (GET_CODE (insn) != JUMP_INSN)
2348 return 0;
2349 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2350 }
2351
2352 /* Return true if INSN is a jump that only transfers control and
2353 nothing more. */
2354
2355 int
2356 onlyjump_p (insn)
2357 rtx insn;
2358 {
2359 rtx set;
2360
2361 if (GET_CODE (insn) != JUMP_INSN)
2362 return 0;
2363
2364 set = single_set (insn);
2365 if (set == NULL)
2366 return 0;
2367 if (GET_CODE (SET_DEST (set)) != PC)
2368 return 0;
2369 if (side_effects_p (SET_SRC (set)))
2370 return 0;
2371
2372 return 1;
2373 }
2374
2375 #ifdef HAVE_cc0
2376
2377 /* Return 1 if X is an RTX that does nothing but set the condition codes
2378 and CLOBBER or USE registers.
2379 Return -1 if X does explicitly set the condition codes,
2380 but also does other things. */
2381
2382 int
2383 sets_cc0_p (x)
2384 rtx x ATTRIBUTE_UNUSED;
2385 {
2386 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2387 return 1;
2388 if (GET_CODE (x) == PARALLEL)
2389 {
2390 int i;
2391 int sets_cc0 = 0;
2392 int other_things = 0;
2393 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2394 {
2395 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2396 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2397 sets_cc0 = 1;
2398 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2399 other_things = 1;
2400 }
2401 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2402 }
2403 return 0;
2404 }
2405 #endif
2406 \f
2407 /* Follow any unconditional jump at LABEL;
2408 return the ultimate label reached by any such chain of jumps.
2409 If LABEL is not followed by a jump, return LABEL.
2410 If the chain loops or we can't find end, return LABEL,
2411 since that tells caller to avoid changing the insn.
2412
2413 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2414 a USE or CLOBBER. */
2415
2416 rtx
2417 follow_jumps (label)
2418 rtx label;
2419 {
2420 register rtx insn;
2421 register rtx next;
2422 register rtx value = label;
2423 register int depth;
2424
2425 for (depth = 0;
2426 (depth < 10
2427 && (insn = next_active_insn (value)) != 0
2428 && GET_CODE (insn) == JUMP_INSN
2429 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2430 && onlyjump_p (insn))
2431 || GET_CODE (PATTERN (insn)) == RETURN)
2432 && (next = NEXT_INSN (insn))
2433 && GET_CODE (next) == BARRIER);
2434 depth++)
2435 {
2436 /* Don't chain through the insn that jumps into a loop
2437 from outside the loop,
2438 since that would create multiple loop entry jumps
2439 and prevent loop optimization. */
2440 rtx tem;
2441 if (!reload_completed)
2442 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2443 if (GET_CODE (tem) == NOTE
2444 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2445 /* ??? Optional. Disables some optimizations, but makes
2446 gcov output more accurate with -O. */
2447 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2448 return value;
2449
2450 /* If we have found a cycle, make the insn jump to itself. */
2451 if (JUMP_LABEL (insn) == label)
2452 return label;
2453
2454 tem = next_active_insn (JUMP_LABEL (insn));
2455 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2456 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2457 break;
2458
2459 value = JUMP_LABEL (insn);
2460 }
2461 if (depth == 10)
2462 return label;
2463 return value;
2464 }
2465
2466 /* Assuming that field IDX of X is a vector of label_refs,
2467 replace each of them by the ultimate label reached by it.
2468 Return nonzero if a change is made.
2469 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2470
2471 static int
2472 tension_vector_labels (x, idx)
2473 register rtx x;
2474 register int idx;
2475 {
2476 int changed = 0;
2477 register int i;
2478 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2479 {
2480 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2481 register rtx nlabel = follow_jumps (olabel);
2482 if (nlabel && nlabel != olabel)
2483 {
2484 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2485 ++LABEL_NUSES (nlabel);
2486 if (--LABEL_NUSES (olabel) == 0)
2487 delete_insn (olabel);
2488 changed = 1;
2489 }
2490 }
2491 return changed;
2492 }
2493 \f
2494 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2495 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2496 in INSN, then store one of them in JUMP_LABEL (INSN).
2497 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2498 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2499 Also, when there are consecutive labels, canonicalize on the last of them.
2500
2501 Note that two labels separated by a loop-beginning note
2502 must be kept distinct if we have not yet done loop-optimization,
2503 because the gap between them is where loop-optimize
2504 will want to move invariant code to. CROSS_JUMP tells us
2505 that loop-optimization is done with.
2506
2507 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2508 two labels distinct if they are separated by only USE or CLOBBER insns. */
2509
2510 void
2511 mark_jump_label (x, insn, cross_jump, in_mem)
2512 register rtx x;
2513 rtx insn;
2514 int cross_jump;
2515 int in_mem;
2516 {
2517 register RTX_CODE code = GET_CODE (x);
2518 register int i;
2519 register const char *fmt;
2520
2521 switch (code)
2522 {
2523 case PC:
2524 case CC0:
2525 case REG:
2526 case SUBREG:
2527 case CONST_INT:
2528 case CONST_DOUBLE:
2529 case CLOBBER:
2530 case CALL:
2531 return;
2532
2533 case MEM:
2534 in_mem = 1;
2535 break;
2536
2537 case SYMBOL_REF:
2538 if (!in_mem)
2539 return;
2540
2541 /* If this is a constant-pool reference, see if it is a label. */
2542 if (CONSTANT_POOL_ADDRESS_P (x))
2543 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2544 break;
2545
2546 case LABEL_REF:
2547 {
2548 rtx label = XEXP (x, 0);
2549 rtx olabel = label;
2550 rtx note;
2551 rtx next;
2552
2553 /* Ignore remaining references to unreachable labels that
2554 have been deleted. */
2555 if (GET_CODE (label) == NOTE
2556 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2557 break;
2558
2559 if (GET_CODE (label) != CODE_LABEL)
2560 abort ();
2561
2562 /* Ignore references to labels of containing functions. */
2563 if (LABEL_REF_NONLOCAL_P (x))
2564 break;
2565
2566 /* If there are other labels following this one,
2567 replace it with the last of the consecutive labels. */
2568 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2569 {
2570 if (GET_CODE (next) == CODE_LABEL)
2571 label = next;
2572 else if (cross_jump && GET_CODE (next) == INSN
2573 && (GET_CODE (PATTERN (next)) == USE
2574 || GET_CODE (PATTERN (next)) == CLOBBER))
2575 continue;
2576 else if (GET_CODE (next) != NOTE)
2577 break;
2578 else if (! cross_jump
2579 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2580 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2581 /* ??? Optional. Disables some optimizations, but
2582 makes gcov output more accurate with -O. */
2583 || (flag_test_coverage
2584 && NOTE_LINE_NUMBER (next) > 0)))
2585 break;
2586 }
2587
2588 XEXP (x, 0) = label;
2589 if (! insn || ! INSN_DELETED_P (insn))
2590 ++LABEL_NUSES (label);
2591
2592 if (insn)
2593 {
2594 if (GET_CODE (insn) == JUMP_INSN)
2595 JUMP_LABEL (insn) = label;
2596
2597 /* If we've changed OLABEL and we had a REG_LABEL note
2598 for it, update it as well. */
2599 else if (label != olabel
2600 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2601 XEXP (note, 0) = label;
2602
2603 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2604 is one. */
2605 else if (! find_reg_note (insn, REG_LABEL, label))
2606 {
2607 /* This code used to ignore labels which refered to dispatch
2608 tables to avoid flow.c generating worse code.
2609
2610 However, in the presense of global optimizations like
2611 gcse which call find_basic_blocks without calling
2612 life_analysis, not recording such labels will lead
2613 to compiler aborts because of inconsistencies in the
2614 flow graph. So we go ahead and record the label.
2615
2616 It may also be the case that the optimization argument
2617 is no longer valid because of the more accurate cfg
2618 we build in find_basic_blocks -- it no longer pessimizes
2619 code when it finds a REG_LABEL note. */
2620 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2621 REG_NOTES (insn));
2622 }
2623 }
2624 return;
2625 }
2626
2627 /* Do walk the labels in a vector, but not the first operand of an
2628 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2629 case ADDR_VEC:
2630 case ADDR_DIFF_VEC:
2631 if (! INSN_DELETED_P (insn))
2632 {
2633 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2634
2635 for (i = 0; i < XVECLEN (x, eltnum); i++)
2636 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2637 cross_jump, in_mem);
2638 }
2639 return;
2640
2641 default:
2642 break;
2643 }
2644
2645 fmt = GET_RTX_FORMAT (code);
2646 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2647 {
2648 if (fmt[i] == 'e')
2649 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2650 else if (fmt[i] == 'E')
2651 {
2652 register int j;
2653 for (j = 0; j < XVECLEN (x, i); j++)
2654 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2655 }
2656 }
2657 }
2658
2659 /* If all INSN does is set the pc, delete it,
2660 and delete the insn that set the condition codes for it
2661 if that's what the previous thing was. */
2662
2663 void
2664 delete_jump (insn)
2665 rtx insn;
2666 {
2667 register rtx set = single_set (insn);
2668
2669 if (set && GET_CODE (SET_DEST (set)) == PC)
2670 delete_computation (insn);
2671 }
2672
2673 /* Verify INSN is a BARRIER and delete it. */
2674
2675 void
2676 delete_barrier (insn)
2677 rtx insn;
2678 {
2679 if (GET_CODE (insn) != BARRIER)
2680 abort ();
2681
2682 delete_insn (insn);
2683 }
2684
2685 /* Recursively delete prior insns that compute the value (used only by INSN
2686 which the caller is deleting) stored in the register mentioned by NOTE
2687 which is a REG_DEAD note associated with INSN. */
2688
2689 static void
2690 delete_prior_computation (note, insn)
2691 rtx note;
2692 rtx insn;
2693 {
2694 rtx our_prev;
2695 rtx reg = XEXP (note, 0);
2696
2697 for (our_prev = prev_nonnote_insn (insn);
2698 our_prev && (GET_CODE (our_prev) == INSN
2699 || GET_CODE (our_prev) == CALL_INSN);
2700 our_prev = prev_nonnote_insn (our_prev))
2701 {
2702 rtx pat = PATTERN (our_prev);
2703
2704 /* If we reach a CALL which is not calling a const function
2705 or the callee pops the arguments, then give up. */
2706 if (GET_CODE (our_prev) == CALL_INSN
2707 && (! CONST_CALL_P (our_prev)
2708 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2709 break;
2710
2711 /* If we reach a SEQUENCE, it is too complex to try to
2712 do anything with it, so give up. */
2713 if (GET_CODE (pat) == SEQUENCE)
2714 break;
2715
2716 if (GET_CODE (pat) == USE
2717 && GET_CODE (XEXP (pat, 0)) == INSN)
2718 /* reorg creates USEs that look like this. We leave them
2719 alone because reorg needs them for its own purposes. */
2720 break;
2721
2722 if (reg_set_p (reg, pat))
2723 {
2724 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2725 break;
2726
2727 if (GET_CODE (pat) == PARALLEL)
2728 {
2729 /* If we find a SET of something else, we can't
2730 delete the insn. */
2731
2732 int i;
2733
2734 for (i = 0; i < XVECLEN (pat, 0); i++)
2735 {
2736 rtx part = XVECEXP (pat, 0, i);
2737
2738 if (GET_CODE (part) == SET
2739 && SET_DEST (part) != reg)
2740 break;
2741 }
2742
2743 if (i == XVECLEN (pat, 0))
2744 delete_computation (our_prev);
2745 }
2746 else if (GET_CODE (pat) == SET
2747 && GET_CODE (SET_DEST (pat)) == REG)
2748 {
2749 int dest_regno = REGNO (SET_DEST (pat));
2750 int dest_endregno
2751 = (dest_regno
2752 + (dest_regno < FIRST_PSEUDO_REGISTER
2753 ? HARD_REGNO_NREGS (dest_regno,
2754 GET_MODE (SET_DEST (pat))) : 1));
2755 int regno = REGNO (reg);
2756 int endregno
2757 = (regno
2758 + (regno < FIRST_PSEUDO_REGISTER
2759 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2760
2761 if (dest_regno >= regno
2762 && dest_endregno <= endregno)
2763 delete_computation (our_prev);
2764
2765 /* We may have a multi-word hard register and some, but not
2766 all, of the words of the register are needed in subsequent
2767 insns. Write REG_UNUSED notes for those parts that were not
2768 needed. */
2769 else if (dest_regno <= regno
2770 && dest_endregno >= endregno)
2771 {
2772 int i;
2773
2774 REG_NOTES (our_prev)
2775 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2776 REG_NOTES (our_prev));
2777
2778 for (i = dest_regno; i < dest_endregno; i++)
2779 if (! find_regno_note (our_prev, REG_UNUSED, i))
2780 break;
2781
2782 if (i == dest_endregno)
2783 delete_computation (our_prev);
2784 }
2785 }
2786
2787 break;
2788 }
2789
2790 /* If PAT references the register that dies here, it is an
2791 additional use. Hence any prior SET isn't dead. However, this
2792 insn becomes the new place for the REG_DEAD note. */
2793 if (reg_overlap_mentioned_p (reg, pat))
2794 {
2795 XEXP (note, 1) = REG_NOTES (our_prev);
2796 REG_NOTES (our_prev) = note;
2797 break;
2798 }
2799 }
2800 }
2801
2802 /* Delete INSN and recursively delete insns that compute values used only
2803 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2804 If we are running before flow.c, we need do nothing since flow.c will
2805 delete dead code. We also can't know if the registers being used are
2806 dead or not at this point.
2807
2808 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2809 nothing other than set a register that dies in this insn, we can delete
2810 that insn as well.
2811
2812 On machines with CC0, if CC0 is used in this insn, we may be able to
2813 delete the insn that set it. */
2814
2815 static void
2816 delete_computation (insn)
2817 rtx insn;
2818 {
2819 rtx note, next;
2820
2821 #ifdef HAVE_cc0
2822 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2823 {
2824 rtx prev = prev_nonnote_insn (insn);
2825 /* We assume that at this stage
2826 CC's are always set explicitly
2827 and always immediately before the jump that
2828 will use them. So if the previous insn
2829 exists to set the CC's, delete it
2830 (unless it performs auto-increments, etc.). */
2831 if (prev && GET_CODE (prev) == INSN
2832 && sets_cc0_p (PATTERN (prev)))
2833 {
2834 if (sets_cc0_p (PATTERN (prev)) > 0
2835 && ! side_effects_p (PATTERN (prev)))
2836 delete_computation (prev);
2837 else
2838 /* Otherwise, show that cc0 won't be used. */
2839 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2840 cc0_rtx, REG_NOTES (prev));
2841 }
2842 }
2843 #endif
2844
2845 for (note = REG_NOTES (insn); note; note = next)
2846 {
2847 next = XEXP (note, 1);
2848
2849 if (REG_NOTE_KIND (note) != REG_DEAD
2850 /* Verify that the REG_NOTE is legitimate. */
2851 || GET_CODE (XEXP (note, 0)) != REG)
2852 continue;
2853
2854 delete_prior_computation (note, insn);
2855 }
2856
2857 delete_insn (insn);
2858 }
2859 \f
2860 /* Delete insn INSN from the chain of insns and update label ref counts.
2861 May delete some following insns as a consequence; may even delete
2862 a label elsewhere and insns that follow it.
2863
2864 Returns the first insn after INSN that was not deleted. */
2865
2866 rtx
2867 delete_insn (insn)
2868 register rtx insn;
2869 {
2870 register rtx next = NEXT_INSN (insn);
2871 register rtx prev = PREV_INSN (insn);
2872 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2873 register int dont_really_delete = 0;
2874 rtx note;
2875
2876 while (next && INSN_DELETED_P (next))
2877 next = NEXT_INSN (next);
2878
2879 /* This insn is already deleted => return first following nondeleted. */
2880 if (INSN_DELETED_P (insn))
2881 return next;
2882
2883 if (was_code_label)
2884 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2885
2886 /* Don't delete user-declared labels. When optimizing, convert them
2887 to special NOTEs instead. When not optimizing, leave them alone. */
2888 if (was_code_label && LABEL_NAME (insn) != 0)
2889 {
2890 if (! optimize)
2891 dont_really_delete = 1;
2892 else if (! dont_really_delete)
2893 {
2894 const char *name = LABEL_NAME (insn);
2895 PUT_CODE (insn, NOTE);
2896 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2897 NOTE_SOURCE_FILE (insn) = name;
2898 dont_really_delete = 1;
2899 }
2900 }
2901 else
2902 /* Mark this insn as deleted. */
2903 INSN_DELETED_P (insn) = 1;
2904
2905 /* If this is an unconditional jump, delete it from the jump chain. */
2906 if (simplejump_p (insn))
2907 delete_from_jump_chain (insn);
2908
2909 /* If instruction is followed by a barrier,
2910 delete the barrier too. */
2911
2912 if (next != 0 && GET_CODE (next) == BARRIER)
2913 {
2914 INSN_DELETED_P (next) = 1;
2915 next = NEXT_INSN (next);
2916 }
2917
2918 /* Patch out INSN (and the barrier if any) */
2919
2920 if (! dont_really_delete)
2921 {
2922 if (prev)
2923 {
2924 NEXT_INSN (prev) = next;
2925 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2926 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2927 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2928 }
2929
2930 if (next)
2931 {
2932 PREV_INSN (next) = prev;
2933 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2934 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2935 }
2936
2937 if (prev && NEXT_INSN (prev) == 0)
2938 set_last_insn (prev);
2939 }
2940
2941 /* If deleting a jump, decrement the count of the label,
2942 and delete the label if it is now unused. */
2943
2944 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2945 {
2946 rtx lab = JUMP_LABEL (insn), lab_next;
2947
2948 if (--LABEL_NUSES (lab) == 0)
2949 {
2950 /* This can delete NEXT or PREV,
2951 either directly if NEXT is JUMP_LABEL (INSN),
2952 or indirectly through more levels of jumps. */
2953 delete_insn (lab);
2954
2955 /* I feel a little doubtful about this loop,
2956 but I see no clean and sure alternative way
2957 to find the first insn after INSN that is not now deleted.
2958 I hope this works. */
2959 while (next && INSN_DELETED_P (next))
2960 next = NEXT_INSN (next);
2961 return next;
2962 }
2963 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2964 && GET_CODE (lab_next) == JUMP_INSN
2965 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2966 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2967 {
2968 /* If we're deleting the tablejump, delete the dispatch table.
2969 We may not be able to kill the label immediately preceeding
2970 just yet, as it might be referenced in code leading up to
2971 the tablejump. */
2972 delete_insn (lab_next);
2973 }
2974 }
2975
2976 /* Likewise if we're deleting a dispatch table. */
2977
2978 if (GET_CODE (insn) == JUMP_INSN
2979 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2980 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2981 {
2982 rtx pat = PATTERN (insn);
2983 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2984 int len = XVECLEN (pat, diff_vec_p);
2985
2986 for (i = 0; i < len; i++)
2987 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2988 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2989 while (next && INSN_DELETED_P (next))
2990 next = NEXT_INSN (next);
2991 return next;
2992 }
2993
2994 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2995 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2996 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2997 if (REG_NOTE_KIND (note) == REG_LABEL
2998 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2999 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
3000 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
3001 delete_insn (XEXP (note, 0));
3002
3003 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3004 prev = PREV_INSN (prev);
3005
3006 /* If INSN was a label and a dispatch table follows it,
3007 delete the dispatch table. The tablejump must have gone already.
3008 It isn't useful to fall through into a table. */
3009
3010 if (was_code_label
3011 && NEXT_INSN (insn) != 0
3012 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3013 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3014 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3015 next = delete_insn (NEXT_INSN (insn));
3016
3017 /* If INSN was a label, delete insns following it if now unreachable. */
3018
3019 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3020 {
3021 register RTX_CODE code;
3022 while (next != 0
3023 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3024 || code == NOTE || code == BARRIER
3025 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3026 {
3027 if (code == NOTE
3028 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3029 next = NEXT_INSN (next);
3030 /* Keep going past other deleted labels to delete what follows. */
3031 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3032 next = NEXT_INSN (next);
3033 else
3034 /* Note: if this deletes a jump, it can cause more
3035 deletion of unreachable code, after a different label.
3036 As long as the value from this recursive call is correct,
3037 this invocation functions correctly. */
3038 next = delete_insn (next);
3039 }
3040 }
3041
3042 return next;
3043 }
3044
3045 /* Advance from INSN till reaching something not deleted
3046 then return that. May return INSN itself. */
3047
3048 rtx
3049 next_nondeleted_insn (insn)
3050 rtx insn;
3051 {
3052 while (INSN_DELETED_P (insn))
3053 insn = NEXT_INSN (insn);
3054 return insn;
3055 }
3056 \f
3057 /* Delete a range of insns from FROM to TO, inclusive.
3058 This is for the sake of peephole optimization, so assume
3059 that whatever these insns do will still be done by a new
3060 peephole insn that will replace them. */
3061
3062 void
3063 delete_for_peephole (from, to)
3064 register rtx from, to;
3065 {
3066 register rtx insn = from;
3067
3068 while (1)
3069 {
3070 register rtx next = NEXT_INSN (insn);
3071 register rtx prev = PREV_INSN (insn);
3072
3073 if (GET_CODE (insn) != NOTE)
3074 {
3075 INSN_DELETED_P (insn) = 1;
3076
3077 /* Patch this insn out of the chain. */
3078 /* We don't do this all at once, because we
3079 must preserve all NOTEs. */
3080 if (prev)
3081 NEXT_INSN (prev) = next;
3082
3083 if (next)
3084 PREV_INSN (next) = prev;
3085 }
3086
3087 if (insn == to)
3088 break;
3089 insn = next;
3090 }
3091
3092 /* Note that if TO is an unconditional jump
3093 we *do not* delete the BARRIER that follows,
3094 since the peephole that replaces this sequence
3095 is also an unconditional jump in that case. */
3096 }
3097 \f
3098 /* We have determined that INSN is never reached, and are about to
3099 delete it. Print a warning if the user asked for one.
3100
3101 To try to make this warning more useful, this should only be called
3102 once per basic block not reached, and it only warns when the basic
3103 block contains more than one line from the current function, and
3104 contains at least one operation. CSE and inlining can duplicate insns,
3105 so it's possible to get spurious warnings from this. */
3106
3107 void
3108 never_reached_warning (avoided_insn)
3109 rtx avoided_insn;
3110 {
3111 rtx insn;
3112 rtx a_line_note = NULL;
3113 int two_avoided_lines = 0;
3114 int contains_insn = 0;
3115
3116 if (! warn_notreached)
3117 return;
3118
3119 /* Scan forwards, looking at LINE_NUMBER notes, until
3120 we hit a LABEL or we run out of insns. */
3121
3122 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3123 {
3124 if (GET_CODE (insn) == CODE_LABEL)
3125 break;
3126 else if (GET_CODE (insn) == NOTE /* A line number note? */
3127 && NOTE_LINE_NUMBER (insn) >= 0)
3128 {
3129 if (a_line_note == NULL)
3130 a_line_note = insn;
3131 else
3132 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3133 != NOTE_LINE_NUMBER (insn));
3134 }
3135 else if (INSN_P (insn))
3136 contains_insn = 1;
3137 }
3138 if (two_avoided_lines && contains_insn)
3139 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3140 NOTE_LINE_NUMBER (a_line_note),
3141 "will never be executed");
3142 }
3143 \f
3144 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3145 NLABEL as a return. Accrue modifications into the change group. */
3146
3147 static void
3148 redirect_exp_1 (loc, olabel, nlabel, insn)
3149 rtx *loc;
3150 rtx olabel, nlabel;
3151 rtx insn;
3152 {
3153 register rtx x = *loc;
3154 register RTX_CODE code = GET_CODE (x);
3155 register int i;
3156 register const char *fmt;
3157
3158 if (code == LABEL_REF)
3159 {
3160 if (XEXP (x, 0) == olabel)
3161 {
3162 rtx n;
3163 if (nlabel)
3164 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3165 else
3166 n = gen_rtx_RETURN (VOIDmode);
3167
3168 validate_change (insn, loc, n, 1);
3169 return;
3170 }
3171 }
3172 else if (code == RETURN && olabel == 0)
3173 {
3174 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3175 if (loc == &PATTERN (insn))
3176 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3177 validate_change (insn, loc, x, 1);
3178 return;
3179 }
3180
3181 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3182 && GET_CODE (SET_SRC (x)) == LABEL_REF
3183 && XEXP (SET_SRC (x), 0) == olabel)
3184 {
3185 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3186 return;
3187 }
3188
3189 fmt = GET_RTX_FORMAT (code);
3190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3191 {
3192 if (fmt[i] == 'e')
3193 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3194 else if (fmt[i] == 'E')
3195 {
3196 register int j;
3197 for (j = 0; j < XVECLEN (x, i); j++)
3198 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3199 }
3200 }
3201 }
3202
3203 /* Similar, but apply the change group and report success or failure. */
3204
3205 static int
3206 redirect_exp (olabel, nlabel, insn)
3207 rtx olabel, nlabel;
3208 rtx insn;
3209 {
3210 rtx *loc;
3211
3212 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3213 loc = &XVECEXP (PATTERN (insn), 0, 0);
3214 else
3215 loc = &PATTERN (insn);
3216
3217 redirect_exp_1 (loc, olabel, nlabel, insn);
3218 if (num_validated_changes () == 0)
3219 return 0;
3220
3221 return apply_change_group ();
3222 }
3223
3224 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3225 the modifications into the change group. Return false if we did
3226 not see how to do that. */
3227
3228 int
3229 redirect_jump_1 (jump, nlabel)
3230 rtx jump, nlabel;
3231 {
3232 int ochanges = num_validated_changes ();
3233 rtx *loc;
3234
3235 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3236 loc = &XVECEXP (PATTERN (jump), 0, 0);
3237 else
3238 loc = &PATTERN (jump);
3239
3240 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3241 return num_validated_changes () > ochanges;
3242 }
3243
3244 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3245 jump target label is unused as a result, it and the code following
3246 it may be deleted.
3247
3248 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3249 RETURN insn.
3250
3251 The return value will be 1 if the change was made, 0 if it wasn't
3252 (this can only occur for NLABEL == 0). */
3253
3254 int
3255 redirect_jump (jump, nlabel, delete_unused)
3256 rtx jump, nlabel;
3257 int delete_unused;
3258 {
3259 register rtx olabel = JUMP_LABEL (jump);
3260
3261 if (nlabel == olabel)
3262 return 1;
3263
3264 if (! redirect_exp (olabel, nlabel, jump))
3265 return 0;
3266
3267 /* If this is an unconditional branch, delete it from the jump_chain of
3268 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3269 have UID's in range and JUMP_CHAIN is valid). */
3270 if (jump_chain && (simplejump_p (jump)
3271 || GET_CODE (PATTERN (jump)) == RETURN))
3272 {
3273 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3274
3275 delete_from_jump_chain (jump);
3276 if (label_index < max_jump_chain
3277 && INSN_UID (jump) < max_jump_chain)
3278 {
3279 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3280 jump_chain[label_index] = jump;
3281 }
3282 }
3283
3284 JUMP_LABEL (jump) = nlabel;
3285 if (nlabel)
3286 ++LABEL_NUSES (nlabel);
3287
3288 /* If we're eliding the jump over exception cleanups at the end of a
3289 function, move the function end note so that -Wreturn-type works. */
3290 if (olabel && nlabel
3291 && NEXT_INSN (olabel)
3292 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3293 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3294 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3295
3296 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3297 delete_insn (olabel);
3298
3299 return 1;
3300 }
3301
3302 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3303 Accrue the modifications into the change group. */
3304
3305 static void
3306 invert_exp_1 (insn)
3307 rtx insn;
3308 {
3309 register RTX_CODE code;
3310 rtx x = pc_set (insn);
3311
3312 if (!x)
3313 abort ();
3314 x = SET_SRC (x);
3315
3316 code = GET_CODE (x);
3317
3318 if (code == IF_THEN_ELSE)
3319 {
3320 register rtx comp = XEXP (x, 0);
3321 register rtx tem;
3322
3323 /* We can do this in two ways: The preferable way, which can only
3324 be done if this is not an integer comparison, is to reverse
3325 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3326 of the IF_THEN_ELSE. If we can't do either, fail. */
3327
3328 if (can_reverse_comparison_p (comp, insn))
3329 {
3330 validate_change (insn, &XEXP (x, 0),
3331 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
3332 GET_MODE (comp), XEXP (comp, 0),
3333 XEXP (comp, 1)),
3334 1);
3335 return;
3336 }
3337
3338 tem = XEXP (x, 1);
3339 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3340 validate_change (insn, &XEXP (x, 2), tem, 1);
3341 }
3342 else
3343 abort ();
3344 }
3345
3346 /* Invert the jump condition of conditional jump insn, INSN.
3347
3348 Return 1 if we can do so, 0 if we cannot find a way to do so that
3349 matches a pattern. */
3350
3351 static int
3352 invert_exp (insn)
3353 rtx insn;
3354 {
3355 invert_exp_1 (insn);
3356 if (num_validated_changes () == 0)
3357 return 0;
3358
3359 return apply_change_group ();
3360 }
3361
3362 /* Invert the condition of the jump JUMP, and make it jump to label
3363 NLABEL instead of where it jumps now. Accrue changes into the
3364 change group. Return false if we didn't see how to perform the
3365 inversion and redirection. */
3366
3367 int
3368 invert_jump_1 (jump, nlabel)
3369 rtx jump, nlabel;
3370 {
3371 int ochanges;
3372
3373 ochanges = num_validated_changes ();
3374 invert_exp_1 (jump);
3375 if (num_validated_changes () == ochanges)
3376 return 0;
3377
3378 return redirect_jump_1 (jump, nlabel);
3379 }
3380
3381 /* Invert the condition of the jump JUMP, and make it jump to label
3382 NLABEL instead of where it jumps now. Return true if successful. */
3383
3384 int
3385 invert_jump (jump, nlabel, delete_unused)
3386 rtx jump, nlabel;
3387 int delete_unused;
3388 {
3389 /* We have to either invert the condition and change the label or
3390 do neither. Either operation could fail. We first try to invert
3391 the jump. If that succeeds, we try changing the label. If that fails,
3392 we invert the jump back to what it was. */
3393
3394 if (! invert_exp (jump))
3395 return 0;
3396
3397 if (redirect_jump (jump, nlabel, delete_unused))
3398 {
3399 /* An inverted jump means that a probability taken becomes a
3400 probability not taken. Subtract the branch probability from the
3401 probability base to convert it back to a taken probability. */
3402
3403 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3404 if (note)
3405 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3406
3407 return 1;
3408 }
3409
3410 if (! invert_exp (jump))
3411 /* This should just be putting it back the way it was. */
3412 abort ();
3413
3414 return 0;
3415 }
3416
3417 /* Delete the instruction JUMP from any jump chain it might be on. */
3418
3419 static void
3420 delete_from_jump_chain (jump)
3421 rtx jump;
3422 {
3423 int index;
3424 rtx olabel = JUMP_LABEL (jump);
3425
3426 /* Handle unconditional jumps. */
3427 if (jump_chain && olabel != 0
3428 && INSN_UID (olabel) < max_jump_chain
3429 && simplejump_p (jump))
3430 index = INSN_UID (olabel);
3431 /* Handle return insns. */
3432 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3433 index = 0;
3434 else
3435 return;
3436
3437 if (jump_chain[index] == jump)
3438 jump_chain[index] = jump_chain[INSN_UID (jump)];
3439 else
3440 {
3441 rtx insn;
3442
3443 for (insn = jump_chain[index];
3444 insn != 0;
3445 insn = jump_chain[INSN_UID (insn)])
3446 if (jump_chain[INSN_UID (insn)] == jump)
3447 {
3448 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3449 break;
3450 }
3451 }
3452 }
3453 \f
3454 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3455
3456 If the old jump target label (before the dispatch table) becomes unused,
3457 it and the dispatch table may be deleted. In that case, find the insn
3458 before the jump references that label and delete it and logical successors
3459 too. */
3460
3461 static void
3462 redirect_tablejump (jump, nlabel)
3463 rtx jump, nlabel;
3464 {
3465 register rtx olabel = JUMP_LABEL (jump);
3466 rtx *notep, note, next;
3467
3468 /* Add this jump to the jump_chain of NLABEL. */
3469 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3470 && INSN_UID (jump) < max_jump_chain)
3471 {
3472 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3473 jump_chain[INSN_UID (nlabel)] = jump;
3474 }
3475
3476 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3477 {
3478 next = XEXP (note, 1);
3479
3480 if (REG_NOTE_KIND (note) != REG_DEAD
3481 /* Verify that the REG_NOTE is legitimate. */
3482 || GET_CODE (XEXP (note, 0)) != REG
3483 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3484 notep = &XEXP (note, 1);
3485 else
3486 {
3487 delete_prior_computation (note, jump);
3488 *notep = next;
3489 }
3490 }
3491
3492 PATTERN (jump) = gen_jump (nlabel);
3493 JUMP_LABEL (jump) = nlabel;
3494 ++LABEL_NUSES (nlabel);
3495 INSN_CODE (jump) = -1;
3496
3497 if (--LABEL_NUSES (olabel) == 0)
3498 {
3499 delete_labelref_insn (jump, olabel, 0);
3500 delete_insn (olabel);
3501 }
3502 }
3503
3504 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3505 If we found one, delete it and then delete this insn if DELETE_THIS is
3506 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3507
3508 static int
3509 delete_labelref_insn (insn, label, delete_this)
3510 rtx insn, label;
3511 int delete_this;
3512 {
3513 int deleted = 0;
3514 rtx link;
3515
3516 if (GET_CODE (insn) != NOTE
3517 && reg_mentioned_p (label, PATTERN (insn)))
3518 {
3519 if (delete_this)
3520 {
3521 delete_insn (insn);
3522 deleted = 1;
3523 }
3524 else
3525 return 1;
3526 }
3527
3528 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3529 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3530 {
3531 if (delete_this)
3532 {
3533 delete_insn (insn);
3534 deleted = 1;
3535 }
3536 else
3537 return 1;
3538 }
3539
3540 return deleted;
3541 }
3542 \f
3543 /* Like rtx_equal_p except that it considers two REGs as equal
3544 if they renumber to the same value and considers two commutative
3545 operations to be the same if the order of the operands has been
3546 reversed.
3547
3548 ??? Addition is not commutative on the PA due to the weird implicit
3549 space register selection rules for memory addresses. Therefore, we
3550 don't consider a + b == b + a.
3551
3552 We could/should make this test a little tighter. Possibly only
3553 disabling it on the PA via some backend macro or only disabling this
3554 case when the PLUS is inside a MEM. */
3555
3556 int
3557 rtx_renumbered_equal_p (x, y)
3558 rtx x, y;
3559 {
3560 register int i;
3561 register RTX_CODE code = GET_CODE (x);
3562 register const char *fmt;
3563
3564 if (x == y)
3565 return 1;
3566
3567 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3568 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3569 && GET_CODE (SUBREG_REG (y)) == REG)))
3570 {
3571 int reg_x = -1, reg_y = -1;
3572 int word_x = 0, word_y = 0;
3573
3574 if (GET_MODE (x) != GET_MODE (y))
3575 return 0;
3576
3577 /* If we haven't done any renumbering, don't
3578 make any assumptions. */
3579 if (reg_renumber == 0)
3580 return rtx_equal_p (x, y);
3581
3582 if (code == SUBREG)
3583 {
3584 reg_x = REGNO (SUBREG_REG (x));
3585 word_x = SUBREG_WORD (x);
3586
3587 if (reg_renumber[reg_x] >= 0)
3588 {
3589 reg_x = reg_renumber[reg_x] + word_x;
3590 word_x = 0;
3591 }
3592 }
3593
3594 else
3595 {
3596 reg_x = REGNO (x);
3597 if (reg_renumber[reg_x] >= 0)
3598 reg_x = reg_renumber[reg_x];
3599 }
3600
3601 if (GET_CODE (y) == SUBREG)
3602 {
3603 reg_y = REGNO (SUBREG_REG (y));
3604 word_y = SUBREG_WORD (y);
3605
3606 if (reg_renumber[reg_y] >= 0)
3607 {
3608 reg_y = reg_renumber[reg_y];
3609 word_y = 0;
3610 }
3611 }
3612
3613 else
3614 {
3615 reg_y = REGNO (y);
3616 if (reg_renumber[reg_y] >= 0)
3617 reg_y = reg_renumber[reg_y];
3618 }
3619
3620 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3621 }
3622
3623 /* Now we have disposed of all the cases
3624 in which different rtx codes can match. */
3625 if (code != GET_CODE (y))
3626 return 0;
3627
3628 switch (code)
3629 {
3630 case PC:
3631 case CC0:
3632 case ADDR_VEC:
3633 case ADDR_DIFF_VEC:
3634 return 0;
3635
3636 case CONST_INT:
3637 return INTVAL (x) == INTVAL (y);
3638
3639 case LABEL_REF:
3640 /* We can't assume nonlocal labels have their following insns yet. */
3641 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3642 return XEXP (x, 0) == XEXP (y, 0);
3643
3644 /* Two label-refs are equivalent if they point at labels
3645 in the same position in the instruction stream. */
3646 return (next_real_insn (XEXP (x, 0))
3647 == next_real_insn (XEXP (y, 0)));
3648
3649 case SYMBOL_REF:
3650 return XSTR (x, 0) == XSTR (y, 0);
3651
3652 case CODE_LABEL:
3653 /* If we didn't match EQ equality above, they aren't the same. */
3654 return 0;
3655
3656 default:
3657 break;
3658 }
3659
3660 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3661
3662 if (GET_MODE (x) != GET_MODE (y))
3663 return 0;
3664
3665 /* For commutative operations, the RTX match if the operand match in any
3666 order. Also handle the simple binary and unary cases without a loop.
3667
3668 ??? Don't consider PLUS a commutative operator; see comments above. */
3669 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3670 && code != PLUS)
3671 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3672 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3673 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3674 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3675 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3676 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3677 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3678 else if (GET_RTX_CLASS (code) == '1')
3679 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3680
3681 /* Compare the elements. If any pair of corresponding elements
3682 fail to match, return 0 for the whole things. */
3683
3684 fmt = GET_RTX_FORMAT (code);
3685 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3686 {
3687 register int j;
3688 switch (fmt[i])
3689 {
3690 case 'w':
3691 if (XWINT (x, i) != XWINT (y, i))
3692 return 0;
3693 break;
3694
3695 case 'i':
3696 if (XINT (x, i) != XINT (y, i))
3697 return 0;
3698 break;
3699
3700 case 's':
3701 if (strcmp (XSTR (x, i), XSTR (y, i)))
3702 return 0;
3703 break;
3704
3705 case 'e':
3706 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3707 return 0;
3708 break;
3709
3710 case 'u':
3711 if (XEXP (x, i) != XEXP (y, i))
3712 return 0;
3713 /* fall through. */
3714 case '0':
3715 break;
3716
3717 case 'E':
3718 if (XVECLEN (x, i) != XVECLEN (y, i))
3719 return 0;
3720 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3721 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3722 return 0;
3723 break;
3724
3725 default:
3726 abort ();
3727 }
3728 }
3729 return 1;
3730 }
3731 \f
3732 /* If X is a hard register or equivalent to one or a subregister of one,
3733 return the hard register number. If X is a pseudo register that was not
3734 assigned a hard register, return the pseudo register number. Otherwise,
3735 return -1. Any rtx is valid for X. */
3736
3737 int
3738 true_regnum (x)
3739 rtx x;
3740 {
3741 if (GET_CODE (x) == REG)
3742 {
3743 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3744 return reg_renumber[REGNO (x)];
3745 return REGNO (x);
3746 }
3747 if (GET_CODE (x) == SUBREG)
3748 {
3749 int base = true_regnum (SUBREG_REG (x));
3750 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3751 return SUBREG_WORD (x) + base;
3752 }
3753 return -1;
3754 }
3755 \f
3756 /* Optimize code of the form:
3757
3758 for (x = a[i]; x; ...)
3759 ...
3760 for (x = a[i]; x; ...)
3761 ...
3762 foo:
3763
3764 Loop optimize will change the above code into
3765
3766 if (x = a[i])
3767 for (;;)
3768 { ...; if (! (x = ...)) break; }
3769 if (x = a[i])
3770 for (;;)
3771 { ...; if (! (x = ...)) break; }
3772 foo:
3773
3774 In general, if the first test fails, the program can branch
3775 directly to `foo' and skip the second try which is doomed to fail.
3776 We run this after loop optimization and before flow analysis. */
3777
3778 /* When comparing the insn patterns, we track the fact that different
3779 pseudo-register numbers may have been used in each computation.
3780 The following array stores an equivalence -- same_regs[I] == J means
3781 that pseudo register I was used in the first set of tests in a context
3782 where J was used in the second set. We also count the number of such
3783 pending equivalences. If nonzero, the expressions really aren't the
3784 same. */
3785
3786 static int *same_regs;
3787
3788 static int num_same_regs;
3789
3790 /* Track any registers modified between the target of the first jump and
3791 the second jump. They never compare equal. */
3792
3793 static char *modified_regs;
3794
3795 /* Record if memory was modified. */
3796
3797 static int modified_mem;
3798
3799 /* Called via note_stores on each insn between the target of the first
3800 branch and the second branch. It marks any changed registers. */
3801
3802 static void
3803 mark_modified_reg (dest, x, data)
3804 rtx dest;
3805 rtx x ATTRIBUTE_UNUSED;
3806 void *data ATTRIBUTE_UNUSED;
3807 {
3808 int regno;
3809 unsigned int i;
3810
3811 if (GET_CODE (dest) == SUBREG)
3812 dest = SUBREG_REG (dest);
3813
3814 if (GET_CODE (dest) == MEM)
3815 modified_mem = 1;
3816
3817 if (GET_CODE (dest) != REG)
3818 return;
3819
3820 regno = REGNO (dest);
3821 if (regno >= FIRST_PSEUDO_REGISTER)
3822 modified_regs[regno] = 1;
3823 else
3824 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3825 modified_regs[regno + i] = 1;
3826 }
3827
3828 /* F is the first insn in the chain of insns. */
3829
3830 void
3831 thread_jumps (f, max_reg, flag_before_loop)
3832 rtx f;
3833 int max_reg;
3834 int flag_before_loop;
3835 {
3836 /* Basic algorithm is to find a conditional branch,
3837 the label it may branch to, and the branch after
3838 that label. If the two branches test the same condition,
3839 walk back from both branch paths until the insn patterns
3840 differ, or code labels are hit. If we make it back to
3841 the target of the first branch, then we know that the first branch
3842 will either always succeed or always fail depending on the relative
3843 senses of the two branches. So adjust the first branch accordingly
3844 in this case. */
3845
3846 rtx label, b1, b2, t1, t2;
3847 enum rtx_code code1, code2;
3848 rtx b1op0, b1op1, b2op0, b2op1;
3849 int changed = 1;
3850 int i;
3851 int *all_reset;
3852
3853 /* Allocate register tables and quick-reset table. */
3854 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3855 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3856 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3857 for (i = 0; i < max_reg; i++)
3858 all_reset[i] = -1;
3859
3860 while (changed)
3861 {
3862 changed = 0;
3863
3864 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3865 {
3866 rtx set;
3867 rtx set2;
3868
3869 /* Get to a candidate branch insn. */
3870 if (GET_CODE (b1) != JUMP_INSN
3871 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3872 continue;
3873
3874 memset (modified_regs, 0, max_reg * sizeof (char));
3875 modified_mem = 0;
3876
3877 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3878 num_same_regs = 0;
3879
3880 label = JUMP_LABEL (b1);
3881
3882 /* Look for a branch after the target. Record any registers and
3883 memory modified between the target and the branch. Stop when we
3884 get to a label since we can't know what was changed there. */
3885 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3886 {
3887 if (GET_CODE (b2) == CODE_LABEL)
3888 break;
3889
3890 else if (GET_CODE (b2) == JUMP_INSN)
3891 {
3892 /* If this is an unconditional jump and is the only use of
3893 its target label, we can follow it. */
3894 if (any_uncondjump_p (b2)
3895 && onlyjump_p (b2)
3896 && JUMP_LABEL (b2) != 0
3897 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3898 {
3899 b2 = JUMP_LABEL (b2);
3900 continue;
3901 }
3902 else
3903 break;
3904 }
3905
3906 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3907 continue;
3908
3909 if (GET_CODE (b2) == CALL_INSN)
3910 {
3911 modified_mem = 1;
3912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3913 if (call_used_regs[i] && ! fixed_regs[i]
3914 && i != STACK_POINTER_REGNUM
3915 && i != FRAME_POINTER_REGNUM
3916 && i != HARD_FRAME_POINTER_REGNUM
3917 && i != ARG_POINTER_REGNUM)
3918 modified_regs[i] = 1;
3919 }
3920
3921 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3922 }
3923
3924 /* Check the next candidate branch insn from the label
3925 of the first. */
3926 if (b2 == 0
3927 || GET_CODE (b2) != JUMP_INSN
3928 || b2 == b1
3929 || !any_condjump_p (b2)
3930 || !onlyjump_p (b2))
3931 continue;
3932 set = pc_set (b1);
3933 set2 = pc_set (b2);
3934
3935 /* Get the comparison codes and operands, reversing the
3936 codes if appropriate. If we don't have comparison codes,
3937 we can't do anything. */
3938 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3939 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3940 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3941 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3942 code1 = reverse_condition (code1);
3943
3944 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3945 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3946 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3947 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3948 code2 = reverse_condition (code2);
3949
3950 /* If they test the same things and knowing that B1 branches
3951 tells us whether or not B2 branches, check if we
3952 can thread the branch. */
3953 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3954 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3955 && (comparison_dominates_p (code1, code2)
3956 || (can_reverse_comparison_p (XEXP (SET_SRC (set), 0), b1)
3957 && comparison_dominates_p (code1,
3958 reverse_condition (code2)))))
3959
3960 {
3961 t1 = prev_nonnote_insn (b1);
3962 t2 = prev_nonnote_insn (b2);
3963
3964 while (t1 != 0 && t2 != 0)
3965 {
3966 if (t2 == label)
3967 {
3968 /* We have reached the target of the first branch.
3969 If there are no pending register equivalents,
3970 we know that this branch will either always
3971 succeed (if the senses of the two branches are
3972 the same) or always fail (if not). */
3973 rtx new_label;
3974
3975 if (num_same_regs != 0)
3976 break;
3977
3978 if (comparison_dominates_p (code1, code2))
3979 new_label = JUMP_LABEL (b2);
3980 else
3981 new_label = get_label_after (b2);
3982
3983 if (JUMP_LABEL (b1) != new_label)
3984 {
3985 rtx prev = PREV_INSN (new_label);
3986
3987 if (flag_before_loop
3988 && GET_CODE (prev) == NOTE
3989 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3990 {
3991 /* Don't thread to the loop label. If a loop
3992 label is reused, loop optimization will
3993 be disabled for that loop. */
3994 new_label = gen_label_rtx ();
3995 emit_label_after (new_label, PREV_INSN (prev));
3996 }
3997 changed |= redirect_jump (b1, new_label, 1);
3998 }
3999 break;
4000 }
4001
4002 /* If either of these is not a normal insn (it might be
4003 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4004 have already been skipped above.) Similarly, fail
4005 if the insns are different. */
4006 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4007 || recog_memoized (t1) != recog_memoized (t2)
4008 || ! rtx_equal_for_thread_p (PATTERN (t1),
4009 PATTERN (t2), t2))
4010 break;
4011
4012 t1 = prev_nonnote_insn (t1);
4013 t2 = prev_nonnote_insn (t2);
4014 }
4015 }
4016 }
4017 }
4018
4019 /* Clean up. */
4020 free (modified_regs);
4021 free (same_regs);
4022 free (all_reset);
4023 }
4024 \f
4025 /* This is like RTX_EQUAL_P except that it knows about our handling of
4026 possibly equivalent registers and knows to consider volatile and
4027 modified objects as not equal.
4028
4029 YINSN is the insn containing Y. */
4030
4031 int
4032 rtx_equal_for_thread_p (x, y, yinsn)
4033 rtx x, y;
4034 rtx yinsn;
4035 {
4036 register int i;
4037 register int j;
4038 register enum rtx_code code;
4039 register const char *fmt;
4040
4041 code = GET_CODE (x);
4042 /* Rtx's of different codes cannot be equal. */
4043 if (code != GET_CODE (y))
4044 return 0;
4045
4046 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4047 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4048
4049 if (GET_MODE (x) != GET_MODE (y))
4050 return 0;
4051
4052 /* For floating-point, consider everything unequal. This is a bit
4053 pessimistic, but this pass would only rarely do anything for FP
4054 anyway. */
4055 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4056 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4057 return 0;
4058
4059 /* For commutative operations, the RTX match if the operand match in any
4060 order. Also handle the simple binary and unary cases without a loop. */
4061 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4062 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4063 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4064 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4065 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4066 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4067 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4068 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4069 else if (GET_RTX_CLASS (code) == '1')
4070 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4071
4072 /* Handle special-cases first. */
4073 switch (code)
4074 {
4075 case REG:
4076 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4077 return 1;
4078
4079 /* If neither is user variable or hard register, check for possible
4080 equivalence. */
4081 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4082 || REGNO (x) < FIRST_PSEUDO_REGISTER
4083 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4084 return 0;
4085
4086 if (same_regs[REGNO (x)] == -1)
4087 {
4088 same_regs[REGNO (x)] = REGNO (y);
4089 num_same_regs++;
4090
4091 /* If this is the first time we are seeing a register on the `Y'
4092 side, see if it is the last use. If not, we can't thread the
4093 jump, so mark it as not equivalent. */
4094 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4095 return 0;
4096
4097 return 1;
4098 }
4099 else
4100 return (same_regs[REGNO (x)] == (int) REGNO (y));
4101
4102 break;
4103
4104 case MEM:
4105 /* If memory modified or either volatile, not equivalent.
4106 Else, check address. */
4107 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4108 return 0;
4109
4110 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4111
4112 case ASM_INPUT:
4113 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4114 return 0;
4115
4116 break;
4117
4118 case SET:
4119 /* Cancel a pending `same_regs' if setting equivalenced registers.
4120 Then process source. */
4121 if (GET_CODE (SET_DEST (x)) == REG
4122 && GET_CODE (SET_DEST (y)) == REG)
4123 {
4124 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4125 {
4126 same_regs[REGNO (SET_DEST (x))] = -1;
4127 num_same_regs--;
4128 }
4129 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4130 return 0;
4131 }
4132 else
4133 {
4134 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4135 return 0;
4136 }
4137
4138 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4139
4140 case LABEL_REF:
4141 return XEXP (x, 0) == XEXP (y, 0);
4142
4143 case SYMBOL_REF:
4144 return XSTR (x, 0) == XSTR (y, 0);
4145
4146 default:
4147 break;
4148 }
4149
4150 if (x == y)
4151 return 1;
4152
4153 fmt = GET_RTX_FORMAT (code);
4154 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4155 {
4156 switch (fmt[i])
4157 {
4158 case 'w':
4159 if (XWINT (x, i) != XWINT (y, i))
4160 return 0;
4161 break;
4162
4163 case 'n':
4164 case 'i':
4165 if (XINT (x, i) != XINT (y, i))
4166 return 0;
4167 break;
4168
4169 case 'V':
4170 case 'E':
4171 /* Two vectors must have the same length. */
4172 if (XVECLEN (x, i) != XVECLEN (y, i))
4173 return 0;
4174
4175 /* And the corresponding elements must match. */
4176 for (j = 0; j < XVECLEN (x, i); j++)
4177 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4178 XVECEXP (y, i, j), yinsn) == 0)
4179 return 0;
4180 break;
4181
4182 case 'e':
4183 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4184 return 0;
4185 break;
4186
4187 case 'S':
4188 case 's':
4189 if (strcmp (XSTR (x, i), XSTR (y, i)))
4190 return 0;
4191 break;
4192
4193 case 'u':
4194 /* These are just backpointers, so they don't matter. */
4195 break;
4196
4197 case '0':
4198 case 't':
4199 break;
4200
4201 /* It is believed that rtx's at this level will never
4202 contain anything but integers and other rtx's,
4203 except for within LABEL_REFs and SYMBOL_REFs. */
4204 default:
4205 abort ();
4206 }
4207 }
4208 return 1;
4209 }
This page took 0.21842 seconds and 6 git commands to generate.