]> gcc.gnu.org Git - gcc.git/blob - gcc/jump.c
6fc4deb10f9f0455c4126cdb4dcbef65d9ee94d6
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
128 \f
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
137 {
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
139 }
140
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
147 {
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
149 }
150
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
152
153 void
154 jump_optimize_minimal (f)
155 rtx f;
156 {
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
158 }
159 \f
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
163
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
167
168 If NOOP_MOVES is nonzero, delete no-op move insns.
169
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
172
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
175
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
181
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
183
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
192 */
193
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
203 {
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210 #ifdef HAVE_trap
211 enum rtx_code reversed_code;
212 #endif
213
214 cross_jump_death_matters = (cross_jump == 2);
215 max_uid = init_label_info (f) + 1;
216
217 /* If we are performing cross jump optimizations, then initialize
218 tables mapping UIDs to EH regions to avoid incorrect movement
219 of insns from one EH region to another. */
220 if (flag_exceptions && cross_jump)
221 init_insn_eh_region (f, max_uid);
222
223 if (! mark_labels_only)
224 delete_barrier_successors (f);
225
226 /* Leave some extra room for labels and duplicate exit test insns
227 we make. */
228 max_jump_chain = max_uid * 14 / 10;
229 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
230
231 mark_all_labels (f, cross_jump);
232
233 /* Keep track of labels used from static data; we don't track them
234 closely enough to delete them here, so make sure their reference
235 count doesn't drop to zero. */
236
237 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
238 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
239 LABEL_NUSES (XEXP (insn, 0))++;
240
241 check_exception_handler_labels ();
242
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
245
246 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
247 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
248 LABEL_NUSES (XEXP (insn, 0))++;
249
250 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
251 notes and recompute LABEL_NUSES. */
252 if (mark_labels_only)
253 goto end;
254
255 if (! minimal)
256 exception_optimize ();
257
258 last_insn = delete_unreferenced_labels (f);
259
260 if (noop_moves)
261 delete_noop_moves (f);
262
263 /* If we haven't yet gotten to reload and we have just run regscan,
264 delete any insn that sets a register that isn't used elsewhere.
265 This helps some of the optimizations below by having less insns
266 being jumped around. */
267
268 if (optimize && ! reload_completed && after_regscan)
269 for (insn = f; insn; insn = next)
270 {
271 rtx set = single_set (insn);
272
273 next = NEXT_INSN (insn);
274
275 if (set && GET_CODE (SET_DEST (set)) == REG
276 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
277 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
278 /* We use regno_last_note_uid so as not to delete the setting
279 of a reg that's used in notes. A subsequent optimization
280 might arrange to use that reg for real. */
281 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
282 && ! side_effects_p (SET_SRC (set))
283 && ! find_reg_note (insn, REG_RETVAL, 0)
284 /* An ADDRESSOF expression can turn into a use of the internal arg
285 pointer, so do not delete the initialization of the internal
286 arg pointer yet. If it is truly dead, flow will delete the
287 initializing insn. */
288 && SET_DEST (set) != current_function_internal_arg_pointer)
289 delete_insn (insn);
290 }
291
292 /* Now iterate optimizing jumps until nothing changes over one pass. */
293 changed = 1;
294 old_max_reg = max_reg_num ();
295 while (changed)
296 {
297 changed = 0;
298
299 for (insn = f; insn; insn = next)
300 {
301 rtx reallabelprev;
302 rtx temp, temp1, temp2 = NULL_RTX;
303 rtx temp4 ATTRIBUTE_UNUSED;
304 rtx nlabel;
305 int this_is_any_uncondjump;
306 int this_is_any_condjump;
307 int this_is_onlyjump;
308
309 next = NEXT_INSN (insn);
310
311 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
312 jump. Try to optimize by duplicating the loop exit test if so.
313 This is only safe immediately after regscan, because it uses
314 the values of regno_first_uid and regno_last_uid. */
315 if (after_regscan && GET_CODE (insn) == NOTE
316 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
317 && (temp1 = next_nonnote_insn (insn)) != 0
318 && any_uncondjump_p (temp1)
319 && onlyjump_p (temp1))
320 {
321 temp = PREV_INSN (insn);
322 if (duplicate_loop_exit_test (insn))
323 {
324 changed = 1;
325 next = NEXT_INSN (temp);
326 continue;
327 }
328 }
329
330 if (GET_CODE (insn) != JUMP_INSN)
331 continue;
332
333 this_is_any_condjump = any_condjump_p (insn);
334 this_is_any_uncondjump = any_uncondjump_p (insn);
335 this_is_onlyjump = onlyjump_p (insn);
336
337 /* Tension the labels in dispatch tables. */
338
339 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
340 changed |= tension_vector_labels (PATTERN (insn), 0);
341 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
342 changed |= tension_vector_labels (PATTERN (insn), 1);
343
344 /* See if this jump goes to another jump and redirect if so. */
345 nlabel = follow_jumps (JUMP_LABEL (insn));
346 if (nlabel != JUMP_LABEL (insn))
347 changed |= redirect_jump (insn, nlabel, 1);
348
349 if (! optimize || minimal)
350 continue;
351
352 /* If a dispatch table always goes to the same place,
353 get rid of it and replace the insn that uses it. */
354
355 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
356 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
357 {
358 int i;
359 rtx pat = PATTERN (insn);
360 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
361 int len = XVECLEN (pat, diff_vec_p);
362 rtx dispatch = prev_real_insn (insn);
363 rtx set;
364
365 for (i = 0; i < len; i++)
366 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
367 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
368 break;
369
370 if (i == len
371 && dispatch != 0
372 && GET_CODE (dispatch) == JUMP_INSN
373 && JUMP_LABEL (dispatch) != 0
374 /* Don't mess with a casesi insn.
375 XXX according to the comment before computed_jump_p(),
376 all casesi insns should be a parallel of the jump
377 and a USE of a LABEL_REF. */
378 && ! ((set = single_set (dispatch)) != NULL
379 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
380 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
381 {
382 redirect_tablejump (dispatch,
383 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
384 changed = 1;
385 }
386 }
387
388 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
389
390 /* Detect jump to following insn. */
391 if (reallabelprev == insn
392 && (this_is_any_condjump || this_is_any_uncondjump)
393 && this_is_onlyjump)
394 {
395 next = next_real_insn (JUMP_LABEL (insn));
396 delete_jump (insn);
397
398 /* Remove the "inactive" but "real" insns (i.e. uses and
399 clobbers) in between here and there. */
400 temp = insn;
401 while ((temp = next_real_insn (temp)) != next)
402 delete_insn (temp);
403
404 changed = 1;
405 continue;
406 }
407
408 /* Detect a conditional jump going to the same place
409 as an immediately following unconditional jump. */
410 else if (this_is_any_condjump && this_is_onlyjump
411 && (temp = next_active_insn (insn)) != 0
412 && simplejump_p (temp)
413 && (next_active_insn (JUMP_LABEL (insn))
414 == next_active_insn (JUMP_LABEL (temp))))
415 {
416 /* Don't mess up test coverage analysis. */
417 temp2 = temp;
418 if (flag_test_coverage && !reload_completed)
419 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
420 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
421 break;
422
423 if (temp2 == temp)
424 {
425 /* Ensure that we jump to the later of the two labels.
426 Consider:
427
428 if (test) goto L2;
429 goto L1;
430 ...
431 L1:
432 (clobber return-reg)
433 L2:
434 (use return-reg)
435
436 If we leave the goto L1, we'll incorrectly leave
437 return-reg dead for TEST true. */
438
439 temp2 = next_active_insn (JUMP_LABEL (insn));
440 if (!temp2)
441 temp2 = get_last_insn ();
442 if (GET_CODE (temp2) != CODE_LABEL)
443 temp2 = prev_label (temp2);
444 if (temp2 != JUMP_LABEL (temp))
445 redirect_jump (temp, temp2, 1);
446
447 delete_jump (insn);
448 changed = 1;
449 continue;
450 }
451 }
452
453 /* Detect a conditional jump jumping over an unconditional jump. */
454
455 else if (this_is_any_condjump
456 && reallabelprev != 0
457 && GET_CODE (reallabelprev) == JUMP_INSN
458 && prev_active_insn (reallabelprev) == insn
459 && no_labels_between_p (insn, reallabelprev)
460 && any_uncondjump_p (reallabelprev)
461 && onlyjump_p (reallabelprev))
462 {
463 /* When we invert the unconditional jump, we will be
464 decrementing the usage count of its old label.
465 Make sure that we don't delete it now because that
466 might cause the following code to be deleted. */
467 rtx prev_uses = prev_nonnote_insn (reallabelprev);
468 rtx prev_label = JUMP_LABEL (insn);
469
470 if (prev_label)
471 ++LABEL_NUSES (prev_label);
472
473 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
474 {
475 /* It is very likely that if there are USE insns before
476 this jump, they hold REG_DEAD notes. These REG_DEAD
477 notes are no longer valid due to this optimization,
478 and will cause the life-analysis that following passes
479 (notably delayed-branch scheduling) to think that
480 these registers are dead when they are not.
481
482 To prevent this trouble, we just remove the USE insns
483 from the insn chain. */
484
485 while (prev_uses && GET_CODE (prev_uses) == INSN
486 && GET_CODE (PATTERN (prev_uses)) == USE)
487 {
488 rtx useless = prev_uses;
489 prev_uses = prev_nonnote_insn (prev_uses);
490 delete_insn (useless);
491 }
492
493 delete_insn (reallabelprev);
494 changed = 1;
495 }
496
497 /* We can now safely delete the label if it is unreferenced
498 since the delete_insn above has deleted the BARRIER. */
499 if (prev_label && --LABEL_NUSES (prev_label) == 0)
500 delete_insn (prev_label);
501
502 next = NEXT_INSN (insn);
503 }
504
505 /* If we have an unconditional jump preceded by a USE, try to put
506 the USE before the target and jump there. This simplifies many
507 of the optimizations below since we don't have to worry about
508 dealing with these USE insns. We only do this if the label
509 being branch to already has the identical USE or if code
510 never falls through to that label. */
511
512 else if (this_is_any_uncondjump
513 && (temp = prev_nonnote_insn (insn)) != 0
514 && GET_CODE (temp) == INSN
515 && GET_CODE (PATTERN (temp)) == USE
516 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
517 && (GET_CODE (temp1) == BARRIER
518 || (GET_CODE (temp1) == INSN
519 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
520 /* Don't do this optimization if we have a loop containing
521 only the USE instruction, and the loop start label has
522 a usage count of 1. This is because we will redo this
523 optimization everytime through the outer loop, and jump
524 opt will never exit. */
525 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
526 && temp2 == JUMP_LABEL (insn)
527 && LABEL_NUSES (temp2) == 1))
528 {
529 if (GET_CODE (temp1) == BARRIER)
530 {
531 emit_insn_after (PATTERN (temp), temp1);
532 temp1 = NEXT_INSN (temp1);
533 }
534
535 delete_insn (temp);
536 redirect_jump (insn, get_label_before (temp1), 1);
537 reallabelprev = prev_real_insn (temp1);
538 changed = 1;
539 next = NEXT_INSN (insn);
540 }
541
542 #ifdef HAVE_trap
543 /* Detect a conditional jump jumping over an unconditional trap. */
544 if (HAVE_trap
545 && this_is_any_condjump && this_is_onlyjump
546 && reallabelprev != 0
547 && GET_CODE (reallabelprev) == INSN
548 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
549 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
550 && prev_active_insn (reallabelprev) == insn
551 && no_labels_between_p (insn, reallabelprev)
552 && (temp2 = get_condition (insn, &temp4))
553 && ((reversed_code = reversed_comparison_code (temp2, insn))
554 != UNKNOWN))
555 {
556 rtx new = gen_cond_trap (reversed_code,
557 XEXP (temp2, 0), XEXP (temp2, 1),
558 TRAP_CODE (PATTERN (reallabelprev)));
559
560 if (new)
561 {
562 emit_insn_before (new, temp4);
563 delete_insn (reallabelprev);
564 delete_jump (insn);
565 changed = 1;
566 continue;
567 }
568 }
569 /* Detect a jump jumping to an unconditional trap. */
570 else if (HAVE_trap && this_is_onlyjump
571 && (temp = next_active_insn (JUMP_LABEL (insn)))
572 && GET_CODE (temp) == INSN
573 && GET_CODE (PATTERN (temp)) == TRAP_IF
574 && (this_is_any_uncondjump
575 || (this_is_any_condjump
576 && (temp2 = get_condition (insn, &temp4)))))
577 {
578 rtx tc = TRAP_CONDITION (PATTERN (temp));
579
580 if (tc == const_true_rtx
581 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
582 {
583 rtx new;
584 /* Replace an unconditional jump to a trap with a trap. */
585 if (this_is_any_uncondjump)
586 {
587 emit_barrier_after (emit_insn_before (gen_trap (), insn));
588 delete_jump (insn);
589 changed = 1;
590 continue;
591 }
592 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
593 XEXP (temp2, 1),
594 TRAP_CODE (PATTERN (temp)));
595 if (new)
596 {
597 emit_insn_before (new, temp4);
598 delete_jump (insn);
599 changed = 1;
600 continue;
601 }
602 }
603 /* If the trap condition and jump condition are mutually
604 exclusive, redirect the jump to the following insn. */
605 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
606 && this_is_any_condjump
607 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
608 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
609 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
610 && redirect_jump (insn, get_label_after (temp), 1))
611 {
612 changed = 1;
613 continue;
614 }
615 }
616 #endif
617 else
618 {
619 /* Now that the jump has been tensioned,
620 try cross jumping: check for identical code
621 before the jump and before its target label. */
622
623 /* First, cross jumping of conditional jumps: */
624
625 if (cross_jump && condjump_p (insn))
626 {
627 rtx newjpos, newlpos;
628 rtx x = prev_real_insn (JUMP_LABEL (insn));
629
630 /* A conditional jump may be crossjumped
631 only if the place it jumps to follows
632 an opposing jump that comes back here. */
633
634 if (x != 0 && ! jump_back_p (x, insn))
635 /* We have no opposing jump;
636 cannot cross jump this insn. */
637 x = 0;
638
639 newjpos = 0;
640 /* TARGET is nonzero if it is ok to cross jump
641 to code before TARGET. If so, see if matches. */
642 if (x != 0)
643 find_cross_jump (insn, x, 2,
644 &newjpos, &newlpos);
645
646 if (newjpos != 0)
647 {
648 do_cross_jump (insn, newjpos, newlpos);
649 /* Make the old conditional jump
650 into an unconditional one. */
651 SET_SRC (PATTERN (insn))
652 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
653 INSN_CODE (insn) = -1;
654 emit_barrier_after (insn);
655 /* Add to jump_chain unless this is a new label
656 whose UID is too large. */
657 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
658 {
659 jump_chain[INSN_UID (insn)]
660 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
661 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
662 }
663 changed = 1;
664 next = insn;
665 }
666 }
667
668 /* Cross jumping of unconditional jumps:
669 a few differences. */
670
671 if (cross_jump && simplejump_p (insn))
672 {
673 rtx newjpos, newlpos;
674 rtx target;
675
676 newjpos = 0;
677
678 /* TARGET is nonzero if it is ok to cross jump
679 to code before TARGET. If so, see if matches. */
680 find_cross_jump (insn, JUMP_LABEL (insn), 1,
681 &newjpos, &newlpos);
682
683 /* If cannot cross jump to code before the label,
684 see if we can cross jump to another jump to
685 the same label. */
686 /* Try each other jump to this label. */
687 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
688 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
689 target != 0 && newjpos == 0;
690 target = jump_chain[INSN_UID (target)])
691 if (target != insn
692 && JUMP_LABEL (target) == JUMP_LABEL (insn)
693 /* Ignore TARGET if it's deleted. */
694 && ! INSN_DELETED_P (target))
695 find_cross_jump (insn, target, 2,
696 &newjpos, &newlpos);
697
698 if (newjpos != 0)
699 {
700 do_cross_jump (insn, newjpos, newlpos);
701 changed = 1;
702 next = insn;
703 }
704 }
705
706 /* This code was dead in the previous jump.c! */
707 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
708 {
709 /* Return insns all "jump to the same place"
710 so we can cross-jump between any two of them. */
711
712 rtx newjpos, newlpos, target;
713
714 newjpos = 0;
715
716 /* If cannot cross jump to code before the label,
717 see if we can cross jump to another jump to
718 the same label. */
719 /* Try each other jump to this label. */
720 for (target = jump_chain[0];
721 target != 0 && newjpos == 0;
722 target = jump_chain[INSN_UID (target)])
723 if (target != insn
724 && ! INSN_DELETED_P (target)
725 && GET_CODE (PATTERN (target)) == RETURN)
726 find_cross_jump (insn, target, 2,
727 &newjpos, &newlpos);
728
729 if (newjpos != 0)
730 {
731 do_cross_jump (insn, newjpos, newlpos);
732 changed = 1;
733 next = insn;
734 }
735 }
736 }
737 }
738
739 first = 0;
740 }
741
742 /* Delete extraneous line number notes.
743 Note that two consecutive notes for different lines are not really
744 extraneous. There should be some indication where that line belonged,
745 even if it became empty. */
746
747 {
748 rtx last_note = 0;
749
750 for (insn = f; insn; insn = NEXT_INSN (insn))
751 if (GET_CODE (insn) == NOTE)
752 {
753 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
754 /* Any previous line note was for the prologue; gdb wants a new
755 note after the prologue even if it is for the same line. */
756 last_note = NULL_RTX;
757 else if (NOTE_LINE_NUMBER (insn) >= 0)
758 {
759 /* Delete this note if it is identical to previous note. */
760 if (last_note
761 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
762 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
763 {
764 delete_insn (insn);
765 continue;
766 }
767
768 last_note = insn;
769 }
770 }
771 }
772
773 end:
774 /* Clean up. */
775 free (jump_chain);
776 jump_chain = 0;
777 }
778 \f
779 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
780 notes whose labels don't occur in the insn any more. Returns the
781 largest INSN_UID found. */
782 static int
783 init_label_info (f)
784 rtx f;
785 {
786 int largest_uid = 0;
787 rtx insn;
788
789 for (insn = f; insn; insn = NEXT_INSN (insn))
790 {
791 if (GET_CODE (insn) == CODE_LABEL)
792 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
793 else if (GET_CODE (insn) == JUMP_INSN)
794 JUMP_LABEL (insn) = 0;
795 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
796 {
797 rtx note, next;
798
799 for (note = REG_NOTES (insn); note; note = next)
800 {
801 next = XEXP (note, 1);
802 if (REG_NOTE_KIND (note) == REG_LABEL
803 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
804 remove_note (insn, note);
805 }
806 }
807 if (INSN_UID (insn) > largest_uid)
808 largest_uid = INSN_UID (insn);
809 }
810
811 return largest_uid;
812 }
813
814 /* Delete insns following barriers, up to next label.
815
816 Also delete no-op jumps created by gcse. */
817
818 static void
819 delete_barrier_successors (f)
820 rtx f;
821 {
822 rtx insn;
823 rtx set;
824
825 for (insn = f; insn;)
826 {
827 if (GET_CODE (insn) == BARRIER)
828 {
829 insn = NEXT_INSN (insn);
830
831 never_reached_warning (insn);
832
833 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
834 {
835 if (GET_CODE (insn) == NOTE
836 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
837 insn = NEXT_INSN (insn);
838 else
839 insn = delete_insn (insn);
840 }
841 /* INSN is now the code_label. */
842 }
843
844 /* Also remove (set (pc) (pc)) insns which can be created by
845 gcse. We eliminate such insns now to avoid having them
846 cause problems later. */
847 else if (GET_CODE (insn) == JUMP_INSN
848 && (set = pc_set (insn)) != NULL
849 && SET_SRC (set) == pc_rtx
850 && SET_DEST (set) == pc_rtx
851 && onlyjump_p (insn))
852 insn = delete_insn (insn);
853
854 else
855 insn = NEXT_INSN (insn);
856 }
857 }
858
859 /* Mark the label each jump jumps to.
860 Combine consecutive labels, and count uses of labels.
861
862 For each label, make a chain (using `jump_chain')
863 of all the *unconditional* jumps that jump to it;
864 also make a chain of all returns.
865
866 CROSS_JUMP indicates whether we are doing cross jumping
867 and if we are whether we will be paying attention to
868 death notes or not. */
869
870 static void
871 mark_all_labels (f, cross_jump)
872 rtx f;
873 int cross_jump;
874 {
875 rtx insn;
876
877 for (insn = f; insn; insn = NEXT_INSN (insn))
878 if (INSN_P (insn))
879 {
880 if (GET_CODE (insn) == CALL_INSN
881 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
882 {
883 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
884 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
885 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
886 continue;
887 }
888
889 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
890 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
891 {
892 /* When we know the LABEL_REF contained in a REG used in
893 an indirect jump, we'll have a REG_LABEL note so that
894 flow can tell where it's going. */
895 if (JUMP_LABEL (insn) == 0)
896 {
897 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
898 if (label_note)
899 {
900 /* But a LABEL_REF around the REG_LABEL note, so
901 that we can canonicalize it. */
902 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
903 XEXP (label_note, 0));
904
905 mark_jump_label (label_ref, insn, cross_jump, 0);
906 XEXP (label_note, 0) = XEXP (label_ref, 0);
907 JUMP_LABEL (insn) = XEXP (label_note, 0);
908 }
909 }
910 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
911 {
912 jump_chain[INSN_UID (insn)]
913 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
914 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
915 }
916 if (GET_CODE (PATTERN (insn)) == RETURN)
917 {
918 jump_chain[INSN_UID (insn)] = jump_chain[0];
919 jump_chain[0] = insn;
920 }
921 }
922 }
923 }
924
925 /* Delete all labels already not referenced.
926 Also find and return the last insn. */
927
928 static rtx
929 delete_unreferenced_labels (f)
930 rtx f;
931 {
932 rtx final = NULL_RTX;
933 rtx insn;
934
935 for (insn = f; insn;)
936 {
937 if (GET_CODE (insn) == CODE_LABEL
938 && LABEL_NUSES (insn) == 0
939 && LABEL_ALTERNATE_NAME (insn) == NULL)
940 insn = delete_insn (insn);
941 else
942 {
943 final = insn;
944 insn = NEXT_INSN (insn);
945 }
946 }
947
948 return final;
949 }
950
951 /* Delete various simple forms of moves which have no necessary
952 side effect. */
953
954 static void
955 delete_noop_moves (f)
956 rtx f;
957 {
958 rtx insn, next;
959
960 for (insn = f; insn;)
961 {
962 next = NEXT_INSN (insn);
963
964 if (GET_CODE (insn) == INSN)
965 {
966 register rtx body = PATTERN (insn);
967
968 /* Detect and delete no-op move instructions
969 resulting from not allocating a parameter in a register. */
970
971 if (GET_CODE (body) == SET
972 && (SET_DEST (body) == SET_SRC (body)
973 || (GET_CODE (SET_DEST (body)) == MEM
974 && GET_CODE (SET_SRC (body)) == MEM
975 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
976 && ! (GET_CODE (SET_DEST (body)) == MEM
977 && MEM_VOLATILE_P (SET_DEST (body)))
978 && ! (GET_CODE (SET_SRC (body)) == MEM
979 && MEM_VOLATILE_P (SET_SRC (body))))
980 delete_computation (insn);
981
982 /* Detect and ignore no-op move instructions
983 resulting from smart or fortuitous register allocation. */
984
985 else if (GET_CODE (body) == SET)
986 {
987 int sreg = true_regnum (SET_SRC (body));
988 int dreg = true_regnum (SET_DEST (body));
989
990 if (sreg == dreg && sreg >= 0)
991 delete_insn (insn);
992 else if (sreg >= 0 && dreg >= 0)
993 {
994 rtx trial;
995 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
996 sreg, NULL_PTR, dreg,
997 GET_MODE (SET_SRC (body)));
998
999 if (tem != 0
1000 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
1001 {
1002 /* DREG may have been the target of a REG_DEAD note in
1003 the insn which makes INSN redundant. If so, reorg
1004 would still think it is dead. So search for such a
1005 note and delete it if we find it. */
1006 if (! find_regno_note (insn, REG_UNUSED, dreg))
1007 for (trial = prev_nonnote_insn (insn);
1008 trial && GET_CODE (trial) != CODE_LABEL;
1009 trial = prev_nonnote_insn (trial))
1010 if (find_regno_note (trial, REG_DEAD, dreg))
1011 {
1012 remove_death (dreg, trial);
1013 break;
1014 }
1015
1016 /* Deleting insn could lose a death-note for SREG. */
1017 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
1018 {
1019 /* Change this into a USE so that we won't emit
1020 code for it, but still can keep the note. */
1021 PATTERN (insn)
1022 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
1023 INSN_CODE (insn) = -1;
1024 /* Remove all reg notes but the REG_DEAD one. */
1025 REG_NOTES (insn) = trial;
1026 XEXP (trial, 1) = NULL_RTX;
1027 }
1028 else
1029 delete_insn (insn);
1030 }
1031 }
1032 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1033 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1034 NULL_PTR, 0,
1035 GET_MODE (SET_DEST (body))))
1036 {
1037 /* This handles the case where we have two consecutive
1038 assignments of the same constant to pseudos that didn't
1039 get a hard reg. Each SET from the constant will be
1040 converted into a SET of the spill register and an
1041 output reload will be made following it. This produces
1042 two loads of the same constant into the same spill
1043 register. */
1044
1045 rtx in_insn = insn;
1046
1047 /* Look back for a death note for the first reg.
1048 If there is one, it is no longer accurate. */
1049 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1050 {
1051 if ((GET_CODE (in_insn) == INSN
1052 || GET_CODE (in_insn) == JUMP_INSN)
1053 && find_regno_note (in_insn, REG_DEAD, dreg))
1054 {
1055 remove_death (dreg, in_insn);
1056 break;
1057 }
1058 in_insn = PREV_INSN (in_insn);
1059 }
1060
1061 /* Delete the second load of the value. */
1062 delete_insn (insn);
1063 }
1064 }
1065 else if (GET_CODE (body) == PARALLEL)
1066 {
1067 /* If each part is a set between two identical registers or
1068 a USE or CLOBBER, delete the insn. */
1069 int i, sreg, dreg;
1070 rtx tem;
1071
1072 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1073 {
1074 tem = XVECEXP (body, 0, i);
1075 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1076 continue;
1077
1078 if (GET_CODE (tem) != SET
1079 || (sreg = true_regnum (SET_SRC (tem))) < 0
1080 || (dreg = true_regnum (SET_DEST (tem))) < 0
1081 || dreg != sreg)
1082 break;
1083 }
1084
1085 if (i < 0)
1086 delete_insn (insn);
1087 }
1088 /* Also delete insns to store bit fields if they are no-ops. */
1089 /* Not worth the hair to detect this in the big-endian case. */
1090 else if (! BYTES_BIG_ENDIAN
1091 && GET_CODE (body) == SET
1092 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1093 && XEXP (SET_DEST (body), 2) == const0_rtx
1094 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1095 && ! (GET_CODE (SET_SRC (body)) == MEM
1096 && MEM_VOLATILE_P (SET_SRC (body))))
1097 delete_insn (insn);
1098 }
1099 insn = next;
1100 }
1101 }
1102
1103 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1104 jump. Assume that this unconditional jump is to the exit test code. If
1105 the code is sufficiently simple, make a copy of it before INSN,
1106 followed by a jump to the exit of the loop. Then delete the unconditional
1107 jump after INSN.
1108
1109 Return 1 if we made the change, else 0.
1110
1111 This is only safe immediately after a regscan pass because it uses the
1112 values of regno_first_uid and regno_last_uid. */
1113
1114 static int
1115 duplicate_loop_exit_test (loop_start)
1116 rtx loop_start;
1117 {
1118 rtx insn, set, reg, p, link;
1119 rtx copy = 0, first_copy = 0;
1120 int num_insns = 0;
1121 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1122 rtx lastexit;
1123 int max_reg = max_reg_num ();
1124 rtx *reg_map = 0;
1125
1126 /* Scan the exit code. We do not perform this optimization if any insn:
1127
1128 is a CALL_INSN
1129 is a CODE_LABEL
1130 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1131 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1132 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1133 is not valid.
1134
1135 We also do not do this if we find an insn with ASM_OPERANDS. While
1136 this restriction should not be necessary, copying an insn with
1137 ASM_OPERANDS can confuse asm_noperands in some cases.
1138
1139 Also, don't do this if the exit code is more than 20 insns. */
1140
1141 for (insn = exitcode;
1142 insn
1143 && ! (GET_CODE (insn) == NOTE
1144 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1145 insn = NEXT_INSN (insn))
1146 {
1147 switch (GET_CODE (insn))
1148 {
1149 case CODE_LABEL:
1150 case CALL_INSN:
1151 return 0;
1152 case NOTE:
1153 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1154 a jump immediately after the loop start that branches outside
1155 the loop but within an outer loop, near the exit test.
1156 If we copied this exit test and created a phony
1157 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1158 before the exit test look like these could be safely moved
1159 out of the loop even if they actually may be never executed.
1160 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1161
1162 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1163 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1164 return 0;
1165
1166 if (optimize < 2
1167 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1168 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1169 /* If we were to duplicate this code, we would not move
1170 the BLOCK notes, and so debugging the moved code would
1171 be difficult. Thus, we only move the code with -O2 or
1172 higher. */
1173 return 0;
1174
1175 break;
1176 case JUMP_INSN:
1177 case INSN:
1178 /* The code below would grossly mishandle REG_WAS_0 notes,
1179 so get rid of them here. */
1180 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1181 remove_note (insn, p);
1182 if (++num_insns > 20
1183 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1184 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1185 return 0;
1186 break;
1187 default:
1188 break;
1189 }
1190 }
1191
1192 /* Unless INSN is zero, we can do the optimization. */
1193 if (insn == 0)
1194 return 0;
1195
1196 lastexit = insn;
1197
1198 /* See if any insn sets a register only used in the loop exit code and
1199 not a user variable. If so, replace it with a new register. */
1200 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1201 if (GET_CODE (insn) == INSN
1202 && (set = single_set (insn)) != 0
1203 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1204 || (GET_CODE (reg) == SUBREG
1205 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1206 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1207 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1208 {
1209 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1210 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1211 break;
1212
1213 if (p != lastexit)
1214 {
1215 /* We can do the replacement. Allocate reg_map if this is the
1216 first replacement we found. */
1217 if (reg_map == 0)
1218 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1219
1220 REG_LOOP_TEST_P (reg) = 1;
1221
1222 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1223 }
1224 }
1225
1226 /* Now copy each insn. */
1227 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1228 {
1229 switch (GET_CODE (insn))
1230 {
1231 case BARRIER:
1232 copy = emit_barrier_before (loop_start);
1233 break;
1234 case NOTE:
1235 /* Only copy line-number notes. */
1236 if (NOTE_LINE_NUMBER (insn) >= 0)
1237 {
1238 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1239 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1240 }
1241 break;
1242
1243 case INSN:
1244 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1245 if (reg_map)
1246 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1247
1248 mark_jump_label (PATTERN (copy), copy, 0, 0);
1249
1250 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1251 make them. */
1252 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1253 if (REG_NOTE_KIND (link) != REG_LABEL)
1254 {
1255 if (GET_CODE (link) == EXPR_LIST)
1256 REG_NOTES (copy)
1257 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1258 XEXP (link, 0),
1259 REG_NOTES (copy)));
1260 else
1261 REG_NOTES (copy)
1262 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1263 XEXP (link, 0),
1264 REG_NOTES (copy)));
1265 }
1266
1267 if (reg_map && REG_NOTES (copy))
1268 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1269 break;
1270
1271 case JUMP_INSN:
1272 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1273 loop_start);
1274 if (reg_map)
1275 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1276 mark_jump_label (PATTERN (copy), copy, 0, 0);
1277 if (REG_NOTES (insn))
1278 {
1279 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1280 if (reg_map)
1281 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1282 }
1283
1284 /* If this is a simple jump, add it to the jump chain. */
1285
1286 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1287 && simplejump_p (copy))
1288 {
1289 jump_chain[INSN_UID (copy)]
1290 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1291 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1292 }
1293 break;
1294
1295 default:
1296 abort ();
1297 }
1298
1299 /* Record the first insn we copied. We need it so that we can
1300 scan the copied insns for new pseudo registers. */
1301 if (! first_copy)
1302 first_copy = copy;
1303 }
1304
1305 /* Now clean up by emitting a jump to the end label and deleting the jump
1306 at the start of the loop. */
1307 if (! copy || GET_CODE (copy) != BARRIER)
1308 {
1309 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1310 loop_start);
1311
1312 /* Record the first insn we copied. We need it so that we can
1313 scan the copied insns for new pseudo registers. This may not
1314 be strictly necessary since we should have copied at least one
1315 insn above. But I am going to be safe. */
1316 if (! first_copy)
1317 first_copy = copy;
1318
1319 mark_jump_label (PATTERN (copy), copy, 0, 0);
1320 if (INSN_UID (copy) < max_jump_chain
1321 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1322 {
1323 jump_chain[INSN_UID (copy)]
1324 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1325 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1326 }
1327 emit_barrier_before (loop_start);
1328 }
1329
1330 /* Now scan from the first insn we copied to the last insn we copied
1331 (copy) for new pseudo registers. Do this after the code to jump to
1332 the end label since that might create a new pseudo too. */
1333 reg_scan_update (first_copy, copy, max_reg);
1334
1335 /* Mark the exit code as the virtual top of the converted loop. */
1336 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1337
1338 delete_insn (next_nonnote_insn (loop_start));
1339
1340 /* Clean up. */
1341 if (reg_map)
1342 free (reg_map);
1343
1344 return 1;
1345 }
1346 \f
1347 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1348 notes between START and END out before START. Assume that END is not
1349 such a note. START may be such a note. Returns the value of the new
1350 starting insn, which may be different if the original start was such a
1351 note. */
1352
1353 rtx
1354 squeeze_notes (start, end)
1355 rtx start, end;
1356 {
1357 rtx insn;
1358 rtx next;
1359
1360 for (insn = start; insn != end; insn = next)
1361 {
1362 next = NEXT_INSN (insn);
1363 if (GET_CODE (insn) == NOTE
1364 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1365 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1366 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1367 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1368 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1369 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1370 {
1371 if (insn == start)
1372 start = next;
1373 else
1374 {
1375 rtx prev = PREV_INSN (insn);
1376 PREV_INSN (insn) = PREV_INSN (start);
1377 NEXT_INSN (insn) = start;
1378 NEXT_INSN (PREV_INSN (insn)) = insn;
1379 PREV_INSN (NEXT_INSN (insn)) = insn;
1380 NEXT_INSN (prev) = next;
1381 PREV_INSN (next) = prev;
1382 }
1383 }
1384 }
1385
1386 return start;
1387 }
1388 \f
1389 /* Compare the instructions before insn E1 with those before E2
1390 to find an opportunity for cross jumping.
1391 (This means detecting identical sequences of insns followed by
1392 jumps to the same place, or followed by a label and a jump
1393 to that label, and replacing one with a jump to the other.)
1394
1395 Assume E1 is a jump that jumps to label E2
1396 (that is not always true but it might as well be).
1397 Find the longest possible equivalent sequences
1398 and store the first insns of those sequences into *F1 and *F2.
1399 Store zero there if no equivalent preceding instructions are found.
1400
1401 We give up if we find a label in stream 1.
1402 Actually we could transfer that label into stream 2. */
1403
1404 static void
1405 find_cross_jump (e1, e2, minimum, f1, f2)
1406 rtx e1, e2;
1407 int minimum;
1408 rtx *f1, *f2;
1409 {
1410 register rtx i1 = e1, i2 = e2;
1411 register rtx p1, p2;
1412 int lose = 0;
1413
1414 rtx last1 = 0, last2 = 0;
1415 rtx afterlast1 = 0, afterlast2 = 0;
1416
1417 *f1 = 0;
1418 *f2 = 0;
1419
1420 while (1)
1421 {
1422 i1 = prev_nonnote_insn (i1);
1423
1424 i2 = PREV_INSN (i2);
1425 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1426 i2 = PREV_INSN (i2);
1427
1428 if (i1 == 0)
1429 break;
1430
1431 /* Don't allow the range of insns preceding E1 or E2
1432 to include the other (E2 or E1). */
1433 if (i2 == e1 || i1 == e2)
1434 break;
1435
1436 /* If we will get to this code by jumping, those jumps will be
1437 tensioned to go directly to the new label (before I2),
1438 so this cross-jumping won't cost extra. So reduce the minimum. */
1439 if (GET_CODE (i1) == CODE_LABEL)
1440 {
1441 --minimum;
1442 break;
1443 }
1444
1445 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1446 break;
1447
1448 /* Avoid moving insns across EH regions if either of the insns
1449 can throw. */
1450 if (flag_exceptions
1451 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1452 && !in_same_eh_region (i1, i2))
1453 break;
1454
1455 p1 = PATTERN (i1);
1456 p2 = PATTERN (i2);
1457
1458 /* If this is a CALL_INSN, compare register usage information.
1459 If we don't check this on stack register machines, the two
1460 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1461 numbers of stack registers in the same basic block.
1462 If we don't check this on machines with delay slots, a delay slot may
1463 be filled that clobbers a parameter expected by the subroutine.
1464
1465 ??? We take the simple route for now and assume that if they're
1466 equal, they were constructed identically. */
1467
1468 if (GET_CODE (i1) == CALL_INSN
1469 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1470 CALL_INSN_FUNCTION_USAGE (i2)))
1471 lose = 1;
1472
1473 #ifdef STACK_REGS
1474 /* If cross_jump_death_matters is not 0, the insn's mode
1475 indicates whether or not the insn contains any stack-like
1476 regs. */
1477
1478 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1479 {
1480 /* If register stack conversion has already been done, then
1481 death notes must also be compared before it is certain that
1482 the two instruction streams match. */
1483
1484 rtx note;
1485 HARD_REG_SET i1_regset, i2_regset;
1486
1487 CLEAR_HARD_REG_SET (i1_regset);
1488 CLEAR_HARD_REG_SET (i2_regset);
1489
1490 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1491 if (REG_NOTE_KIND (note) == REG_DEAD
1492 && STACK_REG_P (XEXP (note, 0)))
1493 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1494
1495 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1496 if (REG_NOTE_KIND (note) == REG_DEAD
1497 && STACK_REG_P (XEXP (note, 0)))
1498 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1499
1500 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1501
1502 lose = 1;
1503
1504 done:
1505 ;
1506 }
1507 #endif
1508
1509 /* Don't allow old-style asm or volatile extended asms to be accepted
1510 for cross jumping purposes. It is conceptually correct to allow
1511 them, since cross-jumping preserves the dynamic instruction order
1512 even though it is changing the static instruction order. However,
1513 if an asm is being used to emit an assembler pseudo-op, such as
1514 the MIPS `.set reorder' pseudo-op, then the static instruction order
1515 matters and it must be preserved. */
1516 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1517 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1518 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1519 lose = 1;
1520
1521 if (lose || GET_CODE (p1) != GET_CODE (p2)
1522 || ! rtx_renumbered_equal_p (p1, p2))
1523 {
1524 /* The following code helps take care of G++ cleanups. */
1525 rtx equiv1;
1526 rtx equiv2;
1527
1528 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1529 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1530 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1531 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1532 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1533 /* If the equivalences are not to a constant, they may
1534 reference pseudos that no longer exist, so we can't
1535 use them. */
1536 && CONSTANT_P (XEXP (equiv1, 0))
1537 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1538 {
1539 rtx s1 = single_set (i1);
1540 rtx s2 = single_set (i2);
1541 if (s1 != 0 && s2 != 0
1542 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1543 {
1544 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1545 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1546 if (! rtx_renumbered_equal_p (p1, p2))
1547 cancel_changes (0);
1548 else if (apply_change_group ())
1549 goto win;
1550 }
1551 }
1552
1553 /* Insns fail to match; cross jumping is limited to the following
1554 insns. */
1555
1556 #ifdef HAVE_cc0
1557 /* Don't allow the insn after a compare to be shared by
1558 cross-jumping unless the compare is also shared.
1559 Here, if either of these non-matching insns is a compare,
1560 exclude the following insn from possible cross-jumping. */
1561 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1562 last1 = afterlast1, last2 = afterlast2, ++minimum;
1563 #endif
1564
1565 /* If cross-jumping here will feed a jump-around-jump
1566 optimization, this jump won't cost extra, so reduce
1567 the minimum. */
1568 if (GET_CODE (i1) == JUMP_INSN
1569 && JUMP_LABEL (i1)
1570 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1571 --minimum;
1572 break;
1573 }
1574
1575 win:
1576 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1577 {
1578 /* Ok, this insn is potentially includable in a cross-jump here. */
1579 afterlast1 = last1, afterlast2 = last2;
1580 last1 = i1, last2 = i2, --minimum;
1581 }
1582 }
1583
1584 if (minimum <= 0 && last1 != 0 && last1 != e1)
1585 *f1 = last1, *f2 = last2;
1586 }
1587
1588 static void
1589 do_cross_jump (insn, newjpos, newlpos)
1590 rtx insn, newjpos, newlpos;
1591 {
1592 /* Find an existing label at this point
1593 or make a new one if there is none. */
1594 register rtx label = get_label_before (newlpos);
1595
1596 /* Make the same jump insn jump to the new point. */
1597 if (GET_CODE (PATTERN (insn)) == RETURN)
1598 {
1599 /* Remove from jump chain of returns. */
1600 delete_from_jump_chain (insn);
1601 /* Change the insn. */
1602 PATTERN (insn) = gen_jump (label);
1603 INSN_CODE (insn) = -1;
1604 JUMP_LABEL (insn) = label;
1605 LABEL_NUSES (label)++;
1606 /* Add to new the jump chain. */
1607 if (INSN_UID (label) < max_jump_chain
1608 && INSN_UID (insn) < max_jump_chain)
1609 {
1610 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1611 jump_chain[INSN_UID (label)] = insn;
1612 }
1613 }
1614 else
1615 redirect_jump (insn, label, 1);
1616
1617 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1618 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1619 the NEWJPOS stream. */
1620
1621 while (newjpos != insn)
1622 {
1623 rtx lnote;
1624
1625 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1626 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1627 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1628 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1629 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1630 remove_note (newlpos, lnote);
1631
1632 delete_insn (newjpos);
1633 newjpos = next_real_insn (newjpos);
1634 newlpos = next_real_insn (newlpos);
1635 }
1636 }
1637 \f
1638 /* Return the label before INSN, or put a new label there. */
1639
1640 rtx
1641 get_label_before (insn)
1642 rtx insn;
1643 {
1644 rtx label;
1645
1646 /* Find an existing label at this point
1647 or make a new one if there is none. */
1648 label = prev_nonnote_insn (insn);
1649
1650 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1651 {
1652 rtx prev = PREV_INSN (insn);
1653
1654 label = gen_label_rtx ();
1655 emit_label_after (label, prev);
1656 LABEL_NUSES (label) = 0;
1657 }
1658 return label;
1659 }
1660
1661 /* Return the label after INSN, or put a new label there. */
1662
1663 rtx
1664 get_label_after (insn)
1665 rtx insn;
1666 {
1667 rtx label;
1668
1669 /* Find an existing label at this point
1670 or make a new one if there is none. */
1671 label = next_nonnote_insn (insn);
1672
1673 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1674 {
1675 label = gen_label_rtx ();
1676 emit_label_after (label, insn);
1677 LABEL_NUSES (label) = 0;
1678 }
1679 return label;
1680 }
1681 \f
1682 /* Return 1 if INSN is a jump that jumps to right after TARGET
1683 only on the condition that TARGET itself would drop through.
1684 Assumes that TARGET is a conditional jump. */
1685
1686 static int
1687 jump_back_p (insn, target)
1688 rtx insn, target;
1689 {
1690 rtx cinsn, ctarget;
1691 enum rtx_code codei, codet;
1692 rtx set, tset;
1693
1694 if (! any_condjump_p (insn)
1695 || any_uncondjump_p (target)
1696 || target != prev_real_insn (JUMP_LABEL (insn)))
1697 return 0;
1698 set = pc_set (insn);
1699 tset = pc_set (target);
1700
1701 cinsn = XEXP (SET_SRC (set), 0);
1702 ctarget = XEXP (SET_SRC (tset), 0);
1703
1704 codei = GET_CODE (cinsn);
1705 codet = GET_CODE (ctarget);
1706
1707 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1708 {
1709 codei = reversed_comparison_code (cinsn, insn);
1710 if (codei == UNKNOWN)
1711 return 0;
1712 }
1713
1714 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1715 {
1716 codet = reversed_comparison_code (ctarget, target);
1717 if (codei == UNKNOWN)
1718 return 0;
1719 }
1720
1721 return (codei == codet
1722 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1723 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1724 }
1725 \f
1726 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1727 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1728 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1729 know whether it's source is floating point or integer comparison. Machine
1730 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1731 to help this function avoid overhead in these cases. */
1732 enum rtx_code
1733 reversed_comparison_code_parts (code, arg0, arg1, insn)
1734 rtx insn, arg0, arg1;
1735 enum rtx_code code;
1736 {
1737 enum machine_mode mode;
1738
1739 /* If this is not actually a comparison, we can't reverse it. */
1740 if (GET_RTX_CLASS (code) != '<')
1741 return UNKNOWN;
1742
1743 mode = GET_MODE (arg0);
1744 if (mode == VOIDmode)
1745 mode = GET_MODE (arg1);
1746
1747 /* First see if machine description supply us way to reverse the comparison.
1748 Give it priority over everything else to allow machine description to do
1749 tricks. */
1750 #ifdef REVERSIBLE_CC_MODE
1751 if (GET_MODE_CLASS (mode) == MODE_CC
1752 && REVERSIBLE_CC_MODE (mode))
1753 {
1754 #ifdef REVERSE_CONDITION
1755 return REVERSE_CONDITION (code, mode);
1756 #endif
1757 return reverse_condition (code);
1758 }
1759 #endif
1760
1761 /* Try few special cases based on the comparison code. */
1762 switch (code)
1763 {
1764 case GEU:
1765 case GTU:
1766 case LEU:
1767 case LTU:
1768 case NE:
1769 case EQ:
1770 /* It is always safe to reverse EQ and NE, even for the floating
1771 point. Similary the unsigned comparisons are never used for
1772 floating point so we can reverse them in the default way. */
1773 return reverse_condition (code);
1774 case ORDERED:
1775 case UNORDERED:
1776 case LTGT:
1777 case UNEQ:
1778 /* In case we already see unordered comparison, we can be sure to
1779 be dealing with floating point so we don't need any more tests. */
1780 return reverse_condition_maybe_unordered (code);
1781 case UNLT:
1782 case UNLE:
1783 case UNGT:
1784 case UNGE:
1785 /* We don't have safe way to reverse these yet. */
1786 return UNKNOWN;
1787 default:
1788 break;
1789 }
1790
1791 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1792 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1793 || flag_unsafe_math_optimizations)
1794 return reverse_condition (code);
1795
1796 if (GET_MODE_CLASS (mode) == MODE_CC
1797 #ifdef HAVE_cc0
1798 || arg0 == cc0_rtx
1799 #endif
1800 )
1801 {
1802 rtx prev;
1803 /* Try to search for the comparison to determine the real mode.
1804 This code is expensive, but with sane machine description it
1805 will be never used, since REVERSIBLE_CC_MODE will return true
1806 in all cases. */
1807 if (! insn)
1808 return UNKNOWN;
1809
1810 for (prev = prev_nonnote_insn (insn);
1811 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1812 prev = prev_nonnote_insn (prev))
1813 {
1814 rtx set = set_of (arg0, prev);
1815 if (set && GET_CODE (set) == SET
1816 && rtx_equal_p (SET_DEST (set), arg0))
1817 {
1818 rtx src = SET_SRC (set);
1819
1820 if (GET_CODE (src) == COMPARE)
1821 {
1822 rtx comparison = src;
1823 arg0 = XEXP (src, 0);
1824 mode = GET_MODE (arg0);
1825 if (mode == VOIDmode)
1826 mode = GET_MODE (XEXP (comparison, 1));
1827 break;
1828 }
1829 /* We can get past reg-reg moves. This may be usefull for model
1830 of i387 comparisons that first move flag registers around. */
1831 if (REG_P (src))
1832 {
1833 arg0 = src;
1834 continue;
1835 }
1836 }
1837 /* If register is clobbered in some ununderstandable way,
1838 give up. */
1839 if (set)
1840 return UNKNOWN;
1841 }
1842 }
1843
1844 /* An integer condition. */
1845 if (GET_CODE (arg0) == CONST_INT
1846 || (GET_MODE (arg0) != VOIDmode
1847 && GET_MODE_CLASS (mode) != MODE_CC
1848 && ! FLOAT_MODE_P (mode)))
1849 return reverse_condition (code);
1850
1851 return UNKNOWN;
1852 }
1853
1854 /* An wrapper around the previous function to take COMPARISON as rtx
1855 expression. This simplifies many callers. */
1856 enum rtx_code
1857 reversed_comparison_code (comparison, insn)
1858 rtx comparison, insn;
1859 {
1860 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1861 return UNKNOWN;
1862 return reversed_comparison_code_parts (GET_CODE (comparison),
1863 XEXP (comparison, 0),
1864 XEXP (comparison, 1), insn);
1865 }
1866 \f
1867 /* Given an rtx-code for a comparison, return the code for the negated
1868 comparison. If no such code exists, return UNKNOWN.
1869
1870 WATCH OUT! reverse_condition is not safe to use on a jump that might
1871 be acting on the results of an IEEE floating point comparison, because
1872 of the special treatment of non-signaling nans in comparisons.
1873 Use reversed_comparison_code instead. */
1874
1875 enum rtx_code
1876 reverse_condition (code)
1877 enum rtx_code code;
1878 {
1879 switch (code)
1880 {
1881 case EQ:
1882 return NE;
1883 case NE:
1884 return EQ;
1885 case GT:
1886 return LE;
1887 case GE:
1888 return LT;
1889 case LT:
1890 return GE;
1891 case LE:
1892 return GT;
1893 case GTU:
1894 return LEU;
1895 case GEU:
1896 return LTU;
1897 case LTU:
1898 return GEU;
1899 case LEU:
1900 return GTU;
1901 case UNORDERED:
1902 return ORDERED;
1903 case ORDERED:
1904 return UNORDERED;
1905
1906 case UNLT:
1907 case UNLE:
1908 case UNGT:
1909 case UNGE:
1910 case UNEQ:
1911 case LTGT:
1912 return UNKNOWN;
1913
1914 default:
1915 abort ();
1916 }
1917 }
1918
1919 /* Similar, but we're allowed to generate unordered comparisons, which
1920 makes it safe for IEEE floating-point. Of course, we have to recognize
1921 that the target will support them too... */
1922
1923 enum rtx_code
1924 reverse_condition_maybe_unordered (code)
1925 enum rtx_code code;
1926 {
1927 /* Non-IEEE formats don't have unordered conditions. */
1928 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1929 return reverse_condition (code);
1930
1931 switch (code)
1932 {
1933 case EQ:
1934 return NE;
1935 case NE:
1936 return EQ;
1937 case GT:
1938 return UNLE;
1939 case GE:
1940 return UNLT;
1941 case LT:
1942 return UNGE;
1943 case LE:
1944 return UNGT;
1945 case LTGT:
1946 return UNEQ;
1947 case UNORDERED:
1948 return ORDERED;
1949 case ORDERED:
1950 return UNORDERED;
1951 case UNLT:
1952 return GE;
1953 case UNLE:
1954 return GT;
1955 case UNGT:
1956 return LE;
1957 case UNGE:
1958 return LT;
1959 case UNEQ:
1960 return LTGT;
1961
1962 default:
1963 abort ();
1964 }
1965 }
1966
1967 /* Similar, but return the code when two operands of a comparison are swapped.
1968 This IS safe for IEEE floating-point. */
1969
1970 enum rtx_code
1971 swap_condition (code)
1972 enum rtx_code code;
1973 {
1974 switch (code)
1975 {
1976 case EQ:
1977 case NE:
1978 case UNORDERED:
1979 case ORDERED:
1980 case UNEQ:
1981 case LTGT:
1982 return code;
1983
1984 case GT:
1985 return LT;
1986 case GE:
1987 return LE;
1988 case LT:
1989 return GT;
1990 case LE:
1991 return GE;
1992 case GTU:
1993 return LTU;
1994 case GEU:
1995 return LEU;
1996 case LTU:
1997 return GTU;
1998 case LEU:
1999 return GEU;
2000 case UNLT:
2001 return UNGT;
2002 case UNLE:
2003 return UNGE;
2004 case UNGT:
2005 return UNLT;
2006 case UNGE:
2007 return UNLE;
2008
2009 default:
2010 abort ();
2011 }
2012 }
2013
2014 /* Given a comparison CODE, return the corresponding unsigned comparison.
2015 If CODE is an equality comparison or already an unsigned comparison,
2016 CODE is returned. */
2017
2018 enum rtx_code
2019 unsigned_condition (code)
2020 enum rtx_code code;
2021 {
2022 switch (code)
2023 {
2024 case EQ:
2025 case NE:
2026 case GTU:
2027 case GEU:
2028 case LTU:
2029 case LEU:
2030 return code;
2031
2032 case GT:
2033 return GTU;
2034 case GE:
2035 return GEU;
2036 case LT:
2037 return LTU;
2038 case LE:
2039 return LEU;
2040
2041 default:
2042 abort ();
2043 }
2044 }
2045
2046 /* Similarly, return the signed version of a comparison. */
2047
2048 enum rtx_code
2049 signed_condition (code)
2050 enum rtx_code code;
2051 {
2052 switch (code)
2053 {
2054 case EQ:
2055 case NE:
2056 case GT:
2057 case GE:
2058 case LT:
2059 case LE:
2060 return code;
2061
2062 case GTU:
2063 return GT;
2064 case GEU:
2065 return GE;
2066 case LTU:
2067 return LT;
2068 case LEU:
2069 return LE;
2070
2071 default:
2072 abort ();
2073 }
2074 }
2075 \f
2076 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2077 truth of CODE1 implies the truth of CODE2. */
2078
2079 int
2080 comparison_dominates_p (code1, code2)
2081 enum rtx_code code1, code2;
2082 {
2083 /* UNKNOWN comparison codes can happen as a result of trying to revert
2084 comparison codes.
2085 They can't match anything, so we have to reject them here. */
2086 if (code1 == UNKNOWN || code2 == UNKNOWN)
2087 return 0;
2088
2089 if (code1 == code2)
2090 return 1;
2091
2092 switch (code1)
2093 {
2094 case UNEQ:
2095 if (code2 == UNLE || code2 == UNGE)
2096 return 1;
2097 break;
2098
2099 case EQ:
2100 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2101 || code2 == ORDERED)
2102 return 1;
2103 break;
2104
2105 case UNLT:
2106 if (code2 == UNLE || code2 == NE)
2107 return 1;
2108 break;
2109
2110 case LT:
2111 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2112 return 1;
2113 break;
2114
2115 case UNGT:
2116 if (code2 == UNGE || code2 == NE)
2117 return 1;
2118 break;
2119
2120 case GT:
2121 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2122 return 1;
2123 break;
2124
2125 case GE:
2126 case LE:
2127 if (code2 == ORDERED)
2128 return 1;
2129 break;
2130
2131 case LTGT:
2132 if (code2 == NE || code2 == ORDERED)
2133 return 1;
2134 break;
2135
2136 case LTU:
2137 if (code2 == LEU || code2 == NE)
2138 return 1;
2139 break;
2140
2141 case GTU:
2142 if (code2 == GEU || code2 == NE)
2143 return 1;
2144 break;
2145
2146 case UNORDERED:
2147 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2148 || code2 == UNGE || code2 == UNGT)
2149 return 1;
2150 break;
2151
2152 default:
2153 break;
2154 }
2155
2156 return 0;
2157 }
2158 \f
2159 /* Return 1 if INSN is an unconditional jump and nothing else. */
2160
2161 int
2162 simplejump_p (insn)
2163 rtx insn;
2164 {
2165 return (GET_CODE (insn) == JUMP_INSN
2166 && GET_CODE (PATTERN (insn)) == SET
2167 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2168 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2169 }
2170
2171 /* Return nonzero if INSN is a (possibly) conditional jump
2172 and nothing more.
2173
2174 Use this function is deprecated, since we need to support combined
2175 branch and compare insns. Use any_condjump_p instead whenever possible. */
2176
2177 int
2178 condjump_p (insn)
2179 rtx insn;
2180 {
2181 register rtx x = PATTERN (insn);
2182
2183 if (GET_CODE (x) != SET
2184 || GET_CODE (SET_DEST (x)) != PC)
2185 return 0;
2186
2187 x = SET_SRC (x);
2188 if (GET_CODE (x) == LABEL_REF)
2189 return 1;
2190 else
2191 return (GET_CODE (x) == IF_THEN_ELSE
2192 && ((GET_CODE (XEXP (x, 2)) == PC
2193 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2194 || GET_CODE (XEXP (x, 1)) == RETURN))
2195 || (GET_CODE (XEXP (x, 1)) == PC
2196 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2197 || GET_CODE (XEXP (x, 2)) == RETURN))));
2198
2199 return 0;
2200 }
2201
2202 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2203 PARALLEL.
2204
2205 Use this function is deprecated, since we need to support combined
2206 branch and compare insns. Use any_condjump_p instead whenever possible. */
2207
2208 int
2209 condjump_in_parallel_p (insn)
2210 rtx insn;
2211 {
2212 register rtx x = PATTERN (insn);
2213
2214 if (GET_CODE (x) != PARALLEL)
2215 return 0;
2216 else
2217 x = XVECEXP (x, 0, 0);
2218
2219 if (GET_CODE (x) != SET)
2220 return 0;
2221 if (GET_CODE (SET_DEST (x)) != PC)
2222 return 0;
2223 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2224 return 1;
2225 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2226 return 0;
2227 if (XEXP (SET_SRC (x), 2) == pc_rtx
2228 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2229 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2230 return 1;
2231 if (XEXP (SET_SRC (x), 1) == pc_rtx
2232 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2233 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2234 return 1;
2235 return 0;
2236 }
2237
2238 /* Return set of PC, otherwise NULL. */
2239
2240 rtx
2241 pc_set (insn)
2242 rtx insn;
2243 {
2244 rtx pat;
2245 if (GET_CODE (insn) != JUMP_INSN)
2246 return NULL_RTX;
2247 pat = PATTERN (insn);
2248
2249 /* The set is allowed to appear either as the insn pattern or
2250 the first set in a PARALLEL. */
2251 if (GET_CODE (pat) == PARALLEL)
2252 pat = XVECEXP (pat, 0, 0);
2253 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2254 return pat;
2255
2256 return NULL_RTX;
2257 }
2258
2259 /* Return true when insn is an unconditional direct jump,
2260 possibly bundled inside a PARALLEL. */
2261
2262 int
2263 any_uncondjump_p (insn)
2264 rtx insn;
2265 {
2266 rtx x = pc_set (insn);
2267 if (!x)
2268 return 0;
2269 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2270 return 0;
2271 return 1;
2272 }
2273
2274 /* Return true when insn is a conditional jump. This function works for
2275 instructions containing PC sets in PARALLELs. The instruction may have
2276 various other effects so before removing the jump you must verify
2277 onlyjump_p.
2278
2279 Note that unlike condjump_p it returns false for unconditional jumps. */
2280
2281 int
2282 any_condjump_p (insn)
2283 rtx insn;
2284 {
2285 rtx x = pc_set (insn);
2286 enum rtx_code a, b;
2287
2288 if (!x)
2289 return 0;
2290 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2291 return 0;
2292
2293 a = GET_CODE (XEXP (SET_SRC (x), 1));
2294 b = GET_CODE (XEXP (SET_SRC (x), 2));
2295
2296 return ((b == PC && (a == LABEL_REF || a == RETURN))
2297 || (a == PC && (b == LABEL_REF || b == RETURN)));
2298 }
2299
2300 /* Return the label of a conditional jump. */
2301
2302 rtx
2303 condjump_label (insn)
2304 rtx insn;
2305 {
2306 rtx x = pc_set (insn);
2307
2308 if (!x)
2309 return NULL_RTX;
2310 x = SET_SRC (x);
2311 if (GET_CODE (x) == LABEL_REF)
2312 return x;
2313 if (GET_CODE (x) != IF_THEN_ELSE)
2314 return NULL_RTX;
2315 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2316 return XEXP (x, 1);
2317 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2318 return XEXP (x, 2);
2319 return NULL_RTX;
2320 }
2321
2322 /* Return true if INSN is a (possibly conditional) return insn. */
2323
2324 static int
2325 returnjump_p_1 (loc, data)
2326 rtx *loc;
2327 void *data ATTRIBUTE_UNUSED;
2328 {
2329 rtx x = *loc;
2330 return x && GET_CODE (x) == RETURN;
2331 }
2332
2333 int
2334 returnjump_p (insn)
2335 rtx insn;
2336 {
2337 if (GET_CODE (insn) != JUMP_INSN)
2338 return 0;
2339 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2340 }
2341
2342 /* Return true if INSN is a jump that only transfers control and
2343 nothing more. */
2344
2345 int
2346 onlyjump_p (insn)
2347 rtx insn;
2348 {
2349 rtx set;
2350
2351 if (GET_CODE (insn) != JUMP_INSN)
2352 return 0;
2353
2354 set = single_set (insn);
2355 if (set == NULL)
2356 return 0;
2357 if (GET_CODE (SET_DEST (set)) != PC)
2358 return 0;
2359 if (side_effects_p (SET_SRC (set)))
2360 return 0;
2361
2362 return 1;
2363 }
2364
2365 #ifdef HAVE_cc0
2366
2367 /* Return 1 if X is an RTX that does nothing but set the condition codes
2368 and CLOBBER or USE registers.
2369 Return -1 if X does explicitly set the condition codes,
2370 but also does other things. */
2371
2372 int
2373 sets_cc0_p (x)
2374 rtx x ATTRIBUTE_UNUSED;
2375 {
2376 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2377 return 1;
2378 if (GET_CODE (x) == PARALLEL)
2379 {
2380 int i;
2381 int sets_cc0 = 0;
2382 int other_things = 0;
2383 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2384 {
2385 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2386 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2387 sets_cc0 = 1;
2388 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2389 other_things = 1;
2390 }
2391 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2392 }
2393 return 0;
2394 }
2395 #endif
2396 \f
2397 /* Follow any unconditional jump at LABEL;
2398 return the ultimate label reached by any such chain of jumps.
2399 If LABEL is not followed by a jump, return LABEL.
2400 If the chain loops or we can't find end, return LABEL,
2401 since that tells caller to avoid changing the insn.
2402
2403 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2404 a USE or CLOBBER. */
2405
2406 rtx
2407 follow_jumps (label)
2408 rtx label;
2409 {
2410 register rtx insn;
2411 register rtx next;
2412 register rtx value = label;
2413 register int depth;
2414
2415 for (depth = 0;
2416 (depth < 10
2417 && (insn = next_active_insn (value)) != 0
2418 && GET_CODE (insn) == JUMP_INSN
2419 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2420 && onlyjump_p (insn))
2421 || GET_CODE (PATTERN (insn)) == RETURN)
2422 && (next = NEXT_INSN (insn))
2423 && GET_CODE (next) == BARRIER);
2424 depth++)
2425 {
2426 /* Don't chain through the insn that jumps into a loop
2427 from outside the loop,
2428 since that would create multiple loop entry jumps
2429 and prevent loop optimization. */
2430 rtx tem;
2431 if (!reload_completed)
2432 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2433 if (GET_CODE (tem) == NOTE
2434 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2435 /* ??? Optional. Disables some optimizations, but makes
2436 gcov output more accurate with -O. */
2437 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2438 return value;
2439
2440 /* If we have found a cycle, make the insn jump to itself. */
2441 if (JUMP_LABEL (insn) == label)
2442 return label;
2443
2444 tem = next_active_insn (JUMP_LABEL (insn));
2445 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2446 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2447 break;
2448
2449 value = JUMP_LABEL (insn);
2450 }
2451 if (depth == 10)
2452 return label;
2453 return value;
2454 }
2455
2456 /* Assuming that field IDX of X is a vector of label_refs,
2457 replace each of them by the ultimate label reached by it.
2458 Return nonzero if a change is made.
2459 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2460
2461 static int
2462 tension_vector_labels (x, idx)
2463 register rtx x;
2464 register int idx;
2465 {
2466 int changed = 0;
2467 register int i;
2468 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2469 {
2470 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2471 register rtx nlabel = follow_jumps (olabel);
2472 if (nlabel && nlabel != olabel)
2473 {
2474 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2475 ++LABEL_NUSES (nlabel);
2476 if (--LABEL_NUSES (olabel) == 0)
2477 delete_insn (olabel);
2478 changed = 1;
2479 }
2480 }
2481 return changed;
2482 }
2483 \f
2484 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2485 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2486 in INSN, then store one of them in JUMP_LABEL (INSN).
2487 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2488 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2489 Also, when there are consecutive labels, canonicalize on the last of them.
2490
2491 Note that two labels separated by a loop-beginning note
2492 must be kept distinct if we have not yet done loop-optimization,
2493 because the gap between them is where loop-optimize
2494 will want to move invariant code to. CROSS_JUMP tells us
2495 that loop-optimization is done with.
2496
2497 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2498 two labels distinct if they are separated by only USE or CLOBBER insns. */
2499
2500 void
2501 mark_jump_label (x, insn, cross_jump, in_mem)
2502 register rtx x;
2503 rtx insn;
2504 int cross_jump;
2505 int in_mem;
2506 {
2507 register RTX_CODE code = GET_CODE (x);
2508 register int i;
2509 register const char *fmt;
2510
2511 switch (code)
2512 {
2513 case PC:
2514 case CC0:
2515 case REG:
2516 case SUBREG:
2517 case CONST_INT:
2518 case CONST_DOUBLE:
2519 case CLOBBER:
2520 case CALL:
2521 return;
2522
2523 case MEM:
2524 in_mem = 1;
2525 break;
2526
2527 case SYMBOL_REF:
2528 if (!in_mem)
2529 return;
2530
2531 /* If this is a constant-pool reference, see if it is a label. */
2532 if (CONSTANT_POOL_ADDRESS_P (x))
2533 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2534 break;
2535
2536 case LABEL_REF:
2537 {
2538 rtx label = XEXP (x, 0);
2539 rtx olabel = label;
2540 rtx note;
2541 rtx next;
2542
2543 /* Ignore remaining references to unreachable labels that
2544 have been deleted. */
2545 if (GET_CODE (label) == NOTE
2546 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2547 break;
2548
2549 if (GET_CODE (label) != CODE_LABEL)
2550 abort ();
2551
2552 /* Ignore references to labels of containing functions. */
2553 if (LABEL_REF_NONLOCAL_P (x))
2554 break;
2555
2556 /* If there are other labels following this one,
2557 replace it with the last of the consecutive labels. */
2558 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2559 {
2560 if (GET_CODE (next) == CODE_LABEL)
2561 label = next;
2562 else if (cross_jump && GET_CODE (next) == INSN
2563 && (GET_CODE (PATTERN (next)) == USE
2564 || GET_CODE (PATTERN (next)) == CLOBBER))
2565 continue;
2566 else if (GET_CODE (next) != NOTE)
2567 break;
2568 else if (! cross_jump
2569 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2570 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2571 /* ??? Optional. Disables some optimizations, but
2572 makes gcov output more accurate with -O. */
2573 || (flag_test_coverage
2574 && NOTE_LINE_NUMBER (next) > 0)))
2575 break;
2576 }
2577
2578 XEXP (x, 0) = label;
2579 if (! insn || ! INSN_DELETED_P (insn))
2580 ++LABEL_NUSES (label);
2581
2582 if (insn)
2583 {
2584 if (GET_CODE (insn) == JUMP_INSN)
2585 JUMP_LABEL (insn) = label;
2586
2587 /* If we've changed OLABEL and we had a REG_LABEL note
2588 for it, update it as well. */
2589 else if (label != olabel
2590 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2591 XEXP (note, 0) = label;
2592
2593 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2594 is one. */
2595 else if (! find_reg_note (insn, REG_LABEL, label))
2596 {
2597 /* This code used to ignore labels which refered to dispatch
2598 tables to avoid flow.c generating worse code.
2599
2600 However, in the presense of global optimizations like
2601 gcse which call find_basic_blocks without calling
2602 life_analysis, not recording such labels will lead
2603 to compiler aborts because of inconsistencies in the
2604 flow graph. So we go ahead and record the label.
2605
2606 It may also be the case that the optimization argument
2607 is no longer valid because of the more accurate cfg
2608 we build in find_basic_blocks -- it no longer pessimizes
2609 code when it finds a REG_LABEL note. */
2610 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2611 REG_NOTES (insn));
2612 }
2613 }
2614 return;
2615 }
2616
2617 /* Do walk the labels in a vector, but not the first operand of an
2618 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2619 case ADDR_VEC:
2620 case ADDR_DIFF_VEC:
2621 if (! INSN_DELETED_P (insn))
2622 {
2623 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2624
2625 for (i = 0; i < XVECLEN (x, eltnum); i++)
2626 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2627 cross_jump, in_mem);
2628 }
2629 return;
2630
2631 default:
2632 break;
2633 }
2634
2635 fmt = GET_RTX_FORMAT (code);
2636 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2637 {
2638 if (fmt[i] == 'e')
2639 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2640 else if (fmt[i] == 'E')
2641 {
2642 register int j;
2643 for (j = 0; j < XVECLEN (x, i); j++)
2644 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2645 }
2646 }
2647 }
2648
2649 /* If all INSN does is set the pc, delete it,
2650 and delete the insn that set the condition codes for it
2651 if that's what the previous thing was. */
2652
2653 void
2654 delete_jump (insn)
2655 rtx insn;
2656 {
2657 register rtx set = single_set (insn);
2658
2659 if (set && GET_CODE (SET_DEST (set)) == PC)
2660 delete_computation (insn);
2661 }
2662
2663 /* Verify INSN is a BARRIER and delete it. */
2664
2665 void
2666 delete_barrier (insn)
2667 rtx insn;
2668 {
2669 if (GET_CODE (insn) != BARRIER)
2670 abort ();
2671
2672 delete_insn (insn);
2673 }
2674
2675 /* Recursively delete prior insns that compute the value (used only by INSN
2676 which the caller is deleting) stored in the register mentioned by NOTE
2677 which is a REG_DEAD note associated with INSN. */
2678
2679 static void
2680 delete_prior_computation (note, insn)
2681 rtx note;
2682 rtx insn;
2683 {
2684 rtx our_prev;
2685 rtx reg = XEXP (note, 0);
2686
2687 for (our_prev = prev_nonnote_insn (insn);
2688 our_prev && (GET_CODE (our_prev) == INSN
2689 || GET_CODE (our_prev) == CALL_INSN);
2690 our_prev = prev_nonnote_insn (our_prev))
2691 {
2692 rtx pat = PATTERN (our_prev);
2693
2694 /* If we reach a CALL which is not calling a const function
2695 or the callee pops the arguments, then give up. */
2696 if (GET_CODE (our_prev) == CALL_INSN
2697 && (! CONST_CALL_P (our_prev)
2698 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2699 break;
2700
2701 /* If we reach a SEQUENCE, it is too complex to try to
2702 do anything with it, so give up. */
2703 if (GET_CODE (pat) == SEQUENCE)
2704 break;
2705
2706 if (GET_CODE (pat) == USE
2707 && GET_CODE (XEXP (pat, 0)) == INSN)
2708 /* reorg creates USEs that look like this. We leave them
2709 alone because reorg needs them for its own purposes. */
2710 break;
2711
2712 if (reg_set_p (reg, pat))
2713 {
2714 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2715 break;
2716
2717 if (GET_CODE (pat) == PARALLEL)
2718 {
2719 /* If we find a SET of something else, we can't
2720 delete the insn. */
2721
2722 int i;
2723
2724 for (i = 0; i < XVECLEN (pat, 0); i++)
2725 {
2726 rtx part = XVECEXP (pat, 0, i);
2727
2728 if (GET_CODE (part) == SET
2729 && SET_DEST (part) != reg)
2730 break;
2731 }
2732
2733 if (i == XVECLEN (pat, 0))
2734 delete_computation (our_prev);
2735 }
2736 else if (GET_CODE (pat) == SET
2737 && GET_CODE (SET_DEST (pat)) == REG)
2738 {
2739 int dest_regno = REGNO (SET_DEST (pat));
2740 int dest_endregno
2741 = (dest_regno
2742 + (dest_regno < FIRST_PSEUDO_REGISTER
2743 ? HARD_REGNO_NREGS (dest_regno,
2744 GET_MODE (SET_DEST (pat))) : 1));
2745 int regno = REGNO (reg);
2746 int endregno
2747 = (regno
2748 + (regno < FIRST_PSEUDO_REGISTER
2749 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2750
2751 if (dest_regno >= regno
2752 && dest_endregno <= endregno)
2753 delete_computation (our_prev);
2754
2755 /* We may have a multi-word hard register and some, but not
2756 all, of the words of the register are needed in subsequent
2757 insns. Write REG_UNUSED notes for those parts that were not
2758 needed. */
2759 else if (dest_regno <= regno
2760 && dest_endregno >= endregno)
2761 {
2762 int i;
2763
2764 REG_NOTES (our_prev)
2765 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2766 REG_NOTES (our_prev));
2767
2768 for (i = dest_regno; i < dest_endregno; i++)
2769 if (! find_regno_note (our_prev, REG_UNUSED, i))
2770 break;
2771
2772 if (i == dest_endregno)
2773 delete_computation (our_prev);
2774 }
2775 }
2776
2777 break;
2778 }
2779
2780 /* If PAT references the register that dies here, it is an
2781 additional use. Hence any prior SET isn't dead. However, this
2782 insn becomes the new place for the REG_DEAD note. */
2783 if (reg_overlap_mentioned_p (reg, pat))
2784 {
2785 XEXP (note, 1) = REG_NOTES (our_prev);
2786 REG_NOTES (our_prev) = note;
2787 break;
2788 }
2789 }
2790 }
2791
2792 /* Delete INSN and recursively delete insns that compute values used only
2793 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2794 If we are running before flow.c, we need do nothing since flow.c will
2795 delete dead code. We also can't know if the registers being used are
2796 dead or not at this point.
2797
2798 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2799 nothing other than set a register that dies in this insn, we can delete
2800 that insn as well.
2801
2802 On machines with CC0, if CC0 is used in this insn, we may be able to
2803 delete the insn that set it. */
2804
2805 static void
2806 delete_computation (insn)
2807 rtx insn;
2808 {
2809 rtx note, next;
2810
2811 #ifdef HAVE_cc0
2812 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2813 {
2814 rtx prev = prev_nonnote_insn (insn);
2815 /* We assume that at this stage
2816 CC's are always set explicitly
2817 and always immediately before the jump that
2818 will use them. So if the previous insn
2819 exists to set the CC's, delete it
2820 (unless it performs auto-increments, etc.). */
2821 if (prev && GET_CODE (prev) == INSN
2822 && sets_cc0_p (PATTERN (prev)))
2823 {
2824 if (sets_cc0_p (PATTERN (prev)) > 0
2825 && ! side_effects_p (PATTERN (prev)))
2826 delete_computation (prev);
2827 else
2828 /* Otherwise, show that cc0 won't be used. */
2829 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2830 cc0_rtx, REG_NOTES (prev));
2831 }
2832 }
2833 #endif
2834
2835 for (note = REG_NOTES (insn); note; note = next)
2836 {
2837 next = XEXP (note, 1);
2838
2839 if (REG_NOTE_KIND (note) != REG_DEAD
2840 /* Verify that the REG_NOTE is legitimate. */
2841 || GET_CODE (XEXP (note, 0)) != REG)
2842 continue;
2843
2844 delete_prior_computation (note, insn);
2845 }
2846
2847 delete_insn (insn);
2848 }
2849 \f
2850 /* Delete insn INSN from the chain of insns and update label ref counts.
2851 May delete some following insns as a consequence; may even delete
2852 a label elsewhere and insns that follow it.
2853
2854 Returns the first insn after INSN that was not deleted. */
2855
2856 rtx
2857 delete_insn (insn)
2858 register rtx insn;
2859 {
2860 register rtx next = NEXT_INSN (insn);
2861 register rtx prev = PREV_INSN (insn);
2862 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2863 register int dont_really_delete = 0;
2864 rtx note;
2865
2866 while (next && INSN_DELETED_P (next))
2867 next = NEXT_INSN (next);
2868
2869 /* This insn is already deleted => return first following nondeleted. */
2870 if (INSN_DELETED_P (insn))
2871 return next;
2872
2873 if (was_code_label)
2874 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2875
2876 /* Don't delete user-declared labels. When optimizing, convert them
2877 to special NOTEs instead. When not optimizing, leave them alone. */
2878 if (was_code_label && LABEL_NAME (insn) != 0)
2879 {
2880 if (! optimize)
2881 dont_really_delete = 1;
2882 else if (! dont_really_delete)
2883 {
2884 const char *name = LABEL_NAME (insn);
2885 PUT_CODE (insn, NOTE);
2886 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2887 NOTE_SOURCE_FILE (insn) = name;
2888 dont_really_delete = 1;
2889 }
2890 }
2891 else
2892 /* Mark this insn as deleted. */
2893 INSN_DELETED_P (insn) = 1;
2894
2895 /* If this is an unconditional jump, delete it from the jump chain. */
2896 if (simplejump_p (insn))
2897 delete_from_jump_chain (insn);
2898
2899 /* If instruction is followed by a barrier,
2900 delete the barrier too. */
2901
2902 if (next != 0 && GET_CODE (next) == BARRIER)
2903 {
2904 INSN_DELETED_P (next) = 1;
2905 next = NEXT_INSN (next);
2906 }
2907
2908 /* Patch out INSN (and the barrier if any) */
2909
2910 if (! dont_really_delete)
2911 {
2912 if (prev)
2913 {
2914 NEXT_INSN (prev) = next;
2915 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2916 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2917 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2918 }
2919
2920 if (next)
2921 {
2922 PREV_INSN (next) = prev;
2923 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2924 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2925 }
2926
2927 if (prev && NEXT_INSN (prev) == 0)
2928 set_last_insn (prev);
2929 }
2930
2931 /* If deleting a jump, decrement the count of the label,
2932 and delete the label if it is now unused. */
2933
2934 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2935 {
2936 rtx lab = JUMP_LABEL (insn), lab_next;
2937
2938 if (--LABEL_NUSES (lab) == 0)
2939 {
2940 /* This can delete NEXT or PREV,
2941 either directly if NEXT is JUMP_LABEL (INSN),
2942 or indirectly through more levels of jumps. */
2943 delete_insn (lab);
2944
2945 /* I feel a little doubtful about this loop,
2946 but I see no clean and sure alternative way
2947 to find the first insn after INSN that is not now deleted.
2948 I hope this works. */
2949 while (next && INSN_DELETED_P (next))
2950 next = NEXT_INSN (next);
2951 return next;
2952 }
2953 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2954 && GET_CODE (lab_next) == JUMP_INSN
2955 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2956 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2957 {
2958 /* If we're deleting the tablejump, delete the dispatch table.
2959 We may not be able to kill the label immediately preceeding
2960 just yet, as it might be referenced in code leading up to
2961 the tablejump. */
2962 delete_insn (lab_next);
2963 }
2964 }
2965
2966 /* Likewise if we're deleting a dispatch table. */
2967
2968 if (GET_CODE (insn) == JUMP_INSN
2969 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2970 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2971 {
2972 rtx pat = PATTERN (insn);
2973 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2974 int len = XVECLEN (pat, diff_vec_p);
2975
2976 for (i = 0; i < len; i++)
2977 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2978 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2979 while (next && INSN_DELETED_P (next))
2980 next = NEXT_INSN (next);
2981 return next;
2982 }
2983
2984 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2985 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2986 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2987 if (REG_NOTE_KIND (note) == REG_LABEL
2988 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2989 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2990 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2991 delete_insn (XEXP (note, 0));
2992
2993 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2994 prev = PREV_INSN (prev);
2995
2996 /* If INSN was a label and a dispatch table follows it,
2997 delete the dispatch table. The tablejump must have gone already.
2998 It isn't useful to fall through into a table. */
2999
3000 if (was_code_label
3001 && NEXT_INSN (insn) != 0
3002 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3003 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3004 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3005 next = delete_insn (NEXT_INSN (insn));
3006
3007 /* If INSN was a label, delete insns following it if now unreachable. */
3008
3009 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3010 {
3011 register RTX_CODE code;
3012 while (next != 0
3013 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3014 || code == NOTE || code == BARRIER
3015 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3016 {
3017 if (code == NOTE
3018 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3019 next = NEXT_INSN (next);
3020 /* Keep going past other deleted labels to delete what follows. */
3021 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3022 next = NEXT_INSN (next);
3023 else
3024 /* Note: if this deletes a jump, it can cause more
3025 deletion of unreachable code, after a different label.
3026 As long as the value from this recursive call is correct,
3027 this invocation functions correctly. */
3028 next = delete_insn (next);
3029 }
3030 }
3031
3032 return next;
3033 }
3034
3035 /* Advance from INSN till reaching something not deleted
3036 then return that. May return INSN itself. */
3037
3038 rtx
3039 next_nondeleted_insn (insn)
3040 rtx insn;
3041 {
3042 while (INSN_DELETED_P (insn))
3043 insn = NEXT_INSN (insn);
3044 return insn;
3045 }
3046 \f
3047 /* Delete a range of insns from FROM to TO, inclusive.
3048 This is for the sake of peephole optimization, so assume
3049 that whatever these insns do will still be done by a new
3050 peephole insn that will replace them. */
3051
3052 void
3053 delete_for_peephole (from, to)
3054 register rtx from, to;
3055 {
3056 register rtx insn = from;
3057
3058 while (1)
3059 {
3060 register rtx next = NEXT_INSN (insn);
3061 register rtx prev = PREV_INSN (insn);
3062
3063 if (GET_CODE (insn) != NOTE)
3064 {
3065 INSN_DELETED_P (insn) = 1;
3066
3067 /* Patch this insn out of the chain. */
3068 /* We don't do this all at once, because we
3069 must preserve all NOTEs. */
3070 if (prev)
3071 NEXT_INSN (prev) = next;
3072
3073 if (next)
3074 PREV_INSN (next) = prev;
3075 }
3076
3077 if (insn == to)
3078 break;
3079 insn = next;
3080 }
3081
3082 /* Note that if TO is an unconditional jump
3083 we *do not* delete the BARRIER that follows,
3084 since the peephole that replaces this sequence
3085 is also an unconditional jump in that case. */
3086 }
3087 \f
3088 /* We have determined that INSN is never reached, and are about to
3089 delete it. Print a warning if the user asked for one.
3090
3091 To try to make this warning more useful, this should only be called
3092 once per basic block not reached, and it only warns when the basic
3093 block contains more than one line from the current function, and
3094 contains at least one operation. CSE and inlining can duplicate insns,
3095 so it's possible to get spurious warnings from this. */
3096
3097 void
3098 never_reached_warning (avoided_insn)
3099 rtx avoided_insn;
3100 {
3101 rtx insn;
3102 rtx a_line_note = NULL;
3103 int two_avoided_lines = 0;
3104 int contains_insn = 0;
3105
3106 if (! warn_notreached)
3107 return;
3108
3109 /* Scan forwards, looking at LINE_NUMBER notes, until
3110 we hit a LABEL or we run out of insns. */
3111
3112 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3113 {
3114 if (GET_CODE (insn) == CODE_LABEL)
3115 break;
3116 else if (GET_CODE (insn) == NOTE /* A line number note? */
3117 && NOTE_LINE_NUMBER (insn) >= 0)
3118 {
3119 if (a_line_note == NULL)
3120 a_line_note = insn;
3121 else
3122 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3123 != NOTE_LINE_NUMBER (insn));
3124 }
3125 else if (INSN_P (insn))
3126 contains_insn = 1;
3127 }
3128 if (two_avoided_lines && contains_insn)
3129 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3130 NOTE_LINE_NUMBER (a_line_note),
3131 "will never be executed");
3132 }
3133 \f
3134 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3135 NLABEL as a return. Accrue modifications into the change group. */
3136
3137 static void
3138 redirect_exp_1 (loc, olabel, nlabel, insn)
3139 rtx *loc;
3140 rtx olabel, nlabel;
3141 rtx insn;
3142 {
3143 register rtx x = *loc;
3144 register RTX_CODE code = GET_CODE (x);
3145 register int i;
3146 register const char *fmt;
3147
3148 if (code == LABEL_REF)
3149 {
3150 if (XEXP (x, 0) == olabel)
3151 {
3152 rtx n;
3153 if (nlabel)
3154 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3155 else
3156 n = gen_rtx_RETURN (VOIDmode);
3157
3158 validate_change (insn, loc, n, 1);
3159 return;
3160 }
3161 }
3162 else if (code == RETURN && olabel == 0)
3163 {
3164 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3165 if (loc == &PATTERN (insn))
3166 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3167 validate_change (insn, loc, x, 1);
3168 return;
3169 }
3170
3171 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3172 && GET_CODE (SET_SRC (x)) == LABEL_REF
3173 && XEXP (SET_SRC (x), 0) == olabel)
3174 {
3175 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3176 return;
3177 }
3178
3179 fmt = GET_RTX_FORMAT (code);
3180 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3181 {
3182 if (fmt[i] == 'e')
3183 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3184 else if (fmt[i] == 'E')
3185 {
3186 register int j;
3187 for (j = 0; j < XVECLEN (x, i); j++)
3188 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3189 }
3190 }
3191 }
3192
3193 /* Similar, but apply the change group and report success or failure. */
3194
3195 static int
3196 redirect_exp (olabel, nlabel, insn)
3197 rtx olabel, nlabel;
3198 rtx insn;
3199 {
3200 rtx *loc;
3201
3202 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3203 loc = &XVECEXP (PATTERN (insn), 0, 0);
3204 else
3205 loc = &PATTERN (insn);
3206
3207 redirect_exp_1 (loc, olabel, nlabel, insn);
3208 if (num_validated_changes () == 0)
3209 return 0;
3210
3211 return apply_change_group ();
3212 }
3213
3214 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3215 the modifications into the change group. Return false if we did
3216 not see how to do that. */
3217
3218 int
3219 redirect_jump_1 (jump, nlabel)
3220 rtx jump, nlabel;
3221 {
3222 int ochanges = num_validated_changes ();
3223 rtx *loc;
3224
3225 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3226 loc = &XVECEXP (PATTERN (jump), 0, 0);
3227 else
3228 loc = &PATTERN (jump);
3229
3230 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3231 return num_validated_changes () > ochanges;
3232 }
3233
3234 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3235 jump target label is unused as a result, it and the code following
3236 it may be deleted.
3237
3238 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3239 RETURN insn.
3240
3241 The return value will be 1 if the change was made, 0 if it wasn't
3242 (this can only occur for NLABEL == 0). */
3243
3244 int
3245 redirect_jump (jump, nlabel, delete_unused)
3246 rtx jump, nlabel;
3247 int delete_unused;
3248 {
3249 register rtx olabel = JUMP_LABEL (jump);
3250
3251 if (nlabel == olabel)
3252 return 1;
3253
3254 if (! redirect_exp (olabel, nlabel, jump))
3255 return 0;
3256
3257 /* If this is an unconditional branch, delete it from the jump_chain of
3258 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3259 have UID's in range and JUMP_CHAIN is valid). */
3260 if (jump_chain && (simplejump_p (jump)
3261 || GET_CODE (PATTERN (jump)) == RETURN))
3262 {
3263 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3264
3265 delete_from_jump_chain (jump);
3266 if (label_index < max_jump_chain
3267 && INSN_UID (jump) < max_jump_chain)
3268 {
3269 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3270 jump_chain[label_index] = jump;
3271 }
3272 }
3273
3274 JUMP_LABEL (jump) = nlabel;
3275 if (nlabel)
3276 ++LABEL_NUSES (nlabel);
3277
3278 /* If we're eliding the jump over exception cleanups at the end of a
3279 function, move the function end note so that -Wreturn-type works. */
3280 if (olabel && nlabel
3281 && NEXT_INSN (olabel)
3282 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3283 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3284 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3285
3286 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3287 delete_insn (olabel);
3288
3289 return 1;
3290 }
3291
3292 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3293 Accrue the modifications into the change group. */
3294
3295 static void
3296 invert_exp_1 (insn)
3297 rtx insn;
3298 {
3299 register RTX_CODE code;
3300 rtx x = pc_set (insn);
3301
3302 if (!x)
3303 abort ();
3304 x = SET_SRC (x);
3305
3306 code = GET_CODE (x);
3307
3308 if (code == IF_THEN_ELSE)
3309 {
3310 register rtx comp = XEXP (x, 0);
3311 register rtx tem;
3312 enum rtx_code reversed_code;
3313
3314 /* We can do this in two ways: The preferable way, which can only
3315 be done if this is not an integer comparison, is to reverse
3316 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3317 of the IF_THEN_ELSE. If we can't do either, fail. */
3318
3319 reversed_code = reversed_comparison_code (comp, insn);
3320
3321 if (reversed_code != UNKNOWN)
3322 {
3323 validate_change (insn, &XEXP (x, 0),
3324 gen_rtx_fmt_ee (reversed_code,
3325 GET_MODE (comp), XEXP (comp, 0),
3326 XEXP (comp, 1)),
3327 1);
3328 return;
3329 }
3330
3331 tem = XEXP (x, 1);
3332 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3333 validate_change (insn, &XEXP (x, 2), tem, 1);
3334 }
3335 else
3336 abort ();
3337 }
3338
3339 /* Invert the jump condition of conditional jump insn, INSN.
3340
3341 Return 1 if we can do so, 0 if we cannot find a way to do so that
3342 matches a pattern. */
3343
3344 static int
3345 invert_exp (insn)
3346 rtx insn;
3347 {
3348 invert_exp_1 (insn);
3349 if (num_validated_changes () == 0)
3350 return 0;
3351
3352 return apply_change_group ();
3353 }
3354
3355 /* Invert the condition of the jump JUMP, and make it jump to label
3356 NLABEL instead of where it jumps now. Accrue changes into the
3357 change group. Return false if we didn't see how to perform the
3358 inversion and redirection. */
3359
3360 int
3361 invert_jump_1 (jump, nlabel)
3362 rtx jump, nlabel;
3363 {
3364 int ochanges;
3365
3366 ochanges = num_validated_changes ();
3367 invert_exp_1 (jump);
3368 if (num_validated_changes () == ochanges)
3369 return 0;
3370
3371 return redirect_jump_1 (jump, nlabel);
3372 }
3373
3374 /* Invert the condition of the jump JUMP, and make it jump to label
3375 NLABEL instead of where it jumps now. Return true if successful. */
3376
3377 int
3378 invert_jump (jump, nlabel, delete_unused)
3379 rtx jump, nlabel;
3380 int delete_unused;
3381 {
3382 /* We have to either invert the condition and change the label or
3383 do neither. Either operation could fail. We first try to invert
3384 the jump. If that succeeds, we try changing the label. If that fails,
3385 we invert the jump back to what it was. */
3386
3387 if (! invert_exp (jump))
3388 return 0;
3389
3390 if (redirect_jump (jump, nlabel, delete_unused))
3391 {
3392 /* An inverted jump means that a probability taken becomes a
3393 probability not taken. Subtract the branch probability from the
3394 probability base to convert it back to a taken probability. */
3395
3396 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3397 if (note)
3398 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3399
3400 return 1;
3401 }
3402
3403 if (! invert_exp (jump))
3404 /* This should just be putting it back the way it was. */
3405 abort ();
3406
3407 return 0;
3408 }
3409
3410 /* Delete the instruction JUMP from any jump chain it might be on. */
3411
3412 static void
3413 delete_from_jump_chain (jump)
3414 rtx jump;
3415 {
3416 int index;
3417 rtx olabel = JUMP_LABEL (jump);
3418
3419 /* Handle unconditional jumps. */
3420 if (jump_chain && olabel != 0
3421 && INSN_UID (olabel) < max_jump_chain
3422 && simplejump_p (jump))
3423 index = INSN_UID (olabel);
3424 /* Handle return insns. */
3425 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3426 index = 0;
3427 else
3428 return;
3429
3430 if (jump_chain[index] == jump)
3431 jump_chain[index] = jump_chain[INSN_UID (jump)];
3432 else
3433 {
3434 rtx insn;
3435
3436 for (insn = jump_chain[index];
3437 insn != 0;
3438 insn = jump_chain[INSN_UID (insn)])
3439 if (jump_chain[INSN_UID (insn)] == jump)
3440 {
3441 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3442 break;
3443 }
3444 }
3445 }
3446 \f
3447 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3448
3449 If the old jump target label (before the dispatch table) becomes unused,
3450 it and the dispatch table may be deleted. In that case, find the insn
3451 before the jump references that label and delete it and logical successors
3452 too. */
3453
3454 static void
3455 redirect_tablejump (jump, nlabel)
3456 rtx jump, nlabel;
3457 {
3458 register rtx olabel = JUMP_LABEL (jump);
3459 rtx *notep, note, next;
3460
3461 /* Add this jump to the jump_chain of NLABEL. */
3462 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3463 && INSN_UID (jump) < max_jump_chain)
3464 {
3465 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3466 jump_chain[INSN_UID (nlabel)] = jump;
3467 }
3468
3469 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3470 {
3471 next = XEXP (note, 1);
3472
3473 if (REG_NOTE_KIND (note) != REG_DEAD
3474 /* Verify that the REG_NOTE is legitimate. */
3475 || GET_CODE (XEXP (note, 0)) != REG
3476 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3477 notep = &XEXP (note, 1);
3478 else
3479 {
3480 delete_prior_computation (note, jump);
3481 *notep = next;
3482 }
3483 }
3484
3485 PATTERN (jump) = gen_jump (nlabel);
3486 JUMP_LABEL (jump) = nlabel;
3487 ++LABEL_NUSES (nlabel);
3488 INSN_CODE (jump) = -1;
3489
3490 if (--LABEL_NUSES (olabel) == 0)
3491 {
3492 delete_labelref_insn (jump, olabel, 0);
3493 delete_insn (olabel);
3494 }
3495 }
3496
3497 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3498 If we found one, delete it and then delete this insn if DELETE_THIS is
3499 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3500
3501 static int
3502 delete_labelref_insn (insn, label, delete_this)
3503 rtx insn, label;
3504 int delete_this;
3505 {
3506 int deleted = 0;
3507 rtx link;
3508
3509 if (GET_CODE (insn) != NOTE
3510 && reg_mentioned_p (label, PATTERN (insn)))
3511 {
3512 if (delete_this)
3513 {
3514 delete_insn (insn);
3515 deleted = 1;
3516 }
3517 else
3518 return 1;
3519 }
3520
3521 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3522 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3523 {
3524 if (delete_this)
3525 {
3526 delete_insn (insn);
3527 deleted = 1;
3528 }
3529 else
3530 return 1;
3531 }
3532
3533 return deleted;
3534 }
3535 \f
3536 /* Like rtx_equal_p except that it considers two REGs as equal
3537 if they renumber to the same value and considers two commutative
3538 operations to be the same if the order of the operands has been
3539 reversed.
3540
3541 ??? Addition is not commutative on the PA due to the weird implicit
3542 space register selection rules for memory addresses. Therefore, we
3543 don't consider a + b == b + a.
3544
3545 We could/should make this test a little tighter. Possibly only
3546 disabling it on the PA via some backend macro or only disabling this
3547 case when the PLUS is inside a MEM. */
3548
3549 int
3550 rtx_renumbered_equal_p (x, y)
3551 rtx x, y;
3552 {
3553 register int i;
3554 register RTX_CODE code = GET_CODE (x);
3555 register const char *fmt;
3556
3557 if (x == y)
3558 return 1;
3559
3560 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3561 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3562 && GET_CODE (SUBREG_REG (y)) == REG)))
3563 {
3564 int reg_x = -1, reg_y = -1;
3565 int word_x = 0, word_y = 0;
3566
3567 if (GET_MODE (x) != GET_MODE (y))
3568 return 0;
3569
3570 /* If we haven't done any renumbering, don't
3571 make any assumptions. */
3572 if (reg_renumber == 0)
3573 return rtx_equal_p (x, y);
3574
3575 if (code == SUBREG)
3576 {
3577 reg_x = REGNO (SUBREG_REG (x));
3578 word_x = SUBREG_WORD (x);
3579
3580 if (reg_renumber[reg_x] >= 0)
3581 {
3582 reg_x = reg_renumber[reg_x] + word_x;
3583 word_x = 0;
3584 }
3585 }
3586
3587 else
3588 {
3589 reg_x = REGNO (x);
3590 if (reg_renumber[reg_x] >= 0)
3591 reg_x = reg_renumber[reg_x];
3592 }
3593
3594 if (GET_CODE (y) == SUBREG)
3595 {
3596 reg_y = REGNO (SUBREG_REG (y));
3597 word_y = SUBREG_WORD (y);
3598
3599 if (reg_renumber[reg_y] >= 0)
3600 {
3601 reg_y = reg_renumber[reg_y];
3602 word_y = 0;
3603 }
3604 }
3605
3606 else
3607 {
3608 reg_y = REGNO (y);
3609 if (reg_renumber[reg_y] >= 0)
3610 reg_y = reg_renumber[reg_y];
3611 }
3612
3613 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3614 }
3615
3616 /* Now we have disposed of all the cases
3617 in which different rtx codes can match. */
3618 if (code != GET_CODE (y))
3619 return 0;
3620
3621 switch (code)
3622 {
3623 case PC:
3624 case CC0:
3625 case ADDR_VEC:
3626 case ADDR_DIFF_VEC:
3627 return 0;
3628
3629 case CONST_INT:
3630 return INTVAL (x) == INTVAL (y);
3631
3632 case LABEL_REF:
3633 /* We can't assume nonlocal labels have their following insns yet. */
3634 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3635 return XEXP (x, 0) == XEXP (y, 0);
3636
3637 /* Two label-refs are equivalent if they point at labels
3638 in the same position in the instruction stream. */
3639 return (next_real_insn (XEXP (x, 0))
3640 == next_real_insn (XEXP (y, 0)));
3641
3642 case SYMBOL_REF:
3643 return XSTR (x, 0) == XSTR (y, 0);
3644
3645 case CODE_LABEL:
3646 /* If we didn't match EQ equality above, they aren't the same. */
3647 return 0;
3648
3649 default:
3650 break;
3651 }
3652
3653 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3654
3655 if (GET_MODE (x) != GET_MODE (y))
3656 return 0;
3657
3658 /* For commutative operations, the RTX match if the operand match in any
3659 order. Also handle the simple binary and unary cases without a loop.
3660
3661 ??? Don't consider PLUS a commutative operator; see comments above. */
3662 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3663 && code != PLUS)
3664 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3665 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3666 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3667 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3668 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3669 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3670 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3671 else if (GET_RTX_CLASS (code) == '1')
3672 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3673
3674 /* Compare the elements. If any pair of corresponding elements
3675 fail to match, return 0 for the whole things. */
3676
3677 fmt = GET_RTX_FORMAT (code);
3678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3679 {
3680 register int j;
3681 switch (fmt[i])
3682 {
3683 case 'w':
3684 if (XWINT (x, i) != XWINT (y, i))
3685 return 0;
3686 break;
3687
3688 case 'i':
3689 if (XINT (x, i) != XINT (y, i))
3690 return 0;
3691 break;
3692
3693 case 's':
3694 if (strcmp (XSTR (x, i), XSTR (y, i)))
3695 return 0;
3696 break;
3697
3698 case 'e':
3699 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3700 return 0;
3701 break;
3702
3703 case 'u':
3704 if (XEXP (x, i) != XEXP (y, i))
3705 return 0;
3706 /* fall through. */
3707 case '0':
3708 break;
3709
3710 case 'E':
3711 if (XVECLEN (x, i) != XVECLEN (y, i))
3712 return 0;
3713 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3714 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3715 return 0;
3716 break;
3717
3718 default:
3719 abort ();
3720 }
3721 }
3722 return 1;
3723 }
3724 \f
3725 /* If X is a hard register or equivalent to one or a subregister of one,
3726 return the hard register number. If X is a pseudo register that was not
3727 assigned a hard register, return the pseudo register number. Otherwise,
3728 return -1. Any rtx is valid for X. */
3729
3730 int
3731 true_regnum (x)
3732 rtx x;
3733 {
3734 if (GET_CODE (x) == REG)
3735 {
3736 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3737 return reg_renumber[REGNO (x)];
3738 return REGNO (x);
3739 }
3740 if (GET_CODE (x) == SUBREG)
3741 {
3742 int base = true_regnum (SUBREG_REG (x));
3743 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3744 return SUBREG_WORD (x) + base;
3745 }
3746 return -1;
3747 }
3748 \f
3749 /* Optimize code of the form:
3750
3751 for (x = a[i]; x; ...)
3752 ...
3753 for (x = a[i]; x; ...)
3754 ...
3755 foo:
3756
3757 Loop optimize will change the above code into
3758
3759 if (x = a[i])
3760 for (;;)
3761 { ...; if (! (x = ...)) break; }
3762 if (x = a[i])
3763 for (;;)
3764 { ...; if (! (x = ...)) break; }
3765 foo:
3766
3767 In general, if the first test fails, the program can branch
3768 directly to `foo' and skip the second try which is doomed to fail.
3769 We run this after loop optimization and before flow analysis. */
3770
3771 /* When comparing the insn patterns, we track the fact that different
3772 pseudo-register numbers may have been used in each computation.
3773 The following array stores an equivalence -- same_regs[I] == J means
3774 that pseudo register I was used in the first set of tests in a context
3775 where J was used in the second set. We also count the number of such
3776 pending equivalences. If nonzero, the expressions really aren't the
3777 same. */
3778
3779 static int *same_regs;
3780
3781 static int num_same_regs;
3782
3783 /* Track any registers modified between the target of the first jump and
3784 the second jump. They never compare equal. */
3785
3786 static char *modified_regs;
3787
3788 /* Record if memory was modified. */
3789
3790 static int modified_mem;
3791
3792 /* Called via note_stores on each insn between the target of the first
3793 branch and the second branch. It marks any changed registers. */
3794
3795 static void
3796 mark_modified_reg (dest, x, data)
3797 rtx dest;
3798 rtx x ATTRIBUTE_UNUSED;
3799 void *data ATTRIBUTE_UNUSED;
3800 {
3801 int regno;
3802 unsigned int i;
3803
3804 if (GET_CODE (dest) == SUBREG)
3805 dest = SUBREG_REG (dest);
3806
3807 if (GET_CODE (dest) == MEM)
3808 modified_mem = 1;
3809
3810 if (GET_CODE (dest) != REG)
3811 return;
3812
3813 regno = REGNO (dest);
3814 if (regno >= FIRST_PSEUDO_REGISTER)
3815 modified_regs[regno] = 1;
3816 else
3817 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3818 modified_regs[regno + i] = 1;
3819 }
3820
3821 /* F is the first insn in the chain of insns. */
3822
3823 void
3824 thread_jumps (f, max_reg, flag_before_loop)
3825 rtx f;
3826 int max_reg;
3827 int flag_before_loop;
3828 {
3829 /* Basic algorithm is to find a conditional branch,
3830 the label it may branch to, and the branch after
3831 that label. If the two branches test the same condition,
3832 walk back from both branch paths until the insn patterns
3833 differ, or code labels are hit. If we make it back to
3834 the target of the first branch, then we know that the first branch
3835 will either always succeed or always fail depending on the relative
3836 senses of the two branches. So adjust the first branch accordingly
3837 in this case. */
3838
3839 rtx label, b1, b2, t1, t2;
3840 enum rtx_code code1, code2;
3841 rtx b1op0, b1op1, b2op0, b2op1;
3842 int changed = 1;
3843 int i;
3844 int *all_reset;
3845 enum rtx_code reversed_code1, reversed_code2;
3846
3847 /* Allocate register tables and quick-reset table. */
3848 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3849 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3850 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3851 for (i = 0; i < max_reg; i++)
3852 all_reset[i] = -1;
3853
3854 while (changed)
3855 {
3856 changed = 0;
3857
3858 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3859 {
3860 rtx set;
3861 rtx set2;
3862
3863 /* Get to a candidate branch insn. */
3864 if (GET_CODE (b1) != JUMP_INSN
3865 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3866 continue;
3867
3868 memset (modified_regs, 0, max_reg * sizeof (char));
3869 modified_mem = 0;
3870
3871 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3872 num_same_regs = 0;
3873
3874 label = JUMP_LABEL (b1);
3875
3876 /* Look for a branch after the target. Record any registers and
3877 memory modified between the target and the branch. Stop when we
3878 get to a label since we can't know what was changed there. */
3879 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3880 {
3881 if (GET_CODE (b2) == CODE_LABEL)
3882 break;
3883
3884 else if (GET_CODE (b2) == JUMP_INSN)
3885 {
3886 /* If this is an unconditional jump and is the only use of
3887 its target label, we can follow it. */
3888 if (any_uncondjump_p (b2)
3889 && onlyjump_p (b2)
3890 && JUMP_LABEL (b2) != 0
3891 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3892 {
3893 b2 = JUMP_LABEL (b2);
3894 continue;
3895 }
3896 else
3897 break;
3898 }
3899
3900 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3901 continue;
3902
3903 if (GET_CODE (b2) == CALL_INSN)
3904 {
3905 modified_mem = 1;
3906 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3907 if (call_used_regs[i] && ! fixed_regs[i]
3908 && i != STACK_POINTER_REGNUM
3909 && i != FRAME_POINTER_REGNUM
3910 && i != HARD_FRAME_POINTER_REGNUM
3911 && i != ARG_POINTER_REGNUM)
3912 modified_regs[i] = 1;
3913 }
3914
3915 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3916 }
3917
3918 /* Check the next candidate branch insn from the label
3919 of the first. */
3920 if (b2 == 0
3921 || GET_CODE (b2) != JUMP_INSN
3922 || b2 == b1
3923 || !any_condjump_p (b2)
3924 || !onlyjump_p (b2))
3925 continue;
3926 set = pc_set (b1);
3927 set2 = pc_set (b2);
3928
3929 /* Get the comparison codes and operands, reversing the
3930 codes if appropriate. If we don't have comparison codes,
3931 we can't do anything. */
3932 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3933 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3934 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3935 reversed_code1 = code1;
3936 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3937 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3938 else
3939 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3940
3941 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3942 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3943 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3944 reversed_code2 = code2;
3945 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3946 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3947 else
3948 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3949
3950 /* If they test the same things and knowing that B1 branches
3951 tells us whether or not B2 branches, check if we
3952 can thread the branch. */
3953 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3954 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3955 && (comparison_dominates_p (code1, code2)
3956 || comparison_dominates_p (code1, reversed_code2)))
3957
3958 {
3959 t1 = prev_nonnote_insn (b1);
3960 t2 = prev_nonnote_insn (b2);
3961
3962 while (t1 != 0 && t2 != 0)
3963 {
3964 if (t2 == label)
3965 {
3966 /* We have reached the target of the first branch.
3967 If there are no pending register equivalents,
3968 we know that this branch will either always
3969 succeed (if the senses of the two branches are
3970 the same) or always fail (if not). */
3971 rtx new_label;
3972
3973 if (num_same_regs != 0)
3974 break;
3975
3976 if (comparison_dominates_p (code1, code2))
3977 new_label = JUMP_LABEL (b2);
3978 else
3979 new_label = get_label_after (b2);
3980
3981 if (JUMP_LABEL (b1) != new_label)
3982 {
3983 rtx prev = PREV_INSN (new_label);
3984
3985 if (flag_before_loop
3986 && GET_CODE (prev) == NOTE
3987 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3988 {
3989 /* Don't thread to the loop label. If a loop
3990 label is reused, loop optimization will
3991 be disabled for that loop. */
3992 new_label = gen_label_rtx ();
3993 emit_label_after (new_label, PREV_INSN (prev));
3994 }
3995 changed |= redirect_jump (b1, new_label, 1);
3996 }
3997 break;
3998 }
3999
4000 /* If either of these is not a normal insn (it might be
4001 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4002 have already been skipped above.) Similarly, fail
4003 if the insns are different. */
4004 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4005 || recog_memoized (t1) != recog_memoized (t2)
4006 || ! rtx_equal_for_thread_p (PATTERN (t1),
4007 PATTERN (t2), t2))
4008 break;
4009
4010 t1 = prev_nonnote_insn (t1);
4011 t2 = prev_nonnote_insn (t2);
4012 }
4013 }
4014 }
4015 }
4016
4017 /* Clean up. */
4018 free (modified_regs);
4019 free (same_regs);
4020 free (all_reset);
4021 }
4022 \f
4023 /* This is like RTX_EQUAL_P except that it knows about our handling of
4024 possibly equivalent registers and knows to consider volatile and
4025 modified objects as not equal.
4026
4027 YINSN is the insn containing Y. */
4028
4029 int
4030 rtx_equal_for_thread_p (x, y, yinsn)
4031 rtx x, y;
4032 rtx yinsn;
4033 {
4034 register int i;
4035 register int j;
4036 register enum rtx_code code;
4037 register const char *fmt;
4038
4039 code = GET_CODE (x);
4040 /* Rtx's of different codes cannot be equal. */
4041 if (code != GET_CODE (y))
4042 return 0;
4043
4044 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4045 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4046
4047 if (GET_MODE (x) != GET_MODE (y))
4048 return 0;
4049
4050 /* For floating-point, consider everything unequal. This is a bit
4051 pessimistic, but this pass would only rarely do anything for FP
4052 anyway. */
4053 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4054 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
4055 return 0;
4056
4057 /* For commutative operations, the RTX match if the operand match in any
4058 order. Also handle the simple binary and unary cases without a loop. */
4059 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4060 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4061 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4062 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4063 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4064 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4065 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4066 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4067 else if (GET_RTX_CLASS (code) == '1')
4068 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4069
4070 /* Handle special-cases first. */
4071 switch (code)
4072 {
4073 case REG:
4074 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4075 return 1;
4076
4077 /* If neither is user variable or hard register, check for possible
4078 equivalence. */
4079 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4080 || REGNO (x) < FIRST_PSEUDO_REGISTER
4081 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4082 return 0;
4083
4084 if (same_regs[REGNO (x)] == -1)
4085 {
4086 same_regs[REGNO (x)] = REGNO (y);
4087 num_same_regs++;
4088
4089 /* If this is the first time we are seeing a register on the `Y'
4090 side, see if it is the last use. If not, we can't thread the
4091 jump, so mark it as not equivalent. */
4092 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4093 return 0;
4094
4095 return 1;
4096 }
4097 else
4098 return (same_regs[REGNO (x)] == (int) REGNO (y));
4099
4100 break;
4101
4102 case MEM:
4103 /* If memory modified or either volatile, not equivalent.
4104 Else, check address. */
4105 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4106 return 0;
4107
4108 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4109
4110 case ASM_INPUT:
4111 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4112 return 0;
4113
4114 break;
4115
4116 case SET:
4117 /* Cancel a pending `same_regs' if setting equivalenced registers.
4118 Then process source. */
4119 if (GET_CODE (SET_DEST (x)) == REG
4120 && GET_CODE (SET_DEST (y)) == REG)
4121 {
4122 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4123 {
4124 same_regs[REGNO (SET_DEST (x))] = -1;
4125 num_same_regs--;
4126 }
4127 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4128 return 0;
4129 }
4130 else
4131 {
4132 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4133 return 0;
4134 }
4135
4136 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4137
4138 case LABEL_REF:
4139 return XEXP (x, 0) == XEXP (y, 0);
4140
4141 case SYMBOL_REF:
4142 return XSTR (x, 0) == XSTR (y, 0);
4143
4144 default:
4145 break;
4146 }
4147
4148 if (x == y)
4149 return 1;
4150
4151 fmt = GET_RTX_FORMAT (code);
4152 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4153 {
4154 switch (fmt[i])
4155 {
4156 case 'w':
4157 if (XWINT (x, i) != XWINT (y, i))
4158 return 0;
4159 break;
4160
4161 case 'n':
4162 case 'i':
4163 if (XINT (x, i) != XINT (y, i))
4164 return 0;
4165 break;
4166
4167 case 'V':
4168 case 'E':
4169 /* Two vectors must have the same length. */
4170 if (XVECLEN (x, i) != XVECLEN (y, i))
4171 return 0;
4172
4173 /* And the corresponding elements must match. */
4174 for (j = 0; j < XVECLEN (x, i); j++)
4175 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4176 XVECEXP (y, i, j), yinsn) == 0)
4177 return 0;
4178 break;
4179
4180 case 'e':
4181 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4182 return 0;
4183 break;
4184
4185 case 'S':
4186 case 's':
4187 if (strcmp (XSTR (x, i), XSTR (y, i)))
4188 return 0;
4189 break;
4190
4191 case 'u':
4192 /* These are just backpointers, so they don't matter. */
4193 break;
4194
4195 case '0':
4196 case 't':
4197 break;
4198
4199 /* It is believed that rtx's at this level will never
4200 contain anything but integers and other rtx's,
4201 except for within LABEL_REFs and SYMBOL_REFs. */
4202 default:
4203 abort ();
4204 }
4205 }
4206 return 1;
4207 }
This page took 0.224407 seconds and 5 git commands to generate.