]>
Commit | Line | Data |
---|---|---|
15a63be1 | 1 | /* Optimize jump instructions, for GNU compiler. |
3b708058 | 2 | Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997 |
25f99665 KH |
3 | 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 |
4 | Free Software Foundation, Inc. | |
15a63be1 | 5 | |
1322177d | 6 | This file is part of GCC. |
15a63be1 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 2, or (at your option) any later | |
11 | version. | |
15a63be1 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
15a63be1 RK |
17 | |
18 | You should have received a copy of the GNU General Public License | |
1322177d LB |
19 | along with GCC; see the file COPYING. If not, write to the Free |
20 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
21 | 02111-1307, USA. */ | |
15a63be1 | 22 | |
0045d504 JH |
23 | /* This is the pathetic reminder of old fame of the jump-optimization pass |
24 | of the compiler. Now it contains basically set of utility function to | |
25 | operate with jumps. | |
15a63be1 RK |
26 | |
27 | Each CODE_LABEL has a count of the times it is used | |
28 | stored in the LABEL_NUSES internal field, and each JUMP_INSN | |
29 | has one label that it refers to stored in the | |
30 | JUMP_LABEL internal field. With this we can detect labels that | |
31 | become unused because of the deletion of all the jumps that | |
32 | formerly used them. The JUMP_LABEL info is sometimes looked | |
33 | at by later passes. | |
34 | ||
9a5a17f3 | 35 | The subroutines redirect_jump and invert_jump are used |
15a63be1 RK |
36 | from other passes as well. */ |
37 | ||
38 | #include "config.h" | |
670ee920 | 39 | #include "system.h" |
4977bab6 ZW |
40 | #include "coretypes.h" |
41 | #include "tm.h" | |
15a63be1 | 42 | #include "rtl.h" |
6baf1cc8 | 43 | #include "tm_p.h" |
15a63be1 RK |
44 | #include "flags.h" |
45 | #include "hard-reg-set.h" | |
46 | #include "regs.h" | |
15a63be1 | 47 | #include "insn-config.h" |
0c63f729 | 48 | #include "insn-attr.h" |
e9a25f70 | 49 | #include "recog.h" |
49ad7cfa | 50 | #include "function.h" |
3c86a619 | 51 | #include "expr.h" |
15a63be1 | 52 | #include "real.h" |
6adb4e3a | 53 | #include "except.h" |
5f1989e6 | 54 | #include "diagnostic.h" |
2e107e9e | 55 | #include "toplev.h" |
8461e984 | 56 | #include "reload.h" |
4db384c9 | 57 | #include "predict.h" |
0d446150 | 58 | #include "timevar.h" |
15a63be1 | 59 | |
15a63be1 RK |
60 | /* Optimize jump y; x: ... y: jumpif... x? |
61 | Don't know if it is worth bothering with. */ | |
62 | /* Optimize two cases of conditional jump to conditional jump? | |
63 | This can never delete any instruction or make anything dead, | |
64 | or even change what is live at any point. | |
65 | So perhaps let combiner do it. */ | |
66 | ||
0c20a65f AJ |
67 | static void init_label_info (rtx); |
68 | static void mark_all_labels (rtx); | |
0c20a65f AJ |
69 | static void delete_computation (rtx); |
70 | static void redirect_exp_1 (rtx *, rtx, rtx, rtx); | |
0a634832 | 71 | static int invert_exp_1 (rtx, rtx); |
0c20a65f AJ |
72 | static int returnjump_p_1 (rtx *, void *); |
73 | static void delete_prior_computation (rtx, rtx); | |
0a1c58a2 | 74 | \f |
c4403371 JL |
75 | /* Alternate entry into the jump optimizer. This entry point only rebuilds |
76 | the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping | |
77 | instructions. */ | |
78 | void | |
0c20a65f | 79 | rebuild_jump_labels (rtx f) |
c4403371 | 80 | { |
b3694847 | 81 | rtx insn; |
15a63be1 | 82 | |
0d446150 | 83 | timevar_push (TV_REBUILD_JUMP); |
4977bab6 | 84 | init_label_info (f); |
1e5fd094 | 85 | mark_all_labels (f); |
15a63be1 | 86 | |
f5540cd4 RH |
87 | /* Keep track of labels used from static data; we don't track them |
88 | closely enough to delete them here, so make sure their reference | |
89 | count doesn't drop to zero. */ | |
15a63be1 RK |
90 | |
91 | for (insn = forced_labels; insn; insn = XEXP (insn, 1)) | |
4b4bf941 | 92 | if (LABEL_P (XEXP (insn, 0))) |
f5540cd4 | 93 | LABEL_NUSES (XEXP (insn, 0))++; |
0d446150 | 94 | timevar_pop (TV_REBUILD_JUMP); |
0045d504 JH |
95 | } |
96 | \f | |
01f62f01 JH |
97 | /* Some old code expects exactly one BARRIER as the NEXT_INSN of a |
98 | non-fallthru insn. This is not generally true, as multiple barriers | |
99 | may have crept in, or the BARRIER may be separated from the last | |
100 | real insn by one or more NOTEs. | |
101 | ||
102 | This simple pass moves barriers and removes duplicates so that the | |
103 | old code is happy. | |
104 | */ | |
105 | void | |
0c20a65f | 106 | cleanup_barriers (void) |
01f62f01 JH |
107 | { |
108 | rtx insn, next, prev; | |
109 | for (insn = get_insns (); insn; insn = next) | |
110 | { | |
111 | next = NEXT_INSN (insn); | |
4b4bf941 | 112 | if (BARRIER_P (insn)) |
01f62f01 JH |
113 | { |
114 | prev = prev_nonnote_insn (insn); | |
4b4bf941 | 115 | if (BARRIER_P (prev)) |
f014fc47 | 116 | delete_insn (insn); |
01f62f01 JH |
117 | else if (prev != PREV_INSN (insn)) |
118 | reorder_insns (insn, insn, prev); | |
119 | } | |
120 | } | |
121 | } | |
15a63be1 | 122 | |
0045d504 | 123 | void |
0c20a65f | 124 | purge_line_number_notes (rtx f) |
0045d504 JH |
125 | { |
126 | rtx last_note = 0; | |
127 | rtx insn; | |
15a63be1 RK |
128 | /* Delete extraneous line number notes. |
129 | Note that two consecutive notes for different lines are not really | |
130 | extraneous. There should be some indication where that line belonged, | |
131 | even if it became empty. */ | |
132 | ||
0045d504 | 133 | for (insn = f; insn; insn = NEXT_INSN (insn)) |
4b4bf941 | 134 | if (NOTE_P (insn)) |
0045d504 JH |
135 | { |
136 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG) | |
137 | /* Any previous line note was for the prologue; gdb wants a new | |
138 | note after the prologue even if it is for the same line. */ | |
139 | last_note = NULL_RTX; | |
140 | else if (NOTE_LINE_NUMBER (insn) >= 0) | |
141 | { | |
142 | /* Delete this note if it is identical to previous note. */ | |
143 | if (last_note | |
6773e15f PB |
144 | #ifdef USE_MAPPED_LOCATION |
145 | && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note) | |
146 | #else | |
0045d504 | 147 | && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note) |
6773e15f PB |
148 | && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note) |
149 | #endif | |
150 | ) | |
0045d504 | 151 | { |
53c17031 | 152 | delete_related_insns (insn); |
0045d504 JH |
153 | continue; |
154 | } | |
15a63be1 | 155 | |
0045d504 JH |
156 | last_note = insn; |
157 | } | |
158 | } | |
269ef46c DM |
159 | } |
160 | \f | |
161 | /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL | |
162 | notes whose labels don't occur in the insn any more. Returns the | |
163 | largest INSN_UID found. */ | |
4977bab6 | 164 | static void |
0c20a65f | 165 | init_label_info (rtx f) |
269ef46c | 166 | { |
269ef46c DM |
167 | rtx insn; |
168 | ||
169 | for (insn = f; insn; insn = NEXT_INSN (insn)) | |
4b4bf941 | 170 | if (LABEL_P (insn)) |
4977bab6 | 171 | LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0); |
4b4bf941 | 172 | else if (JUMP_P (insn)) |
4977bab6 | 173 | JUMP_LABEL (insn) = 0; |
4b4bf941 | 174 | else if (NONJUMP_INSN_P (insn) || CALL_P (insn)) |
4977bab6 ZW |
175 | { |
176 | rtx note, next; | |
269ef46c | 177 | |
4977bab6 ZW |
178 | for (note = REG_NOTES (insn); note; note = next) |
179 | { | |
180 | next = XEXP (note, 1); | |
181 | if (REG_NOTE_KIND (note) == REG_LABEL | |
182 | && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn))) | |
183 | remove_note (insn, note); | |
184 | } | |
185 | } | |
269ef46c DM |
186 | } |
187 | ||
269ef46c | 188 | /* Mark the label each jump jumps to. |
0045d504 | 189 | Combine consecutive labels, and count uses of labels. */ |
269ef46c DM |
190 | |
191 | static void | |
0c20a65f | 192 | mark_all_labels (rtx f) |
269ef46c DM |
193 | { |
194 | rtx insn; | |
195 | ||
196 | for (insn = f; insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 197 | if (INSN_P (insn)) |
269ef46c | 198 | { |
1e5fd094 | 199 | mark_jump_label (PATTERN (insn), insn, 0); |
4b4bf941 | 200 | if (! INSN_DELETED_P (insn) && JUMP_P (insn)) |
269ef46c | 201 | { |
f759eb8b AO |
202 | /* When we know the LABEL_REF contained in a REG used in |
203 | an indirect jump, we'll have a REG_LABEL note so that | |
204 | flow can tell where it's going. */ | |
205 | if (JUMP_LABEL (insn) == 0) | |
206 | { | |
207 | rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX); | |
208 | if (label_note) | |
209 | { | |
210 | /* But a LABEL_REF around the REG_LABEL note, so | |
211 | that we can canonicalize it. */ | |
4c33cb26 | 212 | rtx label_ref = gen_rtx_LABEL_REF (Pmode, |
f759eb8b AO |
213 | XEXP (label_note, 0)); |
214 | ||
1e5fd094 | 215 | mark_jump_label (label_ref, insn, 0); |
f759eb8b AO |
216 | XEXP (label_note, 0) = XEXP (label_ref, 0); |
217 | JUMP_LABEL (insn) = XEXP (label_note, 0); | |
218 | } | |
219 | } | |
269ef46c DM |
220 | } |
221 | } | |
222 | } | |
15a63be1 | 223 | \f |
be1bb652 | 224 | /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end, |
2270623a JM |
225 | notes between START and END out before START. START and END may be such |
226 | notes. Returns the values of the new starting and ending insns, which | |
2b7d71b2 JJ |
227 | may be different if the original ones were such notes. |
228 | Return true if there were only such notes and no real instructions. */ | |
15a63be1 | 229 | |
2b7d71b2 | 230 | bool |
0c20a65f | 231 | squeeze_notes (rtx* startp, rtx* endp) |
15a63be1 | 232 | { |
2270623a JM |
233 | rtx start = *startp; |
234 | rtx end = *endp; | |
235 | ||
15a63be1 RK |
236 | rtx insn; |
237 | rtx next; | |
2270623a JM |
238 | rtx last = NULL; |
239 | rtx past_end = NEXT_INSN (end); | |
15a63be1 | 240 | |
2270623a | 241 | for (insn = start; insn != past_end; insn = next) |
15a63be1 RK |
242 | { |
243 | next = NEXT_INSN (insn); | |
4b4bf941 | 244 | if (NOTE_P (insn) |
15a63be1 RK |
245 | && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END |
246 | || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG | |
247 | || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG | |
8b63d0e5 | 248 | || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)) |
15a63be1 | 249 | { |
f39e46ba SB |
250 | /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */ |
251 | gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG | |
252 | && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END); | |
253 | ||
915f619f JW |
254 | if (insn == start) |
255 | start = next; | |
256 | else | |
257 | { | |
258 | rtx prev = PREV_INSN (insn); | |
259 | PREV_INSN (insn) = PREV_INSN (start); | |
260 | NEXT_INSN (insn) = start; | |
261 | NEXT_INSN (PREV_INSN (insn)) = insn; | |
262 | PREV_INSN (NEXT_INSN (insn)) = insn; | |
263 | NEXT_INSN (prev) = next; | |
264 | PREV_INSN (next) = prev; | |
265 | } | |
15a63be1 | 266 | } |
2270623a JM |
267 | else |
268 | last = insn; | |
15a63be1 | 269 | } |
915f619f | 270 | |
2b7d71b2 | 271 | /* There were no real instructions. */ |
2270623a | 272 | if (start == past_end) |
2b7d71b2 | 273 | return true; |
2270623a JM |
274 | |
275 | end = last; | |
276 | ||
277 | *startp = start; | |
278 | *endp = end; | |
2b7d71b2 | 279 | return false; |
15a63be1 RK |
280 | } |
281 | \f | |
15a63be1 RK |
282 | /* Return the label before INSN, or put a new label there. */ |
283 | ||
284 | rtx | |
0c20a65f | 285 | get_label_before (rtx insn) |
15a63be1 RK |
286 | { |
287 | rtx label; | |
288 | ||
289 | /* Find an existing label at this point | |
290 | or make a new one if there is none. */ | |
291 | label = prev_nonnote_insn (insn); | |
292 | ||
4b4bf941 | 293 | if (label == 0 || !LABEL_P (label)) |
15a63be1 RK |
294 | { |
295 | rtx prev = PREV_INSN (insn); | |
296 | ||
15a63be1 RK |
297 | label = gen_label_rtx (); |
298 | emit_label_after (label, prev); | |
299 | LABEL_NUSES (label) = 0; | |
300 | } | |
301 | return label; | |
302 | } | |
303 | ||
304 | /* Return the label after INSN, or put a new label there. */ | |
305 | ||
306 | rtx | |
0c20a65f | 307 | get_label_after (rtx insn) |
15a63be1 RK |
308 | { |
309 | rtx label; | |
310 | ||
311 | /* Find an existing label at this point | |
312 | or make a new one if there is none. */ | |
313 | label = next_nonnote_insn (insn); | |
314 | ||
4b4bf941 | 315 | if (label == 0 || !LABEL_P (label)) |
15a63be1 | 316 | { |
15a63be1 RK |
317 | label = gen_label_rtx (); |
318 | emit_label_after (label, insn); | |
319 | LABEL_NUSES (label) = 0; | |
320 | } | |
321 | return label; | |
322 | } | |
323 | \f | |
5a4aeb03 | 324 | /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code |
ab94bc48 JH |
325 | of reversed comparison if it is possible to do so. Otherwise return UNKNOWN. |
326 | UNKNOWN may be returned in case we are having CC_MODE compare and we don't | |
327 | know whether it's source is floating point or integer comparison. Machine | |
328 | description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros | |
329 | to help this function avoid overhead in these cases. */ | |
330 | enum rtx_code | |
0c20a65f | 331 | reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn) |
15a63be1 | 332 | { |
ab94bc48 | 333 | enum machine_mode mode; |
15a63be1 RK |
334 | |
335 | /* If this is not actually a comparison, we can't reverse it. */ | |
ec8e098d PB |
336 | if (GET_RTX_CLASS (code) != RTX_COMPARE |
337 | && GET_RTX_CLASS (code) != RTX_COMM_COMPARE) | |
ab94bc48 JH |
338 | return UNKNOWN; |
339 | ||
340 | mode = GET_MODE (arg0); | |
341 | if (mode == VOIDmode) | |
342 | mode = GET_MODE (arg1); | |
343 | ||
d1a6adeb KH |
344 | /* First see if machine description supplies us way to reverse the |
345 | comparison. Give it priority over everything else to allow | |
346 | machine description to do tricks. */ | |
3799607a | 347 | if (GET_MODE_CLASS (mode) == MODE_CC |
ab94bc48 JH |
348 | && REVERSIBLE_CC_MODE (mode)) |
349 | { | |
350 | #ifdef REVERSE_CONDITION | |
5d0cab94 | 351 | return REVERSE_CONDITION (code, mode); |
ab94bc48 | 352 | #endif |
5d0cab94 KH |
353 | return reverse_condition (code); |
354 | } | |
15a63be1 | 355 | |
5a4aeb03 | 356 | /* Try a few special cases based on the comparison code. */ |
ab94bc48 JH |
357 | switch (code) |
358 | { | |
5d0cab94 KH |
359 | case GEU: |
360 | case GTU: | |
361 | case LEU: | |
362 | case LTU: | |
363 | case NE: | |
364 | case EQ: | |
365 | /* It is always safe to reverse EQ and NE, even for the floating | |
4d6922ee | 366 | point. Similarly the unsigned comparisons are never used for |
5d0cab94 KH |
367 | floating point so we can reverse them in the default way. */ |
368 | return reverse_condition (code); | |
369 | case ORDERED: | |
370 | case UNORDERED: | |
371 | case LTGT: | |
372 | case UNEQ: | |
373 | /* In case we already see unordered comparison, we can be sure to | |
374 | be dealing with floating point so we don't need any more tests. */ | |
375 | return reverse_condition_maybe_unordered (code); | |
376 | case UNLT: | |
377 | case UNLE: | |
378 | case UNGT: | |
379 | case UNGE: | |
380 | /* We don't have safe way to reverse these yet. */ | |
381 | return UNKNOWN; | |
382 | default: | |
383 | break; | |
ab94bc48 JH |
384 | } |
385 | ||
8beccec8 | 386 | if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0)) |
15a63be1 | 387 | { |
ab94bc48 JH |
388 | rtx prev; |
389 | /* Try to search for the comparison to determine the real mode. | |
390 | This code is expensive, but with sane machine description it | |
391 | will be never used, since REVERSIBLE_CC_MODE will return true | |
392 | in all cases. */ | |
0dab8f8a | 393 | if (! insn) |
ab94bc48 | 394 | return UNKNOWN; |
48b881a3 | 395 | |
c5c76735 | 396 | for (prev = prev_nonnote_insn (insn); |
4b4bf941 | 397 | prev != 0 && !LABEL_P (prev); |
c5c76735 | 398 | prev = prev_nonnote_insn (prev)) |
ab94bc48 JH |
399 | { |
400 | rtx set = set_of (arg0, prev); | |
401 | if (set && GET_CODE (set) == SET | |
402 | && rtx_equal_p (SET_DEST (set), arg0)) | |
403 | { | |
404 | rtx src = SET_SRC (set); | |
15a63be1 | 405 | |
ab94bc48 JH |
406 | if (GET_CODE (src) == COMPARE) |
407 | { | |
408 | rtx comparison = src; | |
409 | arg0 = XEXP (src, 0); | |
410 | mode = GET_MODE (arg0); | |
411 | if (mode == VOIDmode) | |
412 | mode = GET_MODE (XEXP (comparison, 1)); | |
413 | break; | |
414 | } | |
f63d1bf7 | 415 | /* We can get past reg-reg moves. This may be useful for model |
ab94bc48 JH |
416 | of i387 comparisons that first move flag registers around. */ |
417 | if (REG_P (src)) | |
418 | { | |
419 | arg0 = src; | |
420 | continue; | |
421 | } | |
422 | } | |
423 | /* If register is clobbered in some ununderstandable way, | |
424 | give up. */ | |
425 | if (set) | |
426 | return UNKNOWN; | |
427 | } | |
15a63be1 RK |
428 | } |
429 | ||
71925bc0 RS |
430 | /* Test for an integer condition, or a floating-point comparison |
431 | in which NaNs can be ignored. */ | |
ab94bc48 JH |
432 | if (GET_CODE (arg0) == CONST_INT |
433 | || (GET_MODE (arg0) != VOIDmode | |
434 | && GET_MODE_CLASS (mode) != MODE_CC | |
71925bc0 | 435 | && !HONOR_NANS (mode))) |
ab94bc48 JH |
436 | return reverse_condition (code); |
437 | ||
438 | return UNKNOWN; | |
439 | } | |
440 | ||
b20b352b | 441 | /* A wrapper around the previous function to take COMPARISON as rtx |
ab94bc48 JH |
442 | expression. This simplifies many callers. */ |
443 | enum rtx_code | |
0c20a65f | 444 | reversed_comparison_code (rtx comparison, rtx insn) |
ab94bc48 | 445 | { |
ec8e098d | 446 | if (!COMPARISON_P (comparison)) |
ab94bc48 JH |
447 | return UNKNOWN; |
448 | return reversed_comparison_code_parts (GET_CODE (comparison), | |
449 | XEXP (comparison, 0), | |
450 | XEXP (comparison, 1), insn); | |
451 | } | |
14f02e73 PB |
452 | |
453 | /* Return comparison with reversed code of EXP. | |
454 | Return NULL_RTX in case we fail to do the reversal. */ | |
455 | rtx | |
456 | reversed_comparison (rtx exp, enum machine_mode mode) | |
457 | { | |
458 | enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX); | |
459 | if (reversed_code == UNKNOWN) | |
460 | return NULL_RTX; | |
461 | else | |
462 | return simplify_gen_relational (reversed_code, mode, VOIDmode, | |
463 | XEXP (exp, 0), XEXP (exp, 1)); | |
464 | } | |
465 | ||
ab94bc48 | 466 | \f |
1eb8759b RH |
467 | /* Given an rtx-code for a comparison, return the code for the negated |
468 | comparison. If no such code exists, return UNKNOWN. | |
469 | ||
470 | WATCH OUT! reverse_condition is not safe to use on a jump that might | |
471 | be acting on the results of an IEEE floating point comparison, because | |
48b881a3 | 472 | of the special treatment of non-signaling nans in comparisons. |
ab94bc48 | 473 | Use reversed_comparison_code instead. */ |
15a63be1 RK |
474 | |
475 | enum rtx_code | |
0c20a65f | 476 | reverse_condition (enum rtx_code code) |
15a63be1 RK |
477 | { |
478 | switch (code) | |
479 | { | |
480 | case EQ: | |
481 | return NE; | |
15a63be1 RK |
482 | case NE: |
483 | return EQ; | |
15a63be1 RK |
484 | case GT: |
485 | return LE; | |
15a63be1 RK |
486 | case GE: |
487 | return LT; | |
15a63be1 RK |
488 | case LT: |
489 | return GE; | |
15a63be1 RK |
490 | case LE: |
491 | return GT; | |
15a63be1 RK |
492 | case GTU: |
493 | return LEU; | |
15a63be1 RK |
494 | case GEU: |
495 | return LTU; | |
15a63be1 RK |
496 | case LTU: |
497 | return GEU; | |
15a63be1 RK |
498 | case LEU: |
499 | return GTU; | |
1eb8759b RH |
500 | case UNORDERED: |
501 | return ORDERED; | |
502 | case ORDERED: | |
503 | return UNORDERED; | |
504 | ||
505 | case UNLT: | |
506 | case UNLE: | |
507 | case UNGT: | |
508 | case UNGE: | |
509 | case UNEQ: | |
7913f3d0 | 510 | case LTGT: |
1eb8759b | 511 | return UNKNOWN; |
15a63be1 RK |
512 | |
513 | default: | |
41806d92 | 514 | gcc_unreachable (); |
15a63be1 RK |
515 | } |
516 | } | |
517 | ||
7913f3d0 RH |
518 | /* Similar, but we're allowed to generate unordered comparisons, which |
519 | makes it safe for IEEE floating-point. Of course, we have to recognize | |
520 | that the target will support them too... */ | |
521 | ||
522 | enum rtx_code | |
0c20a65f | 523 | reverse_condition_maybe_unordered (enum rtx_code code) |
7913f3d0 | 524 | { |
7913f3d0 RH |
525 | switch (code) |
526 | { | |
527 | case EQ: | |
528 | return NE; | |
529 | case NE: | |
530 | return EQ; | |
531 | case GT: | |
532 | return UNLE; | |
533 | case GE: | |
534 | return UNLT; | |
535 | case LT: | |
536 | return UNGE; | |
537 | case LE: | |
538 | return UNGT; | |
539 | case LTGT: | |
540 | return UNEQ; | |
7913f3d0 RH |
541 | case UNORDERED: |
542 | return ORDERED; | |
543 | case ORDERED: | |
544 | return UNORDERED; | |
545 | case UNLT: | |
546 | return GE; | |
547 | case UNLE: | |
548 | return GT; | |
549 | case UNGT: | |
550 | return LE; | |
551 | case UNGE: | |
552 | return LT; | |
553 | case UNEQ: | |
554 | return LTGT; | |
555 | ||
556 | default: | |
41806d92 | 557 | gcc_unreachable (); |
7913f3d0 RH |
558 | } |
559 | } | |
560 | ||
15a63be1 RK |
561 | /* Similar, but return the code when two operands of a comparison are swapped. |
562 | This IS safe for IEEE floating-point. */ | |
563 | ||
564 | enum rtx_code | |
0c20a65f | 565 | swap_condition (enum rtx_code code) |
15a63be1 RK |
566 | { |
567 | switch (code) | |
568 | { | |
569 | case EQ: | |
570 | case NE: | |
1eb8759b RH |
571 | case UNORDERED: |
572 | case ORDERED: | |
573 | case UNEQ: | |
7913f3d0 | 574 | case LTGT: |
15a63be1 RK |
575 | return code; |
576 | ||
577 | case GT: | |
578 | return LT; | |
15a63be1 RK |
579 | case GE: |
580 | return LE; | |
15a63be1 RK |
581 | case LT: |
582 | return GT; | |
15a63be1 RK |
583 | case LE: |
584 | return GE; | |
15a63be1 RK |
585 | case GTU: |
586 | return LTU; | |
15a63be1 RK |
587 | case GEU: |
588 | return LEU; | |
15a63be1 RK |
589 | case LTU: |
590 | return GTU; | |
15a63be1 RK |
591 | case LEU: |
592 | return GEU; | |
1eb8759b RH |
593 | case UNLT: |
594 | return UNGT; | |
595 | case UNLE: | |
596 | return UNGE; | |
597 | case UNGT: | |
598 | return UNLT; | |
599 | case UNGE: | |
600 | return UNLE; | |
601 | ||
15a63be1 | 602 | default: |
41806d92 | 603 | gcc_unreachable (); |
15a63be1 RK |
604 | } |
605 | } | |
606 | ||
607 | /* Given a comparison CODE, return the corresponding unsigned comparison. | |
608 | If CODE is an equality comparison or already an unsigned comparison, | |
609 | CODE is returned. */ | |
610 | ||
611 | enum rtx_code | |
0c20a65f | 612 | unsigned_condition (enum rtx_code code) |
15a63be1 RK |
613 | { |
614 | switch (code) | |
615 | { | |
616 | case EQ: | |
617 | case NE: | |
618 | case GTU: | |
619 | case GEU: | |
620 | case LTU: | |
621 | case LEU: | |
622 | return code; | |
623 | ||
624 | case GT: | |
625 | return GTU; | |
15a63be1 RK |
626 | case GE: |
627 | return GEU; | |
15a63be1 RK |
628 | case LT: |
629 | return LTU; | |
15a63be1 RK |
630 | case LE: |
631 | return LEU; | |
632 | ||
633 | default: | |
41806d92 | 634 | gcc_unreachable (); |
15a63be1 RK |
635 | } |
636 | } | |
637 | ||
638 | /* Similarly, return the signed version of a comparison. */ | |
639 | ||
640 | enum rtx_code | |
0c20a65f | 641 | signed_condition (enum rtx_code code) |
15a63be1 RK |
642 | { |
643 | switch (code) | |
644 | { | |
645 | case EQ: | |
646 | case NE: | |
647 | case GT: | |
648 | case GE: | |
649 | case LT: | |
650 | case LE: | |
651 | return code; | |
652 | ||
653 | case GTU: | |
654 | return GT; | |
15a63be1 RK |
655 | case GEU: |
656 | return GE; | |
15a63be1 RK |
657 | case LTU: |
658 | return LT; | |
15a63be1 RK |
659 | case LEU: |
660 | return LE; | |
661 | ||
662 | default: | |
41806d92 | 663 | gcc_unreachable (); |
15a63be1 RK |
664 | } |
665 | } | |
666 | \f | |
cc2902df | 667 | /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the |
15a63be1 RK |
668 | truth of CODE1 implies the truth of CODE2. */ |
669 | ||
670 | int | |
0c20a65f | 671 | comparison_dominates_p (enum rtx_code code1, enum rtx_code code2) |
15a63be1 | 672 | { |
1e738f74 FS |
673 | /* UNKNOWN comparison codes can happen as a result of trying to revert |
674 | comparison codes. | |
675 | They can't match anything, so we have to reject them here. */ | |
676 | if (code1 == UNKNOWN || code2 == UNKNOWN) | |
677 | return 0; | |
678 | ||
15a63be1 RK |
679 | if (code1 == code2) |
680 | return 1; | |
681 | ||
682 | switch (code1) | |
683 | { | |
b34878a3 JH |
684 | case UNEQ: |
685 | if (code2 == UNLE || code2 == UNGE) | |
686 | return 1; | |
687 | break; | |
688 | ||
15a63be1 | 689 | case EQ: |
7913f3d0 RH |
690 | if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU |
691 | || code2 == ORDERED) | |
15a63be1 RK |
692 | return 1; |
693 | break; | |
694 | ||
b34878a3 JH |
695 | case UNLT: |
696 | if (code2 == UNLE || code2 == NE) | |
697 | return 1; | |
698 | break; | |
699 | ||
15a63be1 | 700 | case LT: |
b34878a3 JH |
701 | if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT) |
702 | return 1; | |
703 | break; | |
704 | ||
705 | case UNGT: | |
706 | if (code2 == UNGE || code2 == NE) | |
15a63be1 RK |
707 | return 1; |
708 | break; | |
709 | ||
710 | case GT: | |
b34878a3 | 711 | if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT) |
7913f3d0 RH |
712 | return 1; |
713 | break; | |
714 | ||
715 | case GE: | |
716 | case LE: | |
717 | if (code2 == ORDERED) | |
718 | return 1; | |
719 | break; | |
720 | ||
721 | case LTGT: | |
722 | if (code2 == NE || code2 == ORDERED) | |
15a63be1 RK |
723 | return 1; |
724 | break; | |
725 | ||
726 | case LTU: | |
b0c38416 | 727 | if (code2 == LEU || code2 == NE) |
15a63be1 RK |
728 | return 1; |
729 | break; | |
730 | ||
731 | case GTU: | |
b0c38416 | 732 | if (code2 == GEU || code2 == NE) |
15a63be1 RK |
733 | return 1; |
734 | break; | |
7913f3d0 RH |
735 | |
736 | case UNORDERED: | |
b34878a3 JH |
737 | if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT |
738 | || code2 == UNGE || code2 == UNGT) | |
7913f3d0 RH |
739 | return 1; |
740 | break; | |
48b881a3 | 741 | |
e9a25f70 JL |
742 | default: |
743 | break; | |
15a63be1 RK |
744 | } |
745 | ||
746 | return 0; | |
747 | } | |
748 | \f | |
749 | /* Return 1 if INSN is an unconditional jump and nothing else. */ | |
750 | ||
751 | int | |
0c20a65f | 752 | simplejump_p (rtx insn) |
15a63be1 | 753 | { |
4b4bf941 | 754 | return (JUMP_P (insn) |
3c74f8f9 RH |
755 | && GET_CODE (PATTERN (insn)) == SET |
756 | && GET_CODE (SET_DEST (PATTERN (insn))) == PC | |
757 | && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF); | |
15a63be1 RK |
758 | } |
759 | ||
760 | /* Return nonzero if INSN is a (possibly) conditional jump | |
48b881a3 KH |
761 | and nothing more. |
762 | ||
1f52178b | 763 | Use of this function is deprecated, since we need to support combined |
d781a164 | 764 | branch and compare insns. Use any_condjump_p instead whenever possible. */ |
15a63be1 RK |
765 | |
766 | int | |
0c20a65f | 767 | condjump_p (rtx insn) |
15a63be1 | 768 | { |
b3694847 | 769 | rtx x = PATTERN (insn); |
c5c76735 JL |
770 | |
771 | if (GET_CODE (x) != SET | |
772 | || GET_CODE (SET_DEST (x)) != PC) | |
3480bb98 | 773 | return 0; |
c5c76735 JL |
774 | |
775 | x = SET_SRC (x); | |
776 | if (GET_CODE (x) == LABEL_REF) | |
3480bb98 | 777 | return 1; |
48b881a3 KH |
778 | else |
779 | return (GET_CODE (x) == IF_THEN_ELSE | |
780 | && ((GET_CODE (XEXP (x, 2)) == PC | |
781 | && (GET_CODE (XEXP (x, 1)) == LABEL_REF | |
782 | || GET_CODE (XEXP (x, 1)) == RETURN)) | |
783 | || (GET_CODE (XEXP (x, 1)) == PC | |
784 | && (GET_CODE (XEXP (x, 2)) == LABEL_REF | |
785 | || GET_CODE (XEXP (x, 2)) == RETURN)))); | |
3480bb98 JL |
786 | } |
787 | ||
c5c76735 | 788 | /* Return nonzero if INSN is a (possibly) conditional jump inside a |
e4c85816 | 789 | PARALLEL. |
48b881a3 | 790 | |
d781a164 RH |
791 | Use this function is deprecated, since we need to support combined |
792 | branch and compare insns. Use any_condjump_p instead whenever possible. */ | |
3480bb98 JL |
793 | |
794 | int | |
0c20a65f | 795 | condjump_in_parallel_p (rtx insn) |
3480bb98 | 796 | { |
b3694847 | 797 | rtx x = PATTERN (insn); |
3480bb98 JL |
798 | |
799 | if (GET_CODE (x) != PARALLEL) | |
800 | return 0; | |
801 | else | |
802 | x = XVECEXP (x, 0, 0); | |
803 | ||
15a63be1 RK |
804 | if (GET_CODE (x) != SET) |
805 | return 0; | |
806 | if (GET_CODE (SET_DEST (x)) != PC) | |
807 | return 0; | |
808 | if (GET_CODE (SET_SRC (x)) == LABEL_REF) | |
809 | return 1; | |
810 | if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE) | |
811 | return 0; | |
812 | if (XEXP (SET_SRC (x), 2) == pc_rtx | |
813 | && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF | |
814 | || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN)) | |
815 | return 1; | |
816 | if (XEXP (SET_SRC (x), 1) == pc_rtx | |
817 | && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF | |
818 | || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN)) | |
819 | return 1; | |
820 | return 0; | |
821 | } | |
822 | ||
d781a164 RH |
823 | /* Return set of PC, otherwise NULL. */ |
824 | ||
e4c85816 | 825 | rtx |
0c20a65f | 826 | pc_set (rtx insn) |
e4c85816 JH |
827 | { |
828 | rtx pat; | |
4b4bf941 | 829 | if (!JUMP_P (insn)) |
d781a164 | 830 | return NULL_RTX; |
e4c85816 | 831 | pat = PATTERN (insn); |
d781a164 RH |
832 | |
833 | /* The set is allowed to appear either as the insn pattern or | |
834 | the first set in a PARALLEL. */ | |
835 | if (GET_CODE (pat) == PARALLEL) | |
836 | pat = XVECEXP (pat, 0, 0); | |
e4c85816 JH |
837 | if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC) |
838 | return pat; | |
d781a164 RH |
839 | |
840 | return NULL_RTX; | |
e4c85816 JH |
841 | } |
842 | ||
d781a164 RH |
843 | /* Return true when insn is an unconditional direct jump, |
844 | possibly bundled inside a PARALLEL. */ | |
845 | ||
e4c85816 | 846 | int |
0c20a65f | 847 | any_uncondjump_p (rtx insn) |
e4c85816 JH |
848 | { |
849 | rtx x = pc_set (insn); | |
850 | if (!x) | |
851 | return 0; | |
852 | if (GET_CODE (SET_SRC (x)) != LABEL_REF) | |
853 | return 0; | |
6de9cd9a DN |
854 | if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) |
855 | return 0; | |
e4c85816 JH |
856 | return 1; |
857 | } | |
858 | ||
d781a164 | 859 | /* Return true when insn is a conditional jump. This function works for |
e4c85816 JH |
860 | instructions containing PC sets in PARALLELs. The instruction may have |
861 | various other effects so before removing the jump you must verify | |
5527bf14 | 862 | onlyjump_p. |
e4c85816 | 863 | |
d781a164 RH |
864 | Note that unlike condjump_p it returns false for unconditional jumps. */ |
865 | ||
e4c85816 | 866 | int |
0c20a65f | 867 | any_condjump_p (rtx insn) |
e4c85816 JH |
868 | { |
869 | rtx x = pc_set (insn); | |
d781a164 RH |
870 | enum rtx_code a, b; |
871 | ||
e4c85816 JH |
872 | if (!x) |
873 | return 0; | |
d781a164 RH |
874 | if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE) |
875 | return 0; | |
e4c85816 | 876 | |
d781a164 RH |
877 | a = GET_CODE (XEXP (SET_SRC (x), 1)); |
878 | b = GET_CODE (XEXP (SET_SRC (x), 2)); | |
e4c85816 | 879 | |
d781a164 | 880 | return ((b == PC && (a == LABEL_REF || a == RETURN)) |
48b881a3 | 881 | || (a == PC && (b == LABEL_REF || b == RETURN))); |
e4c85816 JH |
882 | } |
883 | ||
d804ed43 RH |
884 | /* Return the label of a conditional jump. */ |
885 | ||
886 | rtx | |
0c20a65f | 887 | condjump_label (rtx insn) |
d804ed43 | 888 | { |
d781a164 | 889 | rtx x = pc_set (insn); |
d804ed43 | 890 | |
d781a164 | 891 | if (!x) |
d804ed43 RH |
892 | return NULL_RTX; |
893 | x = SET_SRC (x); | |
894 | if (GET_CODE (x) == LABEL_REF) | |
895 | return x; | |
896 | if (GET_CODE (x) != IF_THEN_ELSE) | |
897 | return NULL_RTX; | |
898 | if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF) | |
899 | return XEXP (x, 1); | |
900 | if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF) | |
901 | return XEXP (x, 2); | |
902 | return NULL_RTX; | |
903 | } | |
904 | ||
e881bb1b RH |
905 | /* Return true if INSN is a (possibly conditional) return insn. */ |
906 | ||
907 | static int | |
0c20a65f | 908 | returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED) |
e881bb1b RH |
909 | { |
910 | rtx x = *loc; | |
3258e996 RK |
911 | |
912 | return x && (GET_CODE (x) == RETURN | |
913 | || (GET_CODE (x) == SET && SET_IS_RETURN_P (x))); | |
e881bb1b RH |
914 | } |
915 | ||
916 | int | |
0c20a65f | 917 | returnjump_p (rtx insn) |
e881bb1b | 918 | { |
4b4bf941 | 919 | if (!JUMP_P (insn)) |
f5540cd4 | 920 | return 0; |
e881bb1b RH |
921 | return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL); |
922 | } | |
923 | ||
d0e80719 RH |
924 | /* Return true if INSN is a jump that only transfers control and |
925 | nothing more. */ | |
926 | ||
927 | int | |
0c20a65f | 928 | onlyjump_p (rtx insn) |
d0e80719 RH |
929 | { |
930 | rtx set; | |
931 | ||
4b4bf941 | 932 | if (!JUMP_P (insn)) |
d0e80719 RH |
933 | return 0; |
934 | ||
935 | set = single_set (insn); | |
936 | if (set == NULL) | |
937 | return 0; | |
938 | if (GET_CODE (SET_DEST (set)) != PC) | |
939 | return 0; | |
940 | if (side_effects_p (SET_SRC (set))) | |
941 | return 0; | |
942 | ||
943 | return 1; | |
944 | } | |
945 | ||
51d87cd9 BS |
946 | #ifdef HAVE_cc0 |
947 | ||
cc2902df | 948 | /* Return nonzero if X is an RTX that only sets the condition codes |
44ce0063 JW |
949 | and has no side effects. */ |
950 | ||
951 | int | |
0c20a65f | 952 | only_sets_cc0_p (rtx x) |
44ce0063 | 953 | { |
44ce0063 JW |
954 | if (! x) |
955 | return 0; | |
956 | ||
957 | if (INSN_P (x)) | |
958 | x = PATTERN (x); | |
959 | ||
960 | return sets_cc0_p (x) == 1 && ! side_effects_p (x); | |
961 | } | |
962 | ||
15a63be1 RK |
963 | /* Return 1 if X is an RTX that does nothing but set the condition codes |
964 | and CLOBBER or USE registers. | |
965 | Return -1 if X does explicitly set the condition codes, | |
966 | but also does other things. */ | |
967 | ||
968 | int | |
0c20a65f | 969 | sets_cc0_p (rtx x) |
15a63be1 | 970 | { |
44ce0063 JW |
971 | if (! x) |
972 | return 0; | |
973 | ||
974 | if (INSN_P (x)) | |
975 | x = PATTERN (x); | |
976 | ||
15a63be1 RK |
977 | if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx) |
978 | return 1; | |
979 | if (GET_CODE (x) == PARALLEL) | |
980 | { | |
981 | int i; | |
982 | int sets_cc0 = 0; | |
983 | int other_things = 0; | |
984 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
985 | { | |
986 | if (GET_CODE (XVECEXP (x, 0, i)) == SET | |
987 | && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx) | |
988 | sets_cc0 = 1; | |
989 | else if (GET_CODE (XVECEXP (x, 0, i)) == SET) | |
990 | other_things = 1; | |
991 | } | |
992 | return ! sets_cc0 ? 0 : other_things ? -1 : 1; | |
993 | } | |
994 | return 0; | |
15a63be1 | 995 | } |
51d87cd9 | 996 | #endif |
15a63be1 RK |
997 | \f |
998 | /* Follow any unconditional jump at LABEL; | |
999 | return the ultimate label reached by any such chain of jumps. | |
6c2511d3 | 1000 | Return null if the chain ultimately leads to a return instruction. |
15a63be1 | 1001 | If LABEL is not followed by a jump, return LABEL. |
2d20b9df RS |
1002 | If the chain loops or we can't find end, return LABEL, |
1003 | since that tells caller to avoid changing the insn. | |
15a63be1 RK |
1004 | |
1005 | If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or | |
1006 | a USE or CLOBBER. */ | |
1007 | ||
1008 | rtx | |
0c20a65f | 1009 | follow_jumps (rtx label) |
15a63be1 | 1010 | { |
b3694847 SS |
1011 | rtx insn; |
1012 | rtx next; | |
1013 | rtx value = label; | |
1014 | int depth; | |
15a63be1 RK |
1015 | |
1016 | for (depth = 0; | |
1017 | (depth < 10 | |
1018 | && (insn = next_active_insn (value)) != 0 | |
4b4bf941 | 1019 | && JUMP_P (insn) |
742dff15 JH |
1020 | && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn) |
1021 | && onlyjump_p (insn)) | |
a9cc9061 | 1022 | || GET_CODE (PATTERN (insn)) == RETURN) |
15a63be1 | 1023 | && (next = NEXT_INSN (insn)) |
4b4bf941 | 1024 | && BARRIER_P (next)); |
15a63be1 RK |
1025 | depth++) |
1026 | { | |
1027 | /* Don't chain through the insn that jumps into a loop | |
1028 | from outside the loop, | |
1029 | since that would create multiple loop entry jumps | |
1030 | and prevent loop optimization. */ | |
1031 | rtx tem; | |
1032 | if (!reload_completed) | |
1033 | for (tem = value; tem != insn; tem = NEXT_INSN (tem)) | |
4b4bf941 | 1034 | if (NOTE_P (tem) |
f6a6a1b3 DE |
1035 | && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG |
1036 | /* ??? Optional. Disables some optimizations, but makes | |
1037 | gcov output more accurate with -O. */ | |
1038 | || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0))) | |
15a63be1 RK |
1039 | return value; |
1040 | ||
1041 | /* If we have found a cycle, make the insn jump to itself. */ | |
1042 | if (JUMP_LABEL (insn) == label) | |
2d20b9df | 1043 | return label; |
b209b3c5 JVA |
1044 | |
1045 | tem = next_active_insn (JUMP_LABEL (insn)); | |
1046 | if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC | |
1047 | || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC)) | |
1048 | break; | |
1049 | ||
15a63be1 RK |
1050 | value = JUMP_LABEL (insn); |
1051 | } | |
2d20b9df RS |
1052 | if (depth == 10) |
1053 | return label; | |
15a63be1 RK |
1054 | return value; |
1055 | } | |
1056 | ||
15a63be1 RK |
1057 | \f |
1058 | /* Find all CODE_LABELs referred to in X, and increment their use counts. | |
1059 | If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced | |
1060 | in INSN, then store one of them in JUMP_LABEL (INSN). | |
1061 | If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL | |
1062 | referenced in INSN, add a REG_LABEL note containing that label to INSN. | |
1063 | Also, when there are consecutive labels, canonicalize on the last of them. | |
1064 | ||
1065 | Note that two labels separated by a loop-beginning note | |
1066 | must be kept distinct if we have not yet done loop-optimization, | |
1067 | because the gap between them is where loop-optimize | |
1068 | will want to move invariant code to. CROSS_JUMP tells us | |
1e5fd094 | 1069 | that loop-optimization is done with. */ |
15a63be1 | 1070 | |
90a74703 | 1071 | void |
0c20a65f | 1072 | mark_jump_label (rtx x, rtx insn, int in_mem) |
15a63be1 | 1073 | { |
b3694847 SS |
1074 | RTX_CODE code = GET_CODE (x); |
1075 | int i; | |
1076 | const char *fmt; | |
15a63be1 RK |
1077 | |
1078 | switch (code) | |
1079 | { | |
1080 | case PC: | |
1081 | case CC0: | |
1082 | case REG: | |
15a63be1 | 1083 | case CONST_INT: |
15a63be1 RK |
1084 | case CONST_DOUBLE: |
1085 | case CLOBBER: | |
1086 | case CALL: | |
1087 | return; | |
1088 | ||
d7ea4cf6 | 1089 | case MEM: |
a76063a6 CP |
1090 | in_mem = 1; |
1091 | break; | |
1092 | ||
1093 | case SYMBOL_REF: | |
1094 | if (!in_mem) | |
48b881a3 | 1095 | return; |
a76063a6 | 1096 | |
d7ea4cf6 | 1097 | /* If this is a constant-pool reference, see if it is a label. */ |
a76063a6 | 1098 | if (CONSTANT_POOL_ADDRESS_P (x)) |
1e5fd094 | 1099 | mark_jump_label (get_pool_constant (x), insn, in_mem); |
d7ea4cf6 RK |
1100 | break; |
1101 | ||
15a63be1 RK |
1102 | case LABEL_REF: |
1103 | { | |
5c5e36c5 | 1104 | rtx label = XEXP (x, 0); |
5c5e36c5 | 1105 | |
be1bb652 RH |
1106 | /* Ignore remaining references to unreachable labels that |
1107 | have been deleted. */ | |
4b4bf941 | 1108 | if (NOTE_P (label) |
be1bb652 RH |
1109 | && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL) |
1110 | break; | |
1111 | ||
41806d92 | 1112 | gcc_assert (LABEL_P (label)); |
5c5e36c5 | 1113 | |
705f26cf RS |
1114 | /* Ignore references to labels of containing functions. */ |
1115 | if (LABEL_REF_NONLOCAL_P (x)) | |
1116 | break; | |
5c5e36c5 | 1117 | |
15a63be1 | 1118 | XEXP (x, 0) = label; |
ac9b3c97 R |
1119 | if (! insn || ! INSN_DELETED_P (insn)) |
1120 | ++LABEL_NUSES (label); | |
5c5e36c5 | 1121 | |
15a63be1 RK |
1122 | if (insn) |
1123 | { | |
4b4bf941 | 1124 | if (JUMP_P (insn)) |
15a63be1 | 1125 | JUMP_LABEL (insn) = label; |
834452d2 | 1126 | else |
85b94003 | 1127 | { |
834452d2 MM |
1128 | /* Add a REG_LABEL note for LABEL unless there already |
1129 | is one. All uses of a label, except for labels | |
1130 | that are the targets of jumps, must have a | |
1131 | REG_LABEL note. */ | |
1132 | if (! find_reg_note (insn, REG_LABEL, label)) | |
f5dd47c4 | 1133 | REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label, |
834452d2 | 1134 | REG_NOTES (insn)); |
15a63be1 RK |
1135 | } |
1136 | } | |
1137 | return; | |
1138 | } | |
1139 | ||
1140 | /* Do walk the labels in a vector, but not the first operand of an | |
1141 | ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */ | |
1142 | case ADDR_VEC: | |
1143 | case ADDR_DIFF_VEC: | |
ac9b3c97 R |
1144 | if (! INSN_DELETED_P (insn)) |
1145 | { | |
1146 | int eltnum = code == ADDR_DIFF_VEC ? 1 : 0; | |
15a63be1 | 1147 | |
ac9b3c97 | 1148 | for (i = 0; i < XVECLEN (x, eltnum); i++) |
1e5fd094 | 1149 | mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem); |
ac9b3c97 | 1150 | } |
e9a25f70 | 1151 | return; |
48b881a3 | 1152 | |
e9a25f70 JL |
1153 | default: |
1154 | break; | |
15a63be1 RK |
1155 | } |
1156 | ||
1157 | fmt = GET_RTX_FORMAT (code); | |
1158 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1159 | { | |
1160 | if (fmt[i] == 'e') | |
1e5fd094 | 1161 | mark_jump_label (XEXP (x, i), insn, in_mem); |
15a63be1 RK |
1162 | else if (fmt[i] == 'E') |
1163 | { | |
b3694847 | 1164 | int j; |
15a63be1 | 1165 | for (j = 0; j < XVECLEN (x, i); j++) |
1e5fd094 | 1166 | mark_jump_label (XVECEXP (x, i, j), insn, in_mem); |
15a63be1 RK |
1167 | } |
1168 | } | |
1169 | } | |
1170 | ||
1171 | /* If all INSN does is set the pc, delete it, | |
1172 | and delete the insn that set the condition codes for it | |
1173 | if that's what the previous thing was. */ | |
1174 | ||
1175 | void | |
0c20a65f | 1176 | delete_jump (rtx insn) |
15a63be1 | 1177 | { |
b3694847 | 1178 | rtx set = single_set (insn); |
3e5478ea RK |
1179 | |
1180 | if (set && GET_CODE (SET_DEST (set)) == PC) | |
1181 | delete_computation (insn); | |
1182 | } | |
1183 | ||
cfe2d2e7 JW |
1184 | /* Recursively delete prior insns that compute the value (used only by INSN |
1185 | which the caller is deleting) stored in the register mentioned by NOTE | |
1186 | which is a REG_DEAD note associated with INSN. */ | |
1187 | ||
1188 | static void | |
0c20a65f | 1189 | delete_prior_computation (rtx note, rtx insn) |
cfe2d2e7 JW |
1190 | { |
1191 | rtx our_prev; | |
1192 | rtx reg = XEXP (note, 0); | |
1193 | ||
1194 | for (our_prev = prev_nonnote_insn (insn); | |
4b4bf941 JQ |
1195 | our_prev && (NONJUMP_INSN_P (our_prev) |
1196 | || CALL_P (our_prev)); | |
cfe2d2e7 JW |
1197 | our_prev = prev_nonnote_insn (our_prev)) |
1198 | { | |
1199 | rtx pat = PATTERN (our_prev); | |
1200 | ||
6f1661e5 JW |
1201 | /* If we reach a CALL which is not calling a const function |
1202 | or the callee pops the arguments, then give up. */ | |
4b4bf941 | 1203 | if (CALL_P (our_prev) |
24a28584 | 1204 | && (! CONST_OR_PURE_CALL_P (our_prev) |
6f1661e5 JW |
1205 | || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL)) |
1206 | break; | |
1207 | ||
cfe2d2e7 | 1208 | /* If we reach a SEQUENCE, it is too complex to try to |
2f937369 DM |
1209 | do anything with it, so give up. We can be run during |
1210 | and after reorg, so SEQUENCE rtl can legitimately show | |
1211 | up here. */ | |
cfe2d2e7 JW |
1212 | if (GET_CODE (pat) == SEQUENCE) |
1213 | break; | |
1214 | ||
1215 | if (GET_CODE (pat) == USE | |
4b4bf941 | 1216 | && NONJUMP_INSN_P (XEXP (pat, 0))) |
cfe2d2e7 JW |
1217 | /* reorg creates USEs that look like this. We leave them |
1218 | alone because reorg needs them for its own purposes. */ | |
1219 | break; | |
1220 | ||
1221 | if (reg_set_p (reg, pat)) | |
1222 | { | |
4b4bf941 | 1223 | if (side_effects_p (pat) && !CALL_P (our_prev)) |
cfe2d2e7 JW |
1224 | break; |
1225 | ||
1226 | if (GET_CODE (pat) == PARALLEL) | |
1227 | { | |
1228 | /* If we find a SET of something else, we can't | |
1229 | delete the insn. */ | |
1230 | ||
1231 | int i; | |
1232 | ||
1233 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1234 | { | |
1235 | rtx part = XVECEXP (pat, 0, i); | |
1236 | ||
1237 | if (GET_CODE (part) == SET | |
1238 | && SET_DEST (part) != reg) | |
1239 | break; | |
1240 | } | |
1241 | ||
1242 | if (i == XVECLEN (pat, 0)) | |
1243 | delete_computation (our_prev); | |
1244 | } | |
1245 | else if (GET_CODE (pat) == SET | |
f8cfc6aa | 1246 | && REG_P (SET_DEST (pat))) |
cfe2d2e7 JW |
1247 | { |
1248 | int dest_regno = REGNO (SET_DEST (pat)); | |
1249 | int dest_endregno | |
48b881a3 KH |
1250 | = (dest_regno |
1251 | + (dest_regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 JH |
1252 | ? hard_regno_nregs[dest_regno] |
1253 | [GET_MODE (SET_DEST (pat))] : 1)); | |
cfe2d2e7 | 1254 | int regno = REGNO (reg); |
48b881a3 KH |
1255 | int endregno |
1256 | = (regno | |
1257 | + (regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 1258 | ? hard_regno_nregs[regno][GET_MODE (reg)] : 1)); |
cfe2d2e7 JW |
1259 | |
1260 | if (dest_regno >= regno | |
1261 | && dest_endregno <= endregno) | |
1262 | delete_computation (our_prev); | |
1263 | ||
1264 | /* We may have a multi-word hard register and some, but not | |
1265 | all, of the words of the register are needed in subsequent | |
1266 | insns. Write REG_UNUSED notes for those parts that were not | |
1267 | needed. */ | |
1268 | else if (dest_regno <= regno | |
6f1661e5 | 1269 | && dest_endregno >= endregno) |
cfe2d2e7 JW |
1270 | { |
1271 | int i; | |
1272 | ||
1273 | REG_NOTES (our_prev) | |
48b881a3 KH |
1274 | = gen_rtx_EXPR_LIST (REG_UNUSED, reg, |
1275 | REG_NOTES (our_prev)); | |
cfe2d2e7 JW |
1276 | |
1277 | for (i = dest_regno; i < dest_endregno; i++) | |
1278 | if (! find_regno_note (our_prev, REG_UNUSED, i)) | |
1279 | break; | |
1280 | ||
1281 | if (i == dest_endregno) | |
1282 | delete_computation (our_prev); | |
1283 | } | |
1284 | } | |
1285 | ||
1286 | break; | |
1287 | } | |
1288 | ||
1289 | /* If PAT references the register that dies here, it is an | |
1290 | additional use. Hence any prior SET isn't dead. However, this | |
1291 | insn becomes the new place for the REG_DEAD note. */ | |
1292 | if (reg_overlap_mentioned_p (reg, pat)) | |
1293 | { | |
1294 | XEXP (note, 1) = REG_NOTES (our_prev); | |
1295 | REG_NOTES (our_prev) = note; | |
1296 | break; | |
1297 | } | |
1298 | } | |
1299 | } | |
1300 | ||
3e5478ea RK |
1301 | /* Delete INSN and recursively delete insns that compute values used only |
1302 | by INSN. This uses the REG_DEAD notes computed during flow analysis. | |
1303 | If we are running before flow.c, we need do nothing since flow.c will | |
1304 | delete dead code. We also can't know if the registers being used are | |
1305 | dead or not at this point. | |
1306 | ||
1307 | Otherwise, look at all our REG_DEAD notes. If a previous insn does | |
1308 | nothing other than set a register that dies in this insn, we can delete | |
1309 | that insn as well. | |
1310 | ||
1311 | On machines with CC0, if CC0 is used in this insn, we may be able to | |
1312 | delete the insn that set it. */ | |
1313 | ||
8cd2aff2 | 1314 | static void |
0c20a65f | 1315 | delete_computation (rtx insn) |
3e5478ea RK |
1316 | { |
1317 | rtx note, next; | |
15a63be1 | 1318 | |
15a63be1 | 1319 | #ifdef HAVE_cc0 |
2fb95912 | 1320 | if (reg_referenced_p (cc0_rtx, PATTERN (insn))) |
3e5478ea | 1321 | { |
77472c5a | 1322 | rtx prev = prev_nonnote_insn (insn); |
15a63be1 RK |
1323 | /* We assume that at this stage |
1324 | CC's are always set explicitly | |
1325 | and always immediately before the jump that | |
1326 | will use them. So if the previous insn | |
1327 | exists to set the CC's, delete it | |
1328 | (unless it performs auto-increments, etc.). */ | |
4b4bf941 | 1329 | if (prev && NONJUMP_INSN_P (prev) |
15a63be1 RK |
1330 | && sets_cc0_p (PATTERN (prev))) |
1331 | { | |
1332 | if (sets_cc0_p (PATTERN (prev)) > 0 | |
cfe2d2e7 | 1333 | && ! side_effects_p (PATTERN (prev))) |
3e5478ea | 1334 | delete_computation (prev); |
15a63be1 RK |
1335 | else |
1336 | /* Otherwise, show that cc0 won't be used. */ | |
38a448ca RH |
1337 | REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED, |
1338 | cc0_rtx, REG_NOTES (prev)); | |
15a63be1 | 1339 | } |
77472c5a | 1340 | } |
3e5478ea | 1341 | #endif |
15a63be1 | 1342 | |
77472c5a TW |
1343 | for (note = REG_NOTES (insn); note; note = next) |
1344 | { | |
77472c5a | 1345 | next = XEXP (note, 1); |
15a63be1 | 1346 | |
77472c5a TW |
1347 | if (REG_NOTE_KIND (note) != REG_DEAD |
1348 | /* Verify that the REG_NOTE is legitimate. */ | |
f8cfc6aa | 1349 | || !REG_P (XEXP (note, 0))) |
77472c5a | 1350 | continue; |
15a63be1 | 1351 | |
cfe2d2e7 | 1352 | delete_prior_computation (note, insn); |
15a63be1 | 1353 | } |
3e5478ea | 1354 | |
53c17031 | 1355 | delete_related_insns (insn); |
15a63be1 RK |
1356 | } |
1357 | \f | |
53c17031 | 1358 | /* Delete insn INSN from the chain of insns and update label ref counts |
b6553814 | 1359 | and delete insns now unreachable. |
53c17031 | 1360 | |
b6553814 | 1361 | Returns the first insn after INSN that was not deleted. |
15a63be1 | 1362 | |
53c17031 JH |
1363 | Usage of this instruction is deprecated. Use delete_insn instead and |
1364 | subsequent cfg_cleanup pass to delete unreachable code if needed. */ | |
15a63be1 RK |
1365 | |
1366 | rtx | |
0c20a65f | 1367 | delete_related_insns (rtx insn) |
15a63be1 | 1368 | { |
4b4bf941 | 1369 | int was_code_label = (LABEL_P (insn)); |
692dc9c6 | 1370 | rtx note; |
53c17031 | 1371 | rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn); |
15a63be1 RK |
1372 | |
1373 | while (next && INSN_DELETED_P (next)) | |
1374 | next = NEXT_INSN (next); | |
1375 | ||
1376 | /* This insn is already deleted => return first following nondeleted. */ | |
1377 | if (INSN_DELETED_P (insn)) | |
1378 | return next; | |
1379 | ||
53c17031 | 1380 | delete_insn (insn); |
15a63be1 | 1381 | |
15a63be1 RK |
1382 | /* If instruction is followed by a barrier, |
1383 | delete the barrier too. */ | |
1384 | ||
4b4bf941 | 1385 | if (next != 0 && BARRIER_P (next)) |
53c17031 | 1386 | delete_insn (next); |
15a63be1 RK |
1387 | |
1388 | /* If deleting a jump, decrement the count of the label, | |
1389 | and delete the label if it is now unused. */ | |
1390 | ||
4b4bf941 | 1391 | if (JUMP_P (insn) && JUMP_LABEL (insn)) |
1fe65930 RH |
1392 | { |
1393 | rtx lab = JUMP_LABEL (insn), lab_next; | |
1394 | ||
53c17031 | 1395 | if (LABEL_NUSES (lab) == 0) |
1fe65930 RH |
1396 | { |
1397 | /* This can delete NEXT or PREV, | |
1398 | either directly if NEXT is JUMP_LABEL (INSN), | |
1399 | or indirectly through more levels of jumps. */ | |
53c17031 | 1400 | delete_related_insns (lab); |
1fe65930 RH |
1401 | |
1402 | /* I feel a little doubtful about this loop, | |
1403 | but I see no clean and sure alternative way | |
1404 | to find the first insn after INSN that is not now deleted. | |
1405 | I hope this works. */ | |
1406 | while (next && INSN_DELETED_P (next)) | |
1407 | next = NEXT_INSN (next); | |
1408 | return next; | |
1409 | } | |
e1233a7d | 1410 | else if (tablejump_p (insn, NULL, &lab_next)) |
1fe65930 RH |
1411 | { |
1412 | /* If we're deleting the tablejump, delete the dispatch table. | |
eaec9b3d | 1413 | We may not be able to kill the label immediately preceding |
1fe65930 RH |
1414 | just yet, as it might be referenced in code leading up to |
1415 | the tablejump. */ | |
53c17031 | 1416 | delete_related_insns (lab_next); |
1fe65930 RH |
1417 | } |
1418 | } | |
15a63be1 | 1419 | |
3c7d7a4a DE |
1420 | /* Likewise if we're deleting a dispatch table. */ |
1421 | ||
4b4bf941 | 1422 | if (JUMP_P (insn) |
3c7d7a4a DE |
1423 | && (GET_CODE (PATTERN (insn)) == ADDR_VEC |
1424 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)) | |
1425 | { | |
1426 | rtx pat = PATTERN (insn); | |
1427 | int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC; | |
1428 | int len = XVECLEN (pat, diff_vec_p); | |
1429 | ||
1430 | for (i = 0; i < len; i++) | |
53c17031 JH |
1431 | if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0) |
1432 | delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0)); | |
3c7d7a4a DE |
1433 | while (next && INSN_DELETED_P (next)) |
1434 | next = NEXT_INSN (next); | |
1435 | return next; | |
1436 | } | |
1437 | ||
692dc9c6 | 1438 | /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */ |
4b4bf941 | 1439 | if (NONJUMP_INSN_P (insn) || CALL_P (insn)) |
692dc9c6 | 1440 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
f423a6a7 R |
1441 | if (REG_NOTE_KIND (note) == REG_LABEL |
1442 | /* This could also be a NOTE_INSN_DELETED_LABEL note. */ | |
4b4bf941 | 1443 | && LABEL_P (XEXP (note, 0))) |
53c17031 JH |
1444 | if (LABEL_NUSES (XEXP (note, 0)) == 0) |
1445 | delete_related_insns (XEXP (note, 0)); | |
692dc9c6 | 1446 | |
4b4bf941 | 1447 | while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev))) |
15a63be1 RK |
1448 | prev = PREV_INSN (prev); |
1449 | ||
1450 | /* If INSN was a label and a dispatch table follows it, | |
1451 | delete the dispatch table. The tablejump must have gone already. | |
1452 | It isn't useful to fall through into a table. */ | |
1453 | ||
196cedd0 | 1454 | if (was_code_label |
15a63be1 | 1455 | && NEXT_INSN (insn) != 0 |
4b4bf941 | 1456 | && JUMP_P (NEXT_INSN (insn)) |
15a63be1 RK |
1457 | && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC |
1458 | || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC)) | |
53c17031 | 1459 | next = delete_related_insns (NEXT_INSN (insn)); |
15a63be1 RK |
1460 | |
1461 | /* If INSN was a label, delete insns following it if now unreachable. */ | |
1462 | ||
4b4bf941 | 1463 | if (was_code_label && prev && BARRIER_P (prev)) |
15a63be1 | 1464 | { |
ec8e098d PB |
1465 | enum rtx_code code; |
1466 | while (next) | |
15a63be1 | 1467 | { |
ec8e098d | 1468 | code = GET_CODE (next); |
15a63be1 RK |
1469 | if (code == NOTE |
1470 | && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END) | |
1471 | next = NEXT_INSN (next); | |
2e1dbf22 RS |
1472 | /* Keep going past other deleted labels to delete what follows. */ |
1473 | else if (code == CODE_LABEL && INSN_DELETED_P (next)) | |
1474 | next = NEXT_INSN (next); | |
ec8e098d | 1475 | else if (code == BARRIER || INSN_P (next)) |
15a63be1 RK |
1476 | /* Note: if this deletes a jump, it can cause more |
1477 | deletion of unreachable code, after a different label. | |
1478 | As long as the value from this recursive call is correct, | |
1479 | this invocation functions correctly. */ | |
53c17031 | 1480 | next = delete_related_insns (next); |
ec8e098d PB |
1481 | else |
1482 | break; | |
15a63be1 RK |
1483 | } |
1484 | } | |
1485 | ||
1486 | return next; | |
1487 | } | |
15a63be1 RK |
1488 | \f |
1489 | /* Delete a range of insns from FROM to TO, inclusive. | |
1490 | This is for the sake of peephole optimization, so assume | |
1491 | that whatever these insns do will still be done by a new | |
1492 | peephole insn that will replace them. */ | |
1493 | ||
1494 | void | |
0c20a65f | 1495 | delete_for_peephole (rtx from, rtx to) |
15a63be1 | 1496 | { |
b3694847 | 1497 | rtx insn = from; |
15a63be1 RK |
1498 | |
1499 | while (1) | |
1500 | { | |
b3694847 SS |
1501 | rtx next = NEXT_INSN (insn); |
1502 | rtx prev = PREV_INSN (insn); | |
15a63be1 | 1503 | |
4b4bf941 | 1504 | if (!NOTE_P (insn)) |
15a63be1 RK |
1505 | { |
1506 | INSN_DELETED_P (insn) = 1; | |
1507 | ||
1508 | /* Patch this insn out of the chain. */ | |
1509 | /* We don't do this all at once, because we | |
1510 | must preserve all NOTEs. */ | |
1511 | if (prev) | |
1512 | NEXT_INSN (prev) = next; | |
1513 | ||
1514 | if (next) | |
1515 | PREV_INSN (next) = prev; | |
1516 | } | |
1517 | ||
1518 | if (insn == to) | |
1519 | break; | |
1520 | insn = next; | |
1521 | } | |
1522 | ||
1523 | /* Note that if TO is an unconditional jump | |
1524 | we *do not* delete the BARRIER that follows, | |
1525 | since the peephole that replaces this sequence | |
1526 | is also an unconditional jump in that case. */ | |
1527 | } | |
1528 | \f | |
2ea64f10 RH |
1529 | /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or |
1530 | NLABEL as a return. Accrue modifications into the change group. */ | |
15a63be1 | 1531 | |
2ea64f10 | 1532 | static void |
0c20a65f | 1533 | redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn) |
15a63be1 | 1534 | { |
b3694847 SS |
1535 | rtx x = *loc; |
1536 | RTX_CODE code = GET_CODE (x); | |
1537 | int i; | |
1538 | const char *fmt; | |
15a63be1 | 1539 | |
2ea64f10 | 1540 | if (code == LABEL_REF) |
15a63be1 | 1541 | { |
2ea64f10 RH |
1542 | if (XEXP (x, 0) == olabel) |
1543 | { | |
1544 | rtx n; | |
1545 | if (nlabel) | |
4c33cb26 | 1546 | n = gen_rtx_LABEL_REF (Pmode, nlabel); |
2ea64f10 | 1547 | else |
48b881a3 | 1548 | n = gen_rtx_RETURN (VOIDmode); |
15a63be1 | 1549 | |
2ea64f10 RH |
1550 | validate_change (insn, loc, n, 1); |
1551 | return; | |
1552 | } | |
1553 | } | |
1554 | else if (code == RETURN && olabel == 0) | |
1555 | { | |
9550206b | 1556 | if (nlabel) |
4c33cb26 | 1557 | x = gen_rtx_LABEL_REF (Pmode, nlabel); |
9550206b RS |
1558 | else |
1559 | x = gen_rtx_RETURN (VOIDmode); | |
2ea64f10 RH |
1560 | if (loc == &PATTERN (insn)) |
1561 | x = gen_rtx_SET (VOIDmode, pc_rtx, x); | |
1562 | validate_change (insn, loc, x, 1); | |
1563 | return; | |
1564 | } | |
15a63be1 | 1565 | |
2ea64f10 RH |
1566 | if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx |
1567 | && GET_CODE (SET_SRC (x)) == LABEL_REF | |
1568 | && XEXP (SET_SRC (x), 0) == olabel) | |
1569 | { | |
1570 | validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1); | |
1571 | return; | |
15a63be1 RK |
1572 | } |
1573 | ||
1574 | fmt = GET_RTX_FORMAT (code); | |
1575 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1576 | { | |
1577 | if (fmt[i] == 'e') | |
2ea64f10 | 1578 | redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn); |
d4757e6a | 1579 | else if (fmt[i] == 'E') |
15a63be1 | 1580 | { |
b3694847 | 1581 | int j; |
15a63be1 | 1582 | for (j = 0; j < XVECLEN (x, i); j++) |
2ea64f10 | 1583 | redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn); |
15a63be1 RK |
1584 | } |
1585 | } | |
2ea64f10 | 1586 | } |
15a63be1 | 1587 | |
2ea64f10 RH |
1588 | /* Make JUMP go to NLABEL instead of where it jumps now. Accrue |
1589 | the modifications into the change group. Return false if we did | |
1590 | not see how to do that. */ | |
1591 | ||
1592 | int | |
0c20a65f | 1593 | redirect_jump_1 (rtx jump, rtx nlabel) |
2ea64f10 RH |
1594 | { |
1595 | int ochanges = num_validated_changes (); | |
742dff15 JH |
1596 | rtx *loc; |
1597 | ||
1598 | if (GET_CODE (PATTERN (jump)) == PARALLEL) | |
1599 | loc = &XVECEXP (PATTERN (jump), 0, 0); | |
1600 | else | |
1601 | loc = &PATTERN (jump); | |
1602 | ||
1603 | redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump); | |
2ea64f10 RH |
1604 | return num_validated_changes () > ochanges; |
1605 | } | |
1606 | ||
1607 | /* Make JUMP go to NLABEL instead of where it jumps now. If the old | |
1608 | jump target label is unused as a result, it and the code following | |
1609 | it may be deleted. | |
15a63be1 RK |
1610 | |
1611 | If NLABEL is zero, we are to turn the jump into a (possibly conditional) | |
1612 | RETURN insn. | |
1613 | ||
2ea64f10 RH |
1614 | The return value will be 1 if the change was made, 0 if it wasn't |
1615 | (this can only occur for NLABEL == 0). */ | |
15a63be1 RK |
1616 | |
1617 | int | |
0c20a65f | 1618 | redirect_jump (rtx jump, rtx nlabel, int delete_unused) |
15a63be1 | 1619 | { |
b3694847 | 1620 | rtx olabel = JUMP_LABEL (jump); |
15a63be1 RK |
1621 | |
1622 | if (nlabel == olabel) | |
1623 | return 1; | |
1624 | ||
0a634832 | 1625 | if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ()) |
15a63be1 RK |
1626 | return 0; |
1627 | ||
0a634832 R |
1628 | redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0); |
1629 | return 1; | |
1630 | } | |
1631 | ||
1632 | /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with | |
1633 | NLABEL in JUMP. If DELETE_UNUSED is non-negative, copy a | |
1634 | NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL. | |
1635 | If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref | |
1636 | count has dropped to zero. */ | |
1637 | void | |
1638 | redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused, | |
1639 | int invert) | |
1640 | { | |
1641 | rtx note; | |
1642 | ||
15a63be1 RK |
1643 | JUMP_LABEL (jump) = nlabel; |
1644 | if (nlabel) | |
1645 | ++LABEL_NUSES (nlabel); | |
1646 | ||
bc6688b4 RS |
1647 | /* Update labels in any REG_EQUAL note. */ |
1648 | if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX) | |
1649 | { | |
0a634832 R |
1650 | if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump))) |
1651 | remove_note (jump, note); | |
1652 | else | |
bc6688b4 | 1653 | { |
0a634832 R |
1654 | redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump); |
1655 | confirm_change_group (); | |
bc6688b4 | 1656 | } |
bc6688b4 RS |
1657 | } |
1658 | ||
d4cf5733 JM |
1659 | /* If we're eliding the jump over exception cleanups at the end of a |
1660 | function, move the function end note so that -Wreturn-type works. */ | |
5cb7d6b4 RH |
1661 | if (olabel && nlabel |
1662 | && NEXT_INSN (olabel) | |
4b4bf941 | 1663 | && NOTE_P (NEXT_INSN (olabel)) |
0a634832 R |
1664 | && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END |
1665 | && delete_unused >= 0) | |
d4cf5733 JM |
1666 | emit_note_after (NOTE_INSN_FUNCTION_END, nlabel); |
1667 | ||
0a634832 | 1668 | if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0 |
31fbaad4 R |
1669 | /* Undefined labels will remain outside the insn stream. */ |
1670 | && INSN_UID (olabel)) | |
53c17031 | 1671 | delete_related_insns (olabel); |
0a634832 R |
1672 | if (invert) |
1673 | invert_br_probabilities (jump); | |
15a63be1 RK |
1674 | } |
1675 | ||
0a634832 R |
1676 | /* Invert the jump condition X contained in jump insn INSN. Accrue the |
1677 | modifications into the change group. Return nonzero for success. */ | |
1678 | static int | |
1679 | invert_exp_1 (rtx x, rtx insn) | |
2ea64f10 | 1680 | { |
0a634832 | 1681 | RTX_CODE code = GET_CODE (x); |
2ea64f10 RH |
1682 | |
1683 | if (code == IF_THEN_ELSE) | |
1684 | { | |
b3694847 SS |
1685 | rtx comp = XEXP (x, 0); |
1686 | rtx tem; | |
261efdef | 1687 | enum rtx_code reversed_code; |
2ea64f10 RH |
1688 | |
1689 | /* We can do this in two ways: The preferable way, which can only | |
1690 | be done if this is not an integer comparison, is to reverse | |
1691 | the comparison code. Otherwise, swap the THEN-part and ELSE-part | |
1692 | of the IF_THEN_ELSE. If we can't do either, fail. */ | |
1693 | ||
261efdef JH |
1694 | reversed_code = reversed_comparison_code (comp, insn); |
1695 | ||
1696 | if (reversed_code != UNKNOWN) | |
2ea64f10 RH |
1697 | { |
1698 | validate_change (insn, &XEXP (x, 0), | |
261efdef | 1699 | gen_rtx_fmt_ee (reversed_code, |
2ea64f10 RH |
1700 | GET_MODE (comp), XEXP (comp, 0), |
1701 | XEXP (comp, 1)), | |
1702 | 1); | |
0a634832 | 1703 | return 1; |
2ea64f10 | 1704 | } |
48b881a3 | 1705 | |
2ea64f10 RH |
1706 | tem = XEXP (x, 1); |
1707 | validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1); | |
1708 | validate_change (insn, &XEXP (x, 2), tem, 1); | |
0a634832 | 1709 | return 1; |
2ea64f10 | 1710 | } |
742dff15 | 1711 | else |
2ea64f10 | 1712 | return 0; |
2ea64f10 RH |
1713 | } |
1714 | ||
1715 | /* Invert the condition of the jump JUMP, and make it jump to label | |
1716 | NLABEL instead of where it jumps now. Accrue changes into the | |
1717 | change group. Return false if we didn't see how to perform the | |
1718 | inversion and redirection. */ | |
1719 | ||
1720 | int | |
0c20a65f | 1721 | invert_jump_1 (rtx jump, rtx nlabel) |
2ea64f10 | 1722 | { |
0a634832 | 1723 | rtx x = pc_set (jump); |
2ea64f10 | 1724 | int ochanges; |
41806d92 | 1725 | int ok; |
2ea64f10 RH |
1726 | |
1727 | ochanges = num_validated_changes (); | |
41806d92 NS |
1728 | gcc_assert (x); |
1729 | ok = invert_exp_1 (SET_SRC (x), jump); | |
1730 | gcc_assert (ok); | |
1731 | ||
2ea64f10 RH |
1732 | if (num_validated_changes () == ochanges) |
1733 | return 0; | |
1734 | ||
77fb4cc1 R |
1735 | /* redirect_jump_1 will fail of nlabel == olabel, and the current use is |
1736 | in Pmode, so checking this is not merely an optimization. */ | |
1737 | return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel); | |
2ea64f10 RH |
1738 | } |
1739 | ||
1740 | /* Invert the condition of the jump JUMP, and make it jump to label | |
1741 | NLABEL instead of where it jumps now. Return true if successful. */ | |
1742 | ||
1743 | int | |
0c20a65f | 1744 | invert_jump (rtx jump, rtx nlabel, int delete_unused) |
2ea64f10 | 1745 | { |
0a634832 | 1746 | rtx olabel = JUMP_LABEL (jump); |
2ea64f10 | 1747 | |
0a634832 | 1748 | if (invert_jump_1 (jump, nlabel) && apply_change_group ()) |
2ea64f10 | 1749 | { |
0a634832 | 1750 | redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1); |
2ea64f10 RH |
1751 | return 1; |
1752 | } | |
0a634832 | 1753 | cancel_changes (0); |
2ea64f10 RH |
1754 | return 0; |
1755 | } | |
1756 | ||
15a63be1 RK |
1757 | \f |
1758 | /* Like rtx_equal_p except that it considers two REGs as equal | |
4fe73cc1 RK |
1759 | if they renumber to the same value and considers two commutative |
1760 | operations to be the same if the order of the operands has been | |
8fc001f9 JL |
1761 | reversed. |
1762 | ||
1763 | ??? Addition is not commutative on the PA due to the weird implicit | |
1764 | space register selection rules for memory addresses. Therefore, we | |
1765 | don't consider a + b == b + a. | |
1766 | ||
1767 | We could/should make this test a little tighter. Possibly only | |
1768 | disabling it on the PA via some backend macro or only disabling this | |
1769 | case when the PLUS is inside a MEM. */ | |
15a63be1 RK |
1770 | |
1771 | int | |
0c20a65f | 1772 | rtx_renumbered_equal_p (rtx x, rtx y) |
15a63be1 | 1773 | { |
b3694847 | 1774 | int i; |
ec8e098d | 1775 | enum rtx_code code = GET_CODE (x); |
b3694847 | 1776 | const char *fmt; |
48b881a3 | 1777 | |
15a63be1 RK |
1778 | if (x == y) |
1779 | return 1; | |
4fe73cc1 | 1780 | |
f8cfc6aa JQ |
1781 | if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x)))) |
1782 | && (REG_P (y) || (GET_CODE (y) == SUBREG | |
1783 | && REG_P (SUBREG_REG (y))))) | |
15a63be1 | 1784 | { |
4fe73cc1 | 1785 | int reg_x = -1, reg_y = -1; |
ddef6bc7 | 1786 | int byte_x = 0, byte_y = 0; |
15a63be1 RK |
1787 | |
1788 | if (GET_MODE (x) != GET_MODE (y)) | |
1789 | return 0; | |
1790 | ||
1791 | /* If we haven't done any renumbering, don't | |
1792 | make any assumptions. */ | |
1793 | if (reg_renumber == 0) | |
1794 | return rtx_equal_p (x, y); | |
1795 | ||
1796 | if (code == SUBREG) | |
1797 | { | |
4fe73cc1 | 1798 | reg_x = REGNO (SUBREG_REG (x)); |
ddef6bc7 | 1799 | byte_x = SUBREG_BYTE (x); |
4fe73cc1 RK |
1800 | |
1801 | if (reg_renumber[reg_x] >= 0) | |
1802 | { | |
ddef6bc7 JJ |
1803 | reg_x = subreg_regno_offset (reg_renumber[reg_x], |
1804 | GET_MODE (SUBREG_REG (x)), | |
1805 | byte_x, | |
1806 | GET_MODE (x)); | |
1807 | byte_x = 0; | |
4fe73cc1 | 1808 | } |
15a63be1 RK |
1809 | } |
1810 | else | |
1811 | { | |
4fe73cc1 RK |
1812 | reg_x = REGNO (x); |
1813 | if (reg_renumber[reg_x] >= 0) | |
1814 | reg_x = reg_renumber[reg_x]; | |
15a63be1 | 1815 | } |
4fe73cc1 | 1816 | |
15a63be1 RK |
1817 | if (GET_CODE (y) == SUBREG) |
1818 | { | |
4fe73cc1 | 1819 | reg_y = REGNO (SUBREG_REG (y)); |
ddef6bc7 | 1820 | byte_y = SUBREG_BYTE (y); |
4fe73cc1 RK |
1821 | |
1822 | if (reg_renumber[reg_y] >= 0) | |
1823 | { | |
ddef6bc7 JJ |
1824 | reg_y = subreg_regno_offset (reg_renumber[reg_y], |
1825 | GET_MODE (SUBREG_REG (y)), | |
1826 | byte_y, | |
1827 | GET_MODE (y)); | |
1828 | byte_y = 0; | |
4fe73cc1 | 1829 | } |
15a63be1 RK |
1830 | } |
1831 | else | |
1832 | { | |
4fe73cc1 RK |
1833 | reg_y = REGNO (y); |
1834 | if (reg_renumber[reg_y] >= 0) | |
1835 | reg_y = reg_renumber[reg_y]; | |
15a63be1 | 1836 | } |
4fe73cc1 | 1837 | |
ddef6bc7 | 1838 | return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y; |
15a63be1 | 1839 | } |
4fe73cc1 | 1840 | |
48b881a3 | 1841 | /* Now we have disposed of all the cases |
15a63be1 RK |
1842 | in which different rtx codes can match. */ |
1843 | if (code != GET_CODE (y)) | |
1844 | return 0; | |
4fe73cc1 | 1845 | |
15a63be1 RK |
1846 | switch (code) |
1847 | { | |
1848 | case PC: | |
1849 | case CC0: | |
1850 | case ADDR_VEC: | |
1851 | case ADDR_DIFF_VEC: | |
15a63be1 | 1852 | case CONST_INT: |
47c7b4d2 | 1853 | return 0; |
15a63be1 RK |
1854 | |
1855 | case LABEL_REF: | |
705f26cf RS |
1856 | /* We can't assume nonlocal labels have their following insns yet. */ |
1857 | if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y)) | |
1858 | return XEXP (x, 0) == XEXP (y, 0); | |
4fe73cc1 | 1859 | |
15a63be1 RK |
1860 | /* Two label-refs are equivalent if they point at labels |
1861 | in the same position in the instruction stream. */ | |
1862 | return (next_real_insn (XEXP (x, 0)) | |
1863 | == next_real_insn (XEXP (y, 0))); | |
1864 | ||
1865 | case SYMBOL_REF: | |
1866 | return XSTR (x, 0) == XSTR (y, 0); | |
e9a25f70 | 1867 | |
bba596a3 RH |
1868 | case CODE_LABEL: |
1869 | /* If we didn't match EQ equality above, they aren't the same. */ | |
1870 | return 0; | |
1871 | ||
e9a25f70 JL |
1872 | default: |
1873 | break; | |
15a63be1 RK |
1874 | } |
1875 | ||
1876 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
1877 | ||
1878 | if (GET_MODE (x) != GET_MODE (y)) | |
1879 | return 0; | |
1880 | ||
4fe73cc1 | 1881 | /* For commutative operations, the RTX match if the operand match in any |
8fc001f9 JL |
1882 | order. Also handle the simple binary and unary cases without a loop. |
1883 | ||
1884 | ??? Don't consider PLUS a commutative operator; see comments above. */ | |
ec8e098d | 1885 | if (COMMUTATIVE_P (x) && code != PLUS) |
4fe73cc1 RK |
1886 | return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)) |
1887 | && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1))) | |
1888 | || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1)) | |
1889 | && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0)))); | |
ec8e098d | 1890 | else if (NON_COMMUTATIVE_P (x)) |
4fe73cc1 RK |
1891 | return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)) |
1892 | && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1))); | |
ec8e098d | 1893 | else if (UNARY_P (x)) |
4fe73cc1 RK |
1894 | return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)); |
1895 | ||
15a63be1 RK |
1896 | /* Compare the elements. If any pair of corresponding elements |
1897 | fail to match, return 0 for the whole things. */ | |
1898 | ||
1899 | fmt = GET_RTX_FORMAT (code); | |
1900 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1901 | { | |
b3694847 | 1902 | int j; |
15a63be1 RK |
1903 | switch (fmt[i]) |
1904 | { | |
5f4f0e22 CH |
1905 | case 'w': |
1906 | if (XWINT (x, i) != XWINT (y, i)) | |
1907 | return 0; | |
1908 | break; | |
1909 | ||
15a63be1 RK |
1910 | case 'i': |
1911 | if (XINT (x, i) != XINT (y, i)) | |
1912 | return 0; | |
1913 | break; | |
1914 | ||
46fac664 JH |
1915 | case 't': |
1916 | if (XTREE (x, i) != XTREE (y, i)) | |
1917 | return 0; | |
1918 | break; | |
1919 | ||
15a63be1 RK |
1920 | case 's': |
1921 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1922 | return 0; | |
1923 | break; | |
1924 | ||
1925 | case 'e': | |
1926 | if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i))) | |
1927 | return 0; | |
1928 | break; | |
1929 | ||
1930 | case 'u': | |
1931 | if (XEXP (x, i) != XEXP (y, i)) | |
1932 | return 0; | |
938d968e | 1933 | /* Fall through. */ |
15a63be1 RK |
1934 | case '0': |
1935 | break; | |
1936 | ||
1937 | case 'E': | |
1938 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1939 | return 0; | |
1940 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
1941 | if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j))) | |
1942 | return 0; | |
1943 | break; | |
1944 | ||
1945 | default: | |
41806d92 | 1946 | gcc_unreachable (); |
15a63be1 RK |
1947 | } |
1948 | } | |
1949 | return 1; | |
1950 | } | |
1951 | \f | |
1952 | /* If X is a hard register or equivalent to one or a subregister of one, | |
1953 | return the hard register number. If X is a pseudo register that was not | |
1954 | assigned a hard register, return the pseudo register number. Otherwise, | |
1955 | return -1. Any rtx is valid for X. */ | |
1956 | ||
1957 | int | |
0c20a65f | 1958 | true_regnum (rtx x) |
15a63be1 | 1959 | { |
f8cfc6aa | 1960 | if (REG_P (x)) |
15a63be1 RK |
1961 | { |
1962 | if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0) | |
1963 | return reg_renumber[REGNO (x)]; | |
1964 | return REGNO (x); | |
1965 | } | |
1966 | if (GET_CODE (x) == SUBREG) | |
1967 | { | |
1968 | int base = true_regnum (SUBREG_REG (x)); | |
1969 | if (base >= 0 && base < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 JJ |
1970 | return base + subreg_regno_offset (REGNO (SUBREG_REG (x)), |
1971 | GET_MODE (SUBREG_REG (x)), | |
1972 | SUBREG_BYTE (x), GET_MODE (x)); | |
15a63be1 RK |
1973 | } |
1974 | return -1; | |
1975 | } | |
344b78b8 JH |
1976 | |
1977 | /* Return regno of the register REG and handle subregs too. */ | |
1978 | unsigned int | |
0c20a65f | 1979 | reg_or_subregno (rtx reg) |
344b78b8 | 1980 | { |
344b78b8 | 1981 | if (GET_CODE (reg) == SUBREG) |
41806d92 NS |
1982 | reg = SUBREG_REG (reg); |
1983 | gcc_assert (REG_P (reg)); | |
1984 | return REGNO (reg); | |
344b78b8 | 1985 | } |