]> gcc.gnu.org Git - gcc.git/blob - gcc/ifcvt.c
* sparc/sol2-sld-64.h (STARTFILE_SPEC): Added missing blank.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "real.h"
34 #include "output.h"
35 #include "tm_p.h"
36
37
38 #ifndef HAVE_conditional_execution
39 #define HAVE_conditional_execution 0
40 #endif
41 #ifndef HAVE_conditional_move
42 #define HAVE_conditional_move 0
43 #endif
44 #ifndef HAVE_incscc
45 #define HAVE_incscc 0
46 #endif
47 #ifndef HAVE_decscc
48 #define HAVE_decscc 0
49 #endif
50
51 #ifndef MAX_CONDITIONAL_EXECUTE
52 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
53 #endif
54
55 #define NULL_EDGE ((struct edge_def *)NULL)
56 #define NULL_BLOCK ((struct basic_block_def *)NULL)
57
58 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
59 static int num_possible_if_blocks;
60
61 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
62 execution. */
63 static int num_updated_if_blocks;
64
65 /* # of basic blocks that were removed. */
66 static int num_removed_blocks;
67
68 /* The post-dominator relation on the original block numbers. */
69 static sbitmap *post_dominators;
70
71 /* Forward references. */
72 static int count_bb_insns PARAMS ((basic_block));
73 static rtx first_active_insn PARAMS ((basic_block));
74 static int last_active_insn_p PARAMS ((basic_block, rtx));
75 static int seq_contains_jump PARAMS ((rtx));
76
77 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
78 static rtx cond_exec_get_condition PARAMS ((rtx));
79 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
80 basic_block, basic_block));
81
82 static rtx noce_get_condition PARAMS ((rtx, rtx *));
83 static int noce_operand_ok PARAMS ((rtx));
84 static int noce_process_if_block PARAMS ((basic_block, basic_block,
85 basic_block, basic_block));
86
87 static int process_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
89 static void merge_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
91
92 static int find_if_header PARAMS ((basic_block));
93 static int find_if_block PARAMS ((basic_block, edge, edge));
94 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
95 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
96 static int find_memory PARAMS ((rtx *, void *));
97 static int dead_or_predicable PARAMS ((basic_block, basic_block,
98 basic_block, rtx, int));
99 static void noce_emit_move_insn PARAMS ((rtx, rtx));
100 \f
101 /* Abuse the basic_block AUX field to store the original block index,
102 as well as a flag indicating that the block should be rescaned for
103 life analysis. */
104
105 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
106 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
107 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
108 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
109
110 \f
111 /* Count the number of non-jump active insns in BB. */
112
113 static int
114 count_bb_insns (bb)
115 basic_block bb;
116 {
117 int count = 0;
118 rtx insn = bb->head;
119
120 while (1)
121 {
122 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
123 count++;
124
125 if (insn == bb->end)
126 break;
127 insn = NEXT_INSN (insn);
128 }
129
130 return count;
131 }
132
133 /* Return the first non-jump active insn in the basic block. */
134
135 static rtx
136 first_active_insn (bb)
137 basic_block bb;
138 {
139 rtx insn = bb->head;
140
141 if (GET_CODE (insn) == CODE_LABEL)
142 {
143 if (insn == bb->end)
144 return NULL_RTX;
145 insn = NEXT_INSN (insn);
146 }
147
148 while (GET_CODE (insn) == NOTE)
149 {
150 if (insn == bb->end)
151 return NULL_RTX;
152 insn = NEXT_INSN (insn);
153 }
154
155 if (GET_CODE (insn) == JUMP_INSN)
156 return NULL_RTX;
157
158 return insn;
159 }
160
161 /* Return true if INSN is the last active non-jump insn in BB. */
162
163 static int
164 last_active_insn_p (bb, insn)
165 basic_block bb;
166 rtx insn;
167 {
168 do
169 {
170 if (insn == bb->end)
171 return TRUE;
172 insn = NEXT_INSN (insn);
173 }
174 while (GET_CODE (insn) == NOTE);
175
176 return GET_CODE (insn) == JUMP_INSN;
177 }
178
179 /* It is possible, especially when having dealt with multi-word
180 arithmetic, for the expanders to have emitted jumps. Search
181 through the sequence and return TRUE if a jump exists so that
182 we can abort the conversion. */
183
184 static int
185 seq_contains_jump (insn)
186 rtx insn;
187 {
188 while (insn)
189 {
190 if (GET_CODE (insn) == JUMP_INSN)
191 return 1;
192 insn = NEXT_INSN (insn);
193 }
194 return 0;
195 }
196 \f
197 /* Go through a bunch of insns, converting them to conditional
198 execution format if possible. Return TRUE if all of the non-note
199 insns were processed. */
200
201 static int
202 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
203 rtx start; /* first insn to look at */
204 rtx end; /* last insn to look at */
205 rtx test; /* conditional execution test */
206 rtx prob_val; /* probability of branch taken. */
207 int mod_ok; /* true if modifications ok last insn. */
208 {
209 int must_be_last = FALSE;
210 rtx insn;
211 rtx pattern;
212
213 for (insn = start; ; insn = NEXT_INSN (insn))
214 {
215 if (GET_CODE (insn) == NOTE)
216 goto insn_done;
217
218 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
219 abort ();
220
221 /* Remove USE insns that get in the way. */
222 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
223 {
224 /* ??? Ug. Actually unlinking the thing is problematic,
225 given what we'd have to coordinate with our callers. */
226 PUT_CODE (insn, NOTE);
227 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
228 NOTE_SOURCE_FILE (insn) = 0;
229 goto insn_done;
230 }
231
232 /* Last insn wasn't last? */
233 if (must_be_last)
234 return FALSE;
235
236 if (modified_in_p (test, insn))
237 {
238 if (!mod_ok)
239 return FALSE;
240 must_be_last = TRUE;
241 }
242
243 /* Now build the conditional form of the instruction. */
244 pattern = PATTERN (insn);
245
246 /* If the machine needs to modify the insn being conditionally executed,
247 say for example to force a constant integer operand into a temp
248 register, do so here. */
249 #ifdef IFCVT_MODIFY_INSN
250 IFCVT_MODIFY_INSN (pattern, insn);
251 if (! pattern)
252 return FALSE;
253 #endif
254
255 validate_change (insn, &PATTERN (insn),
256 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
257 pattern), 1);
258
259 if (GET_CODE (insn) == CALL_INSN && prob_val)
260 validate_change (insn, &REG_NOTES (insn),
261 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
262 REG_NOTES (insn)), 1);
263
264 insn_done:
265 if (insn == end)
266 break;
267 }
268
269 return TRUE;
270 }
271
272 /* Return the condition for a jump. Do not do any special processing. */
273
274 static rtx
275 cond_exec_get_condition (jump)
276 rtx jump;
277 {
278 rtx test_if, cond;
279
280 if (any_condjump_p (jump))
281 test_if = SET_SRC (pc_set (jump));
282 else
283 return NULL_RTX;
284 cond = XEXP (test_if, 0);
285
286 /* If this branches to JUMP_LABEL when the condition is false,
287 reverse the condition. */
288 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
289 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
290 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
291 GET_MODE (cond), XEXP (cond, 0),
292 XEXP (cond, 1));
293
294 return cond;
295 }
296
297 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
298 to conditional execution. Return TRUE if we were successful at
299 converting the the block. */
300
301 static int
302 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
303 basic_block test_bb; /* Basic block test is in */
304 basic_block then_bb; /* Basic block for THEN block */
305 basic_block else_bb; /* Basic block for ELSE block */
306 basic_block join_bb; /* Basic block the join label is in */
307 {
308 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
309 rtx then_start; /* first insn in THEN block */
310 rtx then_end; /* last insn + 1 in THEN block */
311 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
312 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
313 int max; /* max # of insns to convert. */
314 int then_mod_ok; /* whether conditional mods are ok in THEN */
315 rtx true_expr; /* test for else block insns */
316 rtx false_expr; /* test for then block insns */
317 rtx true_prob_val; /* probability of else block */
318 rtx false_prob_val; /* probability of then block */
319 int n_insns;
320
321 /* Find the conditional jump to the ELSE or JOIN part, and isolate
322 the test. */
323 test_expr = cond_exec_get_condition (test_bb->end);
324 if (! test_expr)
325 return FALSE;
326
327 /* If the conditional jump is more than just a conditional jump,
328 then we can not do conditional execution conversion on this block. */
329 if (!onlyjump_p (test_bb->end))
330 return FALSE;
331
332 /* Collect the bounds of where we're to search. */
333
334 then_start = then_bb->head;
335 then_end = then_bb->end;
336
337 /* Skip a label heading THEN block. */
338 if (GET_CODE (then_start) == CODE_LABEL)
339 then_start = NEXT_INSN (then_start);
340
341 /* Skip a (use (const_int 0)) or branch as the final insn. */
342 if (GET_CODE (then_end) == INSN
343 && GET_CODE (PATTERN (then_end)) == USE
344 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
345 then_end = PREV_INSN (then_end);
346 else if (GET_CODE (then_end) == JUMP_INSN)
347 then_end = PREV_INSN (then_end);
348
349 if (else_bb)
350 {
351 /* Skip the ELSE block's label. */
352 else_start = NEXT_INSN (else_bb->head);
353 else_end = else_bb->end;
354
355 /* Skip a (use (const_int 0)) or branch as the final insn. */
356 if (GET_CODE (else_end) == INSN
357 && GET_CODE (PATTERN (else_end)) == USE
358 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
359 else_end = PREV_INSN (else_end);
360 else if (GET_CODE (else_end) == JUMP_INSN)
361 else_end = PREV_INSN (else_end);
362 }
363
364 /* How many instructions should we convert in total? */
365 n_insns = 0;
366 if (else_bb)
367 {
368 max = 2 * MAX_CONDITIONAL_EXECUTE;
369 n_insns = count_bb_insns (else_bb);
370 }
371 else
372 max = MAX_CONDITIONAL_EXECUTE;
373 n_insns += count_bb_insns (then_bb);
374 if (n_insns > max)
375 return FALSE;
376
377 /* Map test_expr/test_jump into the appropriate MD tests to use on
378 the conditionally executed code. */
379
380 true_expr = test_expr;
381 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
382 GET_MODE (true_expr), XEXP (true_expr, 0),
383 XEXP (true_expr, 1));
384
385 #ifdef IFCVT_MODIFY_TESTS
386 /* If the machine description needs to modify the tests, such as setting a
387 conditional execution register from a comparison, it can do so here. */
388 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
389 join_bb);
390
391 /* See if the conversion failed */
392 if (!true_expr || !false_expr)
393 goto fail;
394 #endif
395
396 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
397 if (true_prob_val)
398 {
399 true_prob_val = XEXP (true_prob_val, 0);
400 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
401 }
402 else
403 false_prob_val = NULL_RTX;
404
405 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
406 on then THEN block. */
407 then_mod_ok = (else_bb == NULL_BLOCK);
408
409 /* Go through the THEN and ELSE blocks converting the insns if possible
410 to conditional execution. */
411
412 if (then_end
413 && ! cond_exec_process_insns (then_start, then_end,
414 false_expr, false_prob_val, then_mod_ok))
415 goto fail;
416
417 if (else_bb
418 && ! cond_exec_process_insns (else_start, else_end,
419 true_expr, true_prob_val, TRUE))
420 goto fail;
421
422 if (! apply_change_group ())
423 return FALSE;
424
425 #ifdef IFCVT_MODIFY_FINAL
426 /* Do any machine dependent final modifications */
427 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
428 #endif
429
430 /* Conversion succeeded. */
431 if (rtl_dump_file)
432 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
433 n_insns, (n_insns == 1) ? " was" : "s were");
434
435 /* Merge the blocks! */
436 merge_if_block (test_bb, then_bb, else_bb, join_bb);
437 return TRUE;
438
439 fail:
440 #ifdef IFCVT_MODIFY_CANCEL
441 /* Cancel any machine dependent changes. */
442 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
443 #endif
444
445 cancel_changes (0);
446 return FALSE;
447 }
448 \f
449 /* Used by noce_process_if_block to communicate with its subroutines.
450
451 The subroutines know that A and B may be evaluated freely. They
452 know that X is a register. They should insert new instructions
453 before cond_earliest. */
454
455 struct noce_if_info
456 {
457 basic_block test_bb;
458 rtx insn_a, insn_b;
459 rtx x, a, b;
460 rtx jump, cond, cond_earliest;
461 };
462
463 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
464 rtx, int, int));
465 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
466 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
467 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
468 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
469 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
470 rtx, enum rtx_code, rtx,
471 rtx, rtx, rtx));
472 static int noce_try_cmove PARAMS ((struct noce_if_info *));
473 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
474 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
475 rtx, rtx *));
476 static int noce_try_minmax PARAMS ((struct noce_if_info *));
477 static int noce_try_abs PARAMS ((struct noce_if_info *));
478
479 /* Helper function for noce_try_store_flag*. */
480
481 static rtx
482 noce_emit_store_flag (if_info, x, reversep, normalize)
483 struct noce_if_info *if_info;
484 rtx x;
485 int reversep, normalize;
486 {
487 rtx cond = if_info->cond;
488 int cond_complex;
489 enum rtx_code code;
490
491 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
492 || ! general_operand (XEXP (cond, 1), VOIDmode));
493
494 /* If earliest == jump, or when the condition is complex, try to
495 build the store_flag insn directly. */
496
497 if (cond_complex)
498 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
499
500 if (reversep)
501 code = reversed_comparison_code (cond, if_info->jump);
502 else
503 code = GET_CODE (cond);
504
505 if ((if_info->cond_earliest == if_info->jump || cond_complex)
506 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
507 {
508 rtx tmp;
509
510 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
511 XEXP (cond, 1));
512 tmp = gen_rtx_SET (VOIDmode, x, tmp);
513
514 start_sequence ();
515 tmp = emit_insn (tmp);
516
517 if (recog_memoized (tmp) >= 0)
518 {
519 tmp = get_insns ();
520 end_sequence ();
521 emit_insns (tmp);
522
523 if_info->cond_earliest = if_info->jump;
524
525 return x;
526 }
527
528 end_sequence ();
529 }
530
531 /* Don't even try if the comparison operands are weird. */
532 if (cond_complex)
533 return NULL_RTX;
534
535 return emit_store_flag (x, code, XEXP (cond, 0),
536 XEXP (cond, 1), VOIDmode,
537 (code == LTU || code == LEU
538 || code == GEU || code == GTU), normalize);
539 }
540
541 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
542 static void
543 noce_emit_move_insn (x, y)
544 rtx x, y;
545 {
546 enum machine_mode outmode, inmode;
547 rtx outer, inner;
548 int bitpos;
549
550 if (GET_CODE (x) != STRICT_LOW_PART)
551 {
552 emit_move_insn (x, y);
553 return;
554 }
555
556 outer = XEXP (x, 0);
557 inner = XEXP (outer, 0);
558 outmode = GET_MODE (outer);
559 inmode = GET_MODE (inner);
560 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
561 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
562 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
563 GET_MODE_BITSIZE (inmode));
564 }
565
566 /* Convert "if (test) x = 1; else x = 0".
567
568 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
569 tried in noce_try_store_flag_constants after noce_try_cmove has had
570 a go at the conversion. */
571
572 static int
573 noce_try_store_flag (if_info)
574 struct noce_if_info *if_info;
575 {
576 int reversep;
577 rtx target, seq;
578
579 if (GET_CODE (if_info->b) == CONST_INT
580 && INTVAL (if_info->b) == STORE_FLAG_VALUE
581 && if_info->a == const0_rtx)
582 reversep = 0;
583 else if (if_info->b == const0_rtx
584 && GET_CODE (if_info->a) == CONST_INT
585 && INTVAL (if_info->a) == STORE_FLAG_VALUE
586 && (reversed_comparison_code (if_info->cond, if_info->jump)
587 != UNKNOWN))
588 reversep = 1;
589 else
590 return FALSE;
591
592 start_sequence ();
593
594 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
595 if (target)
596 {
597 if (target != if_info->x)
598 noce_emit_move_insn (if_info->x, target);
599
600 seq = get_insns ();
601 end_sequence ();
602 emit_insns_before (seq, if_info->cond_earliest);
603
604 return TRUE;
605 }
606 else
607 {
608 end_sequence ();
609 return FALSE;
610 }
611 }
612
613 /* Convert "if (test) x = a; else x = b", for A and B constant. */
614
615 static int
616 noce_try_store_flag_constants (if_info)
617 struct noce_if_info *if_info;
618 {
619 rtx target, seq;
620 int reversep;
621 HOST_WIDE_INT itrue, ifalse, diff, tmp;
622 int normalize, can_reverse;
623
624 if (! no_new_pseudos
625 && GET_CODE (if_info->a) == CONST_INT
626 && GET_CODE (if_info->b) == CONST_INT)
627 {
628 ifalse = INTVAL (if_info->a);
629 itrue = INTVAL (if_info->b);
630 diff = itrue - ifalse;
631
632 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
633 != UNKNOWN);
634
635 reversep = 0;
636 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
637 normalize = 0;
638 else if (ifalse == 0 && exact_log2 (itrue) >= 0
639 && (STORE_FLAG_VALUE == 1
640 || BRANCH_COST >= 2))
641 normalize = 1;
642 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
643 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
644 normalize = 1, reversep = 1;
645 else if (itrue == -1
646 && (STORE_FLAG_VALUE == -1
647 || BRANCH_COST >= 2))
648 normalize = -1;
649 else if (ifalse == -1 && can_reverse
650 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
651 normalize = -1, reversep = 1;
652 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
653 || BRANCH_COST >= 3)
654 normalize = -1;
655 else
656 return FALSE;
657
658 if (reversep)
659 {
660 tmp = itrue; itrue = ifalse; ifalse = tmp;
661 diff = -diff;
662 }
663
664 start_sequence ();
665 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
666 if (! target)
667 {
668 end_sequence ();
669 return FALSE;
670 }
671
672 /* if (test) x = 3; else x = 4;
673 => x = 3 + (test == 0); */
674 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
675 {
676 target = expand_binop (GET_MODE (if_info->x),
677 (diff == STORE_FLAG_VALUE
678 ? add_optab : sub_optab),
679 GEN_INT (ifalse), target, if_info->x, 0,
680 OPTAB_WIDEN);
681 }
682
683 /* if (test) x = 8; else x = 0;
684 => x = (test != 0) << 3; */
685 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
686 {
687 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
688 target, GEN_INT (tmp), if_info->x, 0,
689 OPTAB_WIDEN);
690 }
691
692 /* if (test) x = -1; else x = b;
693 => x = -(test != 0) | b; */
694 else if (itrue == -1)
695 {
696 target = expand_binop (GET_MODE (if_info->x), ior_optab,
697 target, GEN_INT (ifalse), if_info->x, 0,
698 OPTAB_WIDEN);
699 }
700
701 /* if (test) x = a; else x = b;
702 => x = (-(test != 0) & (b - a)) + a; */
703 else
704 {
705 target = expand_binop (GET_MODE (if_info->x), and_optab,
706 target, GEN_INT (diff), if_info->x, 0,
707 OPTAB_WIDEN);
708 if (target)
709 target = expand_binop (GET_MODE (if_info->x), add_optab,
710 target, GEN_INT (ifalse), if_info->x, 0,
711 OPTAB_WIDEN);
712 }
713
714 if (! target)
715 {
716 end_sequence ();
717 return FALSE;
718 }
719
720 if (target != if_info->x)
721 noce_emit_move_insn (if_info->x, target);
722
723 seq = get_insns ();
724 end_sequence ();
725
726 if (seq_contains_jump (seq))
727 return FALSE;
728
729 emit_insns_before (seq, if_info->cond_earliest);
730
731 return TRUE;
732 }
733
734 return FALSE;
735 }
736
737 /* Convert "if (test) foo++" into "foo += (test != 0)", and
738 similarly for "foo--". */
739
740 static int
741 noce_try_store_flag_inc (if_info)
742 struct noce_if_info *if_info;
743 {
744 rtx target, seq;
745 int subtract, normalize;
746
747 if (! no_new_pseudos
748 && (BRANCH_COST >= 2
749 || HAVE_incscc
750 || HAVE_decscc)
751 /* Should be no `else' case to worry about. */
752 && if_info->b == if_info->x
753 && GET_CODE (if_info->a) == PLUS
754 && (XEXP (if_info->a, 1) == const1_rtx
755 || XEXP (if_info->a, 1) == constm1_rtx)
756 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
757 && (reversed_comparison_code (if_info->cond, if_info->jump)
758 != UNKNOWN))
759 {
760 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
761 subtract = 0, normalize = 0;
762 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
763 subtract = 1, normalize = 0;
764 else
765 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
766
767 start_sequence ();
768
769 target = noce_emit_store_flag (if_info,
770 gen_reg_rtx (GET_MODE (if_info->x)),
771 1, normalize);
772
773 if (target)
774 target = expand_binop (GET_MODE (if_info->x),
775 subtract ? sub_optab : add_optab,
776 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
777 if (target)
778 {
779 if (target != if_info->x)
780 noce_emit_move_insn (if_info->x, target);
781
782 seq = get_insns ();
783 end_sequence ();
784
785 if (seq_contains_jump (seq))
786 return FALSE;
787
788 emit_insns_before (seq, if_info->cond_earliest);
789
790 return TRUE;
791 }
792
793 end_sequence ();
794 }
795
796 return FALSE;
797 }
798
799 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
800
801 static int
802 noce_try_store_flag_mask (if_info)
803 struct noce_if_info *if_info;
804 {
805 rtx target, seq;
806 int reversep;
807
808 reversep = 0;
809 if (! no_new_pseudos
810 && (BRANCH_COST >= 2
811 || STORE_FLAG_VALUE == -1)
812 && ((if_info->a == const0_rtx
813 && rtx_equal_p (if_info->b, if_info->x))
814 || ((reversep = (reversed_comparison_code (if_info->cond,
815 if_info->jump)
816 != UNKNOWN))
817 && if_info->b == const0_rtx
818 && rtx_equal_p (if_info->a, if_info->x))))
819 {
820 start_sequence ();
821 target = noce_emit_store_flag (if_info,
822 gen_reg_rtx (GET_MODE (if_info->x)),
823 reversep, -1);
824 if (target)
825 target = expand_binop (GET_MODE (if_info->x), and_optab,
826 if_info->x, target, if_info->x, 0,
827 OPTAB_WIDEN);
828
829 if (target)
830 {
831 if (target != if_info->x)
832 noce_emit_move_insn (if_info->x, target);
833
834 seq = get_insns ();
835 end_sequence ();
836
837 if (seq_contains_jump (seq))
838 return FALSE;
839
840 emit_insns_before (seq, if_info->cond_earliest);
841
842 return TRUE;
843 }
844
845 end_sequence ();
846 }
847
848 return FALSE;
849 }
850
851 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
852
853 static rtx
854 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
855 struct noce_if_info *if_info;
856 rtx x, cmp_a, cmp_b, vfalse, vtrue;
857 enum rtx_code code;
858 {
859 /* If earliest == jump, try to build the cmove insn directly.
860 This is helpful when combine has created some complex condition
861 (like for alpha's cmovlbs) that we can't hope to regenerate
862 through the normal interface. */
863
864 if (if_info->cond_earliest == if_info->jump)
865 {
866 rtx tmp;
867
868 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
869 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
870 tmp = gen_rtx_SET (VOIDmode, x, tmp);
871
872 start_sequence ();
873 tmp = emit_insn (tmp);
874
875 if (recog_memoized (tmp) >= 0)
876 {
877 tmp = get_insns ();
878 end_sequence ();
879 emit_insns (tmp);
880
881 return x;
882 }
883
884 end_sequence ();
885 }
886
887 /* Don't even try if the comparison operands are weird. */
888 if (! general_operand (cmp_a, GET_MODE (cmp_a))
889 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
890 return NULL_RTX;
891
892 #if HAVE_conditional_move
893 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
894 vtrue, vfalse, GET_MODE (x),
895 (code == LTU || code == GEU
896 || code == LEU || code == GTU));
897 #else
898 /* We'll never get here, as noce_process_if_block doesn't call the
899 functions involved. Ifdef code, however, should be discouraged
900 because it leads to typos in the code not selected. However,
901 emit_conditional_move won't exist either. */
902 return NULL_RTX;
903 #endif
904 }
905
906 /* Try only simple constants and registers here. More complex cases
907 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
908 has had a go at it. */
909
910 static int
911 noce_try_cmove (if_info)
912 struct noce_if_info *if_info;
913 {
914 enum rtx_code code;
915 rtx target, seq;
916
917 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
918 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
919 {
920 start_sequence ();
921
922 code = GET_CODE (if_info->cond);
923 target = noce_emit_cmove (if_info, if_info->x, code,
924 XEXP (if_info->cond, 0),
925 XEXP (if_info->cond, 1),
926 if_info->a, if_info->b);
927
928 if (target)
929 {
930 if (target != if_info->x)
931 noce_emit_move_insn (if_info->x, target);
932
933 seq = get_insns ();
934 end_sequence ();
935 emit_insns_before (seq, if_info->cond_earliest);
936 return TRUE;
937 }
938 else
939 {
940 end_sequence ();
941 return FALSE;
942 }
943 }
944
945 return FALSE;
946 }
947
948 /* Try more complex cases involving conditional_move. */
949
950 static int
951 noce_try_cmove_arith (if_info)
952 struct noce_if_info *if_info;
953 {
954 rtx a = if_info->a;
955 rtx b = if_info->b;
956 rtx x = if_info->x;
957 rtx insn_a, insn_b;
958 rtx tmp, target;
959 int is_mem = 0;
960 enum rtx_code code;
961
962 /* A conditional move from two memory sources is equivalent to a
963 conditional on their addresses followed by a load. Don't do this
964 early because it'll screw alias analysis. Note that we've
965 already checked for no side effects. */
966 if (! no_new_pseudos && cse_not_expected
967 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
968 && BRANCH_COST >= 5)
969 {
970 a = XEXP (a, 0);
971 b = XEXP (b, 0);
972 x = gen_reg_rtx (Pmode);
973 is_mem = 1;
974 }
975
976 /* ??? We could handle this if we knew that a load from A or B could
977 not fault. This is also true if we've already loaded
978 from the address along the path from ENTRY. */
979 else if (may_trap_p (a) || may_trap_p (b))
980 return FALSE;
981
982 /* if (test) x = a + b; else x = c - d;
983 => y = a + b;
984 x = c - d;
985 if (test)
986 x = y;
987 */
988
989 code = GET_CODE (if_info->cond);
990 insn_a = if_info->insn_a;
991 insn_b = if_info->insn_b;
992
993 /* Possibly rearrange operands to make things come out more natural. */
994 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
995 {
996 int reversep = 0;
997 if (rtx_equal_p (b, x))
998 reversep = 1;
999 else if (general_operand (b, GET_MODE (b)))
1000 reversep = 1;
1001
1002 if (reversep)
1003 {
1004 code = reversed_comparison_code (if_info->cond, if_info->jump);
1005 tmp = a, a = b, b = tmp;
1006 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1007 }
1008 }
1009
1010 start_sequence ();
1011
1012 /* If either operand is complex, load it into a register first.
1013 The best way to do this is to copy the original insn. In this
1014 way we preserve any clobbers etc that the insn may have had.
1015 This is of course not possible in the IS_MEM case. */
1016 if (! general_operand (a, GET_MODE (a)))
1017 {
1018 rtx set;
1019
1020 if (no_new_pseudos)
1021 goto end_seq_and_fail;
1022
1023 if (is_mem)
1024 {
1025 tmp = gen_reg_rtx (GET_MODE (a));
1026 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1027 }
1028 else if (! insn_a)
1029 goto end_seq_and_fail;
1030 else
1031 {
1032 a = gen_reg_rtx (GET_MODE (a));
1033 tmp = copy_rtx (insn_a);
1034 set = single_set (tmp);
1035 SET_DEST (set) = a;
1036 tmp = emit_insn (PATTERN (tmp));
1037 }
1038 if (recog_memoized (tmp) < 0)
1039 goto end_seq_and_fail;
1040 }
1041 if (! general_operand (b, GET_MODE (b)))
1042 {
1043 rtx set;
1044
1045 if (no_new_pseudos)
1046 goto end_seq_and_fail;
1047
1048 if (is_mem)
1049 {
1050 tmp = gen_reg_rtx (GET_MODE (b));
1051 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1052 }
1053 else if (! insn_b)
1054 goto end_seq_and_fail;
1055 else
1056 {
1057 b = gen_reg_rtx (GET_MODE (b));
1058 tmp = copy_rtx (insn_b);
1059 set = single_set (tmp);
1060 SET_DEST (set) = b;
1061 tmp = emit_insn (PATTERN (tmp));
1062 }
1063 if (recog_memoized (tmp) < 0)
1064 goto end_seq_and_fail;
1065 }
1066
1067 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1068 XEXP (if_info->cond, 1), a, b);
1069
1070 if (! target)
1071 goto end_seq_and_fail;
1072
1073 /* If we're handling a memory for above, emit the load now. */
1074 if (is_mem)
1075 {
1076 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1077
1078 /* Copy over flags as appropriate. */
1079 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1080 MEM_VOLATILE_P (tmp) = 1;
1081 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1082 MEM_IN_STRUCT_P (tmp) = 1;
1083 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1084 MEM_SCALAR_P (tmp) = 1;
1085 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1086 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1087
1088 noce_emit_move_insn (if_info->x, tmp);
1089 }
1090 else if (target != x)
1091 noce_emit_move_insn (x, target);
1092
1093 tmp = get_insns ();
1094 end_sequence ();
1095 emit_insns_before (tmp, if_info->cond_earliest);
1096 return TRUE;
1097
1098 end_seq_and_fail:
1099 end_sequence ();
1100 return FALSE;
1101 }
1102
1103 /* For most cases, the simplified condition we found is the best
1104 choice, but this is not the case for the min/max/abs transforms.
1105 For these we wish to know that it is A or B in the condition. */
1106
1107 static rtx
1108 noce_get_alt_condition (if_info, target, earliest)
1109 struct noce_if_info *if_info;
1110 rtx target;
1111 rtx *earliest;
1112 {
1113 rtx cond, set, insn;
1114 int reverse;
1115
1116 /* If target is already mentioned in the known condition, return it. */
1117 if (reg_mentioned_p (target, if_info->cond))
1118 {
1119 *earliest = if_info->cond_earliest;
1120 return if_info->cond;
1121 }
1122
1123 set = pc_set (if_info->jump);
1124 cond = XEXP (SET_SRC (set), 0);
1125 reverse
1126 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1127 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1128
1129 cond = canonicalize_condition (if_info->jump, cond, reverse,
1130 earliest, target);
1131 if (! cond || ! reg_mentioned_p (target, cond))
1132 return NULL;
1133
1134 /* We almost certainly searched back to a different place.
1135 Need to re-verify correct lifetimes. */
1136
1137 /* X may not be mentioned in the range (cond_earliest, jump]. */
1138 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1139 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1140 return NULL;
1141
1142 /* A and B may not be modified in the range [cond_earliest, jump). */
1143 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1144 if (INSN_P (insn)
1145 && (modified_in_p (if_info->a, insn)
1146 || modified_in_p (if_info->b, insn)))
1147 return NULL;
1148
1149 return cond;
1150 }
1151
1152 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1153
1154 static int
1155 noce_try_minmax (if_info)
1156 struct noce_if_info *if_info;
1157 {
1158 rtx cond, earliest, target, seq;
1159 enum rtx_code code;
1160 int unsignedp;
1161 optab op;
1162
1163 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1164 if (no_new_pseudos)
1165 return FALSE;
1166
1167 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1168 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1169 to get the target to tell us... */
1170 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1171 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1172 && ! flag_unsafe_math_optimizations)
1173 return FALSE;
1174
1175 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1176 if (!cond)
1177 return FALSE;
1178
1179 /* Verify the condition is of the form we expect, and canonicalize
1180 the comparison code. */
1181 code = GET_CODE (cond);
1182 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1183 {
1184 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1185 return FALSE;
1186 }
1187 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1188 {
1189 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1190 return FALSE;
1191 code = swap_condition (code);
1192 }
1193 else
1194 return FALSE;
1195
1196 /* Determine what sort of operation this is. Note that the code is for
1197 a taken branch, so the code->operation mapping appears backwards. */
1198 switch (code)
1199 {
1200 case LT:
1201 case LE:
1202 case UNLT:
1203 case UNLE:
1204 op = smax_optab;
1205 unsignedp = 0;
1206 break;
1207 case GT:
1208 case GE:
1209 case UNGT:
1210 case UNGE:
1211 op = smin_optab;
1212 unsignedp = 0;
1213 break;
1214 case LTU:
1215 case LEU:
1216 op = umax_optab;
1217 unsignedp = 1;
1218 break;
1219 case GTU:
1220 case GEU:
1221 op = umin_optab;
1222 unsignedp = 1;
1223 break;
1224 default:
1225 return FALSE;
1226 }
1227
1228 start_sequence ();
1229
1230 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1231 if_info->x, unsignedp, OPTAB_WIDEN);
1232 if (! target)
1233 {
1234 end_sequence ();
1235 return FALSE;
1236 }
1237 if (target != if_info->x)
1238 noce_emit_move_insn (if_info->x, target);
1239
1240 seq = get_insns ();
1241 end_sequence ();
1242
1243 if (seq_contains_jump (seq))
1244 return FALSE;
1245
1246 emit_insns_before (seq, earliest);
1247 if_info->cond = cond;
1248 if_info->cond_earliest = earliest;
1249
1250 return TRUE;
1251 }
1252
1253 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1254
1255 static int
1256 noce_try_abs (if_info)
1257 struct noce_if_info *if_info;
1258 {
1259 rtx cond, earliest, target, seq, a, b, c;
1260 int negate;
1261
1262 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1263 if (no_new_pseudos)
1264 return FALSE;
1265
1266 /* Recognize A and B as constituting an ABS or NABS. */
1267 a = if_info->a;
1268 b = if_info->b;
1269 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1270 negate = 0;
1271 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1272 {
1273 c = a; a = b; b = c;
1274 negate = 1;
1275 }
1276 else
1277 return FALSE;
1278
1279 cond = noce_get_alt_condition (if_info, b, &earliest);
1280 if (!cond)
1281 return FALSE;
1282
1283 /* Verify the condition is of the form we expect. */
1284 if (rtx_equal_p (XEXP (cond, 0), b))
1285 c = XEXP (cond, 1);
1286 else if (rtx_equal_p (XEXP (cond, 1), b))
1287 c = XEXP (cond, 0);
1288 else
1289 return FALSE;
1290
1291 /* Verify that C is zero. Search backward through the block for
1292 a REG_EQUAL note if necessary. */
1293 if (REG_P (c))
1294 {
1295 rtx insn, note = NULL;
1296 for (insn = earliest;
1297 insn != if_info->test_bb->head;
1298 insn = PREV_INSN (insn))
1299 if (INSN_P (insn)
1300 && ((note = find_reg_note (insn, REG_EQUAL, c))
1301 || (note = find_reg_note (insn, REG_EQUIV, c))))
1302 break;
1303 if (! note)
1304 return FALSE;
1305 c = XEXP (note, 0);
1306 }
1307 if (GET_CODE (c) == MEM
1308 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1309 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1310 c = get_pool_constant (XEXP (c, 0));
1311
1312 /* Work around funny ideas get_condition has wrt canonicalization.
1313 Note that these rtx constants are known to be CONST_INT, and
1314 therefore imply integer comparisons. */
1315 if (c == constm1_rtx && GET_CODE (cond) == GT)
1316 ;
1317 else if (c == const1_rtx && GET_CODE (cond) == LT)
1318 ;
1319 else if (c != CONST0_RTX (GET_MODE (b)))
1320 return FALSE;
1321
1322 /* Determine what sort of operation this is. */
1323 switch (GET_CODE (cond))
1324 {
1325 case LT:
1326 case LE:
1327 case UNLT:
1328 case UNLE:
1329 negate = !negate;
1330 break;
1331 case GT:
1332 case GE:
1333 case UNGT:
1334 case UNGE:
1335 break;
1336 default:
1337 return FALSE;
1338 }
1339
1340 start_sequence ();
1341
1342 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1343
1344 /* ??? It's a quandry whether cmove would be better here, especially
1345 for integers. Perhaps combine will clean things up. */
1346 if (target && negate)
1347 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1348
1349 if (! target)
1350 {
1351 end_sequence ();
1352 return FALSE;
1353 }
1354
1355 if (target != if_info->x)
1356 noce_emit_move_insn (if_info->x, target);
1357
1358 seq = get_insns ();
1359 end_sequence ();
1360
1361 if (seq_contains_jump (seq))
1362 return FALSE;
1363
1364 emit_insns_before (seq, earliest);
1365 if_info->cond = cond;
1366 if_info->cond_earliest = earliest;
1367
1368 return TRUE;
1369 }
1370
1371 /* Look for the condition for the jump first. We'd prefer to avoid
1372 get_condition if we can -- it tries to look back for the contents
1373 of an original compare. On targets that use normal integers for
1374 comparisons, e.g. alpha, this is wasteful. */
1375
1376 static rtx
1377 noce_get_condition (jump, earliest)
1378 rtx jump;
1379 rtx *earliest;
1380 {
1381 rtx cond;
1382 rtx set;
1383
1384 /* If the condition variable is a register and is MODE_INT, accept it.
1385 Otherwise, fall back on get_condition. */
1386
1387 if (! any_condjump_p (jump))
1388 return NULL_RTX;
1389
1390 set = pc_set (jump);
1391
1392 cond = XEXP (SET_SRC (set), 0);
1393 if (GET_CODE (XEXP (cond, 0)) == REG
1394 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1395 {
1396 *earliest = jump;
1397
1398 /* If this branches to JUMP_LABEL when the condition is false,
1399 reverse the condition. */
1400 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1401 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1402 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1403 GET_MODE (cond), XEXP (cond, 0),
1404 XEXP (cond, 1));
1405 }
1406 else
1407 cond = get_condition (jump, earliest);
1408
1409 return cond;
1410 }
1411
1412 /* Return true if OP is ok for if-then-else processing. */
1413
1414 static int
1415 noce_operand_ok (op)
1416 rtx op;
1417 {
1418 /* We special-case memories, so handle any of them with
1419 no address side effects. */
1420 if (GET_CODE (op) == MEM)
1421 return ! side_effects_p (XEXP (op, 0));
1422
1423 if (side_effects_p (op))
1424 return FALSE;
1425
1426 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1427 being linked into the genfoo programs. This is probably a mistake.
1428 With finite operands, most fp operations don't trap. */
1429 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1430 switch (GET_CODE (op))
1431 {
1432 case DIV:
1433 case MOD:
1434 case UDIV:
1435 case UMOD:
1436 /* ??? This is kinda lame -- almost every target will have forced
1437 the constant into a register first. But given the expense of
1438 division, this is probably for the best. */
1439 return (CONSTANT_P (XEXP (op, 1))
1440 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1441 && ! may_trap_p (XEXP (op, 0)));
1442
1443 default:
1444 switch (GET_RTX_CLASS (GET_CODE (op)))
1445 {
1446 case '1':
1447 return ! may_trap_p (XEXP (op, 0));
1448 case 'c':
1449 case '2':
1450 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1451 }
1452 break;
1453 }
1454
1455 return ! may_trap_p (op);
1456 }
1457
1458 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1459 without using conditional execution. Return TRUE if we were
1460 successful at converting the the block. */
1461
1462 static int
1463 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1464 basic_block test_bb; /* Basic block test is in */
1465 basic_block then_bb; /* Basic block for THEN block */
1466 basic_block else_bb; /* Basic block for ELSE block */
1467 basic_block join_bb; /* Basic block the join label is in */
1468 {
1469 /* We're looking for patterns of the form
1470
1471 (1) if (...) x = a; else x = b;
1472 (2) x = b; if (...) x = a;
1473 (3) if (...) x = a; // as if with an initial x = x.
1474
1475 The later patterns require jumps to be more expensive.
1476
1477 ??? For future expansion, look for multiple X in such patterns. */
1478
1479 struct noce_if_info if_info;
1480 rtx insn_a, insn_b;
1481 rtx set_a, set_b;
1482 rtx orig_x, x, a, b;
1483 rtx jump, cond, insn;
1484
1485 /* If this is not a standard conditional jump, we can't parse it. */
1486 jump = test_bb->end;
1487 cond = noce_get_condition (jump, &if_info.cond_earliest);
1488 if (! cond)
1489 return FALSE;
1490
1491 /* If the conditional jump is more than just a conditional jump,
1492 then we can not do if-conversion on this block. */
1493 if (! onlyjump_p (jump))
1494 return FALSE;
1495
1496 /* We must be comparing objects whose modes imply the size. */
1497 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1498 return FALSE;
1499
1500 /* Look for one of the potential sets. */
1501 insn_a = first_active_insn (then_bb);
1502 if (! insn_a
1503 || ! last_active_insn_p (then_bb, insn_a)
1504 || (set_a = single_set (insn_a)) == NULL_RTX)
1505 return FALSE;
1506
1507 x = SET_DEST (set_a);
1508 a = SET_SRC (set_a);
1509
1510 /* Look for the other potential set. Make sure we've got equivalent
1511 destinations. */
1512 /* ??? This is overconservative. Storing to two different mems is
1513 as easy as conditionally computing the address. Storing to a
1514 single mem merely requires a scratch memory to use as one of the
1515 destination addresses; often the memory immediately below the
1516 stack pointer is available for this. */
1517 set_b = NULL_RTX;
1518 if (else_bb)
1519 {
1520 insn_b = first_active_insn (else_bb);
1521 if (! insn_b
1522 || ! last_active_insn_p (else_bb, insn_b)
1523 || (set_b = single_set (insn_b)) == NULL_RTX
1524 || ! rtx_equal_p (x, SET_DEST (set_b)))
1525 return FALSE;
1526 }
1527 else
1528 {
1529 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1530 if (! insn_b
1531 || GET_CODE (insn_b) != INSN
1532 || (set_b = single_set (insn_b)) == NULL_RTX
1533 || ! rtx_equal_p (x, SET_DEST (set_b))
1534 || reg_mentioned_p (x, cond)
1535 || reg_mentioned_p (x, a)
1536 || reg_mentioned_p (x, SET_SRC (set_b)))
1537 insn_b = set_b = NULL_RTX;
1538 }
1539 b = (set_b ? SET_SRC (set_b) : x);
1540
1541 /* X may not be mentioned in the range (cond_earliest, jump]. */
1542 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1543 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1544 return FALSE;
1545
1546 /* A and B may not be modified in the range [cond_earliest, jump). */
1547 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1548 if (INSN_P (insn)
1549 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1550 return FALSE;
1551
1552 /* Only operate on register destinations, and even then avoid extending
1553 the lifetime of hard registers on small register class machines. */
1554 orig_x = x;
1555 if (GET_CODE (x) != REG
1556 || (SMALL_REGISTER_CLASSES
1557 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1558 {
1559 if (no_new_pseudos)
1560 return FALSE;
1561 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1562 ? XEXP (x, 0) : x));
1563 }
1564
1565 /* Don't operate on sources that may trap or are volatile. */
1566 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1567 return FALSE;
1568
1569 /* Set up the info block for our subroutines. */
1570 if_info.test_bb = test_bb;
1571 if_info.cond = cond;
1572 if_info.jump = jump;
1573 if_info.insn_a = insn_a;
1574 if_info.insn_b = insn_b;
1575 if_info.x = x;
1576 if_info.a = a;
1577 if_info.b = b;
1578
1579 /* Try optimizations in some approximation of a useful order. */
1580 /* ??? Should first look to see if X is live incoming at all. If it
1581 isn't, we don't need anything but an unconditional set. */
1582
1583 /* Look and see if A and B are really the same. Avoid creating silly
1584 cmove constructs that no one will fix up later. */
1585 if (rtx_equal_p (a, b))
1586 {
1587 /* If we have an INSN_B, we don't have to create any new rtl. Just
1588 move the instruction that we already have. If we don't have an
1589 INSN_B, that means that A == X, and we've got a noop move. In
1590 that case don't do anything and let the code below delete INSN_A. */
1591 if (insn_b && else_bb)
1592 {
1593 rtx note;
1594
1595 if (else_bb && insn_b == else_bb->end)
1596 else_bb->end = PREV_INSN (insn_b);
1597 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1598
1599 /* If there was a REG_EQUAL note, delete it since it may have been
1600 true due to this insn being after a jump. */
1601 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1602 remove_note (insn_b, note);
1603
1604 insn_b = NULL_RTX;
1605 }
1606 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1607 x must be executed twice. */
1608 else if (insn_b && side_effects_p (orig_x))
1609 return FALSE;
1610
1611 x = orig_x;
1612 goto success;
1613 }
1614
1615 if (noce_try_store_flag (&if_info))
1616 goto success;
1617 if (noce_try_minmax (&if_info))
1618 goto success;
1619 if (noce_try_abs (&if_info))
1620 goto success;
1621 if (HAVE_conditional_move
1622 && noce_try_cmove (&if_info))
1623 goto success;
1624 if (! HAVE_conditional_execution)
1625 {
1626 if (noce_try_store_flag_constants (&if_info))
1627 goto success;
1628 if (noce_try_store_flag_inc (&if_info))
1629 goto success;
1630 if (noce_try_store_flag_mask (&if_info))
1631 goto success;
1632 if (HAVE_conditional_move
1633 && noce_try_cmove_arith (&if_info))
1634 goto success;
1635 }
1636
1637 return FALSE;
1638
1639 success:
1640 /* The original sets may now be killed. */
1641 if (insn_a == then_bb->end)
1642 then_bb->end = PREV_INSN (insn_a);
1643 flow_delete_insn (insn_a);
1644
1645 /* Several special cases here: First, we may have reused insn_b above,
1646 in which case insn_b is now NULL. Second, we want to delete insn_b
1647 if it came from the ELSE block, because follows the now correct
1648 write that appears in the TEST block. However, if we got insn_b from
1649 the TEST block, it may in fact be loading data needed for the comparison.
1650 We'll let life_analysis remove the insn if it's really dead. */
1651 if (insn_b && else_bb)
1652 {
1653 if (insn_b == else_bb->end)
1654 else_bb->end = PREV_INSN (insn_b);
1655 flow_delete_insn (insn_b);
1656 }
1657
1658 /* The new insns will have been inserted before cond_earliest. We should
1659 be able to remove the jump with impunity, but the condition itself may
1660 have been modified by gcse to be shared across basic blocks. */
1661 test_bb->end = PREV_INSN (jump);
1662 flow_delete_insn (jump);
1663
1664 /* If we used a temporary, fix it up now. */
1665 if (orig_x != x)
1666 {
1667 start_sequence ();
1668 noce_emit_move_insn (orig_x, x);
1669 insn_b = gen_sequence ();
1670 end_sequence ();
1671
1672 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1673 }
1674
1675 /* Merge the blocks! */
1676 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1677
1678 return TRUE;
1679 }
1680 \f
1681 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1682 straight line code. Return true if successful. */
1683
1684 static int
1685 process_if_block (test_bb, then_bb, else_bb, join_bb)
1686 basic_block test_bb; /* Basic block test is in */
1687 basic_block then_bb; /* Basic block for THEN block */
1688 basic_block else_bb; /* Basic block for ELSE block */
1689 basic_block join_bb; /* Basic block the join label is in */
1690 {
1691 if (! reload_completed
1692 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1693 return TRUE;
1694
1695 if (HAVE_conditional_execution
1696 && reload_completed
1697 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1698 return TRUE;
1699
1700 return FALSE;
1701 }
1702
1703 /* Merge the blocks and mark for local life update. */
1704
1705 static void
1706 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1707 basic_block test_bb; /* Basic block test is in */
1708 basic_block then_bb; /* Basic block for THEN block */
1709 basic_block else_bb; /* Basic block for ELSE block */
1710 basic_block join_bb; /* Basic block the join label is in */
1711 {
1712 basic_block combo_bb;
1713
1714 /* All block merging is done into the lower block numbers. */
1715
1716 combo_bb = test_bb;
1717
1718 /* First merge TEST block into THEN block. This is a no-brainer since
1719 the THEN block did not have a code label to begin with. */
1720
1721 if (combo_bb->global_live_at_end)
1722 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1723 merge_blocks_nomove (combo_bb, then_bb);
1724 num_removed_blocks++;
1725
1726 /* The ELSE block, if it existed, had a label. That label count
1727 will almost always be zero, but odd things can happen when labels
1728 get their addresses taken. */
1729 if (else_bb)
1730 {
1731 merge_blocks_nomove (combo_bb, else_bb);
1732 num_removed_blocks++;
1733 }
1734
1735 /* If there was no join block reported, that means it was not adjacent
1736 to the others, and so we cannot merge them. */
1737
1738 if (! join_bb)
1739 {
1740 /* The outgoing edge for the current COMBO block should already
1741 be correct. Verify this. */
1742 if (combo_bb->succ == NULL_EDGE)
1743 abort ();
1744
1745 /* There should sill be a branch at the end of the THEN or ELSE
1746 blocks taking us to our final destination. */
1747 if (! simplejump_p (combo_bb->end)
1748 && ! returnjump_p (combo_bb->end))
1749 abort ();
1750 }
1751
1752 /* The JOIN block may have had quite a number of other predecessors too.
1753 Since we've already merged the TEST, THEN and ELSE blocks, we should
1754 have only one remaining edge from our if-then-else diamond. If there
1755 is more than one remaining edge, it must come from elsewhere. There
1756 may be zero incoming edges if the THEN block didn't actually join
1757 back up (as with a call to abort). */
1758 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1759 {
1760 /* We can merge the JOIN. */
1761 if (combo_bb->global_live_at_end)
1762 COPY_REG_SET (combo_bb->global_live_at_end,
1763 join_bb->global_live_at_end);
1764 merge_blocks_nomove (combo_bb, join_bb);
1765 num_removed_blocks++;
1766 }
1767 else
1768 {
1769 /* We cannot merge the JOIN. */
1770
1771 /* The outgoing edge for the current COMBO block should already
1772 be correct. Verify this. */
1773 if (combo_bb->succ->succ_next != NULL_EDGE
1774 || combo_bb->succ->dest != join_bb)
1775 abort ();
1776
1777 /* Remove the jump and cruft from the end of the COMBO block. */
1778 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1779 }
1780
1781 /* Make sure we update life info properly. */
1782 SET_UPDATE_LIFE (combo_bb);
1783
1784 num_updated_if_blocks++;
1785 }
1786 \f
1787 /* Find a block ending in a simple IF condition. Return TRUE if
1788 we were able to transform it in some way. */
1789
1790 static int
1791 find_if_header (test_bb)
1792 basic_block test_bb;
1793 {
1794 edge then_edge;
1795 edge else_edge;
1796
1797 /* The kind of block we're looking for has exactly two successors. */
1798 if ((then_edge = test_bb->succ) == NULL_EDGE
1799 || (else_edge = then_edge->succ_next) == NULL_EDGE
1800 || else_edge->succ_next != NULL_EDGE)
1801 return FALSE;
1802
1803 /* Neither edge should be abnormal. */
1804 if ((then_edge->flags & EDGE_COMPLEX)
1805 || (else_edge->flags & EDGE_COMPLEX))
1806 return FALSE;
1807
1808 /* The THEN edge is canonically the one that falls through. */
1809 if (then_edge->flags & EDGE_FALLTHRU)
1810 ;
1811 else if (else_edge->flags & EDGE_FALLTHRU)
1812 {
1813 edge e = else_edge;
1814 else_edge = then_edge;
1815 then_edge = e;
1816 }
1817 else
1818 /* Otherwise this must be a multiway branch of some sort. */
1819 return FALSE;
1820
1821 if (find_if_block (test_bb, then_edge, else_edge))
1822 goto success;
1823 if (post_dominators
1824 && (! HAVE_conditional_execution || reload_completed))
1825 {
1826 if (find_if_case_1 (test_bb, then_edge, else_edge))
1827 goto success;
1828 if (find_if_case_2 (test_bb, then_edge, else_edge))
1829 goto success;
1830 }
1831
1832 return FALSE;
1833
1834 success:
1835 if (rtl_dump_file)
1836 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1837 return TRUE;
1838 }
1839
1840 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1841 block. If so, we'll try to convert the insns to not require the branch.
1842 Return TRUE if we were successful at converting the the block. */
1843
1844 static int
1845 find_if_block (test_bb, then_edge, else_edge)
1846 basic_block test_bb;
1847 edge then_edge, else_edge;
1848 {
1849 basic_block then_bb = then_edge->dest;
1850 basic_block else_bb = else_edge->dest;
1851 basic_block join_bb = NULL_BLOCK;
1852 edge then_succ = then_bb->succ;
1853 edge else_succ = else_bb->succ;
1854 int next_index;
1855
1856 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1857 if (then_bb->pred->pred_next != NULL_EDGE)
1858 return FALSE;
1859
1860 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1861 if (then_succ != NULL_EDGE
1862 && (then_succ->succ_next != NULL_EDGE
1863 || (then_succ->flags & EDGE_COMPLEX)))
1864 return FALSE;
1865
1866 /* If the THEN block has no successors, conditional execution can still
1867 make a conditional call. Don't do this unless the ELSE block has
1868 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1869 Check for the last insn of the THEN block being an indirect jump, which
1870 is listed as not having any successors, but confuses the rest of the CE
1871 code processing. XXX we should fix this in the future. */
1872 if (then_succ == NULL)
1873 {
1874 if (else_bb->pred->pred_next == NULL_EDGE)
1875 {
1876 rtx last_insn = then_bb->end;
1877
1878 while (last_insn
1879 && GET_CODE (last_insn) == NOTE
1880 && last_insn != then_bb->head)
1881 last_insn = PREV_INSN (last_insn);
1882
1883 if (last_insn
1884 && GET_CODE (last_insn) == JUMP_INSN
1885 && ! simplejump_p (last_insn))
1886 return FALSE;
1887
1888 join_bb = else_bb;
1889 else_bb = NULL_BLOCK;
1890 }
1891 else
1892 return FALSE;
1893 }
1894
1895 /* If the THEN block's successor is the other edge out of the TEST block,
1896 then we have an IF-THEN combo without an ELSE. */
1897 else if (then_succ->dest == else_bb)
1898 {
1899 join_bb = else_bb;
1900 else_bb = NULL_BLOCK;
1901 }
1902
1903 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1904 has exactly one predecessor and one successor, and the outgoing edge
1905 is not complex, then we have an IF-THEN-ELSE combo. */
1906 else if (else_succ != NULL_EDGE
1907 && then_succ->dest == else_succ->dest
1908 && else_bb->pred->pred_next == NULL_EDGE
1909 && else_succ->succ_next == NULL_EDGE
1910 && ! (else_succ->flags & EDGE_COMPLEX))
1911 join_bb = else_succ->dest;
1912
1913 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1914 else
1915 return FALSE;
1916
1917 num_possible_if_blocks++;
1918
1919 if (rtl_dump_file)
1920 {
1921 if (else_bb)
1922 fprintf (rtl_dump_file,
1923 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1924 test_bb->index, then_bb->index, else_bb->index,
1925 join_bb->index);
1926 else
1927 fprintf (rtl_dump_file,
1928 "\nIF-THEN block found, start %d, then %d, join %d\n",
1929 test_bb->index, then_bb->index, join_bb->index);
1930 }
1931
1932 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1933 get the first condition for free, since we've already asserted that
1934 there's a fallthru edge from IF to THEN. */
1935 /* ??? As an enhancement, move the ELSE block. Have to deal with
1936 BLOCK notes, if by no other means than aborting the merge if they
1937 exist. Sticky enough I don't want to think about it now. */
1938 next_index = then_bb->index;
1939 if (else_bb && ++next_index != else_bb->index)
1940 return FALSE;
1941 if (++next_index != join_bb->index)
1942 {
1943 if (else_bb)
1944 join_bb = NULL;
1945 else
1946 return FALSE;
1947 }
1948
1949 /* Do the real work. */
1950 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1951 }
1952
1953 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1954 transformable, but not necessarily the other. There need be no
1955 JOIN block.
1956
1957 Return TRUE if we were successful at converting the the block.
1958
1959 Cases we'd like to look at:
1960
1961 (1)
1962 if (test) goto over; // x not live
1963 x = a;
1964 goto label;
1965 over:
1966
1967 becomes
1968
1969 x = a;
1970 if (! test) goto label;
1971
1972 (2)
1973 if (test) goto E; // x not live
1974 x = big();
1975 goto L;
1976 E:
1977 x = b;
1978 goto M;
1979
1980 becomes
1981
1982 x = b;
1983 if (test) goto M;
1984 x = big();
1985 goto L;
1986
1987 (3) // This one's really only interesting for targets that can do
1988 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1989 // it results in multiple branches on a cache line, which often
1990 // does not sit well with predictors.
1991
1992 if (test1) goto E; // predicted not taken
1993 x = a;
1994 if (test2) goto F;
1995 ...
1996 E:
1997 x = b;
1998 J:
1999
2000 becomes
2001
2002 x = a;
2003 if (test1) goto E;
2004 if (test2) goto F;
2005
2006 Notes:
2007
2008 (A) Don't do (2) if the branch is predicted against the block we're
2009 eliminating. Do it anyway if we can eliminate a branch; this requires
2010 that the sole successor of the eliminated block postdominate the other
2011 side of the if.
2012
2013 (B) With CE, on (3) we can steal from both sides of the if, creating
2014
2015 if (test1) x = a;
2016 if (!test1) x = b;
2017 if (test1) goto J;
2018 if (test2) goto F;
2019 ...
2020 J:
2021
2022 Again, this is most useful if J postdominates.
2023
2024 (C) CE substitutes for helpful life information.
2025
2026 (D) These heuristics need a lot of work. */
2027
2028 /* Tests for case 1 above. */
2029
2030 static int
2031 find_if_case_1 (test_bb, then_edge, else_edge)
2032 basic_block test_bb;
2033 edge then_edge, else_edge;
2034 {
2035 basic_block then_bb = then_edge->dest;
2036 basic_block else_bb = else_edge->dest;
2037 edge then_succ = then_bb->succ;
2038 rtx new_lab;
2039
2040 /* THEN has one successor. */
2041 if (!then_succ || then_succ->succ_next != NULL)
2042 return FALSE;
2043
2044 /* THEN does not fall through, but is not strange either. */
2045 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2046 return FALSE;
2047
2048 /* THEN has one predecessor. */
2049 if (then_bb->pred->pred_next != NULL)
2050 return FALSE;
2051
2052 /* ELSE follows THEN. (??? could be moved) */
2053 if (else_bb->index != then_bb->index + 1)
2054 return FALSE;
2055
2056 num_possible_if_blocks++;
2057 if (rtl_dump_file)
2058 fprintf (rtl_dump_file,
2059 "\nIF-CASE-1 found, start %d, then %d\n",
2060 test_bb->index, then_bb->index);
2061
2062 /* THEN is small. */
2063 if (count_bb_insns (then_bb) > BRANCH_COST)
2064 return FALSE;
2065
2066 /* Find the label for THEN's destination. */
2067 if (then_succ->dest == EXIT_BLOCK_PTR)
2068 new_lab = NULL_RTX;
2069 else
2070 {
2071 new_lab = JUMP_LABEL (then_bb->end);
2072 if (! new_lab)
2073 abort ();
2074 }
2075
2076 /* Registers set are dead, or are predicable. */
2077 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2078 return FALSE;
2079
2080 /* Conversion went ok, including moving the insns and fixing up the
2081 jump. Adjust the CFG to match. */
2082
2083 SET_UPDATE_LIFE (test_bb);
2084 bitmap_operation (test_bb->global_live_at_end,
2085 else_bb->global_live_at_start,
2086 then_bb->global_live_at_end, BITMAP_IOR);
2087
2088 make_edge (NULL, test_bb, then_succ->dest, 0);
2089 flow_delete_block (then_bb);
2090 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2091
2092 num_removed_blocks++;
2093 num_updated_if_blocks++;
2094
2095 return TRUE;
2096 }
2097
2098 /* Test for case 2 above. */
2099
2100 static int
2101 find_if_case_2 (test_bb, then_edge, else_edge)
2102 basic_block test_bb;
2103 edge then_edge, else_edge;
2104 {
2105 basic_block then_bb = then_edge->dest;
2106 basic_block else_bb = else_edge->dest;
2107 edge else_succ = else_bb->succ;
2108 rtx new_lab, note;
2109
2110 /* ELSE has one successor. */
2111 if (!else_succ || else_succ->succ_next != NULL)
2112 return FALSE;
2113
2114 /* ELSE outgoing edge is not complex. */
2115 if (else_succ->flags & EDGE_COMPLEX)
2116 return FALSE;
2117
2118 /* ELSE has one predecessor. */
2119 if (else_bb->pred->pred_next != NULL)
2120 return FALSE;
2121
2122 /* THEN is not EXIT. */
2123 if (then_bb->index < 0)
2124 return FALSE;
2125
2126 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2127 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2128 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2129 ;
2130 else if (else_succ->dest->index < 0
2131 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2132 ORIG_INDEX (else_succ->dest)))
2133 ;
2134 else
2135 return FALSE;
2136
2137 num_possible_if_blocks++;
2138 if (rtl_dump_file)
2139 fprintf (rtl_dump_file,
2140 "\nIF-CASE-2 found, start %d, else %d\n",
2141 test_bb->index, else_bb->index);
2142
2143 /* ELSE is small. */
2144 if (count_bb_insns (then_bb) > BRANCH_COST)
2145 return FALSE;
2146
2147 /* Find the label for ELSE's destination. */
2148 if (else_succ->dest == EXIT_BLOCK_PTR)
2149 new_lab = NULL_RTX;
2150 else
2151 {
2152 if (else_succ->flags & EDGE_FALLTHRU)
2153 {
2154 new_lab = else_succ->dest->head;
2155 if (GET_CODE (new_lab) != CODE_LABEL)
2156 abort ();
2157 }
2158 else
2159 {
2160 new_lab = JUMP_LABEL (else_bb->end);
2161 if (! new_lab)
2162 abort ();
2163 }
2164 }
2165
2166 /* Registers set are dead, or are predicable. */
2167 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2168 return FALSE;
2169
2170 /* Conversion went ok, including moving the insns and fixing up the
2171 jump. Adjust the CFG to match. */
2172
2173 SET_UPDATE_LIFE (test_bb);
2174 bitmap_operation (test_bb->global_live_at_end,
2175 then_bb->global_live_at_start,
2176 else_bb->global_live_at_end, BITMAP_IOR);
2177
2178 remove_edge (else_edge);
2179 make_edge (NULL, test_bb, else_succ->dest, 0);
2180 flow_delete_block (else_bb);
2181
2182 num_removed_blocks++;
2183 num_updated_if_blocks++;
2184
2185 /* ??? We may now fallthru from one of THEN's successors into a join
2186 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2187
2188 return TRUE;
2189 }
2190
2191 /* A subroutine of dead_or_predicable called through for_each_rtx.
2192 Return 1 if a memory is found. */
2193
2194 static int
2195 find_memory (px, data)
2196 rtx *px;
2197 void *data ATTRIBUTE_UNUSED;
2198 {
2199 return GET_CODE (*px) == MEM;
2200 }
2201
2202 /* Used by the code above to perform the actual rtl transformations.
2203 Return TRUE if successful.
2204
2205 TEST_BB is the block containing the conditional branch. MERGE_BB
2206 is the block containing the code to manipulate. NEW_DEST is the
2207 label TEST_BB should be branching to after the conversion.
2208 REVERSEP is true if the sense of the branch should be reversed. */
2209
2210 static int
2211 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2212 basic_block test_bb, merge_bb, other_bb;
2213 rtx new_dest;
2214 int reversep;
2215 {
2216 rtx head, end, jump, earliest, old_dest;
2217
2218 jump = test_bb->end;
2219
2220 /* Find the extent of the real code in the merge block. */
2221 head = merge_bb->head;
2222 end = merge_bb->end;
2223
2224 if (GET_CODE (head) == CODE_LABEL)
2225 head = NEXT_INSN (head);
2226 if (GET_CODE (head) == NOTE)
2227 {
2228 if (head == end)
2229 {
2230 head = end = NULL_RTX;
2231 goto no_body;
2232 }
2233 head = NEXT_INSN (head);
2234 }
2235
2236 if (GET_CODE (end) == JUMP_INSN)
2237 {
2238 if (head == end)
2239 {
2240 head = end = NULL_RTX;
2241 goto no_body;
2242 }
2243 end = PREV_INSN (end);
2244 }
2245
2246 /* Disable handling dead code by conditional execution if the machine needs
2247 to do anything funny with the tests, etc. */
2248 #ifndef IFCVT_MODIFY_TESTS
2249 if (HAVE_conditional_execution)
2250 {
2251 /* In the conditional execution case, we have things easy. We know
2252 the condition is reversable. We don't have to check life info,
2253 becase we're going to conditionally execute the code anyway.
2254 All that's left is making sure the insns involved can actually
2255 be predicated. */
2256
2257 rtx cond, prob_val;
2258
2259 cond = cond_exec_get_condition (jump);
2260
2261 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2262 if (prob_val)
2263 prob_val = XEXP (prob_val, 0);
2264
2265 if (reversep)
2266 {
2267 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2268 GET_MODE (cond), XEXP (cond, 0),
2269 XEXP (cond, 1));
2270 if (prob_val)
2271 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2272 }
2273
2274 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2275 goto cancel;
2276
2277 earliest = jump;
2278 }
2279 else
2280 #endif
2281 {
2282 /* In the non-conditional execution case, we have to verify that there
2283 are no trapping operations, no calls, no references to memory, and
2284 that any registers modified are dead at the branch site. */
2285
2286 rtx insn, cond, prev;
2287 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2288 regset merge_set, tmp, test_live, test_set;
2289 struct propagate_block_info *pbi;
2290 int i, fail = 0;
2291
2292 /* Check for no calls or trapping operations. */
2293 for (insn = head; ; insn = NEXT_INSN (insn))
2294 {
2295 if (GET_CODE (insn) == CALL_INSN)
2296 return FALSE;
2297 if (INSN_P (insn))
2298 {
2299 if (may_trap_p (PATTERN (insn)))
2300 return FALSE;
2301
2302 /* ??? Even non-trapping memories such as stack frame
2303 references must be avoided. For stores, we collect
2304 no lifetime info; for reads, we'd have to assert
2305 true_dependance false against every store in the
2306 TEST range. */
2307 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2308 return FALSE;
2309 }
2310 if (insn == end)
2311 break;
2312 }
2313
2314 if (! any_condjump_p (jump))
2315 return FALSE;
2316
2317 /* Find the extent of the conditional. */
2318 cond = noce_get_condition (jump, &earliest);
2319 if (! cond)
2320 return FALSE;
2321
2322 /* Collect:
2323 MERGE_SET = set of registers set in MERGE_BB
2324 TEST_LIVE = set of registers live at EARLIEST
2325 TEST_SET = set of registers set between EARLIEST and the
2326 end of the block. */
2327
2328 tmp = INITIALIZE_REG_SET (tmp_head);
2329 merge_set = INITIALIZE_REG_SET (merge_set_head);
2330 test_live = INITIALIZE_REG_SET (test_live_head);
2331 test_set = INITIALIZE_REG_SET (test_set_head);
2332
2333 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2334 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2335 since we've already asserted that MERGE_BB is small. */
2336 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2337
2338 /* For small register class machines, don't lengthen lifetimes of
2339 hard registers before reload. */
2340 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2341 {
2342 EXECUTE_IF_SET_IN_BITMAP
2343 (merge_set, 0, i,
2344 {
2345 if (i < FIRST_PSEUDO_REGISTER
2346 && ! fixed_regs[i]
2347 && ! global_regs[i])
2348 fail = 1;
2349 });
2350 }
2351
2352 /* For TEST, we're interested in a range of insns, not a whole block.
2353 Moreover, we're interested in the insns live from OTHER_BB. */
2354
2355 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2356 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2357 0);
2358
2359 for (insn = jump; ; insn = prev)
2360 {
2361 prev = propagate_one_insn (pbi, insn);
2362 if (insn == earliest)
2363 break;
2364 }
2365
2366 free_propagate_block_info (pbi);
2367
2368 /* We can perform the transformation if
2369 MERGE_SET & (TEST_SET | TEST_LIVE)
2370 and
2371 TEST_SET & merge_bb->global_live_at_start
2372 are empty. */
2373
2374 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2375 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2376 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2377
2378 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2379 BITMAP_AND);
2380 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2381
2382 FREE_REG_SET (tmp);
2383 FREE_REG_SET (merge_set);
2384 FREE_REG_SET (test_live);
2385 FREE_REG_SET (test_set);
2386
2387 if (fail)
2388 return FALSE;
2389 }
2390
2391 no_body:
2392 /* We don't want to use normal invert_jump or redirect_jump because
2393 we don't want to delete_insn called. Also, we want to do our own
2394 change group management. */
2395
2396 old_dest = JUMP_LABEL (jump);
2397 if (reversep
2398 ? ! invert_jump_1 (jump, new_dest)
2399 : ! redirect_jump_1 (jump, new_dest))
2400 goto cancel;
2401
2402 if (! apply_change_group ())
2403 return FALSE;
2404
2405 if (old_dest)
2406 LABEL_NUSES (old_dest) -= 1;
2407 if (new_dest)
2408 LABEL_NUSES (new_dest) += 1;
2409 JUMP_LABEL (jump) = new_dest;
2410
2411 if (reversep)
2412 {
2413 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2414 if (note)
2415 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2416 }
2417
2418 /* Move the insns out of MERGE_BB to before the branch. */
2419 if (head != NULL)
2420 {
2421 if (end == merge_bb->end)
2422 merge_bb->end = PREV_INSN (head);
2423
2424 head = squeeze_notes (head, end);
2425 if (GET_CODE (end) == NOTE
2426 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2427 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2428 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2429 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2430 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2431 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2432 {
2433 if (head == end)
2434 return TRUE;
2435 end = PREV_INSN (end);
2436 }
2437
2438 reorder_insns (head, end, PREV_INSN (earliest));
2439 }
2440 return TRUE;
2441
2442 cancel:
2443 cancel_changes (0);
2444 return FALSE;
2445 }
2446 \f
2447 /* Main entry point for all if-conversion. */
2448
2449 void
2450 if_convert (life_data_ok)
2451 int life_data_ok;
2452 {
2453 int block_num;
2454
2455 num_possible_if_blocks = 0;
2456 num_updated_if_blocks = 0;
2457 num_removed_blocks = 0;
2458
2459 /* Free up basic_block_for_insn so that we don't have to keep it
2460 up to date, either here or in merge_blocks_nomove. */
2461 free_basic_block_vars (1);
2462
2463 /* Compute postdominators if we think we'll use them. */
2464 post_dominators = NULL;
2465 if (HAVE_conditional_execution || life_data_ok)
2466 {
2467 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2468 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2469 }
2470
2471 /* Record initial block numbers. */
2472 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2473 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2474
2475 /* Go through each of the basic blocks looking for things to convert. */
2476 for (block_num = 0; block_num < n_basic_blocks; )
2477 {
2478 basic_block bb = BASIC_BLOCK (block_num);
2479 if (find_if_header (bb))
2480 block_num = bb->index;
2481 else
2482 block_num++;
2483 }
2484
2485 if (post_dominators)
2486 sbitmap_vector_free (post_dominators);
2487
2488 if (rtl_dump_file)
2489 fflush (rtl_dump_file);
2490
2491 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2492 compute_bb_for_insn (get_max_uid ());
2493
2494 /* Rebuild life info for basic blocks that require it. */
2495 if (num_removed_blocks && life_data_ok)
2496 {
2497 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2498 sbitmap_zero (update_life_blocks);
2499
2500 /* If we allocated new pseudos, we must resize the array for sched1. */
2501 if (max_regno < max_reg_num ())
2502 {
2503 max_regno = max_reg_num ();
2504 allocate_reg_info (max_regno, FALSE, FALSE);
2505 }
2506
2507 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2508 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2509 SET_BIT (update_life_blocks, block_num);
2510
2511 count_or_remove_death_notes (update_life_blocks, 1);
2512 /* ??? See about adding a mode that verifies that the initial
2513 set of blocks don't let registers come live. */
2514 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2515 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2516 | PROP_KILL_DEAD_CODE);
2517
2518 sbitmap_free (update_life_blocks);
2519 }
2520
2521 /* Write the final stats. */
2522 if (rtl_dump_file && num_possible_if_blocks > 0)
2523 {
2524 fprintf (rtl_dump_file,
2525 "\n%d possible IF blocks searched.\n",
2526 num_possible_if_blocks);
2527 fprintf (rtl_dump_file,
2528 "%d IF blocks converted.\n",
2529 num_updated_if_blocks);
2530 fprintf (rtl_dump_file,
2531 "%d basic blocks deleted.\n\n\n",
2532 num_removed_blocks);
2533 }
2534
2535 #ifdef ENABLE_CHECKING
2536 verify_flow_info ();
2537 #endif
2538 }
This page took 0.145155 seconds and 5 git commands to generate.