]> gcc.gnu.org Git - gcc.git/blob - gcc/ifcvt.c
Handle jump insns that are PARALLEL.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "tm_p.h"
35
36
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
39 #endif
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
42 #endif
43 #ifndef HAVE_incscc
44 #define HAVE_incscc 0
45 #endif
46 #ifndef HAVE_decscc
47 #define HAVE_decscc 0
48 #endif
49
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
52 #endif
53
54 #define NULL_EDGE ((struct edge_def *)NULL)
55 #define NULL_BLOCK ((struct basic_block_def *)NULL)
56
57 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
58 static int num_possible_if_blocks;
59
60 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
61 execution. */
62 static int num_updated_if_blocks;
63
64 /* # of basic blocks that were removed. */
65 static int num_removed_blocks;
66
67 /* The post-dominator relation on the original block numbers. */
68 static sbitmap *post_dominators;
69
70 /* Forward references. */
71 static int count_bb_insns PARAMS ((basic_block));
72 static rtx first_active_insn PARAMS ((basic_block));
73 static int last_active_insn_p PARAMS ((basic_block, rtx));
74 static int seq_contains_jump PARAMS ((rtx));
75
76 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
77 static rtx cond_exec_get_condition PARAMS ((rtx));
78 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
79 basic_block, basic_block));
80
81 static rtx noce_get_condition PARAMS ((rtx, rtx *));
82 static int noce_process_if_block PARAMS ((basic_block, basic_block,
83 basic_block, basic_block));
84
85 static int process_if_block PARAMS ((basic_block, basic_block,
86 basic_block, basic_block));
87 static void merge_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
89
90 static int find_if_header PARAMS ((basic_block));
91 static int find_if_block PARAMS ((basic_block, edge, edge));
92 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
93 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
94 static int find_memory PARAMS ((rtx *, void *));
95 static int dead_or_predicable PARAMS ((basic_block, basic_block,
96 basic_block, rtx, int));
97 \f
98 /* Abuse the basic_block AUX field to store the original block index,
99 as well as a flag indicating that the block should be rescaned for
100 life analysis. */
101
102 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
103 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
104 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
105 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
106
107 \f
108 /* Count the number of non-jump active insns in BB. */
109
110 static int
111 count_bb_insns (bb)
112 basic_block bb;
113 {
114 int count = 0;
115 rtx insn = bb->head;
116
117 while (1)
118 {
119 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
120 count++;
121
122 if (insn == bb->end)
123 break;
124 insn = NEXT_INSN (insn);
125 }
126
127 return count;
128 }
129
130 /* Return the first non-jump active insn in the basic block. */
131
132 static rtx
133 first_active_insn (bb)
134 basic_block bb;
135 {
136 rtx insn = bb->head;
137
138 if (GET_CODE (insn) == CODE_LABEL)
139 {
140 if (insn == bb->end)
141 return NULL_RTX;
142 insn = NEXT_INSN (insn);
143 }
144
145 while (GET_CODE (insn) == NOTE)
146 {
147 if (insn == bb->end)
148 return NULL_RTX;
149 insn = NEXT_INSN (insn);
150 }
151
152 if (GET_CODE (insn) == JUMP_INSN)
153 return NULL_RTX;
154
155 return insn;
156 }
157
158 /* Return true if INSN is the last active non-jump insn in BB. */
159
160 static int
161 last_active_insn_p (bb, insn)
162 basic_block bb;
163 rtx insn;
164 {
165 do
166 {
167 if (insn == bb->end)
168 return TRUE;
169 insn = NEXT_INSN (insn);
170 }
171 while (GET_CODE (insn) == NOTE);
172
173 return GET_CODE (insn) == JUMP_INSN;
174 }
175
176 /* It is possible, especially when having dealt with multi-word
177 arithmetic, for the expanders to have emitted jumps. Search
178 through the sequence and return TRUE if a jump exists so that
179 we can abort the conversion. */
180
181 static int
182 seq_contains_jump (insn)
183 rtx insn;
184 {
185 while (insn)
186 {
187 if (GET_CODE (insn) == JUMP_INSN)
188 return 1;
189 insn = NEXT_INSN (insn);
190 }
191 return 0;
192 }
193 \f
194 /* Go through a bunch of insns, converting them to conditional
195 execution format if possible. Return TRUE if all of the non-note
196 insns were processed. */
197
198 static int
199 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
200 rtx start; /* first insn to look at */
201 rtx end; /* last insn to look at */
202 rtx test; /* conditional execution test */
203 rtx prob_val; /* probability of branch taken. */
204 int mod_ok; /* true if modifications ok last insn. */
205 {
206 int must_be_last = FALSE;
207 rtx insn;
208 rtx pattern;
209
210 for (insn = start; ; insn = NEXT_INSN (insn))
211 {
212 if (GET_CODE (insn) == NOTE)
213 goto insn_done;
214
215 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
216 abort ();
217
218 /* Remove USE insns that get in the way. */
219 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
220 {
221 /* ??? Ug. Actually unlinking the thing is problematic,
222 given what we'd have to coordinate with our callers. */
223 PUT_CODE (insn, NOTE);
224 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
225 NOTE_SOURCE_FILE (insn) = 0;
226 goto insn_done;
227 }
228
229 /* Last insn wasn't last? */
230 if (must_be_last)
231 return FALSE;
232
233 if (modified_in_p (test, insn))
234 {
235 if (!mod_ok)
236 return FALSE;
237 must_be_last = TRUE;
238 }
239
240 /* Now build the conditional form of the instruction. */
241 pattern = PATTERN (insn);
242
243 /* If the machine needs to modify the insn being conditionally executed,
244 say for example to force a constant integer operand into a temp
245 register, do so here. */
246 #ifdef IFCVT_MODIFY_INSN
247 IFCVT_MODIFY_INSN (pattern, insn);
248 if (! pattern)
249 return FALSE;
250 #endif
251
252 validate_change (insn, &PATTERN (insn),
253 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
254 pattern), 1);
255
256 if (GET_CODE (insn) == CALL_INSN && prob_val)
257 validate_change (insn, &REG_NOTES (insn),
258 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
259 REG_NOTES (insn)), 1);
260
261 insn_done:
262 if (insn == end)
263 break;
264 }
265
266 return TRUE;
267 }
268
269 /* Return the condition for a jump. Do not do any special processing. */
270
271 static rtx
272 cond_exec_get_condition (jump)
273 rtx jump;
274 {
275 rtx test_if, cond;
276
277 if (any_condjump_p (jump))
278 test_if = SET_SRC (pc_set (jump));
279 else
280 return NULL_RTX;
281 cond = XEXP (test_if, 0);
282
283 /* If this branches to JUMP_LABEL when the condition is false,
284 reverse the condition. */
285 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
286 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
287 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
288 GET_MODE (cond), XEXP (cond, 0),
289 XEXP (cond, 1));
290
291 return cond;
292 }
293
294 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
295 to conditional execution. Return TRUE if we were successful at
296 converting the the block. */
297
298 static int
299 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
300 basic_block test_bb; /* Basic block test is in */
301 basic_block then_bb; /* Basic block for THEN block */
302 basic_block else_bb; /* Basic block for ELSE block */
303 basic_block join_bb; /* Basic block the join label is in */
304 {
305 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
306 rtx then_start; /* first insn in THEN block */
307 rtx then_end; /* last insn + 1 in THEN block */
308 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
309 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
310 int max; /* max # of insns to convert. */
311 int then_mod_ok; /* whether conditional mods are ok in THEN */
312 rtx true_expr; /* test for else block insns */
313 rtx false_expr; /* test for then block insns */
314 rtx true_prob_val; /* probability of else block */
315 rtx false_prob_val; /* probability of then block */
316 int n_insns;
317
318 /* Find the conditional jump to the ELSE or JOIN part, and isolate
319 the test. */
320 test_expr = cond_exec_get_condition (test_bb->end);
321 if (! test_expr)
322 return FALSE;
323
324 /* If the conditional jump is more than just a conditional jump,
325 then we can not do conditional execution conversion on this block. */
326 if (!onlyjump_p (test_bb->end))
327 return FALSE;
328
329 /* Collect the bounds of where we're to search. */
330
331 then_start = then_bb->head;
332 then_end = then_bb->end;
333
334 /* Skip a label heading THEN block. */
335 if (GET_CODE (then_start) == CODE_LABEL)
336 then_start = NEXT_INSN (then_start);
337
338 /* Skip a (use (const_int 0)) or branch as the final insn. */
339 if (GET_CODE (then_end) == INSN
340 && GET_CODE (PATTERN (then_end)) == USE
341 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
342 then_end = PREV_INSN (then_end);
343 else if (GET_CODE (then_end) == JUMP_INSN)
344 then_end = PREV_INSN (then_end);
345
346 if (else_bb)
347 {
348 /* Skip the ELSE block's label. */
349 else_start = NEXT_INSN (else_bb->head);
350 else_end = else_bb->end;
351
352 /* Skip a (use (const_int 0)) or branch as the final insn. */
353 if (GET_CODE (else_end) == INSN
354 && GET_CODE (PATTERN (else_end)) == USE
355 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
356 else_end = PREV_INSN (else_end);
357 else if (GET_CODE (else_end) == JUMP_INSN)
358 else_end = PREV_INSN (else_end);
359 }
360
361 /* How many instructions should we convert in total? */
362 n_insns = 0;
363 if (else_bb)
364 {
365 max = 2 * MAX_CONDITIONAL_EXECUTE;
366 n_insns = count_bb_insns (else_bb);
367 }
368 else
369 max = MAX_CONDITIONAL_EXECUTE;
370 n_insns += count_bb_insns (then_bb);
371 if (n_insns > max)
372 return FALSE;
373
374 /* Map test_expr/test_jump into the appropriate MD tests to use on
375 the conditionally executed code. */
376
377 true_expr = test_expr;
378 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
379 GET_MODE (true_expr), XEXP (true_expr, 0),
380 XEXP (true_expr, 1));
381
382 #ifdef IFCVT_MODIFY_TESTS
383 /* If the machine description needs to modify the tests, such as setting a
384 conditional execution register from a comparison, it can do so here. */
385 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
386 join_bb);
387
388 /* See if the conversion failed */
389 if (!true_expr || !false_expr)
390 goto fail;
391 #endif
392
393 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
394 if (true_prob_val)
395 {
396 true_prob_val = XEXP (true_prob_val, 0);
397 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
398 }
399 else
400 false_prob_val = NULL_RTX;
401
402 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
403 on then THEN block. */
404 then_mod_ok = (else_bb == NULL_BLOCK);
405
406 /* Go through the THEN and ELSE blocks converting the insns if possible
407 to conditional execution. */
408
409 if (then_end
410 && ! cond_exec_process_insns (then_start, then_end,
411 false_expr, false_prob_val, then_mod_ok))
412 goto fail;
413
414 if (else_bb
415 && ! cond_exec_process_insns (else_start, else_end,
416 true_expr, true_prob_val, TRUE))
417 goto fail;
418
419 if (! apply_change_group ())
420 return FALSE;
421
422 #ifdef IFCVT_MODIFY_FINAL
423 /* Do any machine dependent final modifications */
424 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
425 #endif
426
427 /* Conversion succeeded. */
428 if (rtl_dump_file)
429 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
430 n_insns, (n_insns == 1) ? " was" : "s were");
431
432 /* Merge the blocks! */
433 merge_if_block (test_bb, then_bb, else_bb, join_bb);
434 return TRUE;
435
436 fail:
437 #ifdef IFCVT_MODIFY_CANCEL
438 /* Cancel any machine dependent changes. */
439 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
440 #endif
441
442 cancel_changes (0);
443 return FALSE;
444 }
445 \f
446 /* Used by noce_process_if_block to communicate with its subroutines.
447
448 The subroutines know that A and B may be evaluated freely. They
449 know that X is a register. They should insert new instructions
450 before cond_earliest. */
451
452 struct noce_if_info
453 {
454 rtx insn_a, insn_b;
455 rtx x, a, b;
456 rtx jump, cond, cond_earliest;
457 };
458
459 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
460 rtx, int, int));
461 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
462 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
463 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
464 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
465 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
466 rtx, enum rtx_code, rtx,
467 rtx, rtx, rtx));
468 static int noce_try_cmove PARAMS ((struct noce_if_info *));
469 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
470
471 /* Helper function for noce_try_store_flag*. */
472
473 static rtx
474 noce_emit_store_flag (if_info, x, reversep, normalize)
475 struct noce_if_info *if_info;
476 rtx x;
477 int reversep, normalize;
478 {
479 rtx cond = if_info->cond;
480 int cond_complex;
481 enum rtx_code code;
482
483 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
484 || ! general_operand (XEXP (cond, 1), VOIDmode));
485
486 /* If earliest == jump, or when the condition is complex, try to
487 build the store_flag insn directly. */
488
489 if (cond_complex)
490 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
491
492 if ((if_info->cond_earliest == if_info->jump || cond_complex)
493 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
494 {
495 rtx tmp;
496
497 code = GET_CODE (cond);
498 if (reversep)
499 code = reverse_condition (code);
500
501 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
502 XEXP (cond, 1));
503 tmp = gen_rtx_SET (VOIDmode, x, tmp);
504
505 start_sequence ();
506 tmp = emit_insn (tmp);
507
508 if (recog_memoized (tmp) >= 0)
509 {
510 tmp = get_insns ();
511 end_sequence ();
512 emit_insns (tmp);
513
514 if_info->cond_earliest = if_info->jump;
515
516 return x;
517 }
518
519 end_sequence ();
520 }
521
522 /* Don't even try if the comparison operands are weird. */
523 if (cond_complex)
524 return NULL_RTX;
525
526 code = GET_CODE (cond);
527 if (reversep)
528 code = reverse_condition (code);
529
530 return emit_store_flag (x, code, XEXP (cond, 0),
531 XEXP (cond, 1), VOIDmode,
532 (code == LTU || code == LEU
533 || code == GEU || code == GTU), normalize);
534 }
535
536 /* Convert "if (test) x = 1; else x = 0".
537
538 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
539 tried in noce_try_store_flag_constants after noce_try_cmove has had
540 a go at the conversion. */
541
542 static int
543 noce_try_store_flag (if_info)
544 struct noce_if_info *if_info;
545 {
546 int reversep;
547 rtx target, seq;
548
549 if (GET_CODE (if_info->b) == CONST_INT
550 && INTVAL (if_info->b) == STORE_FLAG_VALUE
551 && if_info->a == const0_rtx)
552 reversep = 0;
553 else if (if_info->b == const0_rtx
554 && GET_CODE (if_info->a) == CONST_INT
555 && INTVAL (if_info->a) == STORE_FLAG_VALUE
556 && can_reverse_comparison_p (if_info->cond, if_info->jump))
557 reversep = 1;
558 else
559 return FALSE;
560
561 start_sequence ();
562
563 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
564 if (target)
565 {
566 if (target != if_info->x)
567 emit_move_insn (if_info->x, target);
568
569 seq = get_insns ();
570 end_sequence ();
571 emit_insns_before (seq, if_info->cond_earliest);
572
573 return TRUE;
574 }
575 else
576 {
577 end_sequence ();
578 return FALSE;
579 }
580 }
581
582 /* Convert "if (test) x = a; else x = b", for A and B constant. */
583
584 static int
585 noce_try_store_flag_constants (if_info)
586 struct noce_if_info *if_info;
587 {
588 rtx target, seq;
589 int reversep;
590 HOST_WIDE_INT itrue, ifalse, diff, tmp;
591 int normalize, can_reverse;
592
593 if (! no_new_pseudos
594 && GET_CODE (if_info->a) == CONST_INT
595 && GET_CODE (if_info->b) == CONST_INT)
596 {
597 ifalse = INTVAL (if_info->a);
598 itrue = INTVAL (if_info->b);
599 diff = itrue - ifalse;
600
601 can_reverse = can_reverse_comparison_p (if_info->cond, if_info->jump);
602
603 reversep = 0;
604 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
605 normalize = 0;
606 else if (ifalse == 0 && exact_log2 (itrue) >= 0
607 && (STORE_FLAG_VALUE == 1
608 || BRANCH_COST >= 2))
609 normalize = 1;
610 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
611 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
612 normalize = 1, reversep = 1;
613 else if (itrue == -1
614 && (STORE_FLAG_VALUE == -1
615 || BRANCH_COST >= 2))
616 normalize = -1;
617 else if (ifalse == -1 && can_reverse
618 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
619 normalize = -1, reversep = 1;
620 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
621 || BRANCH_COST >= 3)
622 normalize = -1;
623 else
624 return FALSE;
625
626 if (reversep)
627 {
628 tmp = itrue; itrue = ifalse; ifalse = tmp;
629 diff = -diff;
630 }
631
632 start_sequence ();
633 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
634 if (! target)
635 {
636 end_sequence ();
637 return FALSE;
638 }
639
640 /* if (test) x = 3; else x = 4;
641 => x = 3 + (test == 0); */
642 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
643 {
644 target = expand_binop (GET_MODE (if_info->x),
645 (diff == STORE_FLAG_VALUE
646 ? add_optab : sub_optab),
647 GEN_INT (ifalse), target, if_info->x, 0,
648 OPTAB_WIDEN);
649 }
650
651 /* if (test) x = 8; else x = 0;
652 => x = (test != 0) << 3; */
653 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
654 {
655 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
656 target, GEN_INT (tmp), if_info->x, 0,
657 OPTAB_WIDEN);
658 }
659
660 /* if (test) x = -1; else x = b;
661 => x = -(test != 0) | b; */
662 else if (itrue == -1)
663 {
664 target = expand_binop (GET_MODE (if_info->x), ior_optab,
665 target, GEN_INT (ifalse), if_info->x, 0,
666 OPTAB_WIDEN);
667 }
668
669 /* if (test) x = a; else x = b;
670 => x = (-(test != 0) & (b - a)) + a; */
671 else
672 {
673 target = expand_binop (GET_MODE (if_info->x), and_optab,
674 target, GEN_INT (diff), if_info->x, 0,
675 OPTAB_WIDEN);
676 if (target)
677 target = expand_binop (GET_MODE (if_info->x), add_optab,
678 target, GEN_INT (ifalse), if_info->x, 0,
679 OPTAB_WIDEN);
680 }
681
682 if (! target)
683 {
684 end_sequence ();
685 return FALSE;
686 }
687
688 if (target != if_info->x)
689 emit_move_insn (if_info->x, target);
690
691 seq = get_insns ();
692 end_sequence ();
693
694 if (seq_contains_jump (seq))
695 return FALSE;
696
697 emit_insns_before (seq, if_info->cond_earliest);
698
699 return TRUE;
700 }
701
702 return FALSE;
703 }
704
705 /* Convert "if (test) foo++" into "foo += (test != 0)", and
706 similarly for "foo--". */
707
708 static int
709 noce_try_store_flag_inc (if_info)
710 struct noce_if_info *if_info;
711 {
712 rtx target, seq;
713 int subtract, normalize;
714
715 if (! no_new_pseudos
716 && (BRANCH_COST >= 2
717 || HAVE_incscc
718 || HAVE_decscc)
719 /* Should be no `else' case to worry about. */
720 && if_info->b == if_info->x
721 && GET_CODE (if_info->a) == PLUS
722 && (XEXP (if_info->a, 1) == const1_rtx
723 || XEXP (if_info->a, 1) == constm1_rtx)
724 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
725 && can_reverse_comparison_p (if_info->cond, if_info->jump))
726 {
727 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
728 subtract = 0, normalize = 0;
729 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
730 subtract = 1, normalize = 0;
731 else
732 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
733
734 start_sequence ();
735
736 target = noce_emit_store_flag (if_info,
737 gen_reg_rtx (GET_MODE (if_info->x)),
738 1, normalize);
739
740 if (target)
741 target = expand_binop (GET_MODE (if_info->x),
742 subtract ? sub_optab : add_optab,
743 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
744 if (target)
745 {
746 if (target != if_info->x)
747 emit_move_insn (if_info->x, target);
748
749 seq = get_insns ();
750 end_sequence ();
751
752 if (seq_contains_jump (seq))
753 return FALSE;
754
755 emit_insns_before (seq, if_info->cond_earliest);
756
757 return TRUE;
758 }
759
760 end_sequence ();
761 }
762
763 return FALSE;
764 }
765
766 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
767
768 static int
769 noce_try_store_flag_mask (if_info)
770 struct noce_if_info *if_info;
771 {
772 rtx target, seq;
773 int reversep;
774
775 reversep = 0;
776 if (! no_new_pseudos
777 && (BRANCH_COST >= 2
778 || STORE_FLAG_VALUE == -1)
779 && ((if_info->a == const0_rtx
780 && rtx_equal_p (if_info->b, if_info->x))
781 || ((reversep = can_reverse_comparison_p (if_info->cond,
782 if_info->jump))
783 && if_info->b == const0_rtx
784 && rtx_equal_p (if_info->a, if_info->x))))
785 {
786 start_sequence ();
787 target = noce_emit_store_flag (if_info,
788 gen_reg_rtx (GET_MODE (if_info->x)),
789 reversep, -1);
790 if (target)
791 target = expand_binop (GET_MODE (if_info->x), and_optab,
792 if_info->x, target, if_info->x, 0,
793 OPTAB_WIDEN);
794
795 if (target)
796 {
797 if (target != if_info->x)
798 emit_move_insn (if_info->x, target);
799
800 seq = get_insns ();
801 end_sequence ();
802
803 if (seq_contains_jump (seq))
804 return FALSE;
805
806 emit_insns_before (seq, if_info->cond_earliest);
807
808 return TRUE;
809 }
810
811 end_sequence ();
812 }
813
814 return FALSE;
815 }
816
817 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
818
819 static rtx
820 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
821 struct noce_if_info *if_info;
822 rtx x, cmp_a, cmp_b, vfalse, vtrue;
823 enum rtx_code code;
824 {
825 /* If earliest == jump, try to build the cmove insn directly.
826 This is helpful when combine has created some complex condition
827 (like for alpha's cmovlbs) that we can't hope to regenerate
828 through the normal interface. */
829
830 if (if_info->cond_earliest == if_info->jump)
831 {
832 rtx tmp;
833
834 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
835 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
836 tmp = gen_rtx_SET (VOIDmode, x, tmp);
837
838 start_sequence ();
839 tmp = emit_insn (tmp);
840
841 if (recog_memoized (tmp) >= 0)
842 {
843 tmp = get_insns ();
844 end_sequence ();
845 emit_insns (tmp);
846
847 return x;
848 }
849
850 end_sequence ();
851 }
852
853 /* Don't even try if the comparison operands are weird. */
854 if (! general_operand (cmp_a, GET_MODE (cmp_a))
855 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
856 return NULL_RTX;
857
858 #if HAVE_conditional_move
859 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
860 vtrue, vfalse, GET_MODE (x),
861 (code == LTU || code == GEU
862 || code == LEU || code == GTU));
863 #else
864 /* We'll never get here, as noce_process_if_block doesn't call the
865 functions involved. Ifdef code, however, should be discouraged
866 because it leads to typos in the code not selected. However,
867 emit_conditional_move won't exist either. */
868 return NULL_RTX;
869 #endif
870 }
871
872 /* Try only simple constants and registers here. More complex cases
873 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
874 has had a go at it. */
875
876 static int
877 noce_try_cmove (if_info)
878 struct noce_if_info *if_info;
879 {
880 enum rtx_code code;
881 rtx target, seq;
882
883 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
884 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
885 {
886 start_sequence ();
887
888 code = GET_CODE (if_info->cond);
889 target = noce_emit_cmove (if_info, if_info->x, code,
890 XEXP (if_info->cond, 0),
891 XEXP (if_info->cond, 1),
892 if_info->a, if_info->b);
893
894 if (target)
895 {
896 if (target != if_info->x)
897 emit_move_insn (if_info->x, target);
898
899 seq = get_insns ();
900 end_sequence ();
901 emit_insns_before (seq, if_info->cond_earliest);
902 return TRUE;
903 }
904 else
905 {
906 end_sequence ();
907 return FALSE;
908 }
909 }
910
911 return FALSE;
912 }
913
914 /* Try more complex cases involving conditional_move. */
915
916 static int
917 noce_try_cmove_arith (if_info)
918 struct noce_if_info *if_info;
919 {
920 rtx a = if_info->a;
921 rtx b = if_info->b;
922 rtx x = if_info->x;
923 rtx insn_a, insn_b;
924 rtx tmp, target;
925 int is_mem = 0;
926 enum rtx_code code;
927
928 /* A conditional move from two memory sources is equivalent to a
929 conditional on their addresses followed by a load. Don't do this
930 early because it'll screw alias analysis. Note that we've
931 already checked for no side effects. */
932 if (! no_new_pseudos && cse_not_expected
933 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
934 && BRANCH_COST >= 5)
935 {
936 a = XEXP (a, 0);
937 b = XEXP (b, 0);
938 x = gen_reg_rtx (Pmode);
939 is_mem = 1;
940 }
941
942 /* ??? We could handle this if we knew that a load from A or B could
943 not fault. This is also true if we've already loaded
944 from the address along the path from ENTRY. */
945 else if (may_trap_p (a) || may_trap_p (b))
946 return FALSE;
947
948 /* if (test) x = a + b; else x = c - d;
949 => y = a + b;
950 x = c - d;
951 if (test)
952 x = y;
953 */
954
955 code = GET_CODE (if_info->cond);
956 insn_a = if_info->insn_a;
957 insn_b = if_info->insn_b;
958
959 /* Possibly rearrange operands to make things come out more natural. */
960 if (can_reverse_comparison_p (if_info->cond, if_info->jump))
961 {
962 int reversep = 0;
963 if (rtx_equal_p (b, x))
964 reversep = 1;
965 else if (general_operand (b, GET_MODE (b)))
966 reversep = 1;
967
968 if (reversep)
969 {
970 code = reverse_condition (code);
971 tmp = a, a = b, b = tmp;
972 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
973 }
974 }
975
976 start_sequence ();
977
978 /* If either operand is complex, load it into a register first.
979 The best way to do this is to copy the original insn. In this
980 way we preserve any clobbers etc that the insn may have had.
981 This is of course not possible in the IS_MEM case. */
982 if (! general_operand (a, GET_MODE (a)))
983 {
984 rtx set;
985
986 if (no_new_pseudos)
987 goto end_seq_and_fail;
988
989 if (is_mem)
990 {
991 tmp = gen_reg_rtx (GET_MODE (a));
992 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
993 }
994 else if (! insn_a)
995 goto end_seq_and_fail;
996 else
997 {
998 a = gen_reg_rtx (GET_MODE (a));
999 tmp = copy_rtx (insn_a);
1000 set = single_set (tmp);
1001 SET_DEST (set) = a;
1002 tmp = emit_insn (PATTERN (tmp));
1003 }
1004 if (recog_memoized (tmp) < 0)
1005 goto end_seq_and_fail;
1006 }
1007 if (! general_operand (b, GET_MODE (b)))
1008 {
1009 rtx set;
1010
1011 if (no_new_pseudos)
1012 goto end_seq_and_fail;
1013
1014 if (is_mem)
1015 {
1016 tmp = gen_reg_rtx (GET_MODE (b));
1017 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1018 }
1019 else if (! insn_b)
1020 goto end_seq_and_fail;
1021 else
1022 {
1023 b = gen_reg_rtx (GET_MODE (b));
1024 tmp = copy_rtx (insn_b);
1025 set = single_set (tmp);
1026 SET_DEST (set) = b;
1027 tmp = emit_insn (PATTERN (tmp));
1028 }
1029 if (recog_memoized (tmp) < 0)
1030 goto end_seq_and_fail;
1031 }
1032
1033 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1034 XEXP (if_info->cond, 1), a, b);
1035
1036 if (! target)
1037 goto end_seq_and_fail;
1038
1039 /* If we're handling a memory for above, emit the load now. */
1040 if (is_mem)
1041 {
1042 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1043
1044 /* Copy over flags as appropriate. */
1045 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1046 MEM_VOLATILE_P (tmp) = 1;
1047 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1048 MEM_IN_STRUCT_P (tmp) = 1;
1049 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1050 MEM_SCALAR_P (tmp) = 1;
1051 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1052 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1053
1054 emit_move_insn (if_info->x, tmp);
1055 }
1056 else if (target != x)
1057 emit_move_insn (x, target);
1058
1059 tmp = get_insns ();
1060 end_sequence ();
1061 emit_insns_before (tmp, if_info->cond_earliest);
1062 return TRUE;
1063
1064 end_seq_and_fail:
1065 end_sequence ();
1066 return FALSE;
1067 }
1068
1069 /* Look for the condition for the jump first. We'd prefer to avoid
1070 get_condition if we can -- it tries to look back for the contents
1071 of an original compare. On targets that use normal integers for
1072 comparisons, e.g. alpha, this is wasteful. */
1073
1074 static rtx
1075 noce_get_condition (jump, earliest)
1076 rtx jump;
1077 rtx *earliest;
1078 {
1079 rtx cond;
1080 rtx set;
1081
1082 /* If the condition variable is a register and is MODE_INT, accept it.
1083 Otherwise, fall back on get_condition. */
1084
1085 if (! any_condjump_p (jump))
1086 return NULL_RTX;
1087
1088 set = pc_set (jump);
1089
1090 cond = XEXP (SET_SRC (set), 0);
1091 if (GET_CODE (XEXP (cond, 0)) == REG
1092 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1093 {
1094 *earliest = jump;
1095
1096 /* If this branches to JUMP_LABEL when the condition is false,
1097 reverse the condition. */
1098 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1099 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1100 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1101 GET_MODE (cond), XEXP (cond, 0),
1102 XEXP (cond, 1));
1103 }
1104 else
1105 cond = get_condition (jump, earliest);
1106
1107 return cond;
1108 }
1109
1110 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1111 without using conditional execution. Return TRUE if we were
1112 successful at converting the the block. */
1113
1114 static int
1115 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1116 basic_block test_bb; /* Basic block test is in */
1117 basic_block then_bb; /* Basic block for THEN block */
1118 basic_block else_bb; /* Basic block for ELSE block */
1119 basic_block join_bb; /* Basic block the join label is in */
1120 {
1121 /* We're looking for patterns of the form
1122
1123 (1) if (...) x = a; else x = b;
1124 (2) x = b; if (...) x = a;
1125 (3) if (...) x = a; // as if with an initial x = x.
1126
1127 The later patterns require jumps to be more expensive.
1128
1129 ??? For future expansion, look for multiple X in such patterns. */
1130
1131 struct noce_if_info if_info;
1132 rtx insn_a, insn_b;
1133 rtx set_a, set_b;
1134 rtx orig_x, x, a, b;
1135 rtx jump, cond, insn;
1136
1137 /* If this is not a standard conditional jump, we can't parse it. */
1138 jump = test_bb->end;
1139 cond = noce_get_condition (jump, &if_info.cond_earliest);
1140 if (! cond)
1141 return FALSE;
1142
1143 /* If the conditional jump is more than just a conditional jump,
1144 then we can not do if-conversion on this block. */
1145 if (! onlyjump_p (jump))
1146 return FALSE;
1147
1148 /* We must be comparing objects whose modes imply the size. */
1149 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1150 return FALSE;
1151
1152 /* Look for one of the potential sets. */
1153 insn_a = first_active_insn (then_bb);
1154 if (! insn_a
1155 || ! last_active_insn_p (then_bb, insn_a)
1156 || (set_a = single_set (insn_a)) == NULL_RTX)
1157 return FALSE;
1158
1159 x = SET_DEST (set_a);
1160 a = SET_SRC (set_a);
1161
1162 /* Look for the other potential set. Make sure we've got equivalent
1163 destinations. */
1164 /* ??? This is overconservative. Storing to two different mems is
1165 as easy as conditionally computing the address. Storing to a
1166 single mem merely requires a scratch memory to use as one of the
1167 destination addresses; often the memory immediately below the
1168 stack pointer is available for this. */
1169 set_b = NULL_RTX;
1170 if (else_bb)
1171 {
1172 insn_b = first_active_insn (else_bb);
1173 if (! insn_b
1174 || ! last_active_insn_p (else_bb, insn_b)
1175 || (set_b = single_set (insn_b)) == NULL_RTX
1176 || ! rtx_equal_p (x, SET_DEST (set_b)))
1177 return FALSE;
1178 }
1179 else
1180 {
1181 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1182 if (! insn_b
1183 || GET_CODE (insn_b) != INSN
1184 || (set_b = single_set (insn_b)) == NULL_RTX
1185 || ! rtx_equal_p (x, SET_DEST (set_b))
1186 || reg_mentioned_p (x, cond)
1187 || reg_mentioned_p (x, a)
1188 || reg_mentioned_p (x, SET_SRC (set_b)))
1189 insn_b = set_b = NULL_RTX;
1190 }
1191 b = (set_b ? SET_SRC (set_b) : x);
1192
1193 /* X may not be mentioned in the range (cond_earliest, jump]. */
1194 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1195 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1196 return FALSE;
1197
1198 /* A and B may not be modified in the range [cond_earliest, jump). */
1199 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1200 if (INSN_P (insn)
1201 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1202 return FALSE;
1203
1204 /* Only operate on register destinations, and even then avoid extending
1205 the lifetime of hard registers on small register class machines. */
1206 orig_x = x;
1207 if (GET_CODE (x) != REG
1208 || (SMALL_REGISTER_CLASSES
1209 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1210 {
1211 if (no_new_pseudos)
1212 return FALSE;
1213 x = gen_reg_rtx (GET_MODE (x));
1214 }
1215
1216 /* Don't operate on sources that may trap or are volatile. */
1217 if (side_effects_p (a) || side_effects_p (b)
1218 || (GET_CODE (a) != MEM && may_trap_p (a))
1219 || (GET_CODE (b) != MEM && may_trap_p (b)))
1220 return FALSE;
1221
1222 /* Set up the info block for our subroutines. */
1223 if_info.cond = cond;
1224 if_info.jump = jump;
1225 if_info.insn_a = insn_a;
1226 if_info.insn_b = insn_b;
1227 if_info.x = x;
1228 if_info.a = a;
1229 if_info.b = b;
1230
1231 /* Try optimizations in some approximation of a useful order. */
1232 /* ??? Should first look to see if X is live incoming at all. If it
1233 isn't, we don't need anything but an unconditional set. */
1234
1235 /* Look and see if A and B are really the same. Avoid creating silly
1236 cmove constructs that no one will fix up later. */
1237 if (rtx_equal_p (a, b))
1238 {
1239 /* If we have an INSN_B, we don't have to create any new rtl. Just
1240 move the instruction that we already have. If we don't have an
1241 INSN_B, that means that A == X, and we've got a noop move. In
1242 that case don't do anything and let the code below delete INSN_A. */
1243 if (insn_b && else_bb)
1244 {
1245 if (else_bb && insn_b == else_bb->end)
1246 else_bb->end = PREV_INSN (insn_b);
1247 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1248 insn_b = NULL_RTX;
1249 }
1250 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1251 x must be executed twice. */
1252 else if (insn_b && side_effects_p (orig_x))
1253 return FALSE;
1254
1255 x = orig_x;
1256 goto success;
1257 }
1258
1259 if (noce_try_store_flag (&if_info))
1260 goto success;
1261 if (HAVE_conditional_move
1262 && noce_try_cmove (&if_info))
1263 goto success;
1264 if (! HAVE_conditional_execution)
1265 {
1266 if (noce_try_store_flag_constants (&if_info))
1267 goto success;
1268 if (noce_try_store_flag_inc (&if_info))
1269 goto success;
1270 if (noce_try_store_flag_mask (&if_info))
1271 goto success;
1272 if (HAVE_conditional_move
1273 && noce_try_cmove_arith (&if_info))
1274 goto success;
1275 }
1276
1277 return FALSE;
1278
1279 success:
1280 /* The original sets may now be killed. */
1281 if (insn_a == then_bb->end)
1282 then_bb->end = PREV_INSN (insn_a);
1283 flow_delete_insn (insn_a);
1284
1285 /* Several special cases here: First, we may have reused insn_b above,
1286 in which case insn_b is now NULL. Second, we want to delete insn_b
1287 if it came from the ELSE block, because follows the now correct
1288 write that appears in the TEST block. However, if we got insn_b from
1289 the TEST block, it may in fact be loading data needed for the comparison.
1290 We'll let life_analysis remove the insn if it's really dead. */
1291 if (insn_b && else_bb)
1292 {
1293 if (insn_b == else_bb->end)
1294 else_bb->end = PREV_INSN (insn_b);
1295 flow_delete_insn (insn_b);
1296 }
1297
1298 /* The new insns will have been inserted before cond_earliest. We should
1299 be able to remove the jump with impunity, but the condition itself may
1300 have been modified by gcse to be shared across basic blocks. */
1301 test_bb->end = PREV_INSN (jump);
1302 flow_delete_insn (jump);
1303
1304 /* If we used a temporary, fix it up now. */
1305 if (orig_x != x)
1306 {
1307 start_sequence ();
1308 emit_move_insn (orig_x, x);
1309 insn_b = gen_sequence ();
1310 end_sequence ();
1311
1312 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1313 }
1314
1315 /* Merge the blocks! */
1316 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1317
1318 return TRUE;
1319 }
1320 \f
1321 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1322 straight line code. Return true if successful. */
1323
1324 static int
1325 process_if_block (test_bb, then_bb, else_bb, join_bb)
1326 basic_block test_bb; /* Basic block test is in */
1327 basic_block then_bb; /* Basic block for THEN block */
1328 basic_block else_bb; /* Basic block for ELSE block */
1329 basic_block join_bb; /* Basic block the join label is in */
1330 {
1331 if (! reload_completed
1332 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1333 return TRUE;
1334
1335 if (HAVE_conditional_execution
1336 && reload_completed
1337 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1338 return TRUE;
1339
1340 return FALSE;
1341 }
1342
1343 /* Merge the blocks and mark for local life update. */
1344
1345 static void
1346 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1347 basic_block test_bb; /* Basic block test is in */
1348 basic_block then_bb; /* Basic block for THEN block */
1349 basic_block else_bb; /* Basic block for ELSE block */
1350 basic_block join_bb; /* Basic block the join label is in */
1351 {
1352 basic_block combo_bb;
1353
1354 /* All block merging is done into the lower block numbers. */
1355
1356 combo_bb = test_bb;
1357
1358 /* First merge TEST block into THEN block. This is a no-brainer since
1359 the THEN block did not have a code label to begin with. */
1360
1361 if (combo_bb->global_live_at_end)
1362 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1363 merge_blocks_nomove (combo_bb, then_bb);
1364 num_removed_blocks++;
1365
1366 /* The ELSE block, if it existed, had a label. That label count
1367 will almost always be zero, but odd things can happen when labels
1368 get their addresses taken. */
1369 if (else_bb)
1370 {
1371 merge_blocks_nomove (combo_bb, else_bb);
1372 num_removed_blocks++;
1373 }
1374
1375 /* If there was no join block reported, that means it was not adjacent
1376 to the others, and so we cannot merge them. */
1377
1378 if (! join_bb)
1379 {
1380 /* The outgoing edge for the current COMBO block should already
1381 be correct. Verify this. */
1382 if (combo_bb->succ == NULL_EDGE)
1383 abort ();
1384
1385 /* There should sill be a branch at the end of the THEN or ELSE
1386 blocks taking us to our final destination. */
1387 if (! simplejump_p (combo_bb->end)
1388 && ! returnjump_p (combo_bb->end))
1389 abort ();
1390 }
1391
1392 /* The JOIN block may have had quite a number of other predecessors too.
1393 Since we've already merged the TEST, THEN and ELSE blocks, we should
1394 have only one remaining edge from our if-then-else diamond. If there
1395 is more than one remaining edge, it must come from elsewhere. There
1396 may be zero incoming edges if the THEN block didn't actually join
1397 back up (as with a call to abort). */
1398 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1399 {
1400 /* We can merge the JOIN. */
1401 if (combo_bb->global_live_at_end)
1402 COPY_REG_SET (combo_bb->global_live_at_end,
1403 join_bb->global_live_at_end);
1404 merge_blocks_nomove (combo_bb, join_bb);
1405 num_removed_blocks++;
1406 }
1407 else
1408 {
1409 /* We cannot merge the JOIN. */
1410
1411 /* The outgoing edge for the current COMBO block should already
1412 be correct. Verify this. */
1413 if (combo_bb->succ->succ_next != NULL_EDGE
1414 || combo_bb->succ->dest != join_bb)
1415 abort ();
1416
1417 /* Remove the jump and cruft from the end of the COMBO block. */
1418 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1419 }
1420
1421 /* Make sure we update life info properly. */
1422 SET_UPDATE_LIFE (combo_bb);
1423
1424 num_updated_if_blocks++;
1425 }
1426 \f
1427 /* Find a block ending in a simple IF condition. Return TRUE if
1428 we were able to transform it in some way. */
1429
1430 static int
1431 find_if_header (test_bb)
1432 basic_block test_bb;
1433 {
1434 edge then_edge;
1435 edge else_edge;
1436
1437 /* The kind of block we're looking for has exactly two successors. */
1438 if ((then_edge = test_bb->succ) == NULL_EDGE
1439 || (else_edge = then_edge->succ_next) == NULL_EDGE
1440 || else_edge->succ_next != NULL_EDGE)
1441 return FALSE;
1442
1443 /* Neither edge should be abnormal. */
1444 if ((then_edge->flags & EDGE_COMPLEX)
1445 || (else_edge->flags & EDGE_COMPLEX))
1446 return FALSE;
1447
1448 /* The THEN edge is canonically the one that falls through. */
1449 if (then_edge->flags & EDGE_FALLTHRU)
1450 ;
1451 else if (else_edge->flags & EDGE_FALLTHRU)
1452 {
1453 edge e = else_edge;
1454 else_edge = then_edge;
1455 then_edge = e;
1456 }
1457 else
1458 /* Otherwise this must be a multiway branch of some sort. */
1459 return FALSE;
1460
1461 if (find_if_block (test_bb, then_edge, else_edge))
1462 goto success;
1463 if (post_dominators
1464 && (! HAVE_conditional_execution || reload_completed))
1465 {
1466 if (find_if_case_1 (test_bb, then_edge, else_edge))
1467 goto success;
1468 if (find_if_case_2 (test_bb, then_edge, else_edge))
1469 goto success;
1470 }
1471
1472 return FALSE;
1473
1474 success:
1475 if (rtl_dump_file)
1476 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1477 return TRUE;
1478 }
1479
1480 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1481 block. If so, we'll try to convert the insns to not require the branch.
1482 Return TRUE if we were successful at converting the the block. */
1483
1484 static int
1485 find_if_block (test_bb, then_edge, else_edge)
1486 basic_block test_bb;
1487 edge then_edge, else_edge;
1488 {
1489 basic_block then_bb = then_edge->dest;
1490 basic_block else_bb = else_edge->dest;
1491 basic_block join_bb = NULL_BLOCK;
1492 edge then_succ = then_bb->succ;
1493 edge else_succ = else_bb->succ;
1494 int next_index;
1495
1496 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1497 if (then_bb->pred->pred_next != NULL_EDGE)
1498 return FALSE;
1499
1500 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1501 if (then_succ != NULL_EDGE
1502 && (then_succ->succ_next != NULL_EDGE
1503 || (then_succ->flags & EDGE_COMPLEX)))
1504 return FALSE;
1505
1506 /* If the THEN block has no successors, conditional execution can still
1507 make a conditional call. Don't do this unless the ELSE block has
1508 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1509 Check for the last insn of the THEN block being an indirect jump, which
1510 is listed as not having any successors, but confuses the rest of the CE
1511 code processing. XXX we should fix this in the future. */
1512 if (then_succ == NULL)
1513 {
1514 if (else_bb->pred->pred_next == NULL_EDGE)
1515 {
1516 rtx last_insn = then_bb->end;
1517
1518 while (last_insn
1519 && GET_CODE (last_insn) == NOTE
1520 && last_insn != then_bb->head)
1521 last_insn = PREV_INSN (last_insn);
1522
1523 if (last_insn
1524 && GET_CODE (last_insn) == JUMP_INSN
1525 && ! simplejump_p (last_insn))
1526 return FALSE;
1527
1528 join_bb = else_bb;
1529 else_bb = NULL_BLOCK;
1530 }
1531 else
1532 return FALSE;
1533 }
1534
1535 /* If the THEN block's successor is the other edge out of the TEST block,
1536 then we have an IF-THEN combo without an ELSE. */
1537 else if (then_succ->dest == else_bb)
1538 {
1539 join_bb = else_bb;
1540 else_bb = NULL_BLOCK;
1541 }
1542
1543 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1544 has exactly one predecessor and one successor, and the outgoing edge
1545 is not complex, then we have an IF-THEN-ELSE combo. */
1546 else if (else_succ != NULL_EDGE
1547 && then_succ->dest == else_succ->dest
1548 && else_bb->pred->pred_next == NULL_EDGE
1549 && else_succ->succ_next == NULL_EDGE
1550 && ! (else_succ->flags & EDGE_COMPLEX))
1551 join_bb = else_succ->dest;
1552
1553 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1554 else
1555 return FALSE;
1556
1557 num_possible_if_blocks++;
1558
1559 if (rtl_dump_file)
1560 {
1561 if (else_bb)
1562 fprintf (rtl_dump_file,
1563 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1564 test_bb->index, then_bb->index, else_bb->index,
1565 join_bb->index);
1566 else
1567 fprintf (rtl_dump_file,
1568 "\nIF-THEN block found, start %d, then %d, join %d\n",
1569 test_bb->index, then_bb->index, join_bb->index);
1570 }
1571
1572 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1573 get the first condition for free, since we've already asserted that
1574 there's a fallthru edge from IF to THEN. */
1575 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1576 BLOCK notes, if by no other means than aborting the merge if they
1577 exist. Sticky enough I don't want to think about it now. */
1578 next_index = then_bb->index;
1579 if (else_bb && ++next_index != else_bb->index)
1580 return FALSE;
1581 if (++next_index != join_bb->index)
1582 {
1583 if (else_bb)
1584 join_bb = NULL;
1585 else
1586 return FALSE;
1587 }
1588
1589 /* Do the real work. */
1590 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1591 }
1592
1593 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1594 transformable, but not necessarily the other. There need be no
1595 JOIN block.
1596
1597 Return TRUE if we were successful at converting the the block.
1598
1599 Cases we'd like to look at:
1600
1601 (1)
1602 if (test) goto over; // x not live
1603 x = a;
1604 goto label;
1605 over:
1606
1607 becomes
1608
1609 x = a;
1610 if (! test) goto label;
1611
1612 (2)
1613 if (test) goto E; // x not live
1614 x = big();
1615 goto L;
1616 E:
1617 x = b;
1618 goto M;
1619
1620 becomes
1621
1622 x = b;
1623 if (test) goto M;
1624 x = big();
1625 goto L;
1626
1627 (3) // This one's really only interesting for targets that can do
1628 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1629 // it results in multiple branches on a cache line, which often
1630 // does not sit well with predictors.
1631
1632 if (test1) goto E; // predicted not taken
1633 x = a;
1634 if (test2) goto F;
1635 ...
1636 E:
1637 x = b;
1638 J:
1639
1640 becomes
1641
1642 x = a;
1643 if (test1) goto E;
1644 if (test2) goto F;
1645
1646 Notes:
1647
1648 (A) Don't do (2) if the branch is predicted against the block we're
1649 eliminating. Do it anyway if we can eliminate a branch; this requires
1650 that the sole successor of the eliminated block postdominate the other
1651 side of the if.
1652
1653 (B) With CE, on (3) we can steal from both sides of the if, creating
1654
1655 if (test1) x = a;
1656 if (!test1) x = b;
1657 if (test1) goto J;
1658 if (test2) goto F;
1659 ...
1660 J:
1661
1662 Again, this is most useful if J postdominates.
1663
1664 (C) CE substitutes for helpful life information.
1665
1666 (D) These heuristics need a lot of work. */
1667
1668 /* Tests for case 1 above. */
1669
1670 static int
1671 find_if_case_1 (test_bb, then_edge, else_edge)
1672 basic_block test_bb;
1673 edge then_edge, else_edge;
1674 {
1675 basic_block then_bb = then_edge->dest;
1676 basic_block else_bb = else_edge->dest;
1677 edge then_succ = then_bb->succ;
1678 rtx new_lab;
1679
1680 /* THEN has one successor. */
1681 if (!then_succ || then_succ->succ_next != NULL)
1682 return FALSE;
1683
1684 /* THEN does not fall through, but is not strange either. */
1685 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
1686 return FALSE;
1687
1688 /* THEN has one predecessor. */
1689 if (then_bb->pred->pred_next != NULL)
1690 return FALSE;
1691
1692 /* ELSE follows THEN. (??? could be moved) */
1693 if (else_bb->index != then_bb->index + 1)
1694 return FALSE;
1695
1696 num_possible_if_blocks++;
1697 if (rtl_dump_file)
1698 fprintf (rtl_dump_file,
1699 "\nIF-CASE-1 found, start %d, then %d\n",
1700 test_bb->index, then_bb->index);
1701
1702 /* THEN is small. */
1703 if (count_bb_insns (then_bb) > BRANCH_COST)
1704 return FALSE;
1705
1706 /* Find the label for THEN's destination. */
1707 if (then_succ->dest == EXIT_BLOCK_PTR)
1708 new_lab = NULL_RTX;
1709 else
1710 {
1711 new_lab = JUMP_LABEL (then_bb->end);
1712 if (! new_lab)
1713 abort ();
1714 }
1715
1716 /* Registers set are dead, or are predicable. */
1717 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
1718 return FALSE;
1719
1720 /* Conversion went ok, including moving the insns and fixing up the
1721 jump. Adjust the CFG to match. */
1722
1723 SET_UPDATE_LIFE (test_bb);
1724 bitmap_operation (test_bb->global_live_at_end,
1725 else_bb->global_live_at_start,
1726 then_bb->global_live_at_end, BITMAP_IOR);
1727
1728 make_edge (NULL, test_bb, then_succ->dest, 0);
1729 flow_delete_block (then_bb);
1730 tidy_fallthru_edge (else_edge, test_bb, else_bb);
1731
1732 num_removed_blocks++;
1733 num_updated_if_blocks++;
1734
1735 return TRUE;
1736 }
1737
1738 /* Test for case 2 above. */
1739
1740 static int
1741 find_if_case_2 (test_bb, then_edge, else_edge)
1742 basic_block test_bb;
1743 edge then_edge, else_edge;
1744 {
1745 basic_block then_bb = then_edge->dest;
1746 basic_block else_bb = else_edge->dest;
1747 edge else_succ = else_bb->succ;
1748 rtx new_lab, note;
1749
1750 /* ELSE has one successor. */
1751 if (!else_succ || else_succ->succ_next != NULL)
1752 return FALSE;
1753
1754 /* ELSE outgoing edge is not complex. */
1755 if (else_succ->flags & EDGE_COMPLEX)
1756 return FALSE;
1757
1758 /* ELSE has one predecessor. */
1759 if (else_bb->pred->pred_next != NULL)
1760 return FALSE;
1761
1762 /* THEN is not EXIT. */
1763 if (then_bb->index < 0)
1764 return FALSE;
1765
1766 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1767 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
1768 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
1769 ;
1770 else if (else_succ->dest->index < 0
1771 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
1772 ORIG_INDEX (else_succ->dest)))
1773 ;
1774 else
1775 return FALSE;
1776
1777 num_possible_if_blocks++;
1778 if (rtl_dump_file)
1779 fprintf (rtl_dump_file,
1780 "\nIF-CASE-2 found, start %d, else %d\n",
1781 test_bb->index, else_bb->index);
1782
1783 /* ELSE is small. */
1784 if (count_bb_insns (then_bb) > BRANCH_COST)
1785 return FALSE;
1786
1787 /* Find the label for ELSE's destination. */
1788 if (else_succ->dest == EXIT_BLOCK_PTR)
1789 new_lab = NULL_RTX;
1790 else
1791 {
1792 if (else_succ->flags & EDGE_FALLTHRU)
1793 {
1794 new_lab = else_succ->dest->head;
1795 if (GET_CODE (new_lab) != CODE_LABEL)
1796 abort ();
1797 }
1798 else
1799 {
1800 new_lab = JUMP_LABEL (else_bb->end);
1801 if (! new_lab)
1802 abort ();
1803 }
1804 }
1805
1806 /* Registers set are dead, or are predicable. */
1807 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
1808 return FALSE;
1809
1810 /* Conversion went ok, including moving the insns and fixing up the
1811 jump. Adjust the CFG to match. */
1812
1813 SET_UPDATE_LIFE (test_bb);
1814 bitmap_operation (test_bb->global_live_at_end,
1815 then_bb->global_live_at_start,
1816 else_bb->global_live_at_end, BITMAP_IOR);
1817
1818 remove_edge (else_edge);
1819 make_edge (NULL, test_bb, else_succ->dest, 0);
1820 flow_delete_block (else_bb);
1821
1822 num_removed_blocks++;
1823 num_updated_if_blocks++;
1824
1825 /* ??? We may now fallthru from one of THEN's successors into a join
1826 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1827
1828 return TRUE;
1829 }
1830
1831 /* A subroutine of dead_or_predicable called through for_each_rtx.
1832 Return 1 if a memory is found. */
1833
1834 static int
1835 find_memory (px, data)
1836 rtx *px;
1837 void *data ATTRIBUTE_UNUSED;
1838 {
1839 return GET_CODE (*px) == MEM;
1840 }
1841
1842 /* Used by the code above to perform the actual rtl transformations.
1843 Return TRUE if successful.
1844
1845 TEST_BB is the block containing the conditional branch. MERGE_BB
1846 is the block containing the code to manipulate. NEW_DEST is the
1847 label TEST_BB should be branching to after the conversion.
1848 REVERSEP is true if the sense of the branch should be reversed. */
1849
1850 static int
1851 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
1852 basic_block test_bb, merge_bb, other_bb;
1853 rtx new_dest;
1854 int reversep;
1855 {
1856 rtx head, end, jump, earliest, old_dest;
1857
1858 jump = test_bb->end;
1859
1860 /* Find the extent of the real code in the merge block. */
1861 head = merge_bb->head;
1862 end = merge_bb->end;
1863
1864 if (GET_CODE (head) == CODE_LABEL)
1865 head = NEXT_INSN (head);
1866 if (GET_CODE (head) == NOTE)
1867 {
1868 if (head == end)
1869 {
1870 head = end = NULL_RTX;
1871 goto no_body;
1872 }
1873 head = NEXT_INSN (head);
1874 }
1875
1876 if (GET_CODE (end) == JUMP_INSN)
1877 {
1878 if (head == end)
1879 {
1880 head = end = NULL_RTX;
1881 goto no_body;
1882 }
1883 end = PREV_INSN (end);
1884 }
1885
1886 /* Disable handling dead code by conditional execution if the machine needs
1887 to do anything funny with the tests, etc. */
1888 #ifndef IFCVT_MODIFY_TESTS
1889 if (HAVE_conditional_execution)
1890 {
1891 /* In the conditional execution case, we have things easy. We know
1892 the condition is reversable. We don't have to check life info,
1893 becase we're going to conditionally execute the code anyway.
1894 All that's left is making sure the insns involved can actually
1895 be predicated. */
1896
1897 rtx cond, prob_val;
1898
1899 cond = cond_exec_get_condition (jump);
1900
1901 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1902 if (prob_val)
1903 prob_val = XEXP (prob_val, 0);
1904
1905 if (reversep)
1906 {
1907 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1908 GET_MODE (cond), XEXP (cond, 0),
1909 XEXP (cond, 1));
1910 if (prob_val)
1911 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
1912 }
1913
1914 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
1915 goto cancel;
1916
1917 earliest = jump;
1918 }
1919 else
1920 #endif
1921 {
1922 /* In the non-conditional execution case, we have to verify that there
1923 are no trapping operations, no calls, no references to memory, and
1924 that any registers modified are dead at the branch site. */
1925
1926 rtx insn, cond, prev;
1927 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
1928 regset merge_set, tmp, test_live, test_set;
1929 struct propagate_block_info *pbi;
1930 int i, fail = 0;
1931
1932 /* Check for no calls or trapping operations. */
1933 for (insn = head; ; insn = NEXT_INSN (insn))
1934 {
1935 if (GET_CODE (insn) == CALL_INSN)
1936 return FALSE;
1937 if (INSN_P (insn))
1938 {
1939 if (may_trap_p (PATTERN (insn)))
1940 return FALSE;
1941
1942 /* ??? Even non-trapping memories such as stack frame
1943 references must be avoided. For stores, we collect
1944 no lifetime info; for reads, we'd have to assert
1945 true_dependance false against every store in the
1946 TEST range. */
1947 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
1948 return FALSE;
1949 }
1950 if (insn == end)
1951 break;
1952 }
1953
1954 if (! any_condjump_p (jump))
1955 return FALSE;
1956
1957 /* Find the extent of the conditional. */
1958 cond = noce_get_condition (jump, &earliest);
1959 if (! cond)
1960 return FALSE;
1961
1962 /* Collect:
1963 MERGE_SET = set of registers set in MERGE_BB
1964 TEST_LIVE = set of registers live at EARLIEST
1965 TEST_SET = set of registers set between EARLIEST and the
1966 end of the block. */
1967
1968 tmp = INITIALIZE_REG_SET (tmp_head);
1969 merge_set = INITIALIZE_REG_SET (merge_set_head);
1970 test_live = INITIALIZE_REG_SET (test_live_head);
1971 test_set = INITIALIZE_REG_SET (test_set_head);
1972
1973 /* ??? bb->local_set is only valid during calculate_global_regs_live,
1974 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
1975 since we've already asserted that MERGE_BB is small. */
1976 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
1977
1978 /* For small register class machines, don't lengthen lifetimes of
1979 hard registers before reload. */
1980 if (SMALL_REGISTER_CLASSES && ! reload_completed)
1981 {
1982 EXECUTE_IF_SET_IN_BITMAP
1983 (merge_set, 0, i,
1984 {
1985 if (i < FIRST_PSEUDO_REGISTER
1986 && ! fixed_regs[i]
1987 && ! global_regs[i])
1988 fail = 1;
1989 });
1990 }
1991
1992 /* For TEST, we're interested in a range of insns, not a whole block.
1993 Moreover, we're interested in the insns live from OTHER_BB. */
1994
1995 COPY_REG_SET (test_live, other_bb->global_live_at_start);
1996 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
1997 0);
1998
1999 for (insn = jump; ; insn = prev)
2000 {
2001 prev = propagate_one_insn (pbi, insn);
2002 if (insn == earliest)
2003 break;
2004 }
2005
2006 free_propagate_block_info (pbi);
2007
2008 /* We can perform the transformation if
2009 MERGE_SET & (TEST_SET | TEST_LIVE)
2010 and
2011 TEST_SET & merge_bb->global_live_at_start
2012 are empty. */
2013
2014 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2015 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2016 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2017
2018 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2019 BITMAP_AND);
2020 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2021
2022 FREE_REG_SET (tmp);
2023 FREE_REG_SET (merge_set);
2024 FREE_REG_SET (test_live);
2025 FREE_REG_SET (test_set);
2026
2027 if (fail)
2028 return FALSE;
2029 }
2030
2031 no_body:
2032 /* We don't want to use normal invert_jump or redirect_jump because
2033 we don't want to delete_insn called. Also, we want to do our own
2034 change group management. */
2035
2036 old_dest = JUMP_LABEL (jump);
2037 if (reversep
2038 ? ! invert_jump_1 (jump, new_dest)
2039 : ! redirect_jump_1 (jump, new_dest))
2040 goto cancel;
2041
2042 if (! apply_change_group ())
2043 return FALSE;
2044
2045 if (old_dest)
2046 LABEL_NUSES (old_dest) -= 1;
2047 if (new_dest)
2048 LABEL_NUSES (new_dest) += 1;
2049 JUMP_LABEL (jump) = new_dest;
2050
2051 if (reversep)
2052 {
2053 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2054 if (note)
2055 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2056 }
2057
2058 /* Move the insns out of MERGE_BB to before the branch. */
2059 if (head != NULL)
2060 {
2061 if (end == merge_bb->end)
2062 merge_bb->end = PREV_INSN (head);
2063
2064 head = squeeze_notes (head, end);
2065 if (GET_CODE (end) == NOTE
2066 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2067 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2068 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2069 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2070 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2071 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2072 {
2073 if (head == end)
2074 return TRUE;
2075 end = PREV_INSN (end);
2076 }
2077
2078 reorder_insns (head, end, PREV_INSN (earliest));
2079 }
2080 return TRUE;
2081
2082 cancel:
2083 cancel_changes (0);
2084 return FALSE;
2085 }
2086 \f
2087 /* Main entry point for all if-conversion. */
2088
2089 void
2090 if_convert (life_data_ok)
2091 int life_data_ok;
2092 {
2093 int block_num;
2094
2095 num_possible_if_blocks = 0;
2096 num_updated_if_blocks = 0;
2097 num_removed_blocks = 0;
2098
2099 /* Free up basic_block_for_insn so that we don't have to keep it
2100 up to date, either here or in merge_blocks_nomove. */
2101 free_basic_block_vars (1);
2102
2103 /* Compute postdominators if we think we'll use them. */
2104 post_dominators = NULL;
2105 if (HAVE_conditional_execution || life_data_ok)
2106 {
2107 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2108 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2109 }
2110
2111 /* Record initial block numbers. */
2112 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2113 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2114
2115 /* Go through each of the basic blocks looking for things to convert. */
2116 for (block_num = 0; block_num < n_basic_blocks; )
2117 {
2118 basic_block bb = BASIC_BLOCK (block_num);
2119 if (find_if_header (bb))
2120 block_num = bb->index;
2121 else
2122 block_num++;
2123 }
2124
2125 if (post_dominators)
2126 sbitmap_vector_free (post_dominators);
2127
2128 if (rtl_dump_file)
2129 fflush (rtl_dump_file);
2130
2131 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2132 compute_bb_for_insn (get_max_uid ());
2133
2134 /* Rebuild life info for basic blocks that require it. */
2135 if (num_removed_blocks && life_data_ok)
2136 {
2137 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2138 sbitmap_zero (update_life_blocks);
2139
2140 /* If we allocated new pseudos, we must resize the array for sched1. */
2141 if (max_regno < max_reg_num ())
2142 {
2143 max_regno = max_reg_num ();
2144 allocate_reg_info (max_regno, FALSE, FALSE);
2145 }
2146
2147 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2148 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2149 SET_BIT (update_life_blocks, block_num);
2150
2151 count_or_remove_death_notes (update_life_blocks, 1);
2152 /* ??? See about adding a mode that verifies that the initial
2153 set of blocks don't let registers come live. */
2154 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2155 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2156 | PROP_KILL_DEAD_CODE);
2157
2158 sbitmap_free (update_life_blocks);
2159 }
2160
2161 /* Write the final stats. */
2162 if (rtl_dump_file && num_possible_if_blocks > 0)
2163 {
2164 fprintf (rtl_dump_file,
2165 "\n%d possible IF blocks searched.\n",
2166 num_possible_if_blocks);
2167 fprintf (rtl_dump_file,
2168 "%d IF blocks converted.\n",
2169 num_updated_if_blocks);
2170 fprintf (rtl_dump_file,
2171 "%d basic blocks deleted.\n\n\n",
2172 num_removed_blocks);
2173 }
2174
2175 #ifdef ENABLE_CHECKING
2176 verify_flow_info ();
2177 #endif
2178 }
This page took 0.132203 seconds and 6 git commands to generate.