]> gcc.gnu.org Git - gcc.git/blame - gcc/cfgcleanup.c
bitmap.h (struct bitmap_obstack): New obstack type.
[gcc.git] / gcc / cfgcleanup.c
CommitLineData
402209ff
JH
1/* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
d9221e01 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
402209ff
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
1ea7e6ad 22/* This file contains optimizer of the control flow. The main entry point is
402209ff
JH
23 cleanup_cfg. Following optimizations are performed:
24
25 - Unreachable blocks removal
d1a6adeb 26 - Edge forwarding (edge to the forwarder block is forwarded to its
eaec9b3d 27 successor. Simplification of the branch instruction is performed by
402209ff 28 underlying infrastructure so branch can be converted to simplejump or
f5143c46 29 eliminated).
402209ff
JH
30 - Cross jumping (tail merging)
31 - Conditional jump-around-simplejump simplification
32 - Basic block merging. */
33
34#include "config.h"
35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
402209ff
JH
38#include "rtl.h"
39#include "hard-reg-set.h"
7932a3db 40#include "regs.h"
402209ff
JH
41#include "timevar.h"
42#include "output.h"
43#include "insn-config.h"
44#include "flags.h"
45#include "recog.h"
46#include "toplev.h"
8ecba28a 47#include "cselib.h"
5f24e0dc 48#include "params.h"
9f16e871 49#include "tm_p.h"
e4ec2cac 50#include "target.h"
750054a2 51#include "cfglayout.h"
78528714 52#include "emit-rtl.h"
402209ff 53
79f5e6be 54/* cleanup_cfg maintains following flags for each basic block. */
5f0d2358
RK
55
56enum bb_flags
57{
635559ab
JH
58 /* Set if BB is the forwarder block to avoid too many
59 forwarder_block_p calls. */
1540f9eb
JH
60 BB_FORWARDER_BLOCK = 1,
61 BB_NONTHREADABLE_BLOCK = 2
5f0d2358 62};
635559ab 63
5f0d2358
RK
64#define BB_FLAGS(BB) (enum bb_flags) (BB)->aux
65#define BB_SET_FLAG(BB, FLAG) \
66 (BB)->aux = (void *) (long) ((enum bb_flags) (BB)->aux | (FLAG))
67#define BB_CLEAR_FLAG(BB, FLAG) \
68 (BB)->aux = (void *) (long) ((enum bb_flags) (BB)->aux & ~(FLAG))
635559ab 69
5f0d2358 70#define FORWARDER_BLOCK_P(BB) (BB_FLAGS (BB) & BB_FORWARDER_BLOCK)
635559ab 71
7cf240d5
JH
72/* Set to true when we are running first pass of try_optimize_cfg loop. */
73static bool first_pass;
d329e058
AJ
74static bool try_crossjump_to_edge (int, edge, edge);
75static bool try_crossjump_bb (int, basic_block);
76static bool outgoing_edges_match (int, basic_block, basic_block);
77static int flow_find_cross_jump (int, basic_block, basic_block, rtx *, rtx *);
78static bool insns_match_p (int, rtx, rtx);
79
d329e058
AJ
80static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
81static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
d329e058
AJ
82static bool try_optimize_cfg (int);
83static bool try_simplify_condjump (basic_block);
84static bool try_forward_edges (int, basic_block);
85static edge thread_jump (int, edge, basic_block);
86static bool mark_effect (rtx, bitmap);
87static void notice_new_block (basic_block);
88static void update_forwarder_flag (basic_block);
89static int mentions_nonequal_regs (rtx *, void *);
2b3493c8 90static void merge_memattrs (rtx, rtx);
635559ab
JH
91\f
92/* Set flags for newly created block. */
93
94static void
d329e058 95notice_new_block (basic_block bb)
635559ab
JH
96{
97 if (!bb)
98 return;
5f0d2358 99
635559ab
JH
100 if (forwarder_block_p (bb))
101 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
102}
103
104/* Recompute forwarder flag after block has been modified. */
105
106static void
d329e058 107update_forwarder_flag (basic_block bb)
635559ab
JH
108{
109 if (forwarder_block_p (bb))
110 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
111 else
112 BB_CLEAR_FLAG (bb, BB_FORWARDER_BLOCK);
113}
402209ff
JH
114\f
115/* Simplify a conditional jump around an unconditional jump.
116 Return true if something changed. */
117
118static bool
d329e058 119try_simplify_condjump (basic_block cbranch_block)
402209ff
JH
120{
121 basic_block jump_block, jump_dest_block, cbranch_dest_block;
122 edge cbranch_jump_edge, cbranch_fallthru_edge;
123 rtx cbranch_insn;
124
125 /* Verify that there are exactly two successors. */
628f6a4e 126 if (EDGE_COUNT (cbranch_block->succs) != 2)
402209ff
JH
127 return false;
128
129 /* Verify that we've got a normal conditional branch at the end
130 of the block. */
a813c111 131 cbranch_insn = BB_END (cbranch_block);
402209ff
JH
132 if (!any_condjump_p (cbranch_insn))
133 return false;
134
135 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
136 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
137
138 /* The next block must not have multiple predecessors, must not
139 be the last block in the function, and must contain just the
140 unconditional jump. */
141 jump_block = cbranch_fallthru_edge->dest;
628f6a4e 142 if (EDGE_COUNT (jump_block->preds) >= 2
f6366fc7 143 || jump_block->next_bb == EXIT_BLOCK_PTR
635559ab 144 || !FORWARDER_BLOCK_P (jump_block))
402209ff 145 return false;
628f6a4e 146 jump_dest_block = EDGE_SUCC (jump_block, 0)->dest;
402209ff 147
750054a2
CT
148 /* If we are partitioning hot/cold basic blocks, we don't want to
149 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
150 and cold sections.
151
152 Basic block partitioning may result in some jumps that appear to
153 be optimizable (or blocks that appear to be mergeable), but which really
154 must be left untouched (they are required to make it safely across
155 partition boundaries). See the comments at the top of
156 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
750054a2
CT
157
158 if (flag_reorder_blocks_and_partition
076c7ab8 159 && (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
bd454efd 160 || (cbranch_jump_edge->flags & EDGE_CROSSING)))
750054a2
CT
161 return false;
162
402209ff
JH
163 /* The conditional branch must target the block after the
164 unconditional branch. */
165 cbranch_dest_block = cbranch_jump_edge->dest;
166
2f52c531
R
167 if (cbranch_dest_block == EXIT_BLOCK_PTR
168 || !can_fallthru (jump_block, cbranch_dest_block))
402209ff
JH
169 return false;
170
ca6c03ca
JH
171 /* Invert the conditional branch. */
172 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 0))
173 return false;
402209ff 174
c263766c
RH
175 if (dump_file)
176 fprintf (dump_file, "Simplifying condjump %i around jump %i\n",
a813c111 177 INSN_UID (cbranch_insn), INSN_UID (BB_END (jump_block)));
402209ff
JH
178
179 /* Success. Update the CFG to match. Note that after this point
180 the edge variable names appear backwards; the redirection is done
181 this way to preserve edge profile data. */
182 cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
183 cbranch_dest_block);
184 cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
185 jump_dest_block);
186 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
187 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
b446e5a2 188 update_br_prob_note (cbranch_block);
402209ff
JH
189
190 /* Delete the block with the unconditional jump, and clean up the mess. */
f470c378
ZD
191 delete_basic_block (jump_block);
192 tidy_fallthru_edge (cbranch_jump_edge);
261139ce 193 update_forwarder_flag (cbranch_block);
402209ff
JH
194
195 return true;
196}
197\f
8ecba28a
JH
198/* Attempt to prove that operation is NOOP using CSElib or mark the effect
199 on register. Used by jump threading. */
5f0d2358 200
8ecba28a 201static bool
d329e058 202mark_effect (rtx exp, regset nonequal)
8ecba28a 203{
9f16e871
JH
204 int regno;
205 rtx dest;
8ecba28a
JH
206 switch (GET_CODE (exp))
207 {
208 /* In case we do clobber the register, mark it as equal, as we know the
209 value is dead so it don't have to match. */
f87c27b4
KH
210 case CLOBBER:
211 if (REG_P (XEXP (exp, 0)))
212 {
213 dest = XEXP (exp, 0);
214 regno = REGNO (dest);
215 CLEAR_REGNO_REG_SET (nonequal, regno);
216 if (regno < FIRST_PSEUDO_REGISTER)
217 {
66fd46b6 218 int n = hard_regno_nregs[regno][GET_MODE (dest)];
f87c27b4
KH
219 while (--n > 0)
220 CLEAR_REGNO_REG_SET (nonequal, regno + n);
221 }
222 }
223 return false;
5f0d2358 224
f87c27b4
KH
225 case SET:
226 if (rtx_equal_for_cselib_p (SET_DEST (exp), SET_SRC (exp)))
8ecba28a 227 return false;
f87c27b4
KH
228 dest = SET_DEST (exp);
229 if (dest == pc_rtx)
8ecba28a 230 return false;
f87c27b4
KH
231 if (!REG_P (dest))
232 return true;
233 regno = REGNO (dest);
234 SET_REGNO_REG_SET (nonequal, regno);
235 if (regno < FIRST_PSEUDO_REGISTER)
236 {
66fd46b6 237 int n = hard_regno_nregs[regno][GET_MODE (dest)];
f87c27b4
KH
238 while (--n > 0)
239 SET_REGNO_REG_SET (nonequal, regno + n);
240 }
241 return false;
242
243 default:
244 return false;
8ecba28a
JH
245 }
246}
fe477d8b 247
dcc24678 248/* Return nonzero if X is a register set in regset DATA.
fe477d8b
JH
249 Called via for_each_rtx. */
250static int
d329e058 251mentions_nonequal_regs (rtx *x, void *data)
fe477d8b
JH
252{
253 regset nonequal = (regset) data;
254 if (REG_P (*x))
255 {
256 int regno;
257
258 regno = REGNO (*x);
259 if (REGNO_REG_SET_P (nonequal, regno))
260 return 1;
261 if (regno < FIRST_PSEUDO_REGISTER)
262 {
66fd46b6 263 int n = hard_regno_nregs[regno][GET_MODE (*x)];
fe477d8b
JH
264 while (--n > 0)
265 if (REGNO_REG_SET_P (nonequal, regno + n))
266 return 1;
267 }
268 }
269 return 0;
270}
8ecba28a 271/* Attempt to prove that the basic block B will have no side effects and
95bd1dd7 272 always continues in the same edge if reached via E. Return the edge
8ecba28a
JH
273 if exist, NULL otherwise. */
274
275static edge
d329e058 276thread_jump (int mode, edge e, basic_block b)
8ecba28a
JH
277{
278 rtx set1, set2, cond1, cond2, insn;
279 enum rtx_code code1, code2, reversed_code2;
280 bool reverse1 = false;
3cd8c58a 281 unsigned i;
8ecba28a
JH
282 regset nonequal;
283 bool failed = false;
a2041967 284 reg_set_iterator rsi;
8ecba28a 285
1540f9eb
JH
286 if (BB_FLAGS (b) & BB_NONTHREADABLE_BLOCK)
287 return NULL;
288
8ecba28a
JH
289 /* At the moment, we do handle only conditional jumps, but later we may
290 want to extend this code to tablejumps and others. */
628f6a4e 291 if (EDGE_COUNT (e->src->succs) != 2)
8ecba28a 292 return NULL;
628f6a4e 293 if (EDGE_COUNT (b->succs) != 2)
1540f9eb
JH
294 {
295 BB_SET_FLAG (b, BB_NONTHREADABLE_BLOCK);
296 return NULL;
297 }
8ecba28a
JH
298
299 /* Second branch must end with onlyjump, as we will eliminate the jump. */
a813c111 300 if (!any_condjump_p (BB_END (e->src)))
8ecba28a 301 return NULL;
f87c27b4 302
a813c111 303 if (!any_condjump_p (BB_END (b)) || !onlyjump_p (BB_END (b)))
1540f9eb
JH
304 {
305 BB_SET_FLAG (b, BB_NONTHREADABLE_BLOCK);
306 return NULL;
307 }
8ecba28a 308
a813c111
SB
309 set1 = pc_set (BB_END (e->src));
310 set2 = pc_set (BB_END (b));
8ecba28a 311 if (((e->flags & EDGE_FALLTHRU) != 0)
68f3f6f1 312 != (XEXP (SET_SRC (set1), 1) == pc_rtx))
8ecba28a
JH
313 reverse1 = true;
314
315 cond1 = XEXP (SET_SRC (set1), 0);
316 cond2 = XEXP (SET_SRC (set2), 0);
317 if (reverse1)
a813c111 318 code1 = reversed_comparison_code (cond1, BB_END (e->src));
8ecba28a
JH
319 else
320 code1 = GET_CODE (cond1);
321
322 code2 = GET_CODE (cond2);
a813c111 323 reversed_code2 = reversed_comparison_code (cond2, BB_END (b));
8ecba28a
JH
324
325 if (!comparison_dominates_p (code1, code2)
326 && !comparison_dominates_p (code1, reversed_code2))
327 return NULL;
328
329 /* Ensure that the comparison operators are equivalent.
95bd1dd7 330 ??? This is far too pessimistic. We should allow swapped operands,
8ecba28a
JH
331 different CCmodes, or for example comparisons for interval, that
332 dominate even when operands are not equivalent. */
333 if (!rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
334 || !rtx_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
335 return NULL;
336
337 /* Short circuit cases where block B contains some side effects, as we can't
338 safely bypass it. */
a813c111 339 for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b));
8ecba28a
JH
340 insn = NEXT_INSN (insn))
341 if (INSN_P (insn) && side_effects_p (PATTERN (insn)))
1540f9eb
JH
342 {
343 BB_SET_FLAG (b, BB_NONTHREADABLE_BLOCK);
344 return NULL;
345 }
8ecba28a 346
463301c3 347 cselib_init (false);
8ecba28a
JH
348
349 /* First process all values computed in the source basic block. */
3cd8c58a
NS
350 for (insn = NEXT_INSN (BB_HEAD (e->src));
351 insn != NEXT_INSN (BB_END (e->src));
8ecba28a
JH
352 insn = NEXT_INSN (insn))
353 if (INSN_P (insn))
354 cselib_process_insn (insn);
355
356 nonequal = BITMAP_XMALLOC();
357 CLEAR_REG_SET (nonequal);
5f0d2358 358
8ecba28a
JH
359 /* Now assume that we've continued by the edge E to B and continue
360 processing as if it were same basic block.
8ecba28a 361 Our goal is to prove that whole block is an NOOP. */
5f0d2358 362
3cd8c58a
NS
363 for (insn = NEXT_INSN (BB_HEAD (b));
364 insn != NEXT_INSN (BB_END (b)) && !failed;
8ecba28a 365 insn = NEXT_INSN (insn))
f87c27b4
KH
366 {
367 if (INSN_P (insn))
368 {
369 rtx pat = PATTERN (insn);
370
371 if (GET_CODE (pat) == PARALLEL)
372 {
3cd8c58a 373 for (i = 0; i < (unsigned)XVECLEN (pat, 0); i++)
f87c27b4
KH
374 failed |= mark_effect (XVECEXP (pat, 0, i), nonequal);
375 }
376 else
377 failed |= mark_effect (pat, nonequal);
378 }
5f0d2358 379
f87c27b4
KH
380 cselib_process_insn (insn);
381 }
8ecba28a
JH
382
383 /* Later we should clear nonequal of dead registers. So far we don't
384 have life information in cfg_cleanup. */
385 if (failed)
1540f9eb
JH
386 {
387 BB_SET_FLAG (b, BB_NONTHREADABLE_BLOCK);
388 goto failed_exit;
389 }
8ecba28a 390
fe477d8b
JH
391 /* cond2 must not mention any register that is not equal to the
392 former block. */
393 if (for_each_rtx (&cond2, mentions_nonequal_regs, nonequal))
394 goto failed_exit;
395
8ecba28a
JH
396 /* In case liveness information is available, we need to prove equivalence
397 only of the live values. */
398 if (mode & CLEANUP_UPDATE_LIFE)
399 AND_REG_SET (nonequal, b->global_live_at_end);
400
a2041967
KH
401 EXECUTE_IF_SET_IN_REG_SET (nonequal, 0, i, rsi)
402 goto failed_exit;
8ecba28a
JH
403
404 BITMAP_XFREE (nonequal);
405 cselib_finish ();
406 if ((comparison_dominates_p (code1, code2) != 0)
4deaa2f8 407 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
8ecba28a
JH
408 return BRANCH_EDGE (b);
409 else
410 return FALLTHRU_EDGE (b);
411
412failed_exit:
413 BITMAP_XFREE (nonequal);
414 cselib_finish ();
415 return NULL;
416}
417\f
402209ff 418/* Attempt to forward edges leaving basic block B.
eaec9b3d 419 Return true if successful. */
402209ff
JH
420
421static bool
d329e058 422try_forward_edges (int mode, basic_block b)
402209ff
JH
423{
424 bool changed = false;
628f6a4e
BE
425 edge_iterator ei;
426 edge e, *threaded_edges = NULL;
402209ff 427
750054a2
CT
428 /* If we are partitioning hot/cold basic blocks, we don't want to
429 mess up unconditional or indirect jumps that cross between hot
8e8d5162 430 and cold sections.
750054a2 431
8e8d5162
CT
432 Basic block partitioning may result in some jumps that appear to
433 be optimizable (or blocks that appear to be mergeable), but which really m
434 ust be left untouched (they are required to make it safely across
435 partition boundaries). See the comments at the top of
436 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
437
750054a2
CT
438 if (flag_reorder_blocks_and_partition
439 && find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX))
440 return false;
441
628f6a4e 442 for (ei = ei_start (b->succs); (e = ei_safe_edge (ei)); )
402209ff
JH
443 {
444 basic_block target, first;
445 int counter;
8ecba28a 446 bool threaded = false;
bcb3bc6d 447 int nthreaded_edges = 0;
7cf240d5 448 bool may_thread = first_pass | (b->flags & BB_DIRTY);
402209ff 449
402209ff
JH
450 /* Skip complex edges because we don't know how to update them.
451
eaec9b3d 452 Still handle fallthru edges, as we can succeed to forward fallthru
402209ff 453 edge to the same place as the branch edge of conditional branch
eaec9b3d 454 and turn conditional branch to an unconditional branch. */
402209ff 455 if (e->flags & EDGE_COMPLEX)
628f6a4e
BE
456 {
457 ei_next (&ei);
458 continue;
459 }
402209ff
JH
460
461 target = first = e->dest;
462 counter = 0;
463
9fb32434 464 /* If we are partitioning hot/cold basic_blocks, we don't want to mess
8e8d5162
CT
465 up jumps that cross between hot/cold sections.
466
467 Basic block partitioning may result in some jumps that appear
468 to be optimizable (or blocks that appear to be mergeable), but which
469 really must be left untouched (they are required to make it safely
470 across partition boundaries). See the comments at the top of
471 bb-reorder.c:partition_hot_cold_basic_blocks for complete
472 details. */
9fb32434
CT
473
474 if (flag_reorder_blocks_and_partition
475 && first != EXIT_BLOCK_PTR
476 && find_reg_note (BB_END (first), REG_CROSSING_JUMP, NULL_RTX))
477 return false;
478
0b17ab2f 479 while (counter < n_basic_blocks)
402209ff 480 {
8ecba28a
JH
481 basic_block new_target = NULL;
482 bool new_target_threaded = false;
7cf240d5 483 may_thread |= target->flags & BB_DIRTY;
8ecba28a
JH
484
485 if (FORWARDER_BLOCK_P (target)
628f6a4e
BE
486 && !(EDGE_SUCC (target, 0)->flags & EDGE_CROSSING)
487 && EDGE_SUCC (target, 0)->dest != EXIT_BLOCK_PTR)
8ecba28a
JH
488 {
489 /* Bypass trivial infinite loops. */
628f6a4e 490 if (target == EDGE_SUCC (target, 0)->dest)
0b17ab2f 491 counter = n_basic_blocks;
628f6a4e 492 new_target = EDGE_SUCC (target, 0)->dest;
8ecba28a 493 }
5f0d2358 494
8ecba28a
JH
495 /* Allow to thread only over one edge at time to simplify updating
496 of probabilities. */
7cf240d5 497 else if ((mode & CLEANUP_THREADING) && may_thread)
8ecba28a 498 {
1c570418
JH
499 edge t = thread_jump (mode, e, target);
500 if (t)
8ecba28a 501 {
bcb3bc6d 502 if (!threaded_edges)
1c570418 503 threaded_edges = xmalloc (sizeof (*threaded_edges)
0b17ab2f 504 * n_basic_blocks);
3b3b1e32
RH
505 else
506 {
507 int i;
508
509 /* Detect an infinite loop across blocks not
510 including the start block. */
511 for (i = 0; i < nthreaded_edges; ++i)
512 if (threaded_edges[i] == t)
513 break;
514 if (i < nthreaded_edges)
b90e45ae 515 {
0b17ab2f 516 counter = n_basic_blocks;
b90e45ae
JH
517 break;
518 }
3b3b1e32
RH
519 }
520
521 /* Detect an infinite loop across the start block. */
522 if (t->dest == b)
523 break;
524
341c100f 525 gcc_assert (nthreaded_edges < n_basic_blocks);
1c570418 526 threaded_edges[nthreaded_edges++] = t;
3b3b1e32
RH
527
528 new_target = t->dest;
529 new_target_threaded = true;
8ecba28a
JH
530 }
531 }
5f0d2358 532
8ecba28a
JH
533 if (!new_target)
534 break;
402209ff
JH
535
536 /* Avoid killing of loop pre-headers, as it is the place loop
537 optimizer wants to hoist code to.
538
539 For fallthru forwarders, the LOOP_BEG note must appear between
540 the header of block and CODE_LABEL of the loop, for non forwarders
541 it must appear before the JUMP_INSN. */
1c4a429a 542 if ((mode & CLEANUP_PRE_LOOP) && optimize)
402209ff 543 {
628f6a4e 544 rtx insn = (EDGE_SUCC (target, 0)->flags & EDGE_FALLTHRU
a813c111 545 ? BB_HEAD (target) : prev_nonnote_insn (BB_END (target)));
402209ff 546
4b4bf941 547 if (!NOTE_P (insn))
402209ff
JH
548 insn = NEXT_INSN (insn);
549
4b4bf941 550 for (; insn && !LABEL_P (insn) && !INSN_P (insn);
402209ff 551 insn = NEXT_INSN (insn))
4b4bf941 552 if (NOTE_P (insn)
402209ff
JH
553 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
554 break;
555
4b4bf941 556 if (NOTE_P (insn))
402209ff 557 break;
ac19be7e
DJ
558
559 /* Do not clean up branches to just past the end of a loop
560 at this time; it can mess up the loop optimizer's
991b6592 561 recognition of some patterns. */
ac19be7e 562
a813c111 563 insn = PREV_INSN (BB_HEAD (target));
4b4bf941 564 if (insn && NOTE_P (insn)
ac19be7e
DJ
565 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
566 break;
402209ff 567 }
5f0d2358 568
8ecba28a
JH
569 counter++;
570 target = new_target;
571 threaded |= new_target_threaded;
f87c27b4 572 }
402209ff 573
0b17ab2f 574 if (counter >= n_basic_blocks)
402209ff 575 {
c263766c
RH
576 if (dump_file)
577 fprintf (dump_file, "Infinite loop in BB %i.\n",
0b17ab2f 578 target->index);
402209ff
JH
579 }
580 else if (target == first)
581 ; /* We didn't do anything. */
582 else
583 {
584 /* Save the values now, as the edge may get removed. */
585 gcov_type edge_count = e->count;
586 int edge_probability = e->probability;
8ecba28a 587 int edge_frequency;
1c570418 588 int n = 0;
402209ff 589
6ee3c8e4
JJ
590 /* Don't force if target is exit block. */
591 if (threaded && target != EXIT_BLOCK_PTR)
402209ff 592 {
8ecba28a 593 notice_new_block (redirect_edge_and_branch_force (e, target));
c263766c
RH
594 if (dump_file)
595 fprintf (dump_file, "Conditionals threaded.\n");
402209ff 596 }
8ecba28a 597 else if (!redirect_edge_and_branch (e, target))
402209ff 598 {
c263766c
RH
599 if (dump_file)
600 fprintf (dump_file,
5f0d2358 601 "Forwarding edge %i->%i to %i failed.\n",
0b17ab2f 602 b->index, e->dest->index, target->index);
628f6a4e 603 ei_next (&ei);
8ecba28a 604 continue;
402209ff 605 }
5f0d2358 606
8ecba28a
JH
607 /* We successfully forwarded the edge. Now update profile
608 data: for each edge we traversed in the chain, remove
609 the original edge's execution count. */
610 edge_frequency = ((edge_probability * b->frequency
611 + REG_BR_PROB_BASE / 2)
612 / REG_BR_PROB_BASE);
613
614 if (!FORWARDER_BLOCK_P (b) && forwarder_block_p (b))
615 BB_SET_FLAG (b, BB_FORWARDER_BLOCK);
8ecba28a
JH
616
617 do
618 {
619 edge t;
5f0d2358 620
628f6a4e 621 if (EDGE_COUNT (first->succs) > 1)
3b3b1e32 622 {
341c100f 623 gcc_assert (n < nthreaded_edges);
3b3b1e32 624 t = threaded_edges [n++];
341c100f 625 gcc_assert (t->src == first);
15db5571
JH
626 update_bb_profile_for_threading (first, edge_frequency,
627 edge_count, t);
b446e5a2 628 update_br_prob_note (first);
3b3b1e32 629 }
8ecba28a 630 else
bcb3bc6d 631 {
15db5571
JH
632 first->count -= edge_count;
633 if (first->count < 0)
634 first->count = 0;
635 first->frequency -= edge_frequency;
636 if (first->frequency < 0)
637 first->frequency = 0;
bcb3bc6d
JH
638 /* It is possible that as the result of
639 threading we've removed edge as it is
640 threaded to the fallthru edge. Avoid
641 getting out of sync. */
642 if (n < nthreaded_edges
643 && first == threaded_edges [n]->src)
644 n++;
628f6a4e 645 t = EDGE_SUCC (first, 0);
f87c27b4 646 }
5f0d2358 647
b446e5a2
JH
648 t->count -= edge_count;
649 if (t->count < 0)
650 t->count = 0;
8ecba28a
JH
651 first = t->dest;
652 }
653 while (first != target);
654
655 changed = true;
628f6a4e 656 continue;
402209ff 657 }
628f6a4e 658 ei_next (&ei);
402209ff
JH
659 }
660
1c570418
JH
661 if (threaded_edges)
662 free (threaded_edges);
402209ff
JH
663 return changed;
664}
665\f
402209ff
JH
666
667/* Blocks A and B are to be merged into a single block. A has no incoming
668 fallthru edge, so it can be moved before B without adding or modifying
669 any jumps (aside from the jump from A to B). */
670
4262e623 671static void
d329e058 672merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
402209ff
JH
673{
674 rtx barrier;
341c100f 675 bool only_notes;
402209ff 676
750054a2
CT
677 /* If we are partitioning hot/cold basic blocks, we don't want to
678 mess up unconditional or indirect jumps that cross between hot
8e8d5162 679 and cold sections.
750054a2 680
8e8d5162
CT
681 Basic block partitioning may result in some jumps that appear to
682 be optimizable (or blocks that appear to be mergeable), but which really
683 must be left untouched (they are required to make it safely across
684 partition boundaries). See the comments at the top of
685 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
686
750054a2 687 if (flag_reorder_blocks_and_partition
076c7ab8 688 && (BB_PARTITION (a) != BB_PARTITION (b)
750054a2
CT
689 || find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)))
690 return;
691
a813c111 692 barrier = next_nonnote_insn (BB_END (a));
341c100f 693 gcc_assert (BARRIER_P (barrier));
53c17031 694 delete_insn (barrier);
402209ff
JH
695
696 /* Move block and loop notes out of the chain so that we do not
697 disturb their order.
698
699 ??? A better solution would be to squeeze out all the non-nested notes
700 and adjust the block trees appropriately. Even better would be to have
701 a tighter connection between block trees and rtl so that this is not
702 necessary. */
341c100f
NS
703 only_notes = squeeze_notes (&BB_HEAD (a), &BB_END (a));
704 gcc_assert (!only_notes);
402209ff
JH
705
706 /* Scramble the insn chain. */
a813c111
SB
707 if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
708 reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
38c1593d 709 a->flags |= BB_DIRTY;
402209ff 710
c263766c
RH
711 if (dump_file)
712 fprintf (dump_file, "Moved block %d before %d and merged.\n",
0b17ab2f 713 a->index, b->index);
402209ff 714
bf77398c 715 /* Swap the records for the two blocks around. */
402209ff 716
918ed612
ZD
717 unlink_block (a);
718 link_block (a, b->prev_bb);
719
402209ff 720 /* Now blocks A and B are contiguous. Merge them. */
bc35512f 721 merge_blocks (a, b);
402209ff
JH
722}
723
724/* Blocks A and B are to be merged into a single block. B has no outgoing
725 fallthru edge, so it can be moved after A without adding or modifying
726 any jumps (aside from the jump from A to B). */
727
4262e623 728static void
d329e058 729merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
402209ff 730{
f62ce55b 731 rtx barrier, real_b_end;
ee735eef 732 rtx label, table;
341c100f 733 bool only_notes;
402209ff 734
750054a2
CT
735 /* If we are partitioning hot/cold basic blocks, we don't want to
736 mess up unconditional or indirect jumps that cross between hot
8e8d5162 737 and cold sections.
750054a2 738
8e8d5162
CT
739 Basic block partitioning may result in some jumps that appear to
740 be optimizable (or blocks that appear to be mergeable), but which really
741 must be left untouched (they are required to make it safely across
742 partition boundaries). See the comments at the top of
743 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
744
750054a2
CT
745 if (flag_reorder_blocks_and_partition
746 && (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
076c7ab8 747 || BB_PARTITION (a) != BB_PARTITION (b)))
750054a2
CT
748 return;
749
a813c111 750 real_b_end = BB_END (b);
402209ff 751
ee735eef
JZ
752 /* If there is a jump table following block B temporarily add the jump table
753 to block B so that it will also be moved to the correct location. */
a813c111
SB
754 if (tablejump_p (BB_END (b), &label, &table)
755 && prev_active_insn (label) == BB_END (b))
402209ff 756 {
a813c111 757 BB_END (b) = table;
402209ff
JH
758 }
759
760 /* There had better have been a barrier there. Delete it. */
a813c111 761 barrier = NEXT_INSN (BB_END (b));
4b4bf941 762 if (barrier && BARRIER_P (barrier))
53c17031 763 delete_insn (barrier);
402209ff
JH
764
765 /* Move block and loop notes out of the chain so that we do not
766 disturb their order.
767
768 ??? A better solution would be to squeeze out all the non-nested notes
769 and adjust the block trees appropriately. Even better would be to have
770 a tighter connection between block trees and rtl so that this is not
771 necessary. */
341c100f
NS
772 only_notes = squeeze_notes (&BB_HEAD (b), &BB_END (b));
773 gcc_assert (!only_notes);
774
402209ff
JH
775
776 /* Scramble the insn chain. */
a813c111 777 reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
402209ff 778
f62ce55b 779 /* Restore the real end of b. */
a813c111 780 BB_END (b) = real_b_end;
f62ce55b 781
c263766c
RH
782 if (dump_file)
783 fprintf (dump_file, "Moved block %d after %d and merged.\n",
0b17ab2f 784 b->index, a->index);
2150ad33
RH
785
786 /* Now blocks A and B are contiguous. Merge them. */
bc35512f 787 merge_blocks (a, b);
402209ff
JH
788}
789
790/* Attempt to merge basic blocks that are potentially non-adjacent.
ec3ae3da
JH
791 Return NULL iff the attempt failed, otherwise return basic block
792 where cleanup_cfg should continue. Because the merging commonly
793 moves basic block away or introduces another optimization
e0bb17a8 794 possibility, return basic block just before B so cleanup_cfg don't
ec3ae3da
JH
795 need to iterate.
796
797 It may be good idea to return basic block before C in the case
798 C has been moved after B and originally appeared earlier in the
4d6922ee 799 insn sequence, but we have no information available about the
ec3ae3da
JH
800 relative ordering of these two. Hopefully it is not too common. */
801
802static basic_block
bc35512f 803merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
402209ff 804{
ec3ae3da 805 basic_block next;
402209ff 806
750054a2
CT
807 /* If we are partitioning hot/cold basic blocks, we don't want to
808 mess up unconditional or indirect jumps that cross between hot
8e8d5162 809 and cold sections.
750054a2 810
8e8d5162
CT
811 Basic block partitioning may result in some jumps that appear to
812 be optimizable (or blocks that appear to be mergeable), but which really
813 must be left untouched (they are required to make it safely across
814 partition boundaries). See the comments at the top of
815 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
816
750054a2
CT
817 if (flag_reorder_blocks_and_partition
818 && (find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
819 || find_reg_note (BB_END (c), REG_CROSSING_JUMP, NULL_RTX)
076c7ab8 820 || BB_PARTITION (b) != BB_PARTITION (c)))
750054a2
CT
821 return NULL;
822
823
824
402209ff
JH
825 /* If B has a fallthru edge to C, no need to move anything. */
826 if (e->flags & EDGE_FALLTHRU)
827 {
0b17ab2f 828 int b_index = b->index, c_index = c->index;
bc35512f 829 merge_blocks (b, c);
635559ab 830 update_forwarder_flag (b);
402209ff 831
c263766c
RH
832 if (dump_file)
833 fprintf (dump_file, "Merged %d and %d without moving.\n",
f87c27b4 834 b_index, c_index);
402209ff 835
ec3ae3da 836 return b->prev_bb == ENTRY_BLOCK_PTR ? b : b->prev_bb;
402209ff 837 }
5f0d2358 838
402209ff
JH
839 /* Otherwise we will need to move code around. Do that only if expensive
840 transformations are allowed. */
841 else if (mode & CLEANUP_EXPENSIVE)
842 {
4262e623
JH
843 edge tmp_edge, b_fallthru_edge;
844 bool c_has_outgoing_fallthru;
845 bool b_has_incoming_fallthru;
628f6a4e 846 edge_iterator ei;
402209ff
JH
847
848 /* Avoid overactive code motion, as the forwarder blocks should be
849 eliminated by edge redirection instead. One exception might have
850 been if B is a forwarder block and C has no fallthru edge, but
851 that should be cleaned up by bb-reorder instead. */
635559ab 852 if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
ec3ae3da 853 return NULL;
402209ff
JH
854
855 /* We must make sure to not munge nesting of lexical blocks,
856 and loop notes. This is done by squeezing out all the notes
857 and leaving them there to lie. Not ideal, but functional. */
858
628f6a4e 859 FOR_EACH_EDGE (tmp_edge, ei, c->succs)
402209ff
JH
860 if (tmp_edge->flags & EDGE_FALLTHRU)
861 break;
5f0d2358 862
402209ff 863 c_has_outgoing_fallthru = (tmp_edge != NULL);
402209ff 864
628f6a4e 865 FOR_EACH_EDGE (tmp_edge, ei, b->preds)
402209ff
JH
866 if (tmp_edge->flags & EDGE_FALLTHRU)
867 break;
5f0d2358 868
402209ff 869 b_has_incoming_fallthru = (tmp_edge != NULL);
4262e623 870 b_fallthru_edge = tmp_edge;
ec3ae3da 871 next = b->prev_bb;
912b79e7
JH
872 if (next == c)
873 next = next->prev_bb;
4262e623
JH
874
875 /* Otherwise, we're going to try to move C after B. If C does
876 not have an outgoing fallthru, then it can be moved
877 immediately after B without introducing or modifying jumps. */
878 if (! c_has_outgoing_fallthru)
879 {
880 merge_blocks_move_successor_nojumps (b, c);
ec3ae3da 881 return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
4262e623 882 }
402209ff
JH
883
884 /* If B does not have an incoming fallthru, then it can be moved
885 immediately before C without introducing or modifying jumps.
886 C cannot be the first block, so we do not have to worry about
887 accessing a non-existent block. */
402209ff 888
4262e623
JH
889 if (b_has_incoming_fallthru)
890 {
473fb060 891 basic_block bb;
5f0d2358 892
4262e623 893 if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
ec3ae3da 894 return NULL;
7dddfb65
JH
895 bb = force_nonfallthru (b_fallthru_edge);
896 if (bb)
897 notice_new_block (bb);
4262e623 898 }
5f0d2358 899
4262e623 900 merge_blocks_move_predecessor_nojumps (b, c);
ec3ae3da 901 return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
402209ff 902 }
5f0d2358 903
10d6c0d0 904 return NULL;
402209ff
JH
905}
906\f
0dd0e980 907
2b3493c8
AK
908/* Removes the memory attributes of MEM expression
909 if they are not equal. */
910
911void
912merge_memattrs (rtx x, rtx y)
913{
914 int i;
915 int j;
916 enum rtx_code code;
917 const char *fmt;
918
919 if (x == y)
920 return;
921 if (x == 0 || y == 0)
922 return;
923
924 code = GET_CODE (x);
925
926 if (code != GET_CODE (y))
927 return;
928
929 if (GET_MODE (x) != GET_MODE (y))
930 return;
931
932 if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
933 {
934 if (! MEM_ATTRS (x))
935 MEM_ATTRS (y) = 0;
936 else if (! MEM_ATTRS (y))
937 MEM_ATTRS (x) = 0;
938 else
939 {
71068e31
ZD
940 rtx mem_size;
941
2b3493c8
AK
942 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
943 {
944 set_mem_alias_set (x, 0);
945 set_mem_alias_set (y, 0);
946 }
947
948 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
949 {
950 set_mem_expr (x, 0);
951 set_mem_expr (y, 0);
952 set_mem_offset (x, 0);
953 set_mem_offset (y, 0);
954 }
955 else if (MEM_OFFSET (x) != MEM_OFFSET (y))
956 {
957 set_mem_offset (x, 0);
958 set_mem_offset (y, 0);
959 }
71068e31
ZD
960
961 if (!MEM_SIZE (x))
962 mem_size = NULL_RTX;
963 else if (!MEM_SIZE (y))
964 mem_size = NULL_RTX;
965 else
966 mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
967 INTVAL (MEM_SIZE (y))));
968 set_mem_size (x, mem_size);
969 set_mem_size (y, mem_size);
2b3493c8
AK
970
971 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
972 set_mem_align (y, MEM_ALIGN (x));
973 }
974 }
975
976 fmt = GET_RTX_FORMAT (code);
977 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
978 {
979 switch (fmt[i])
980 {
981 case 'E':
982 /* Two vectors must have the same length. */
983 if (XVECLEN (x, i) != XVECLEN (y, i))
984 return;
985
986 for (j = 0; j < XVECLEN (x, i); j++)
987 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
988
989 break;
990
991 case 'e':
992 merge_memattrs (XEXP (x, i), XEXP (y, i));
993 }
994 }
995 return;
996}
997
998
0dd0e980
JH
999/* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
1000
1001static bool
d329e058 1002insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
0dd0e980
JH
1003{
1004 rtx p1, p2;
1005
1006 /* Verify that I1 and I2 are equivalent. */
1007 if (GET_CODE (i1) != GET_CODE (i2))
1008 return false;
1009
1010 p1 = PATTERN (i1);
1011 p2 = PATTERN (i2);
1012
1013 if (GET_CODE (p1) != GET_CODE (p2))
1014 return false;
1015
1016 /* If this is a CALL_INSN, compare register usage information.
1017 If we don't check this on stack register machines, the two
1018 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1019 numbers of stack registers in the same basic block.
1020 If we don't check this on machines with delay slots, a delay slot may
1021 be filled that clobbers a parameter expected by the subroutine.
1022
1023 ??? We take the simple route for now and assume that if they're
1024 equal, they were constructed identically. */
1025
4b4bf941 1026 if (CALL_P (i1)
db655634
JH
1027 && (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1028 CALL_INSN_FUNCTION_USAGE (i2))
1029 || SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)))
0dd0e980
JH
1030 return false;
1031
1032#ifdef STACK_REGS
1033 /* If cross_jump_death_matters is not 0, the insn's mode
1034 indicates whether or not the insn contains any stack-like
1035 regs. */
1036
1037 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
1038 {
1039 /* If register stack conversion has already been done, then
1040 death notes must also be compared before it is certain that
1041 the two instruction streams match. */
1042
1043 rtx note;
1044 HARD_REG_SET i1_regset, i2_regset;
1045
1046 CLEAR_HARD_REG_SET (i1_regset);
1047 CLEAR_HARD_REG_SET (i2_regset);
1048
1049 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1050 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
1051 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1052
1053 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1054 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
1055 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1056
1057 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1058
1059 return false;
1060
1061 done:
1062 ;
1063 }
1064#endif
1065
1066 if (reload_completed
3ce6bef0
RH
1067 ? rtx_renumbered_equal_p (p1, p2) : rtx_equal_p (p1, p2))
1068 return true;
1069
1070 /* Do not do EQUIV substitution after reload. First, we're undoing the
1071 work of reload_cse. Second, we may be undoing the work of the post-
1072 reload splitting pass. */
1073 /* ??? Possibly add a new phase switch variable that can be used by
1074 targets to disallow the troublesome insns after splitting. */
1075 if (!reload_completed)
0dd0e980
JH
1076 {
1077 /* The following code helps take care of G++ cleanups. */
1078 rtx equiv1 = find_reg_equal_equiv_note (i1);
1079 rtx equiv2 = find_reg_equal_equiv_note (i2);
1080
1081 if (equiv1 && equiv2
1082 /* If the equivalences are not to a constant, they may
1083 reference pseudos that no longer exist, so we can't
1084 use them. */
1085 && (! reload_completed
1086 || (CONSTANT_P (XEXP (equiv1, 0))
1087 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
1088 {
1089 rtx s1 = single_set (i1);
1090 rtx s2 = single_set (i2);
1091 if (s1 != 0 && s2 != 0
1092 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1093 {
1094 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1095 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1096 if (! rtx_renumbered_equal_p (p1, p2))
1097 cancel_changes (0);
1098 else if (apply_change_group ())
1099 return true;
1100 }
1101 }
0dd0e980 1102 }
5f0d2358 1103
3ce6bef0 1104 return false;
0dd0e980
JH
1105}
1106\f
402209ff
JH
1107/* Look through the insns at the end of BB1 and BB2 and find the longest
1108 sequence that are equivalent. Store the first insns for that sequence
1109 in *F1 and *F2 and return the sequence length.
1110
1111 To simplify callers of this function, if the blocks match exactly,
1112 store the head of the blocks in *F1 and *F2. */
1113
1114static int
d329e058
AJ
1115flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
1116 basic_block bb2, rtx *f1, rtx *f2)
402209ff 1117{
0dd0e980 1118 rtx i1, i2, last1, last2, afterlast1, afterlast2;
402209ff
JH
1119 int ninsns = 0;
1120
1121 /* Skip simple jumps at the end of the blocks. Complex jumps still
1122 need to be compared for equivalence, which we'll do below. */
1123
a813c111 1124 i1 = BB_END (bb1);
08f7f057 1125 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
402209ff
JH
1126 if (onlyjump_p (i1)
1127 || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
08f7f057
JH
1128 {
1129 last1 = i1;
08f7f057
JH
1130 i1 = PREV_INSN (i1);
1131 }
5f0d2358 1132
a813c111 1133 i2 = BB_END (bb2);
402209ff
JH
1134 if (onlyjump_p (i2)
1135 || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
08f7f057
JH
1136 {
1137 last2 = i2;
d1ee6d9b
JH
1138 /* Count everything except for unconditional jump as insn. */
1139 if (!simplejump_p (i2) && !returnjump_p (i2) && last1)
1140 ninsns++;
08f7f057
JH
1141 i2 = PREV_INSN (i2);
1142 }
402209ff 1143
402209ff
JH
1144 while (true)
1145 {
1146 /* Ignore notes. */
a813c111 1147 while (!INSN_P (i1) && i1 != BB_HEAD (bb1))
402209ff 1148 i1 = PREV_INSN (i1);
5f0d2358 1149
a813c111 1150 while (!INSN_P (i2) && i2 != BB_HEAD (bb2))
402209ff
JH
1151 i2 = PREV_INSN (i2);
1152
a813c111 1153 if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2))
402209ff
JH
1154 break;
1155
0dd0e980 1156 if (!insns_match_p (mode, i1, i2))
402209ff
JH
1157 break;
1158
2b3493c8
AK
1159 merge_memattrs (i1, i2);
1160
dd4ff203
RK
1161 /* Don't begin a cross-jump with a NOTE insn. */
1162 if (INSN_P (i1))
402209ff 1163 {
7106d491
RE
1164 /* If the merged insns have different REG_EQUAL notes, then
1165 remove them. */
1166 rtx equiv1 = find_reg_equal_equiv_note (i1);
1167 rtx equiv2 = find_reg_equal_equiv_note (i2);
1168
1169 if (equiv1 && !equiv2)
1170 remove_note (i1, equiv1);
1171 else if (!equiv1 && equiv2)
1172 remove_note (i2, equiv2);
1173 else if (equiv1 && equiv2
1174 && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1175 {
1176 remove_note (i1, equiv1);
1177 remove_note (i2, equiv2);
1178 }
f87c27b4 1179
402209ff
JH
1180 afterlast1 = last1, afterlast2 = last2;
1181 last1 = i1, last2 = i2;
f87c27b4 1182 ninsns++;
402209ff 1183 }
5f0d2358 1184
402209ff
JH
1185 i1 = PREV_INSN (i1);
1186 i2 = PREV_INSN (i2);
1187 }
1188
1189#ifdef HAVE_cc0
5f0d2358
RK
1190 /* Don't allow the insn after a compare to be shared by
1191 cross-jumping unless the compare is also shared. */
1192 if (ninsns && reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
1193 last1 = afterlast1, last2 = afterlast2, ninsns--;
402209ff
JH
1194#endif
1195
eaec9b3d 1196 /* Include preceding notes and labels in the cross-jump. One,
402209ff
JH
1197 this may bring us to the head of the blocks as requested above.
1198 Two, it keeps line number notes as matched as may be. */
1199 if (ninsns)
1200 {
a813c111 1201 while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1)))
402209ff 1202 last1 = PREV_INSN (last1);
5f0d2358 1203
4b4bf941 1204 if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
402209ff 1205 last1 = PREV_INSN (last1);
5f0d2358 1206
a813c111 1207 while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2)))
402209ff 1208 last2 = PREV_INSN (last2);
5f0d2358 1209
4b4bf941 1210 if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
402209ff
JH
1211 last2 = PREV_INSN (last2);
1212
1213 *f1 = last1;
1214 *f2 = last2;
1215 }
1216
1217 return ninsns;
1218}
1219
1220/* Return true iff outgoing edges of BB1 and BB2 match, together with
1221 the branch instruction. This means that if we commonize the control
1222 flow before end of the basic block, the semantic remains unchanged.
1223
1224 We may assume that there exists one edge with a common destination. */
1225
1226static bool
d329e058 1227outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
402209ff 1228{
0dd0e980
JH
1229 int nehedges1 = 0, nehedges2 = 0;
1230 edge fallthru1 = 0, fallthru2 = 0;
1231 edge e1, e2;
628f6a4e 1232 edge_iterator ei;
0dd0e980 1233
c04cf67b
RH
1234 /* If BB1 has only one successor, we may be looking at either an
1235 unconditional jump, or a fake edge to exit. */
628f6a4e
BE
1236 if (EDGE_COUNT (bb1->succs) == 1
1237 && (EDGE_SUCC (bb1, 0)->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
4b4bf941 1238 && (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
628f6a4e
BE
1239 return (EDGE_COUNT (bb2->succs) == 1
1240 && (EDGE_SUCC (bb2, 0)->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
4b4bf941 1241 && (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
402209ff
JH
1242
1243 /* Match conditional jumps - this may get tricky when fallthru and branch
1244 edges are crossed. */
628f6a4e 1245 if (EDGE_COUNT (bb1->succs) == 2
a813c111
SB
1246 && any_condjump_p (BB_END (bb1))
1247 && onlyjump_p (BB_END (bb1)))
402209ff
JH
1248 {
1249 edge b1, f1, b2, f2;
1250 bool reverse, match;
1251 rtx set1, set2, cond1, cond2;
1252 enum rtx_code code1, code2;
1253
628f6a4e 1254 if (EDGE_COUNT (bb2->succs) != 2
a813c111
SB
1255 || !any_condjump_p (BB_END (bb2))
1256 || !onlyjump_p (BB_END (bb2)))
0a2ed1f1
JH
1257 return false;
1258
402209ff
JH
1259 b1 = BRANCH_EDGE (bb1);
1260 b2 = BRANCH_EDGE (bb2);
1261 f1 = FALLTHRU_EDGE (bb1);
1262 f2 = FALLTHRU_EDGE (bb2);
1263
1264 /* Get around possible forwarders on fallthru edges. Other cases
1265 should be optimized out already. */
635559ab 1266 if (FORWARDER_BLOCK_P (f1->dest))
628f6a4e 1267 f1 = EDGE_SUCC (f1->dest, 0);
5f0d2358 1268
635559ab 1269 if (FORWARDER_BLOCK_P (f2->dest))
628f6a4e 1270 f2 = EDGE_SUCC (f2->dest, 0);
402209ff
JH
1271
1272 /* To simplify use of this function, return false if there are
1273 unneeded forwarder blocks. These will get eliminated later
1274 during cleanup_cfg. */
635559ab
JH
1275 if (FORWARDER_BLOCK_P (f1->dest)
1276 || FORWARDER_BLOCK_P (f2->dest)
1277 || FORWARDER_BLOCK_P (b1->dest)
1278 || FORWARDER_BLOCK_P (b2->dest))
402209ff
JH
1279 return false;
1280
1281 if (f1->dest == f2->dest && b1->dest == b2->dest)
1282 reverse = false;
1283 else if (f1->dest == b2->dest && b1->dest == f2->dest)
1284 reverse = true;
1285 else
1286 return false;
1287
a813c111
SB
1288 set1 = pc_set (BB_END (bb1));
1289 set2 = pc_set (BB_END (bb2));
402209ff
JH
1290 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
1291 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
1292 reverse = !reverse;
1293
1294 cond1 = XEXP (SET_SRC (set1), 0);
1295 cond2 = XEXP (SET_SRC (set2), 0);
1296 code1 = GET_CODE (cond1);
1297 if (reverse)
a813c111 1298 code2 = reversed_comparison_code (cond2, BB_END (bb2));
402209ff
JH
1299 else
1300 code2 = GET_CODE (cond2);
5f0d2358 1301
402209ff
JH
1302 if (code2 == UNKNOWN)
1303 return false;
1304
1305 /* Verify codes and operands match. */
1306 match = ((code1 == code2
1307 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
1308 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
1309 || (code1 == swap_condition (code2)
1310 && rtx_renumbered_equal_p (XEXP (cond1, 1),
1311 XEXP (cond2, 0))
1312 && rtx_renumbered_equal_p (XEXP (cond1, 0),
1313 XEXP (cond2, 1))));
1314
1315 /* If we return true, we will join the blocks. Which means that
1316 we will only have one branch prediction bit to work with. Thus
1317 we require the existing branches to have probabilities that are
1318 roughly similar. */
b446e5a2
JH
1319 if (match
1320 && !optimize_size
194734e9
JH
1321 && maybe_hot_bb_p (bb1)
1322 && maybe_hot_bb_p (bb2))
402209ff 1323 {
b446e5a2 1324 int prob2;
5f0d2358 1325
b446e5a2
JH
1326 if (b1->dest == b2->dest)
1327 prob2 = b2->probability;
1328 else
1329 /* Do not use f2 probability as f2 may be forwarded. */
1330 prob2 = REG_BR_PROB_BASE - b2->probability;
402209ff 1331
0a2ed1f1
JH
1332 /* Fail if the difference in probabilities is greater than 50%.
1333 This rules out two well-predicted branches with opposite
1334 outcomes. */
7225b8ec 1335 if (abs (b1->probability - prob2) > REG_BR_PROB_BASE / 2)
402209ff 1336 {
c263766c
RH
1337 if (dump_file)
1338 fprintf (dump_file,
f47b662e 1339 "Outcomes of branch in bb %i and %i differ too much (%i %i)\n",
0b17ab2f 1340 bb1->index, bb2->index, b1->probability, prob2);
5f0d2358 1341
b446e5a2
JH
1342 return false;
1343 }
402209ff
JH
1344 }
1345
c263766c
RH
1346 if (dump_file && match)
1347 fprintf (dump_file, "Conditionals in bb %i and %i match.\n",
0b17ab2f 1348 bb1->index, bb2->index);
402209ff
JH
1349
1350 return match;
1351 }
1352
09da1532 1353 /* Generic case - we are seeing a computed jump, table jump or trapping
0dd0e980
JH
1354 instruction. */
1355
39811184
JZ
1356#ifndef CASE_DROPS_THROUGH
1357 /* Check whether there are tablejumps in the end of BB1 and BB2.
1358 Return true if they are identical. */
1359 {
1360 rtx label1, label2;
1361 rtx table1, table2;
1362
a813c111
SB
1363 if (tablejump_p (BB_END (bb1), &label1, &table1)
1364 && tablejump_p (BB_END (bb2), &label2, &table2)
39811184
JZ
1365 && GET_CODE (PATTERN (table1)) == GET_CODE (PATTERN (table2)))
1366 {
1367 /* The labels should never be the same rtx. If they really are same
1368 the jump tables are same too. So disable crossjumping of blocks BB1
1369 and BB2 because when deleting the common insns in the end of BB1
6de9cd9a 1370 by delete_basic_block () the jump table would be deleted too. */
4af16369 1371 /* If LABEL2 is referenced in BB1->END do not do anything
39811184
JZ
1372 because we would loose information when replacing
1373 LABEL1 by LABEL2 and then LABEL2 by LABEL1 in BB1->END. */
a813c111 1374 if (label1 != label2 && !rtx_referenced_p (label2, BB_END (bb1)))
39811184
JZ
1375 {
1376 /* Set IDENTICAL to true when the tables are identical. */
1377 bool identical = false;
1378 rtx p1, p2;
1379
1380 p1 = PATTERN (table1);
1381 p2 = PATTERN (table2);
1382 if (GET_CODE (p1) == ADDR_VEC && rtx_equal_p (p1, p2))
1383 {
1384 identical = true;
1385 }
1386 else if (GET_CODE (p1) == ADDR_DIFF_VEC
1387 && (XVECLEN (p1, 1) == XVECLEN (p2, 1))
1388 && rtx_equal_p (XEXP (p1, 2), XEXP (p2, 2))
1389 && rtx_equal_p (XEXP (p1, 3), XEXP (p2, 3)))
1390 {
1391 int i;
1392
1393 identical = true;
1394 for (i = XVECLEN (p1, 1) - 1; i >= 0 && identical; i--)
1395 if (!rtx_equal_p (XVECEXP (p1, 1, i), XVECEXP (p2, 1, i)))
1396 identical = false;
1397 }
1398
1399 if (identical)
1400 {
4af16369 1401 replace_label_data rr;
39811184
JZ
1402 bool match;
1403
1404 /* Temporarily replace references to LABEL1 with LABEL2
1405 in BB1->END so that we could compare the instructions. */
1406 rr.r1 = label1;
1407 rr.r2 = label2;
4af16369 1408 rr.update_label_nuses = false;
a813c111 1409 for_each_rtx (&BB_END (bb1), replace_label, &rr);
39811184 1410
a813c111 1411 match = insns_match_p (mode, BB_END (bb1), BB_END (bb2));
c263766c
RH
1412 if (dump_file && match)
1413 fprintf (dump_file,
39811184
JZ
1414 "Tablejumps in bb %i and %i match.\n",
1415 bb1->index, bb2->index);
1416
1417 /* Set the original label in BB1->END because when deleting
1418 a block whose end is a tablejump, the tablejump referenced
1419 from the instruction is deleted too. */
1420 rr.r1 = label2;
1421 rr.r2 = label1;
a813c111 1422 for_each_rtx (&BB_END (bb1), replace_label, &rr);
39811184
JZ
1423
1424 return match;
1425 }
1426 }
1427 return false;
1428 }
1429 }
1430#endif
1431
0dd0e980 1432 /* First ensure that the instructions match. There may be many outgoing
39811184 1433 edges so this test is generally cheaper. */
a813c111 1434 if (!insns_match_p (mode, BB_END (bb1), BB_END (bb2)))
0dd0e980
JH
1435 return false;
1436
1437 /* Search the outgoing edges, ensure that the counts do match, find possible
1438 fallthru and exception handling edges since these needs more
1439 validation. */
628f6a4e
BE
1440 if (EDGE_COUNT (bb1->succs) != EDGE_COUNT (bb2->succs))
1441 return false;
1442
1443 FOR_EACH_EDGE (e1, ei, bb1->succs)
0dd0e980 1444 {
628f6a4e
BE
1445 e2 = EDGE_SUCC (bb2, ei.index);
1446
0dd0e980
JH
1447 if (e1->flags & EDGE_EH)
1448 nehedges1++;
5f0d2358 1449
0dd0e980
JH
1450 if (e2->flags & EDGE_EH)
1451 nehedges2++;
5f0d2358 1452
0dd0e980
JH
1453 if (e1->flags & EDGE_FALLTHRU)
1454 fallthru1 = e1;
1455 if (e2->flags & EDGE_FALLTHRU)
1456 fallthru2 = e2;
1457 }
5f0d2358 1458
0dd0e980 1459 /* If number of edges of various types does not match, fail. */
628f6a4e 1460 if (nehedges1 != nehedges2
5f0d2358 1461 || (fallthru1 != 0) != (fallthru2 != 0))
0dd0e980
JH
1462 return false;
1463
1464 /* fallthru edges must be forwarded to the same destination. */
1465 if (fallthru1)
1466 {
1467 basic_block d1 = (forwarder_block_p (fallthru1->dest)
628f6a4e 1468 ? EDGE_SUCC (fallthru1->dest, 0)->dest: fallthru1->dest);
0dd0e980 1469 basic_block d2 = (forwarder_block_p (fallthru2->dest)
628f6a4e 1470 ? EDGE_SUCC (fallthru2->dest, 0)->dest: fallthru2->dest);
5f0d2358 1471
0dd0e980
JH
1472 if (d1 != d2)
1473 return false;
1474 }
5f0d2358 1475
5f77fbd4
JJ
1476 /* Ensure the same EH region. */
1477 {
a813c111
SB
1478 rtx n1 = find_reg_note (BB_END (bb1), REG_EH_REGION, 0);
1479 rtx n2 = find_reg_note (BB_END (bb2), REG_EH_REGION, 0);
5f0d2358 1480
5f77fbd4
JJ
1481 if (!n1 && n2)
1482 return false;
1483
1484 if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
1485 return false;
1486 }
5f0d2358 1487
e0bb17a8 1488 /* We don't need to match the rest of edges as above checks should be enough
0dd0e980
JH
1489 to ensure that they are equivalent. */
1490 return true;
402209ff
JH
1491}
1492
1493/* E1 and E2 are edges with the same destination block. Search their
1494 predecessors for common code. If found, redirect control flow from
1495 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
1496
1497static bool
d329e058 1498try_crossjump_to_edge (int mode, edge e1, edge e2)
402209ff
JH
1499{
1500 int nmatch;
1501 basic_block src1 = e1->src, src2 = e2->src;
39587bb9 1502 basic_block redirect_to, redirect_from, to_remove;
402209ff
JH
1503 rtx newpos1, newpos2;
1504 edge s;
628f6a4e 1505 edge_iterator ei;
402209ff 1506
6de9cd9a
DN
1507 newpos1 = newpos2 = NULL_RTX;
1508
750054a2 1509 /* If we have partitioned hot/cold basic blocks, it is a bad idea
8e8d5162
CT
1510 to try this optimization.
1511
1512 Basic block partitioning may result in some jumps that appear to
1513 be optimizable (or blocks that appear to be mergeable), but which really
1514 must be left untouched (they are required to make it safely across
1515 partition boundaries). See the comments at the top of
1516 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
750054a2
CT
1517
1518 if (flag_reorder_blocks_and_partition && no_new_pseudos)
1519 return false;
1520
402209ff
JH
1521 /* Search backward through forwarder blocks. We don't need to worry
1522 about multiple entry or chained forwarders, as they will be optimized
1523 away. We do this to look past the unconditional jump following a
1524 conditional jump that is required due to the current CFG shape. */
628f6a4e 1525 if (EDGE_COUNT (src1->preds) == 1
635559ab 1526 && FORWARDER_BLOCK_P (src1))
628f6a4e 1527 e1 = EDGE_PRED (src1, 0), src1 = e1->src;
5f0d2358 1528
628f6a4e 1529 if (EDGE_COUNT (src2->preds) == 1
635559ab 1530 && FORWARDER_BLOCK_P (src2))
628f6a4e 1531 e2 = EDGE_PRED (src2, 0), src2 = e2->src;
402209ff
JH
1532
1533 /* Nothing to do if we reach ENTRY, or a common source block. */
1534 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
1535 return false;
1536 if (src1 == src2)
1537 return false;
1538
1539 /* Seeing more than 1 forwarder blocks would confuse us later... */
635559ab 1540 if (FORWARDER_BLOCK_P (e1->dest)
628f6a4e 1541 && FORWARDER_BLOCK_P (EDGE_SUCC (e1->dest, 0)->dest))
402209ff 1542 return false;
5f0d2358 1543
635559ab 1544 if (FORWARDER_BLOCK_P (e2->dest)
628f6a4e 1545 && FORWARDER_BLOCK_P (EDGE_SUCC (e2->dest, 0)->dest))
402209ff
JH
1546 return false;
1547
1548 /* Likewise with dead code (possibly newly created by the other optimizations
1549 of cfg_cleanup). */
628f6a4e 1550 if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
402209ff
JH
1551 return false;
1552
402209ff 1553 /* Look for the common insn sequence, part the first ... */
0dd0e980 1554 if (!outgoing_edges_match (mode, src1, src2))
402209ff
JH
1555 return false;
1556
1557 /* ... and part the second. */
1558 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
12183e0f
PH
1559
1560 /* Don't proceed with the crossjump unless we found a sufficient number
1561 of matching instructions or the 'from' block was totally matched
1562 (such that its predecessors will hopefully be redirected and the
1563 block removed). */
1564 if ((nmatch < PARAM_VALUE (PARAM_MIN_CROSSJUMP_INSNS))
1565 && (newpos1 != BB_HEAD (src1)))
402209ff
JH
1566 return false;
1567
39811184
JZ
1568#ifndef CASE_DROPS_THROUGH
1569 /* Here we know that the insns in the end of SRC1 which are common with SRC2
1570 will be deleted.
1571 If we have tablejumps in the end of SRC1 and SRC2
1572 they have been already compared for equivalence in outgoing_edges_match ()
1573 so replace the references to TABLE1 by references to TABLE2. */
1574 {
1575 rtx label1, label2;
1576 rtx table1, table2;
1577
a813c111
SB
1578 if (tablejump_p (BB_END (src1), &label1, &table1)
1579 && tablejump_p (BB_END (src2), &label2, &table2)
39811184
JZ
1580 && label1 != label2)
1581 {
4af16369 1582 replace_label_data rr;
39811184
JZ
1583 rtx insn;
1584
1585 /* Replace references to LABEL1 with LABEL2. */
1586 rr.r1 = label1;
1587 rr.r2 = label2;
4af16369 1588 rr.update_label_nuses = true;
39811184
JZ
1589 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1590 {
1591 /* Do not replace the label in SRC1->END because when deleting
1592 a block whose end is a tablejump, the tablejump referenced
1593 from the instruction is deleted too. */
a813c111 1594 if (insn != BB_END (src1))
39811184
JZ
1595 for_each_rtx (&insn, replace_label, &rr);
1596 }
1597 }
1598 }
1599#endif
10d6c0d0 1600
402209ff 1601 /* Avoid splitting if possible. */
a813c111 1602 if (newpos2 == BB_HEAD (src2))
402209ff
JH
1603 redirect_to = src2;
1604 else
1605 {
c263766c
RH
1606 if (dump_file)
1607 fprintf (dump_file, "Splitting bb %i before %i insns\n",
0b17ab2f 1608 src2->index, nmatch);
402209ff
JH
1609 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
1610 }
1611
c263766c
RH
1612 if (dump_file)
1613 fprintf (dump_file,
402209ff 1614 "Cross jumping from bb %i to bb %i; %i common insns\n",
0b17ab2f 1615 src1->index, src2->index, nmatch);
402209ff
JH
1616
1617 redirect_to->count += src1->count;
1618 redirect_to->frequency += src1->frequency;
2ca6672b
JH
1619 /* We may have some registers visible trought the block. */
1620 redirect_to->flags |= BB_DIRTY;
402209ff
JH
1621
1622 /* Recompute the frequencies and counts of outgoing edges. */
628f6a4e 1623 FOR_EACH_EDGE (s, ei, redirect_to->succs)
402209ff
JH
1624 {
1625 edge s2;
628f6a4e 1626 edge_iterator ei;
402209ff
JH
1627 basic_block d = s->dest;
1628
635559ab 1629 if (FORWARDER_BLOCK_P (d))
628f6a4e 1630 d = EDGE_SUCC (d, 0)->dest;
5f0d2358 1631
628f6a4e 1632 FOR_EACH_EDGE (s2, ei, src1->succs)
402209ff
JH
1633 {
1634 basic_block d2 = s2->dest;
635559ab 1635 if (FORWARDER_BLOCK_P (d2))
628f6a4e 1636 d2 = EDGE_SUCC (d2, 0)->dest;
402209ff
JH
1637 if (d == d2)
1638 break;
1639 }
5f0d2358 1640
402209ff
JH
1641 s->count += s2->count;
1642
1643 /* Take care to update possible forwarder blocks. We verified
1644 that there is no more than one in the chain, so we can't run
1645 into infinite loop. */
635559ab 1646 if (FORWARDER_BLOCK_P (s->dest))
402209ff 1647 {
628f6a4e 1648 EDGE_SUCC (s->dest, 0)->count += s2->count;
402209ff
JH
1649 s->dest->count += s2->count;
1650 s->dest->frequency += EDGE_FREQUENCY (s);
1651 }
5f0d2358 1652
635559ab 1653 if (FORWARDER_BLOCK_P (s2->dest))
402209ff 1654 {
628f6a4e
BE
1655 EDGE_SUCC (s2->dest, 0)->count -= s2->count;
1656 if (EDGE_SUCC (s2->dest, 0)->count < 0)
1657 EDGE_SUCC (s2->dest, 0)->count = 0;
402209ff
JH
1658 s2->dest->count -= s2->count;
1659 s2->dest->frequency -= EDGE_FREQUENCY (s);
b446e5a2
JH
1660 if (s2->dest->frequency < 0)
1661 s2->dest->frequency = 0;
1662 if (s2->dest->count < 0)
1663 s2->dest->count = 0;
402209ff 1664 }
5f0d2358 1665
402209ff
JH
1666 if (!redirect_to->frequency && !src1->frequency)
1667 s->probability = (s->probability + s2->probability) / 2;
1668 else
5f0d2358
RK
1669 s->probability
1670 = ((s->probability * redirect_to->frequency +
1671 s2->probability * src1->frequency)
1672 / (redirect_to->frequency + src1->frequency));
402209ff
JH
1673 }
1674
b446e5a2 1675 update_br_prob_note (redirect_to);
402209ff
JH
1676
1677 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
1678
1679 /* Skip possible basic block header. */
4b4bf941 1680 if (LABEL_P (newpos1))
402209ff 1681 newpos1 = NEXT_INSN (newpos1);
5f0d2358 1682
4b4bf941 1683 if (NOTE_P (newpos1))
402209ff 1684 newpos1 = NEXT_INSN (newpos1);
402209ff 1685
39587bb9 1686 redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
628f6a4e 1687 to_remove = EDGE_SUCC (redirect_from, 0)->dest;
402209ff 1688
628f6a4e 1689 redirect_edge_and_branch_force (EDGE_SUCC (redirect_from, 0), redirect_to);
f470c378 1690 delete_basic_block (to_remove);
402209ff 1691
39587bb9 1692 update_forwarder_flag (redirect_from);
635559ab 1693
402209ff
JH
1694 return true;
1695}
1696
1697/* Search the predecessors of BB for common insn sequences. When found,
1698 share code between them by redirecting control flow. Return true if
1699 any changes made. */
1700
1701static bool
d329e058 1702try_crossjump_bb (int mode, basic_block bb)
402209ff 1703{
628f6a4e 1704 edge e, e2, fallthru;
402209ff 1705 bool changed;
628f6a4e
BE
1706 unsigned max, ix, ix2;
1707 basic_block ev, ev2;
1708 edge_iterator ei;
402209ff 1709
f63d1bf7 1710 /* Nothing to do if there is not at least two incoming edges. */
628f6a4e 1711 if (EDGE_COUNT (bb->preds) < 2)
402209ff
JH
1712 return false;
1713
750054a2
CT
1714 /* If we are partitioning hot/cold basic blocks, we don't want to
1715 mess up unconditional or indirect jumps that cross between hot
8e8d5162 1716 and cold sections.
750054a2 1717
8e8d5162
CT
1718 Basic block partitioning may result in some jumps that appear to
1719 be optimizable (or blocks that appear to be mergeable), but which really
1720 must be left untouched (they are required to make it safely across
1721 partition boundaries). See the comments at the top of
1722 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1723
750054a2 1724 if (flag_reorder_blocks_and_partition
628f6a4e
BE
1725 && (BB_PARTITION (EDGE_PRED (bb, 0)->src) != BB_PARTITION (EDGE_PRED (bb, 1)->src)
1726 || (EDGE_PRED (bb, 0)->flags & EDGE_CROSSING)))
750054a2
CT
1727 return false;
1728
402209ff
JH
1729 /* It is always cheapest to redirect a block that ends in a branch to
1730 a block that falls through into BB, as that adds no branches to the
1731 program. We'll try that combination first. */
5f24e0dc
RH
1732 fallthru = NULL;
1733 max = PARAM_VALUE (PARAM_MAX_CROSSJUMP_EDGES);
628f6a4e
BE
1734
1735 if (EDGE_COUNT (bb->preds) > max)
1736 return false;
1737
1738 FOR_EACH_EDGE (e, ei, bb->preds)
f5eb5fd0 1739 {
5f24e0dc 1740 if (e->flags & EDGE_FALLTHRU)
628f6a4e 1741 fallthru = e;
f5eb5fd0 1742 }
402209ff
JH
1743
1744 changed = false;
628f6a4e 1745 for (ix = 0, ev = bb; ix < EDGE_COUNT (ev->preds); )
402209ff 1746 {
628f6a4e
BE
1747 e = EDGE_PRED (ev, ix);
1748 ix++;
402209ff 1749
402209ff
JH
1750 /* As noted above, first try with the fallthru predecessor. */
1751 if (fallthru)
1752 {
1753 /* Don't combine the fallthru edge into anything else.
1754 If there is a match, we'll do it the other way around. */
1755 if (e == fallthru)
1756 continue;
7cf240d5
JH
1757 /* If nothing changed since the last attempt, there is nothing
1758 we can do. */
1759 if (!first_pass
1760 && (!(e->src->flags & BB_DIRTY)
1761 && !(fallthru->src->flags & BB_DIRTY)))
1762 continue;
402209ff
JH
1763
1764 if (try_crossjump_to_edge (mode, e, fallthru))
1765 {
1766 changed = true;
628f6a4e
BE
1767 ix = 0;
1768 ev = bb;
402209ff
JH
1769 continue;
1770 }
1771 }
1772
1773 /* Non-obvious work limiting check: Recognize that we're going
1774 to call try_crossjump_bb on every basic block. So if we have
1775 two blocks with lots of outgoing edges (a switch) and they
1776 share lots of common destinations, then we would do the
1777 cross-jump check once for each common destination.
1778
1779 Now, if the blocks actually are cross-jump candidates, then
1780 all of their destinations will be shared. Which means that
1781 we only need check them for cross-jump candidacy once. We
1782 can eliminate redundant checks of crossjump(A,B) by arbitrarily
1783 choosing to do the check from the block for which the edge
1784 in question is the first successor of A. */
628f6a4e 1785 if (EDGE_SUCC (e->src, 0) != e)
402209ff
JH
1786 continue;
1787
628f6a4e 1788 for (ix2 = 0, ev2 = bb; ix2 < EDGE_COUNT (ev2->preds); )
402209ff 1789 {
628f6a4e
BE
1790 e2 = EDGE_PRED (ev2, ix2);
1791 ix2++;
402209ff
JH
1792
1793 if (e2 == e)
1794 continue;
1795
1796 /* We've already checked the fallthru edge above. */
1797 if (e2 == fallthru)
1798 continue;
1799
402209ff
JH
1800 /* The "first successor" check above only prevents multiple
1801 checks of crossjump(A,B). In order to prevent redundant
1802 checks of crossjump(B,A), require that A be the block
1803 with the lowest index. */
0b17ab2f 1804 if (e->src->index > e2->src->index)
402209ff
JH
1805 continue;
1806
7cf240d5
JH
1807 /* If nothing changed since the last attempt, there is nothing
1808 we can do. */
1809 if (!first_pass
1810 && (!(e->src->flags & BB_DIRTY)
1811 && !(e2->src->flags & BB_DIRTY)))
1812 continue;
1813
402209ff
JH
1814 if (try_crossjump_to_edge (mode, e, e2))
1815 {
1816 changed = true;
628f6a4e
BE
1817 ev2 = bb;
1818 ix = 0;
402209ff
JH
1819 break;
1820 }
1821 }
1822 }
1823
1824 return changed;
1825}
1826
1827/* Do simple CFG optimizations - basic block merging, simplifying of jump
1828 instructions etc. Return nonzero if changes were made. */
1829
1830static bool
d329e058 1831try_optimize_cfg (int mode)
402209ff 1832{
402209ff
JH
1833 bool changed_overall = false;
1834 bool changed;
1835 int iterations = 0;
ec3ae3da 1836 basic_block bb, b, next;
402209ff 1837
ca6c03ca
JH
1838 if (mode & CLEANUP_CROSSJUMP)
1839 add_noreturn_fake_exit_edges ();
1840
e0082a72
ZD
1841 FOR_EACH_BB (bb)
1842 update_forwarder_flag (bb);
635559ab 1843
7cf240d5 1844 if (mode & (CLEANUP_UPDATE_LIFE | CLEANUP_CROSSJUMP | CLEANUP_THREADING))
38c1593d
JH
1845 clear_bb_flags ();
1846
245f1bfa 1847 if (! targetm.cannot_modify_jumps_p ())
402209ff 1848 {
7cf240d5 1849 first_pass = true;
e4ec2cac
AO
1850 /* Attempt to merge blocks as made possible by edge removal. If
1851 a block has only one successor, and the successor has only
1852 one predecessor, they may be combined. */
1853 do
402209ff 1854 {
e4ec2cac
AO
1855 changed = false;
1856 iterations++;
1857
c263766c
RH
1858 if (dump_file)
1859 fprintf (dump_file,
e4ec2cac
AO
1860 "\n\ntry_optimize_cfg iteration %i\n\n",
1861 iterations);
402209ff 1862
e0082a72 1863 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR;)
402209ff 1864 {
e0082a72 1865 basic_block c;
e4ec2cac
AO
1866 edge s;
1867 bool changed_here = false;
5f0d2358 1868
e4ec2cac 1869 /* Delete trivially dead basic blocks. */
628f6a4e 1870 while (EDGE_COUNT (b->preds) == 0)
e4ec2cac 1871 {
f6366fc7 1872 c = b->prev_bb;
c263766c
RH
1873 if (dump_file)
1874 fprintf (dump_file, "Deleting block %i.\n",
0b17ab2f 1875 b->index);
e4ec2cac 1876
f470c378 1877 delete_basic_block (b);
bc35512f
JH
1878 if (!(mode & CLEANUP_CFGLAYOUT))
1879 changed = true;
e4ec2cac
AO
1880 b = c;
1881 }
402209ff 1882
6ce2bcb7 1883 /* Remove code labels no longer used. */
628f6a4e
BE
1884 if (EDGE_COUNT (b->preds) == 1
1885 && (EDGE_PRED (b, 0)->flags & EDGE_FALLTHRU)
1886 && !(EDGE_PRED (b, 0)->flags & EDGE_COMPLEX)
4b4bf941 1887 && LABEL_P (BB_HEAD (b))
e4ec2cac
AO
1888 /* If the previous block ends with a branch to this
1889 block, we can't delete the label. Normally this
1890 is a condjump that is yet to be simplified, but
1891 if CASE_DROPS_THRU, this can be a tablejump with
1892 some element going to the same place as the
1893 default (fallthru). */
628f6a4e
BE
1894 && (EDGE_PRED (b, 0)->src == ENTRY_BLOCK_PTR
1895 || !JUMP_P (BB_END (EDGE_PRED (b, 0)->src))
a813c111 1896 || ! label_is_jump_target_p (BB_HEAD (b),
628f6a4e 1897 BB_END (EDGE_PRED (b, 0)->src))))
e4ec2cac 1898 {
a813c111 1899 rtx label = BB_HEAD (b);
5f0d2358 1900
e4ec2cac 1901 delete_insn_chain (label, label);
bc35512f
JH
1902 /* In the case label is undeletable, move it after the
1903 BASIC_BLOCK note. */
a813c111 1904 if (NOTE_LINE_NUMBER (BB_HEAD (b)) == NOTE_INSN_DELETED_LABEL)
bc35512f 1905 {
a813c111 1906 rtx bb_note = NEXT_INSN (BB_HEAD (b));
bc35512f
JH
1907
1908 reorder_insns_nobb (label, label, bb_note);
a813c111 1909 BB_HEAD (b) = bb_note;
bc35512f 1910 }
c263766c
RH
1911 if (dump_file)
1912 fprintf (dump_file, "Deleted label in block %i.\n",
0b17ab2f 1913 b->index);
e4ec2cac 1914 }
402209ff 1915
e4ec2cac 1916 /* If we fall through an empty block, we can remove it. */
bc35512f 1917 if (!(mode & CLEANUP_CFGLAYOUT)
628f6a4e
BE
1918 && EDGE_COUNT (b->preds) == 1
1919 && (EDGE_PRED (b, 0)->flags & EDGE_FALLTHRU)
4b4bf941 1920 && !LABEL_P (BB_HEAD (b))
e4ec2cac
AO
1921 && FORWARDER_BLOCK_P (b)
1922 /* Note that forwarder_block_p true ensures that
1923 there is a successor for this block. */
628f6a4e 1924 && (EDGE_SUCC (b, 0)->flags & EDGE_FALLTHRU)
0b17ab2f 1925 && n_basic_blocks > 1)
e4ec2cac 1926 {
c263766c
RH
1927 if (dump_file)
1928 fprintf (dump_file,
e4ec2cac 1929 "Deleting fallthru block %i.\n",
0b17ab2f 1930 b->index);
e4ec2cac 1931
f6366fc7 1932 c = b->prev_bb == ENTRY_BLOCK_PTR ? b->next_bb : b->prev_bb;
628f6a4e 1933 redirect_edge_succ_nodup (EDGE_PRED (b, 0), EDGE_SUCC (b, 0)->dest);
f470c378 1934 delete_basic_block (b);
e4ec2cac
AO
1935 changed = true;
1936 b = c;
1937 }
5f0d2358 1938
628f6a4e
BE
1939 if (EDGE_COUNT (b->succs) == 1
1940 && (s = EDGE_SUCC (b, 0))
ec3ae3da
JH
1941 && !(s->flags & EDGE_COMPLEX)
1942 && (c = s->dest) != EXIT_BLOCK_PTR
628f6a4e 1943 && EDGE_COUNT (c->preds) == 1
bc35512f
JH
1944 && b != c)
1945 {
1946 /* When not in cfg_layout mode use code aware of reordering
1947 INSN. This code possibly creates new basic blocks so it
1948 does not fit merge_blocks interface and is kept here in
1949 hope that it will become useless once more of compiler
1950 is transformed to use cfg_layout mode. */
1951
1952 if ((mode & CLEANUP_CFGLAYOUT)
1953 && can_merge_blocks_p (b, c))
1954 {
1955 merge_blocks (b, c);
1956 update_forwarder_flag (b);
1957 changed_here = true;
1958 }
1959 else if (!(mode & CLEANUP_CFGLAYOUT)
1960 /* If the jump insn has side effects,
1961 we can't kill the edge. */
4b4bf941 1962 && (!JUMP_P (BB_END (b))
e24e7211 1963 || (reload_completed
a813c111 1964 ? simplejump_p (BB_END (b))
e4efa971
JH
1965 : (onlyjump_p (BB_END (b))
1966 && !tablejump_p (BB_END (b),
1967 NULL, NULL))))
bc35512f
JH
1968 && (next = merge_blocks_move (s, b, c, mode)))
1969 {
1970 b = next;
1971 changed_here = true;
1972 }
ec3ae3da 1973 }
e4ec2cac
AO
1974
1975 /* Simplify branch over branch. */
bc35512f
JH
1976 if ((mode & CLEANUP_EXPENSIVE)
1977 && !(mode & CLEANUP_CFGLAYOUT)
1978 && try_simplify_condjump (b))
38c1593d 1979 changed_here = true;
402209ff 1980
e4ec2cac
AO
1981 /* If B has a single outgoing edge, but uses a
1982 non-trivial jump instruction without side-effects, we
1983 can either delete the jump entirely, or replace it
3348b696 1984 with a simple unconditional jump. */
628f6a4e
BE
1985 if (EDGE_COUNT (b->succs) == 1
1986 && EDGE_SUCC (b, 0)->dest != EXIT_BLOCK_PTR
a813c111 1987 && onlyjump_p (BB_END (b))
750054a2 1988 && !find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
628f6a4e 1989 && try_redirect_by_replacing_jump (EDGE_SUCC (b, 0), EDGE_SUCC (b, 0)->dest,
20b4e8ae 1990 (mode & CLEANUP_CFGLAYOUT) != 0))
e4ec2cac 1991 {
e4ec2cac
AO
1992 update_forwarder_flag (b);
1993 changed_here = true;
1994 }
402209ff 1995
e4ec2cac
AO
1996 /* Simplify branch to branch. */
1997 if (try_forward_edges (mode, b))
1998 changed_here = true;
402209ff 1999
e4ec2cac
AO
2000 /* Look for shared code between blocks. */
2001 if ((mode & CLEANUP_CROSSJUMP)
2002 && try_crossjump_bb (mode, b))
2003 changed_here = true;
402209ff 2004
e4ec2cac
AO
2005 /* Don't get confused by the index shift caused by
2006 deleting blocks. */
2007 if (!changed_here)
e0082a72 2008 b = b->next_bb;
e4ec2cac
AO
2009 else
2010 changed = true;
2011 }
402209ff 2012
e4ec2cac
AO
2013 if ((mode & CLEANUP_CROSSJUMP)
2014 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
402209ff 2015 changed = true;
402209ff
JH
2016
2017#ifdef ENABLE_CHECKING
e4ec2cac
AO
2018 if (changed)
2019 verify_flow_info ();
402209ff
JH
2020#endif
2021
e4ec2cac 2022 changed_overall |= changed;
7cf240d5 2023 first_pass = false;
e4ec2cac
AO
2024 }
2025 while (changed);
402209ff 2026 }
ca6c03ca
JH
2027
2028 if (mode & CLEANUP_CROSSJUMP)
6809cbf9 2029 remove_fake_exit_edges ();
ca6c03ca 2030
1540f9eb 2031 clear_aux_for_blocks ();
635559ab 2032
402209ff
JH
2033 return changed_overall;
2034}
2035\f
6d2f8887 2036/* Delete all unreachable basic blocks. */
4262e623 2037
969d70ca 2038bool
d329e058 2039delete_unreachable_blocks (void)
402209ff 2040{
402209ff 2041 bool changed = false;
e0082a72 2042 basic_block b, next_bb;
402209ff
JH
2043
2044 find_unreachable_blocks ();
2045
bf77398c 2046 /* Delete all unreachable basic blocks. */
402209ff 2047
e0082a72 2048 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
402209ff 2049 {
e0082a72 2050 next_bb = b->next_bb;
0b17ab2f 2051
402209ff 2052 if (!(b->flags & BB_REACHABLE))
6a58eee9 2053 {
f470c378 2054 delete_basic_block (b);
6a58eee9
RH
2055 changed = true;
2056 }
402209ff
JH
2057 }
2058
2059 if (changed)
2060 tidy_fallthru_edges ();
2061 return changed;
2062}
6de9cd9a
DN
2063
2064/* Merges sequential blocks if possible. */
2065
2066bool
2067merge_seq_blocks (void)
2068{
2069 basic_block bb;
2070 bool changed = false;
2071
2072 for (bb = ENTRY_BLOCK_PTR->next_bb; bb != EXIT_BLOCK_PTR; )
2073 {
628f6a4e
BE
2074 if (EDGE_COUNT (bb->succs) == 1
2075 && can_merge_blocks_p (bb, EDGE_SUCC (bb, 0)->dest))
6de9cd9a
DN
2076 {
2077 /* Merge the blocks and retry. */
628f6a4e 2078 merge_blocks (bb, EDGE_SUCC (bb, 0)->dest);
6de9cd9a
DN
2079 changed = true;
2080 continue;
2081 }
2082
2083 bb = bb->next_bb;
2084 }
2085
2086 return changed;
2087}
402209ff
JH
2088\f
2089/* Tidy the CFG by deleting unreachable code and whatnot. */
2090
2091bool
d329e058 2092cleanup_cfg (int mode)
402209ff 2093{
402209ff
JH
2094 bool changed = false;
2095
2096 timevar_push (TV_CLEANUP_CFG);
3dec4024
JH
2097 if (delete_unreachable_blocks ())
2098 {
2099 changed = true;
2100 /* We've possibly created trivially dead code. Cleanup it right
95bd1dd7 2101 now to introduce more opportunities for try_optimize_cfg. */
6ce2bcb7 2102 if (!(mode & (CLEANUP_NO_INSN_DEL | CLEANUP_UPDATE_LIFE))
3dec4024
JH
2103 && !reload_completed)
2104 delete_trivially_dead_insns (get_insns(), max_reg_num ());
2105 }
bf77398c
ZD
2106
2107 compact_blocks ();
2108
3dec4024
JH
2109 while (try_optimize_cfg (mode))
2110 {
2111 delete_unreachable_blocks (), changed = true;
2112 if (mode & CLEANUP_UPDATE_LIFE)
2113 {
95bd1dd7
KH
2114 /* Cleaning up CFG introduces more opportunities for dead code
2115 removal that in turn may introduce more opportunities for
3dec4024 2116 cleaning up the CFG. */
e41f3392 2117 if (!update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3dec4024
JH
2118 PROP_DEATH_NOTES
2119 | PROP_SCAN_DEAD_CODE
2120 | PROP_KILL_DEAD_CODE
23bd7a93
JH
2121 | ((mode & CLEANUP_LOG_LINKS)
2122 ? PROP_LOG_LINKS : 0)))
3dec4024
JH
2123 break;
2124 }
6ce2bcb7 2125 else if (!(mode & CLEANUP_NO_INSN_DEL)
1c4a429a 2126 && (mode & CLEANUP_EXPENSIVE)
95479831 2127 && !reload_completed)
3dec4024
JH
2128 {
2129 if (!delete_trivially_dead_insns (get_insns(), max_reg_num ()))
2130 break;
2131 }
2132 else
2133 break;
2134 delete_dead_jumptables ();
2135 }
402209ff 2136
402209ff
JH
2137 /* Kill the data we won't maintain. */
2138 free_EXPR_LIST_list (&label_value_list);
402209ff
JH
2139 timevar_pop (TV_CLEANUP_CFG);
2140
402209ff
JH
2141 return changed;
2142}
This page took 1.068735 seconds and 5 git commands to generate.