]> gcc.gnu.org Git - gcc.git/blob - gcc/cfgrtl.c
* cfgloop.c (flow_loops_cfg_dump): Use bb->index, not i.
[gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
30 merge_blocks_nomove
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
41 verify_flow_info
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
44 \f
45 #include "config.h"
46 #include "system.h"
47 #include "tree.h"
48 #include "rtl.h"
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
51 #include "regs.h"
52 #include "flags.h"
53 #include "output.h"
54 #include "function.h"
55 #include "except.h"
56 #include "toplev.h"
57 #include "tm_p.h"
58 #include "obstack.h"
59 #include "insn-config.h"
60
61 /* Stubs in case we don't have a return insn. */
62 #ifndef HAVE_return
63 #define HAVE_return 0
64 #define gen_return() NULL_RTX
65 #endif
66
67 /* The basic block structure for every insn, indexed by uid. */
68 varray_type basic_block_for_insn;
69
70 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
71 /* ??? Should probably be using LABEL_NUSES instead. It would take a
72 bit of surgery to be able to use or co-opt the routines in jump. */
73 rtx label_value_list;
74 rtx tail_recursion_label_list;
75
76 static int can_delete_note_p PARAMS ((rtx));
77 static int can_delete_label_p PARAMS ((rtx));
78 static void commit_one_edge_insertion PARAMS ((edge, int));
79 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
80 static rtx last_loop_beg_note PARAMS ((rtx));
81 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
82 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
83 \f
84 /* Return true if NOTE is not one of the ones that must be kept paired,
85 so that we may simply delete it. */
86
87 static int
88 can_delete_note_p (note)
89 rtx note;
90 {
91 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
92 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
93 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
94 }
95
96 /* True if a given label can be deleted. */
97
98 static int
99 can_delete_label_p (label)
100 rtx label;
101 {
102 return (!LABEL_PRESERVE_P (label)
103 /* User declared labels must be preserved. */
104 && LABEL_NAME (label) == 0
105 && !in_expr_list_p (forced_labels, label)
106 && !in_expr_list_p (label_value_list, label));
107 }
108
109 /* Delete INSN by patching it out. Return the next insn. */
110
111 rtx
112 delete_insn (insn)
113 rtx insn;
114 {
115 rtx next = NEXT_INSN (insn);
116 rtx note;
117 bool really_delete = true;
118
119 if (GET_CODE (insn) == CODE_LABEL)
120 {
121 /* Some labels can't be directly removed from the INSN chain, as they
122 might be references via variables, constant pool etc.
123 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
124 if (! can_delete_label_p (insn))
125 {
126 const char *name = LABEL_NAME (insn);
127
128 really_delete = false;
129 PUT_CODE (insn, NOTE);
130 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
131 NOTE_SOURCE_FILE (insn) = name;
132 }
133
134 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
135 }
136
137 if (really_delete)
138 {
139 /* If this insn has already been deleted, something is very wrong. */
140 if (INSN_DELETED_P (insn))
141 abort ();
142 remove_insn (insn);
143 INSN_DELETED_P (insn) = 1;
144 }
145
146 /* If deleting a jump, decrement the use count of the label. Deleting
147 the label itself should happen in the normal course of block merging. */
148 if (GET_CODE (insn) == JUMP_INSN
149 && JUMP_LABEL (insn)
150 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
151 LABEL_NUSES (JUMP_LABEL (insn))--;
152
153 /* Also if deleting an insn that references a label. */
154 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
155 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
156 LABEL_NUSES (XEXP (note, 0))--;
157
158 if (GET_CODE (insn) == JUMP_INSN
159 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
160 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
161 {
162 rtx pat = PATTERN (insn);
163 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
164 int len = XVECLEN (pat, diff_vec_p);
165 int i;
166
167 for (i = 0; i < len; i++)
168 {
169 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
170
171 /* When deleting code in bulk (e.g. removing many unreachable
172 blocks) we can delete a label that's a target of the vector
173 before deleting the vector itself. */
174 if (GET_CODE (label) != NOTE)
175 LABEL_NUSES (label)--;
176 }
177 }
178
179 return next;
180 }
181
182 /* Like delete_insn but also purge dead edges from BB. */
183 rtx
184 delete_insn_and_edges (insn)
185 rtx insn;
186 {
187 rtx x;
188 bool purge = false;
189
190 if (basic_block_for_insn
191 && INSN_P (insn)
192 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
193 && BLOCK_FOR_INSN (insn)
194 && BLOCK_FOR_INSN (insn)->end == insn)
195 purge = true;
196 x = delete_insn (insn);
197 if (purge)
198 purge_dead_edges (BLOCK_FOR_INSN (insn));
199 return x;
200 }
201
202 /* Unlink a chain of insns between START and FINISH, leaving notes
203 that must be paired. */
204
205 void
206 delete_insn_chain (start, finish)
207 rtx start, finish;
208 {
209 rtx next;
210
211 /* Unchain the insns one by one. It would be quicker to delete all of these
212 with a single unchaining, rather than one at a time, but we need to keep
213 the NOTE's. */
214 while (1)
215 {
216 next = NEXT_INSN (start);
217 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
218 ;
219 else
220 next = delete_insn (start);
221
222 if (start == finish)
223 break;
224 start = next;
225 }
226 }
227
228 /* Like delete_insn but also purge dead edges from BB. */
229 void
230 delete_insn_chain_and_edges (first, last)
231 rtx first, last;
232 {
233 bool purge = false;
234
235 if (basic_block_for_insn
236 && INSN_P (last)
237 && (unsigned int)INSN_UID (last) < basic_block_for_insn->num_elements
238 && BLOCK_FOR_INSN (last)
239 && BLOCK_FOR_INSN (last)->end == last)
240 purge = true;
241 delete_insn_chain (first, last);
242 if (purge)
243 purge_dead_edges (BLOCK_FOR_INSN (last));
244 }
245 \f
246 /* Create a new basic block consisting of the instructions between HEAD and END
247 inclusive. This function is designed to allow fast BB construction - reuses
248 the note and basic block struct in BB_NOTE, if any and do not grow
249 BASIC_BLOCK chain and should be used directly only by CFG construction code.
250 END can be NULL in to create new empty basic block before HEAD. Both END
251 and HEAD can be NULL to create basic block at the end of INSN chain.
252 AFTER is the basic block we should be put after. */
253
254 basic_block
255 create_basic_block_structure (index, head, end, bb_note, after)
256 int index;
257 rtx head, end, bb_note;
258 basic_block after;
259 {
260 basic_block bb;
261
262 if (bb_note
263 && ! RTX_INTEGRATED_P (bb_note)
264 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
265 && bb->aux == NULL)
266 {
267 /* If we found an existing note, thread it back onto the chain. */
268
269 rtx after;
270
271 if (GET_CODE (head) == CODE_LABEL)
272 after = head;
273 else
274 {
275 after = PREV_INSN (head);
276 head = bb_note;
277 }
278
279 if (after != bb_note && NEXT_INSN (after) != bb_note)
280 reorder_insns (bb_note, bb_note, after);
281 }
282 else
283 {
284 /* Otherwise we must create a note and a basic block structure. */
285
286 bb = alloc_block ();
287
288 if (!head && !end)
289 head = end = bb_note
290 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
291 else if (GET_CODE (head) == CODE_LABEL && end)
292 {
293 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
294 if (head == end)
295 end = bb_note;
296 }
297 else
298 {
299 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
300 head = bb_note;
301 if (!end)
302 end = head;
303 }
304
305 NOTE_BASIC_BLOCK (bb_note) = bb;
306 }
307
308 /* Always include the bb note in the block. */
309 if (NEXT_INSN (end) == bb_note)
310 end = bb_note;
311
312 bb->head = head;
313 bb->end = end;
314 bb->index = index;
315 bb->flags = BB_NEW;
316 link_block (bb, after);
317 BASIC_BLOCK (index) = bb;
318 if (basic_block_for_insn)
319 update_bb_for_insn (bb);
320
321 /* Tag the block so that we know it has been used when considering
322 other basic block notes. */
323 bb->aux = bb;
324
325 return bb;
326 }
327
328 /* Create new basic block consisting of instructions in between HEAD and END
329 and place it to the BB chain after block AFTER. END can be NULL in to
330 create new empty basic block before HEAD. Both END and HEAD can be NULL to
331 create basic block at the end of INSN chain. */
332
333 basic_block
334 create_basic_block (head, end, after)
335 rtx head, end;
336 basic_block after;
337 {
338 basic_block bb;
339 int index = last_basic_block++;
340
341 /* Place the new block just after the end. */
342 VARRAY_GROW (basic_block_info, last_basic_block);
343
344 n_basic_blocks++;
345
346 bb = create_basic_block_structure (index, head, end, NULL, after);
347 bb->aux = NULL;
348 return bb;
349 }
350 \f
351 /* Delete the insns in a (non-live) block. We physically delete every
352 non-deleted-note insn, and update the flow graph appropriately.
353
354 Return nonzero if we deleted an exception handler. */
355
356 /* ??? Preserving all such notes strikes me as wrong. It would be nice
357 to post-process the stream to remove empty blocks, loops, ranges, etc. */
358
359 int
360 flow_delete_block_noexpunge (b)
361 basic_block b;
362 {
363 int deleted_handler = 0;
364 rtx insn, end, tmp;
365
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached.
368
369 We need to remove the label from the exception_handler_label list
370 and remove the associated NOTE_INSN_EH_REGION_BEG and
371 NOTE_INSN_EH_REGION_END notes. */
372
373 /* Get rid of all NOTE_INSN_PREDICTIONs hanging before the block. */
374
375 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
376 {
377 if (GET_CODE (insn) != NOTE)
378 break;
379 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
380 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
381 }
382
383 insn = b->head;
384
385 never_reached_warning (insn, b->end);
386
387 if (GET_CODE (insn) == CODE_LABEL)
388 maybe_remove_eh_handler (insn);
389
390 /* Include any jump table following the basic block. */
391 end = b->end;
392 if (GET_CODE (end) == JUMP_INSN
393 && (tmp = JUMP_LABEL (end)) != NULL_RTX
394 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
395 && GET_CODE (tmp) == JUMP_INSN
396 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
397 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
398 end = tmp;
399
400 /* Include any barrier that may follow the basic block. */
401 tmp = next_nonnote_insn (end);
402 if (tmp && GET_CODE (tmp) == BARRIER)
403 end = tmp;
404
405 /* Selectively delete the entire chain. */
406 b->head = NULL;
407 delete_insn_chain (insn, end);
408
409 /* Remove the edges into and out of this block. Note that there may
410 indeed be edges in, if we are removing an unreachable loop. */
411 while (b->pred != NULL)
412 remove_edge (b->pred);
413 while (b->succ != NULL)
414 remove_edge (b->succ);
415
416 b->pred = NULL;
417 b->succ = NULL;
418
419 return deleted_handler;
420 }
421
422 int
423 flow_delete_block (b)
424 basic_block b;
425 {
426 int deleted_handler = flow_delete_block_noexpunge (b);
427
428 /* Remove the basic block from the array. */
429 expunge_block (b);
430
431 return deleted_handler;
432 }
433 \f
434 /* Records the basic block struct in BB_FOR_INSN, for every instruction
435 indexed by INSN_UID. MAX is the size of the array. */
436
437 void
438 compute_bb_for_insn (max)
439 int max;
440 {
441 basic_block bb;
442
443 if (basic_block_for_insn)
444 VARRAY_FREE (basic_block_for_insn);
445
446 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
447
448 FOR_EACH_BB (bb)
449 {
450 rtx end = bb->end;
451 rtx insn;
452
453 for (insn = bb->head; ; insn = NEXT_INSN (insn))
454 {
455 if (INSN_UID (insn) < max)
456 VARRAY_BB (basic_block_for_insn, INSN_UID (insn)) = bb;
457
458 if (insn == end)
459 break;
460 }
461 }
462 }
463
464 /* Release the basic_block_for_insn array. */
465
466 void
467 free_bb_for_insn ()
468 {
469 if (basic_block_for_insn)
470 VARRAY_FREE (basic_block_for_insn);
471
472 basic_block_for_insn = 0;
473 }
474
475 /* Update insns block within BB. */
476
477 void
478 update_bb_for_insn (bb)
479 basic_block bb;
480 {
481 rtx insn;
482
483 if (! basic_block_for_insn)
484 return;
485
486 for (insn = bb->head; ; insn = NEXT_INSN (insn))
487 {
488 set_block_for_insn (insn, bb);
489 if (insn == bb->end)
490 break;
491 }
492 }
493
494 /* Record INSN's block as BB. */
495
496 void
497 set_block_for_insn (insn, bb)
498 rtx insn;
499 basic_block bb;
500 {
501 size_t uid = INSN_UID (insn);
502
503 if (uid >= basic_block_for_insn->num_elements)
504 {
505 /* Add one-eighth the size so we don't keep calling xrealloc. */
506 size_t new_size = uid + (uid + 7) / 8;
507
508 VARRAY_GROW (basic_block_for_insn, new_size);
509 }
510
511 VARRAY_BB (basic_block_for_insn, uid) = bb;
512 }
513 \f
514 /* Split a block BB after insn INSN creating a new fallthru edge.
515 Return the new edge. Note that to keep other parts of the compiler happy,
516 this function renumbers all the basic blocks so that the new
517 one has a number one greater than the block split. */
518
519 edge
520 split_block (bb, insn)
521 basic_block bb;
522 rtx insn;
523 {
524 basic_block new_bb;
525 edge new_edge;
526 edge e;
527
528 /* There is no point splitting the block after its end. */
529 if (bb->end == insn)
530 return 0;
531
532 /* Create the new basic block. */
533 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
534 new_bb->count = bb->count;
535 new_bb->frequency = bb->frequency;
536 new_bb->loop_depth = bb->loop_depth;
537 bb->end = insn;
538
539 /* Redirect the outgoing edges. */
540 new_bb->succ = bb->succ;
541 bb->succ = NULL;
542 for (e = new_bb->succ; e; e = e->succ_next)
543 e->src = new_bb;
544
545 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
546
547 if (bb->global_live_at_start)
548 {
549 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
550 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
551 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
552
553 /* We now have to calculate which registers are live at the end
554 of the split basic block and at the start of the new basic
555 block. Start with those registers that are known to be live
556 at the end of the original basic block and get
557 propagate_block to determine which registers are live. */
558 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
559 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
560 COPY_REG_SET (bb->global_live_at_end,
561 new_bb->global_live_at_start);
562 #ifdef HAVE_conditional_execution
563 /* In the presence of conditional execution we are not able to update
564 liveness precisely. */
565 if (reload_completed)
566 {
567 bb->flags |= BB_DIRTY;
568 new_bb->flags |= BB_DIRTY;
569 }
570 #endif
571 }
572
573 return new_edge;
574 }
575
576 /* Blocks A and B are to be merged into a single block A. The insns
577 are already contiguous, hence `nomove'. */
578
579 void
580 merge_blocks_nomove (a, b)
581 basic_block a, b;
582 {
583 rtx b_head = b->head, b_end = b->end, a_end = a->end;
584 rtx del_first = NULL_RTX, del_last = NULL_RTX;
585 int b_empty = 0;
586 edge e;
587
588 /* If there was a CODE_LABEL beginning B, delete it. */
589 if (GET_CODE (b_head) == CODE_LABEL)
590 {
591 /* Detect basic blocks with nothing but a label. This can happen
592 in particular at the end of a function. */
593 if (b_head == b_end)
594 b_empty = 1;
595
596 del_first = del_last = b_head;
597 b_head = NEXT_INSN (b_head);
598 }
599
600 /* Delete the basic block note and handle blocks containing just that
601 note. */
602 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
603 {
604 if (b_head == b_end)
605 b_empty = 1;
606 if (! del_last)
607 del_first = b_head;
608
609 del_last = b_head;
610 b_head = NEXT_INSN (b_head);
611 }
612
613 /* If there was a jump out of A, delete it. */
614 if (GET_CODE (a_end) == JUMP_INSN)
615 {
616 rtx prev;
617
618 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
619 if (GET_CODE (prev) != NOTE
620 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
621 || prev == a->head)
622 break;
623
624 del_first = a_end;
625
626 #ifdef HAVE_cc0
627 /* If this was a conditional jump, we need to also delete
628 the insn that set cc0. */
629 if (only_sets_cc0_p (prev))
630 {
631 rtx tmp = prev;
632
633 prev = prev_nonnote_insn (prev);
634 if (!prev)
635 prev = a->head;
636 del_first = tmp;
637 }
638 #endif
639
640 a_end = PREV_INSN (del_first);
641 }
642 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
643 del_first = NEXT_INSN (a_end);
644
645 /* Normally there should only be one successor of A and that is B, but
646 partway though the merge of blocks for conditional_execution we'll
647 be merging a TEST block with THEN and ELSE successors. Free the
648 whole lot of them and hope the caller knows what they're doing. */
649 while (a->succ)
650 remove_edge (a->succ);
651
652 /* Adjust the edges out of B for the new owner. */
653 for (e = b->succ; e; e = e->succ_next)
654 e->src = a;
655 a->succ = b->succ;
656 a->flags |= b->flags;
657
658 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
659 b->pred = b->succ = NULL;
660 a->global_live_at_end = b->global_live_at_end;
661
662 expunge_block (b);
663
664 /* Delete everything marked above as well as crap that might be
665 hanging out between the two blocks. */
666 delete_insn_chain (del_first, del_last);
667
668 /* Reassociate the insns of B with A. */
669 if (!b_empty)
670 {
671 if (basic_block_for_insn)
672 {
673 rtx x;
674
675 for (x = a_end; x != b_end; x = NEXT_INSN (x))
676 set_block_for_insn (x, a);
677
678 set_block_for_insn (b_end, a);
679 }
680
681 a_end = b_end;
682 }
683
684 a->end = a_end;
685 }
686 \f
687 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
688 exist. */
689
690 rtx
691 block_label (block)
692 basic_block block;
693 {
694 if (block == EXIT_BLOCK_PTR)
695 return NULL_RTX;
696
697 if (GET_CODE (block->head) != CODE_LABEL)
698 {
699 block->head = emit_label_before (gen_label_rtx (), block->head);
700 if (basic_block_for_insn)
701 set_block_for_insn (block->head, block);
702 }
703
704 return block->head;
705 }
706
707 /* Attempt to perform edge redirection by replacing possibly complex jump
708 instruction by unconditional jump or removing jump completely. This can
709 apply only if all edges now point to the same block. The parameters and
710 return values are equivalent to redirect_edge_and_branch. */
711
712 static bool
713 try_redirect_by_replacing_jump (e, target)
714 edge e;
715 basic_block target;
716 {
717 basic_block src = e->src;
718 rtx insn = src->end, kill_from;
719 edge tmp;
720 rtx set, table;
721 int fallthru = 0;
722
723 /* Verify that all targets will be TARGET. */
724 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
725 if (tmp->dest != target && tmp != e)
726 break;
727
728 if (tmp || !onlyjump_p (insn))
729 return false;
730 if (reload_completed && JUMP_LABEL (insn)
731 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
732 && GET_CODE (table) == JUMP_INSN
733 && (GET_CODE (PATTERN (table)) == ADDR_VEC
734 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
735 return false;
736
737 /* Avoid removing branch with side effects. */
738 set = single_set (insn);
739 if (!set || side_effects_p (set))
740 return false;
741
742 /* In case we zap a conditional jump, we'll need to kill
743 the cc0 setter too. */
744 kill_from = insn;
745 #ifdef HAVE_cc0
746 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
747 kill_from = PREV_INSN (insn);
748 #endif
749
750 /* See if we can create the fallthru edge. */
751 if (can_fallthru (src, target))
752 {
753 if (rtl_dump_file)
754 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
755 fallthru = 1;
756
757 /* Selectively unlink whole insn chain. */
758 delete_insn_chain (kill_from, PREV_INSN (target->head));
759 }
760
761 /* If this already is simplejump, redirect it. */
762 else if (simplejump_p (insn))
763 {
764 if (e->dest == target)
765 return false;
766 if (rtl_dump_file)
767 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
768 INSN_UID (insn), e->dest->index, target->index);
769 if (!redirect_jump (insn, block_label (target), 0))
770 {
771 if (target == EXIT_BLOCK_PTR)
772 return false;
773 abort ();
774 }
775 }
776
777 /* Cannot do anything for target exit block. */
778 else if (target == EXIT_BLOCK_PTR)
779 return false;
780
781 /* Or replace possibly complicated jump insn by simple jump insn. */
782 else
783 {
784 rtx target_label = block_label (target);
785 rtx barrier, tmp;
786
787 emit_jump_insn_after (gen_jump (target_label), insn);
788 JUMP_LABEL (src->end) = target_label;
789 LABEL_NUSES (target_label)++;
790 if (rtl_dump_file)
791 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
792 INSN_UID (insn), INSN_UID (src->end));
793
794
795 delete_insn_chain (kill_from, insn);
796
797 /* Recognize a tablejump that we are converting to a
798 simple jump and remove its associated CODE_LABEL
799 and ADDR_VEC or ADDR_DIFF_VEC. */
800 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
801 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
802 && GET_CODE (tmp) == JUMP_INSN
803 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
804 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
805 {
806 delete_insn_chain (JUMP_LABEL (insn), tmp);
807 }
808
809 barrier = next_nonnote_insn (src->end);
810 if (!barrier || GET_CODE (barrier) != BARRIER)
811 emit_barrier_after (src->end);
812 }
813
814 /* Keep only one edge out and set proper flags. */
815 while (src->succ->succ_next)
816 remove_edge (src->succ);
817 e = src->succ;
818 if (fallthru)
819 e->flags = EDGE_FALLTHRU;
820 else
821 e->flags = 0;
822
823 e->probability = REG_BR_PROB_BASE;
824 e->count = src->count;
825
826 /* We don't want a block to end on a line-number note since that has
827 the potential of changing the code between -g and not -g. */
828 while (GET_CODE (e->src->end) == NOTE
829 && NOTE_LINE_NUMBER (e->src->end) >= 0)
830 delete_insn (e->src->end);
831
832 if (e->dest != target)
833 redirect_edge_succ (e, target);
834
835 return true;
836 }
837
838 /* Return last loop_beg note appearing after INSN, before start of next
839 basic block. Return INSN if there are no such notes.
840
841 When emitting jump to redirect an fallthru edge, it should always appear
842 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
843 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
844 test. */
845
846 static rtx
847 last_loop_beg_note (insn)
848 rtx insn;
849 {
850 rtx last = insn;
851
852 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
853 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
854 insn = NEXT_INSN (insn))
855 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
856 last = insn;
857
858 return last;
859 }
860
861 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
862 expense of adding new instructions or reordering basic blocks.
863
864 Function can be also called with edge destination equivalent to the TARGET.
865 Then it should try the simplifications and do nothing if none is possible.
866
867 Return true if transformation succeeded. We still return false in case E
868 already destinated TARGET and we didn't managed to simplify instruction
869 stream. */
870
871 bool
872 redirect_edge_and_branch (e, target)
873 edge e;
874 basic_block target;
875 {
876 rtx tmp;
877 rtx old_label = e->dest->head;
878 basic_block src = e->src;
879 rtx insn = src->end;
880
881 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
882 return false;
883
884 if (try_redirect_by_replacing_jump (e, target))
885 return true;
886
887 /* Do this fast path late, as we want above code to simplify for cases
888 where called on single edge leaving basic block containing nontrivial
889 jump insn. */
890 else if (e->dest == target)
891 return false;
892
893 /* We can only redirect non-fallthru edges of jump insn. */
894 if (e->flags & EDGE_FALLTHRU)
895 return false;
896 else if (GET_CODE (insn) != JUMP_INSN)
897 return false;
898
899 /* Recognize a tablejump and adjust all matching cases. */
900 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
901 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
902 && GET_CODE (tmp) == JUMP_INSN
903 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
904 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
905 {
906 rtvec vec;
907 int j;
908 rtx new_label = block_label (target);
909
910 if (target == EXIT_BLOCK_PTR)
911 return false;
912 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
913 vec = XVEC (PATTERN (tmp), 0);
914 else
915 vec = XVEC (PATTERN (tmp), 1);
916
917 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
918 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
919 {
920 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
921 --LABEL_NUSES (old_label);
922 ++LABEL_NUSES (new_label);
923 }
924
925 /* Handle casesi dispatch insns */
926 if ((tmp = single_set (insn)) != NULL
927 && SET_DEST (tmp) == pc_rtx
928 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
929 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
930 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
931 {
932 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
933 new_label);
934 --LABEL_NUSES (old_label);
935 ++LABEL_NUSES (new_label);
936 }
937 }
938 else
939 {
940 /* ?? We may play the games with moving the named labels from
941 one basic block to the other in case only one computed_jump is
942 available. */
943 if (computed_jump_p (insn)
944 /* A return instruction can't be redirected. */
945 || returnjump_p (insn))
946 return false;
947
948 /* If the insn doesn't go where we think, we're confused. */
949 if (JUMP_LABEL (insn) != old_label)
950 abort ();
951
952 /* If the substitution doesn't succeed, die. This can happen
953 if the back end emitted unrecognizable instructions or if
954 target is exit block on some arches. */
955 if (!redirect_jump (insn, block_label (target), 0))
956 {
957 if (target == EXIT_BLOCK_PTR)
958 return false;
959 abort ();
960 }
961 }
962
963 if (rtl_dump_file)
964 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
965 e->src->index, e->dest->index, target->index);
966
967 if (e->dest != target)
968 redirect_edge_succ_nodup (e, target);
969
970 return true;
971 }
972
973 /* Like force_nonfallthru below, but additionally performs redirection
974 Used by redirect_edge_and_branch_force. */
975
976 static basic_block
977 force_nonfallthru_and_redirect (e, target)
978 edge e;
979 basic_block target;
980 {
981 basic_block jump_block, new_bb = NULL;
982 rtx note;
983 edge new_edge;
984
985 if (e->flags & EDGE_ABNORMAL)
986 abort ();
987 else if (!(e->flags & EDGE_FALLTHRU))
988 abort ();
989 else if (e->src == ENTRY_BLOCK_PTR)
990 {
991 /* We can't redirect the entry block. Create an empty block at the
992 start of the function which we use to add the new jump. */
993 edge *pe1;
994 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
995
996 /* Change the existing edge's source to be the new block, and add
997 a new edge from the entry block to the new block. */
998 e->src = bb;
999 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1000 if (*pe1 == e)
1001 {
1002 *pe1 = e->succ_next;
1003 break;
1004 }
1005 e->succ_next = 0;
1006 bb->succ = e;
1007 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1008 }
1009
1010 if (e->src->succ->succ_next)
1011 {
1012 /* Create the new structures. */
1013 note = last_loop_beg_note (e->src->end);
1014 jump_block
1015 = create_basic_block (NEXT_INSN (note), NULL, e->src);
1016 jump_block->count = e->count;
1017 jump_block->frequency = EDGE_FREQUENCY (e);
1018 jump_block->loop_depth = target->loop_depth;
1019
1020 if (target->global_live_at_start)
1021 {
1022 jump_block->global_live_at_start
1023 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1024 jump_block->global_live_at_end
1025 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1026 COPY_REG_SET (jump_block->global_live_at_start,
1027 target->global_live_at_start);
1028 COPY_REG_SET (jump_block->global_live_at_end,
1029 target->global_live_at_start);
1030 }
1031
1032 /* Wire edge in. */
1033 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1034 new_edge->probability = e->probability;
1035 new_edge->count = e->count;
1036
1037 /* Redirect old edge. */
1038 redirect_edge_pred (e, jump_block);
1039 e->probability = REG_BR_PROB_BASE;
1040
1041 new_bb = jump_block;
1042 }
1043 else
1044 jump_block = e->src;
1045
1046 e->flags &= ~EDGE_FALLTHRU;
1047 if (target == EXIT_BLOCK_PTR)
1048 {
1049 if (HAVE_return)
1050 emit_jump_insn_after (gen_return (), jump_block->end);
1051 else
1052 abort ();
1053 }
1054 else
1055 {
1056 rtx label = block_label (target);
1057 emit_jump_insn_after (gen_jump (label), jump_block->end);
1058 JUMP_LABEL (jump_block->end) = label;
1059 LABEL_NUSES (label)++;
1060 }
1061
1062 emit_barrier_after (jump_block->end);
1063 redirect_edge_succ_nodup (e, target);
1064
1065 return new_bb;
1066 }
1067
1068 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1069 (and possibly create new basic block) to make edge non-fallthru.
1070 Return newly created BB or NULL if none. */
1071
1072 basic_block
1073 force_nonfallthru (e)
1074 edge e;
1075 {
1076 return force_nonfallthru_and_redirect (e, e->dest);
1077 }
1078
1079 /* Redirect edge even at the expense of creating new jump insn or
1080 basic block. Return new basic block if created, NULL otherwise.
1081 Abort if conversion is impossible. */
1082
1083 basic_block
1084 redirect_edge_and_branch_force (e, target)
1085 edge e;
1086 basic_block target;
1087 {
1088 if (redirect_edge_and_branch (e, target)
1089 || e->dest == target)
1090 return NULL;
1091
1092 /* In case the edge redirection failed, try to force it to be non-fallthru
1093 and redirect newly created simplejump. */
1094 return force_nonfallthru_and_redirect (e, target);
1095 }
1096
1097 /* The given edge should potentially be a fallthru edge. If that is in
1098 fact true, delete the jump and barriers that are in the way. */
1099
1100 void
1101 tidy_fallthru_edge (e, b, c)
1102 edge e;
1103 basic_block b, c;
1104 {
1105 rtx q;
1106
1107 /* ??? In a late-running flow pass, other folks may have deleted basic
1108 blocks by nopping out blocks, leaving multiple BARRIERs between here
1109 and the target label. They ought to be chastized and fixed.
1110
1111 We can also wind up with a sequence of undeletable labels between
1112 one block and the next.
1113
1114 So search through a sequence of barriers, labels, and notes for
1115 the head of block C and assert that we really do fall through. */
1116
1117 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1118 if (INSN_P (q))
1119 return;
1120
1121 /* Remove what will soon cease being the jump insn from the source block.
1122 If block B consisted only of this single jump, turn it into a deleted
1123 note. */
1124 q = b->end;
1125 if (GET_CODE (q) == JUMP_INSN
1126 && onlyjump_p (q)
1127 && (any_uncondjump_p (q)
1128 || (b->succ == e && e->succ_next == NULL)))
1129 {
1130 #ifdef HAVE_cc0
1131 /* If this was a conditional jump, we need to also delete
1132 the insn that set cc0. */
1133 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1134 q = PREV_INSN (q);
1135 #endif
1136
1137 q = PREV_INSN (q);
1138
1139 /* We don't want a block to end on a line-number note since that has
1140 the potential of changing the code between -g and not -g. */
1141 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1142 q = PREV_INSN (q);
1143 }
1144
1145 /* Selectively unlink the sequence. */
1146 if (q != PREV_INSN (c->head))
1147 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1148
1149 e->flags |= EDGE_FALLTHRU;
1150 }
1151
1152 /* Fix up edges that now fall through, or rather should now fall through
1153 but previously required a jump around now deleted blocks. Simplify
1154 the search by only examining blocks numerically adjacent, since this
1155 is how find_basic_blocks created them. */
1156
1157 void
1158 tidy_fallthru_edges ()
1159 {
1160 basic_block b, c;
1161
1162 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1163 return;
1164
1165 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1166 {
1167 edge s;
1168
1169 c = b->next_bb;
1170
1171 /* We care about simple conditional or unconditional jumps with
1172 a single successor.
1173
1174 If we had a conditional branch to the next instruction when
1175 find_basic_blocks was called, then there will only be one
1176 out edge for the block which ended with the conditional
1177 branch (since we do not create duplicate edges).
1178
1179 Furthermore, the edge will be marked as a fallthru because we
1180 merge the flags for the duplicate edges. So we do not want to
1181 check that the edge is not a FALLTHRU edge. */
1182
1183 if ((s = b->succ) != NULL
1184 && ! (s->flags & EDGE_COMPLEX)
1185 && s->succ_next == NULL
1186 && s->dest == c
1187 /* If the jump insn has side effects, we can't tidy the edge. */
1188 && (GET_CODE (b->end) != JUMP_INSN
1189 || onlyjump_p (b->end)))
1190 tidy_fallthru_edge (s, b, c);
1191 }
1192 }
1193 \f
1194 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1195 is back edge of syntactic loop. */
1196
1197 static bool
1198 back_edge_of_syntactic_loop_p (bb1, bb2)
1199 basic_block bb1, bb2;
1200 {
1201 rtx insn;
1202 int count = 0;
1203 basic_block bb;
1204
1205 if (bb1 == bb2)
1206 return true;
1207
1208 /* ??? Could we guarantee that bb indices are monotone, so that we could
1209 just compare them? */
1210 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1211 continue;
1212
1213 if (!bb)
1214 return false;
1215
1216 for (insn = bb1->end; insn != bb2->head && count >= 0;
1217 insn = NEXT_INSN (insn))
1218 if (GET_CODE (insn) == NOTE)
1219 {
1220 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1221 count++;
1222 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1223 count--;
1224 }
1225
1226 return count >= 0;
1227 }
1228
1229 /* Split a (typically critical) edge. Return the new block.
1230 Abort on abnormal edges.
1231
1232 ??? The code generally expects to be called on critical edges.
1233 The case of a block ending in an unconditional jump to a
1234 block with multiple predecessors is not handled optimally. */
1235
1236 basic_block
1237 split_edge (edge_in)
1238 edge edge_in;
1239 {
1240 basic_block bb;
1241 edge edge_out;
1242 rtx before;
1243
1244 /* Abnormal edges cannot be split. */
1245 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1246 abort ();
1247
1248 /* We are going to place the new block in front of edge destination.
1249 Avoid existence of fallthru predecessors. */
1250 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1251 {
1252 edge e;
1253
1254 for (e = edge_in->dest->pred; e; e = e->pred_next)
1255 if (e->flags & EDGE_FALLTHRU)
1256 break;
1257
1258 if (e)
1259 force_nonfallthru (e);
1260 }
1261
1262 /* Create the basic block note.
1263
1264 Where we place the note can have a noticeable impact on the generated
1265 code. Consider this cfg:
1266
1267 E
1268 |
1269 0
1270 / \
1271 +->1-->2--->E
1272 | |
1273 +--+
1274
1275 If we need to insert an insn on the edge from block 0 to block 1,
1276 we want to ensure the instructions we insert are outside of any
1277 loop notes that physically sit between block 0 and block 1. Otherwise
1278 we confuse the loop optimizer into thinking the loop is a phony. */
1279
1280 if (edge_in->dest != EXIT_BLOCK_PTR
1281 && PREV_INSN (edge_in->dest->head)
1282 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1283 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1284 == NOTE_INSN_LOOP_BEG)
1285 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1286 before = PREV_INSN (edge_in->dest->head);
1287 else if (edge_in->dest != EXIT_BLOCK_PTR)
1288 before = edge_in->dest->head;
1289 else
1290 before = NULL_RTX;
1291
1292 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1293 bb->count = edge_in->count;
1294 bb->frequency = EDGE_FREQUENCY (edge_in);
1295
1296 /* ??? This info is likely going to be out of date very soon. */
1297 if (edge_in->dest->global_live_at_start)
1298 {
1299 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1300 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1301 COPY_REG_SET (bb->global_live_at_start,
1302 edge_in->dest->global_live_at_start);
1303 COPY_REG_SET (bb->global_live_at_end,
1304 edge_in->dest->global_live_at_start);
1305 }
1306
1307 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1308
1309 /* For non-fallthry edges, we must adjust the predecessor's
1310 jump instruction to target our new block. */
1311 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1312 {
1313 if (!redirect_edge_and_branch (edge_in, bb))
1314 abort ();
1315 }
1316 else
1317 redirect_edge_succ (edge_in, bb);
1318
1319 return bb;
1320 }
1321
1322 /* Queue instructions for insertion on an edge between two basic blocks.
1323 The new instructions and basic blocks (if any) will not appear in the
1324 CFG until commit_edge_insertions is called. */
1325
1326 void
1327 insert_insn_on_edge (pattern, e)
1328 rtx pattern;
1329 edge e;
1330 {
1331 /* We cannot insert instructions on an abnormal critical edge.
1332 It will be easier to find the culprit if we die now. */
1333 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1334 abort ();
1335
1336 if (e->insns == NULL_RTX)
1337 start_sequence ();
1338 else
1339 push_to_sequence (e->insns);
1340
1341 emit_insn (pattern);
1342
1343 e->insns = get_insns ();
1344 end_sequence ();
1345 }
1346
1347 /* Update the CFG for the instructions queued on edge E. */
1348
1349 static void
1350 commit_one_edge_insertion (e, watch_calls)
1351 edge e;
1352 int watch_calls;
1353 {
1354 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1355 basic_block bb;
1356
1357 /* Pull the insns off the edge now since the edge might go away. */
1358 insns = e->insns;
1359 e->insns = NULL_RTX;
1360
1361 /* Special case -- avoid inserting code between call and storing
1362 its return value. */
1363 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1364 && e->src != ENTRY_BLOCK_PTR
1365 && GET_CODE (e->src->end) == CALL_INSN)
1366 {
1367 rtx next = next_nonnote_insn (e->src->end);
1368
1369 after = e->dest->head;
1370 /* The first insn after the call may be a stack pop, skip it. */
1371 while (next
1372 && keep_with_call_p (next))
1373 {
1374 after = next;
1375 next = next_nonnote_insn (next);
1376 }
1377 bb = e->dest;
1378 }
1379 if (!before && !after)
1380 {
1381 /* Figure out where to put these things. If the destination has
1382 one predecessor, insert there. Except for the exit block. */
1383 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1384 {
1385 bb = e->dest;
1386
1387 /* Get the location correct wrt a code label, and "nice" wrt
1388 a basic block note, and before everything else. */
1389 tmp = bb->head;
1390 if (GET_CODE (tmp) == CODE_LABEL)
1391 tmp = NEXT_INSN (tmp);
1392 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1393 tmp = NEXT_INSN (tmp);
1394 if (tmp == bb->head)
1395 before = tmp;
1396 else if (tmp)
1397 after = PREV_INSN (tmp);
1398 else
1399 after = get_last_insn ();
1400 }
1401
1402 /* If the source has one successor and the edge is not abnormal,
1403 insert there. Except for the entry block. */
1404 else if ((e->flags & EDGE_ABNORMAL) == 0
1405 && e->src->succ->succ_next == NULL
1406 && e->src != ENTRY_BLOCK_PTR)
1407 {
1408 bb = e->src;
1409
1410 /* It is possible to have a non-simple jump here. Consider a target
1411 where some forms of unconditional jumps clobber a register. This
1412 happens on the fr30 for example.
1413
1414 We know this block has a single successor, so we can just emit
1415 the queued insns before the jump. */
1416 if (GET_CODE (bb->end) == JUMP_INSN)
1417 for (before = bb->end;
1418 GET_CODE (PREV_INSN (before)) == NOTE
1419 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1420 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1421 ;
1422 else
1423 {
1424 /* We'd better be fallthru, or we've lost track of what's what. */
1425 if ((e->flags & EDGE_FALLTHRU) == 0)
1426 abort ();
1427
1428 after = bb->end;
1429 }
1430 }
1431 /* Otherwise we must split the edge. */
1432 else
1433 {
1434 bb = split_edge (e);
1435 after = bb->end;
1436 }
1437 }
1438
1439 /* Now that we've found the spot, do the insertion. */
1440
1441 if (before)
1442 {
1443 emit_insns_before (insns, before);
1444 last = prev_nonnote_insn (before);
1445 }
1446 else
1447 last = emit_insns_after (insns, after);
1448
1449 if (returnjump_p (last))
1450 {
1451 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1452 This is not currently a problem because this only happens
1453 for the (single) epilogue, which already has a fallthru edge
1454 to EXIT. */
1455
1456 e = bb->succ;
1457 if (e->dest != EXIT_BLOCK_PTR
1458 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1459 abort ();
1460
1461 e->flags &= ~EDGE_FALLTHRU;
1462 emit_barrier_after (last);
1463
1464 if (before)
1465 delete_insn (before);
1466 }
1467 else if (GET_CODE (last) == JUMP_INSN)
1468 abort ();
1469
1470 find_sub_basic_blocks (bb);
1471 }
1472
1473 /* Update the CFG for all queued instructions. */
1474
1475 void
1476 commit_edge_insertions ()
1477 {
1478 basic_block bb;
1479
1480 #ifdef ENABLE_CHECKING
1481 verify_flow_info ();
1482 #endif
1483
1484 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1485 {
1486 edge e, next;
1487
1488 for (e = bb->succ; e; e = next)
1489 {
1490 next = e->succ_next;
1491 if (e->insns)
1492 commit_one_edge_insertion (e, false);
1493 }
1494 }
1495 }
1496 \f
1497 /* Update the CFG for all queued instructions, taking special care of inserting
1498 code on edges between call and storing its return value. */
1499
1500 void
1501 commit_edge_insertions_watch_calls ()
1502 {
1503 basic_block bb;
1504
1505 #ifdef ENABLE_CHECKING
1506 verify_flow_info ();
1507 #endif
1508
1509 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1510 {
1511 edge e, next;
1512
1513 for (e = bb->succ; e; e = next)
1514 {
1515 next = e->succ_next;
1516 if (e->insns)
1517 commit_one_edge_insertion (e, true);
1518 }
1519 }
1520 }
1521 \f
1522 /* Print out one basic block with live information at start and end. */
1523
1524 void
1525 dump_bb (bb, outf)
1526 basic_block bb;
1527 FILE *outf;
1528 {
1529 rtx insn;
1530 rtx last;
1531 edge e;
1532
1533 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1534 bb->index, bb->loop_depth);
1535 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1536 putc ('\n', outf);
1537
1538 fputs (";; Predecessors: ", outf);
1539 for (e = bb->pred; e; e = e->pred_next)
1540 dump_edge_info (outf, e, 0);
1541 putc ('\n', outf);
1542
1543 fputs (";; Registers live at start:", outf);
1544 dump_regset (bb->global_live_at_start, outf);
1545 putc ('\n', outf);
1546
1547 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1548 insn = NEXT_INSN (insn))
1549 print_rtl_single (outf, insn);
1550
1551 fputs (";; Registers live at end:", outf);
1552 dump_regset (bb->global_live_at_end, outf);
1553 putc ('\n', outf);
1554
1555 fputs (";; Successors: ", outf);
1556 for (e = bb->succ; e; e = e->succ_next)
1557 dump_edge_info (outf, e, 1);
1558 putc ('\n', outf);
1559 }
1560
1561 void
1562 debug_bb (bb)
1563 basic_block bb;
1564 {
1565 dump_bb (bb, stderr);
1566 }
1567
1568 void
1569 debug_bb_n (n)
1570 int n;
1571 {
1572 dump_bb (BASIC_BLOCK (n), stderr);
1573 }
1574 \f
1575 /* Like print_rtl, but also print out live information for the start of each
1576 basic block. */
1577
1578 void
1579 print_rtl_with_bb (outf, rtx_first)
1580 FILE *outf;
1581 rtx rtx_first;
1582 {
1583 rtx tmp_rtx;
1584
1585 if (rtx_first == 0)
1586 fprintf (outf, "(nil)\n");
1587 else
1588 {
1589 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1590 int max_uid = get_max_uid ();
1591 basic_block *start
1592 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1593 basic_block *end
1594 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1595 enum bb_state *in_bb_p
1596 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1597
1598 basic_block bb;
1599
1600 FOR_EACH_BB_REVERSE (bb)
1601 {
1602 rtx x;
1603
1604 start[INSN_UID (bb->head)] = bb;
1605 end[INSN_UID (bb->end)] = bb;
1606 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1607 {
1608 enum bb_state state = IN_MULTIPLE_BB;
1609
1610 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1611 state = IN_ONE_BB;
1612 in_bb_p[INSN_UID (x)] = state;
1613
1614 if (x == bb->end)
1615 break;
1616 }
1617 }
1618
1619 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1620 {
1621 int did_output;
1622
1623 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1624 {
1625 fprintf (outf, ";; Start of basic block %d, registers live:",
1626 bb->index);
1627 dump_regset (bb->global_live_at_start, outf);
1628 putc ('\n', outf);
1629 }
1630
1631 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1632 && GET_CODE (tmp_rtx) != NOTE
1633 && GET_CODE (tmp_rtx) != BARRIER)
1634 fprintf (outf, ";; Insn is not within a basic block\n");
1635 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1636 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1637
1638 did_output = print_rtl_single (outf, tmp_rtx);
1639
1640 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1641 {
1642 fprintf (outf, ";; End of basic block %d, registers live:\n",
1643 bb->index);
1644 dump_regset (bb->global_live_at_end, outf);
1645 putc ('\n', outf);
1646 }
1647
1648 if (did_output)
1649 putc ('\n', outf);
1650 }
1651
1652 free (start);
1653 free (end);
1654 free (in_bb_p);
1655 }
1656
1657 if (current_function_epilogue_delay_list != 0)
1658 {
1659 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1660 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1661 tmp_rtx = XEXP (tmp_rtx, 1))
1662 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1663 }
1664 }
1665 \f
1666 void
1667 update_br_prob_note (bb)
1668 basic_block bb;
1669 {
1670 rtx note;
1671 if (GET_CODE (bb->end) != JUMP_INSN)
1672 return;
1673 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1674 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1675 return;
1676 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1677 }
1678 \f
1679 /* Verify the CFG consistency. This function check some CFG invariants and
1680 aborts when something is wrong. Hope that this function will help to
1681 convert many optimization passes to preserve CFG consistent.
1682
1683 Currently it does following checks:
1684
1685 - test head/end pointers
1686 - overlapping of basic blocks
1687 - edge list correctness
1688 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1689 - tails of basic blocks (ensure that boundary is necessary)
1690 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1691 and NOTE_INSN_BASIC_BLOCK
1692 - check that all insns are in the basic blocks
1693 (except the switch handling code, barriers and notes)
1694 - check that all returns are followed by barriers
1695
1696 In future it can be extended check a lot of other stuff as well
1697 (reachability of basic blocks, life information, etc. etc.). */
1698
1699 void
1700 verify_flow_info ()
1701 {
1702 const int max_uid = get_max_uid ();
1703 const rtx rtx_first = get_insns ();
1704 rtx last_head = get_last_insn ();
1705 basic_block *bb_info, *last_visited;
1706 size_t *edge_checksum;
1707 rtx x;
1708 int num_bb_notes, err = 0;
1709 basic_block bb, last_bb_seen;
1710
1711 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1712 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1713 sizeof (basic_block));
1714 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1715
1716 /* Check bb chain & numbers. */
1717 last_bb_seen = ENTRY_BLOCK_PTR;
1718 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1719 {
1720 if (bb != EXIT_BLOCK_PTR
1721 && bb != BASIC_BLOCK (bb->index))
1722 {
1723 error ("bb %d on wrong place", bb->index);
1724 err = 1;
1725 }
1726
1727 if (bb->prev_bb != last_bb_seen)
1728 {
1729 error ("prev_bb of %d should be %d, not %d",
1730 bb->index, last_bb_seen->index, bb->prev_bb->index);
1731 err = 1;
1732 }
1733
1734 last_bb_seen = bb;
1735 }
1736
1737 FOR_EACH_BB_REVERSE (bb)
1738 {
1739 rtx head = bb->head;
1740 rtx end = bb->end;
1741
1742 /* Verify the end of the basic block is in the INSN chain. */
1743 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1744 if (x == end)
1745 break;
1746
1747 if (!x)
1748 {
1749 error ("end insn %d for block %d not found in the insn stream",
1750 INSN_UID (end), bb->index);
1751 err = 1;
1752 }
1753
1754 /* Work backwards from the end to the head of the basic block
1755 to verify the head is in the RTL chain. */
1756 for (; x != NULL_RTX; x = PREV_INSN (x))
1757 {
1758 /* While walking over the insn chain, verify insns appear
1759 in only one basic block and initialize the BB_INFO array
1760 used by other passes. */
1761 if (bb_info[INSN_UID (x)] != NULL)
1762 {
1763 error ("insn %d is in multiple basic blocks (%d and %d)",
1764 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1765 err = 1;
1766 }
1767
1768 bb_info[INSN_UID (x)] = bb;
1769
1770 if (x == head)
1771 break;
1772 }
1773 if (!x)
1774 {
1775 error ("head insn %d for block %d not found in the insn stream",
1776 INSN_UID (head), bb->index);
1777 err = 1;
1778 }
1779
1780 last_head = x;
1781 }
1782
1783 /* Now check the basic blocks (boundaries etc.) */
1784 FOR_EACH_BB_REVERSE (bb)
1785 {
1786 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1787 edge e;
1788 rtx note;
1789
1790 if (INSN_P (bb->end)
1791 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1792 && bb->succ && bb->succ->succ_next
1793 && any_condjump_p (bb->end))
1794 {
1795 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1796 {
1797 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1798 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1799 err = 1;
1800 }
1801 }
1802 if (bb->count < 0)
1803 {
1804 error ("verify_flow_info: Wrong count of block %i %i",
1805 bb->index, (int)bb->count);
1806 err = 1;
1807 }
1808 if (bb->frequency < 0)
1809 {
1810 error ("verify_flow_info: Wrong frequency of block %i %i",
1811 bb->index, bb->frequency);
1812 err = 1;
1813 }
1814 for (e = bb->succ; e; e = e->succ_next)
1815 {
1816 if (last_visited [e->dest->index + 2] == bb)
1817 {
1818 error ("verify_flow_info: Duplicate edge %i->%i",
1819 e->src->index, e->dest->index);
1820 err = 1;
1821 }
1822 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1823 {
1824 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1825 e->src->index, e->dest->index, e->probability);
1826 err = 1;
1827 }
1828 if (e->count < 0)
1829 {
1830 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1831 e->src->index, e->dest->index, (int)e->count);
1832 err = 1;
1833 }
1834
1835 last_visited [e->dest->index + 2] = bb;
1836
1837 if (e->flags & EDGE_FALLTHRU)
1838 n_fallthru++;
1839
1840 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1841 n_branch++;
1842
1843 if (e->flags & EDGE_ABNORMAL_CALL)
1844 n_call++;
1845
1846 if (e->flags & EDGE_EH)
1847 n_eh++;
1848 else if (e->flags & EDGE_ABNORMAL)
1849 n_abnormal++;
1850
1851 if ((e->flags & EDGE_FALLTHRU)
1852 && e->src != ENTRY_BLOCK_PTR
1853 && e->dest != EXIT_BLOCK_PTR)
1854 {
1855 rtx insn;
1856
1857 if (e->src->next_bb != e->dest)
1858 {
1859 error
1860 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1861 e->src->index, e->dest->index);
1862 err = 1;
1863 }
1864 else
1865 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1866 insn = NEXT_INSN (insn))
1867 if (GET_CODE (insn) == BARRIER
1868 #ifndef CASE_DROPS_THROUGH
1869 || INSN_P (insn)
1870 #else
1871 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1872 #endif
1873 )
1874 {
1875 error ("verify_flow_info: Incorrect fallthru %i->%i",
1876 e->src->index, e->dest->index);
1877 fatal_insn ("wrong insn in the fallthru edge", insn);
1878 err = 1;
1879 }
1880 }
1881
1882 if (e->src != bb)
1883 {
1884 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1885 bb->index);
1886 fprintf (stderr, "Predecessor: ");
1887 dump_edge_info (stderr, e, 0);
1888 fprintf (stderr, "\nSuccessor: ");
1889 dump_edge_info (stderr, e, 1);
1890 fprintf (stderr, "\n");
1891 err = 1;
1892 }
1893
1894 edge_checksum[e->dest->index + 2] += (size_t) e;
1895 }
1896
1897 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1898 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1899 {
1900 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1901 err = 1;
1902 }
1903 if (n_branch
1904 && (GET_CODE (bb->end) != JUMP_INSN
1905 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1906 || any_condjump_p (bb->end)))))
1907 {
1908 error ("Too many outgoing branch edges from bb %i", bb->index);
1909 err = 1;
1910 }
1911 if (n_fallthru && any_uncondjump_p (bb->end))
1912 {
1913 error ("Fallthru edge after unconditional jump %i", bb->index);
1914 err = 1;
1915 }
1916 if (n_branch != 1 && any_uncondjump_p (bb->end))
1917 {
1918 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1919 err = 1;
1920 }
1921 if (n_branch != 1 && any_condjump_p (bb->end)
1922 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1923 {
1924 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1925 err = 1;
1926 }
1927 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1928 {
1929 error ("Call edges for non-call insn in bb %i", bb->index);
1930 err = 1;
1931 }
1932 if (n_abnormal
1933 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1934 && (GET_CODE (bb->end) != JUMP_INSN
1935 || any_condjump_p (bb->end)
1936 || any_uncondjump_p (bb->end)))
1937 {
1938 error ("Abnormal edges for no purpose in bb %i", bb->index);
1939 err = 1;
1940 }
1941
1942 if (!n_fallthru)
1943 {
1944 rtx insn;
1945
1946 /* Ensure existence of barrier in BB with no fallthru edges. */
1947 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1948 insn = NEXT_INSN (insn))
1949 if (!insn
1950 || (GET_CODE (insn) == NOTE
1951 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1952 {
1953 error ("missing barrier after block %i", bb->index);
1954 err = 1;
1955 break;
1956 }
1957 }
1958
1959 for (e = bb->pred; e; e = e->pred_next)
1960 {
1961 if (e->dest != bb)
1962 {
1963 error ("basic block %d pred edge is corrupted", bb->index);
1964 fputs ("Predecessor: ", stderr);
1965 dump_edge_info (stderr, e, 0);
1966 fputs ("\nSuccessor: ", stderr);
1967 dump_edge_info (stderr, e, 1);
1968 fputc ('\n', stderr);
1969 err = 1;
1970 }
1971 edge_checksum[e->dest->index + 2] -= (size_t) e;
1972 }
1973
1974 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1975 if (basic_block_for_insn && BLOCK_FOR_INSN (x) != bb)
1976 {
1977 debug_rtx (x);
1978 if (! BLOCK_FOR_INSN (x))
1979 error
1980 ("insn %d inside basic block %d but block_for_insn is NULL",
1981 INSN_UID (x), bb->index);
1982 else
1983 error
1984 ("insn %d inside basic block %d but block_for_insn is %i",
1985 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1986
1987 err = 1;
1988 }
1989
1990 /* OK pointers are correct. Now check the header of basic
1991 block. It ought to contain optional CODE_LABEL followed
1992 by NOTE_BASIC_BLOCK. */
1993 x = bb->head;
1994 if (GET_CODE (x) == CODE_LABEL)
1995 {
1996 if (bb->end == x)
1997 {
1998 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1999 bb->index);
2000 err = 1;
2001 }
2002
2003 x = NEXT_INSN (x);
2004 }
2005
2006 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2007 {
2008 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2009 bb->index);
2010 err = 1;
2011 }
2012
2013 if (bb->end == x)
2014 /* Do checks for empty blocks her. e */
2015 ;
2016 else
2017 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2018 {
2019 if (NOTE_INSN_BASIC_BLOCK_P (x))
2020 {
2021 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2022 INSN_UID (x), bb->index);
2023 err = 1;
2024 }
2025
2026 if (x == bb->end)
2027 break;
2028
2029 if (GET_CODE (x) == JUMP_INSN
2030 || GET_CODE (x) == CODE_LABEL
2031 || GET_CODE (x) == BARRIER)
2032 {
2033 error ("in basic block %d:", bb->index);
2034 fatal_insn ("flow control insn inside a basic block", x);
2035 }
2036 }
2037 }
2038
2039 /* Complete edge checksumming for ENTRY and EXIT. */
2040 {
2041 edge e;
2042
2043 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2044 edge_checksum[e->dest->index + 2] += (size_t) e;
2045
2046 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2047 edge_checksum[e->dest->index + 2] -= (size_t) e;
2048 }
2049
2050 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2051 if (edge_checksum[bb->index + 2])
2052 {
2053 error ("basic block %i edge lists are corrupted", bb->index);
2054 err = 1;
2055 }
2056
2057 num_bb_notes = 0;
2058 last_bb_seen = ENTRY_BLOCK_PTR;
2059
2060 for (x = rtx_first; x; x = NEXT_INSN (x))
2061 {
2062 if (NOTE_INSN_BASIC_BLOCK_P (x))
2063 {
2064 bb = NOTE_BASIC_BLOCK (x);
2065
2066 num_bb_notes++;
2067 if (bb != last_bb_seen->next_bb)
2068 internal_error ("basic blocks not numbered consecutively");
2069
2070 last_bb_seen = bb;
2071 }
2072
2073 if (!bb_info[INSN_UID (x)])
2074 {
2075 switch (GET_CODE (x))
2076 {
2077 case BARRIER:
2078 case NOTE:
2079 break;
2080
2081 case CODE_LABEL:
2082 /* An addr_vec is placed outside any block block. */
2083 if (NEXT_INSN (x)
2084 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2085 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2086 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2087 x = NEXT_INSN (x);
2088
2089 /* But in any case, non-deletable labels can appear anywhere. */
2090 break;
2091
2092 default:
2093 fatal_insn ("insn outside basic block", x);
2094 }
2095 }
2096
2097 if (INSN_P (x)
2098 && GET_CODE (x) == JUMP_INSN
2099 && returnjump_p (x) && ! condjump_p (x)
2100 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2101 fatal_insn ("return not followed by barrier", x);
2102 }
2103
2104 if (num_bb_notes != n_basic_blocks)
2105 internal_error
2106 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2107 num_bb_notes, n_basic_blocks);
2108
2109 if (err)
2110 internal_error ("verify_flow_info failed");
2111
2112 /* Clean up. */
2113 free (bb_info);
2114 free (last_visited);
2115 free (edge_checksum);
2116 }
2117 \f
2118 /* Assume that the preceding pass has possibly eliminated jump instructions
2119 or converted the unconditional jumps. Eliminate the edges from CFG.
2120 Return true if any edges are eliminated. */
2121
2122 bool
2123 purge_dead_edges (bb)
2124 basic_block bb;
2125 {
2126 edge e, next;
2127 rtx insn = bb->end, note;
2128 bool purged = false;
2129
2130 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2131 if (GET_CODE (insn) == INSN
2132 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2133 {
2134 rtx eqnote;
2135
2136 if (! may_trap_p (PATTERN (insn))
2137 || ((eqnote = find_reg_equal_equiv_note (insn))
2138 && ! may_trap_p (XEXP (eqnote, 0))))
2139 remove_note (insn, note);
2140 }
2141
2142 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2143 for (e = bb->succ; e; e = next)
2144 {
2145 next = e->succ_next;
2146 if (e->flags & EDGE_EH)
2147 {
2148 if (can_throw_internal (bb->end))
2149 continue;
2150 }
2151 else if (e->flags & EDGE_ABNORMAL_CALL)
2152 {
2153 if (GET_CODE (bb->end) == CALL_INSN
2154 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2155 || INTVAL (XEXP (note, 0)) >= 0))
2156 continue;
2157 }
2158 else
2159 continue;
2160
2161 remove_edge (e);
2162 bb->flags |= BB_DIRTY;
2163 purged = true;
2164 }
2165
2166 if (GET_CODE (insn) == JUMP_INSN)
2167 {
2168 rtx note;
2169 edge b,f;
2170
2171 /* We do care only about conditional jumps and simplejumps. */
2172 if (!any_condjump_p (insn)
2173 && !returnjump_p (insn)
2174 && !simplejump_p (insn))
2175 return purged;
2176
2177 /* Branch probability/prediction notes are defined only for
2178 condjumps. We've possibly turned condjump into simplejump. */
2179 if (simplejump_p (insn))
2180 {
2181 note = find_reg_note (insn, REG_BR_PROB, NULL);
2182 if (note)
2183 remove_note (insn, note);
2184 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2185 remove_note (insn, note);
2186 }
2187
2188 for (e = bb->succ; e; e = next)
2189 {
2190 next = e->succ_next;
2191
2192 /* Avoid abnormal flags to leak from computed jumps turned
2193 into simplejumps. */
2194
2195 e->flags &= ~EDGE_ABNORMAL;
2196
2197 /* See if this edge is one we should keep. */
2198 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2199 /* A conditional jump can fall through into the next
2200 block, so we should keep the edge. */
2201 continue;
2202 else if (e->dest != EXIT_BLOCK_PTR
2203 && e->dest->head == JUMP_LABEL (insn))
2204 /* If the destination block is the target of the jump,
2205 keep the edge. */
2206 continue;
2207 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2208 /* If the destination block is the exit block, and this
2209 instruction is a return, then keep the edge. */
2210 continue;
2211 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2212 /* Keep the edges that correspond to exceptions thrown by
2213 this instruction. */
2214 continue;
2215
2216 /* We do not need this edge. */
2217 bb->flags |= BB_DIRTY;
2218 purged = true;
2219 remove_edge (e);
2220 }
2221
2222 if (!bb->succ || !purged)
2223 return purged;
2224
2225 if (rtl_dump_file)
2226 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2227
2228 if (!optimize)
2229 return purged;
2230
2231 /* Redistribute probabilities. */
2232 if (!bb->succ->succ_next)
2233 {
2234 bb->succ->probability = REG_BR_PROB_BASE;
2235 bb->succ->count = bb->count;
2236 }
2237 else
2238 {
2239 note = find_reg_note (insn, REG_BR_PROB, NULL);
2240 if (!note)
2241 return purged;
2242
2243 b = BRANCH_EDGE (bb);
2244 f = FALLTHRU_EDGE (bb);
2245 b->probability = INTVAL (XEXP (note, 0));
2246 f->probability = REG_BR_PROB_BASE - b->probability;
2247 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2248 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2249 }
2250
2251 return purged;
2252 }
2253
2254 /* If we don't see a jump insn, we don't know exactly why the block would
2255 have been broken at this point. Look for a simple, non-fallthru edge,
2256 as these are only created by conditional branches. If we find such an
2257 edge we know that there used to be a jump here and can then safely
2258 remove all non-fallthru edges. */
2259 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2260 e = e->succ_next)
2261 ;
2262
2263 if (!e)
2264 return purged;
2265
2266 for (e = bb->succ; e; e = next)
2267 {
2268 next = e->succ_next;
2269 if (!(e->flags & EDGE_FALLTHRU))
2270 {
2271 bb->flags |= BB_DIRTY;
2272 remove_edge (e);
2273 purged = true;
2274 }
2275 }
2276
2277 if (!bb->succ || bb->succ->succ_next)
2278 abort ();
2279
2280 bb->succ->probability = REG_BR_PROB_BASE;
2281 bb->succ->count = bb->count;
2282
2283 if (rtl_dump_file)
2284 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2285 bb->index);
2286 return purged;
2287 }
2288
2289 /* Search all basic blocks for potentially dead edges and purge them. Return
2290 true if some edge has been eliminated. */
2291
2292 bool
2293 purge_all_dead_edges (update_life_p)
2294 int update_life_p;
2295 {
2296 int purged = false;
2297 sbitmap blocks = 0;
2298 basic_block bb;
2299
2300 if (update_life_p)
2301 {
2302 blocks = sbitmap_alloc (last_basic_block);
2303 sbitmap_zero (blocks);
2304 }
2305
2306 FOR_EACH_BB (bb)
2307 {
2308 bool purged_here = purge_dead_edges (bb);
2309
2310 purged |= purged_here;
2311 if (purged_here && update_life_p)
2312 SET_BIT (blocks, bb->index);
2313 }
2314
2315 if (update_life_p && purged)
2316 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2317 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2318 | PROP_KILL_DEAD_CODE);
2319
2320 if (update_life_p)
2321 sbitmap_free (blocks);
2322 return purged;
2323 }
This page took 0.147975 seconds and 5 git commands to generate.