]> gcc.gnu.org Git - gcc.git/blob - gcc/cfganal.c
cfgrtl.c (delete_insn_and_edges, [...]): New.
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "obstack.h"
32 #include "tm_p.h"
33
34 /* Store the data structures necessary for depth-first search. */
35 struct depth_first_search_dsS {
36 /* stack for backtracking during the algorithm */
37 basic_block *stack;
38
39 /* number of edges in the stack. That is, positions 0, ..., sp-1
40 have edges. */
41 unsigned int sp;
42
43 /* record of basic blocks already seen by depth-first search */
44 sbitmap visited_blocks;
45 };
46 typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48 static void flow_dfs_compute_reverse_init
49 PARAMS ((depth_first_search_ds));
50 static void flow_dfs_compute_reverse_add_bb
51 PARAMS ((depth_first_search_ds, basic_block));
52 static basic_block flow_dfs_compute_reverse_execute
53 PARAMS ((depth_first_search_ds));
54 static void flow_dfs_compute_reverse_finish
55 PARAMS ((depth_first_search_ds));
56 static void remove_fake_successors PARAMS ((basic_block));
57 static bool need_fake_edge_p PARAMS ((rtx));
58 \f
59 /* Return true if the block has no effect and only forwards control flow to
60 its single destination. */
61
62 bool
63 forwarder_block_p (bb)
64 basic_block bb;
65 {
66 rtx insn;
67
68 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
69 || !bb->succ || bb->succ->succ_next)
70 return false;
71
72 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
73 if (INSN_P (insn) && active_insn_p (insn))
74 return false;
75
76 return (!INSN_P (insn)
77 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
78 || !active_insn_p (insn));
79 }
80
81 /* Return nonzero if we can reach target from src by falling through. */
82
83 bool
84 can_fallthru (src, target)
85 basic_block src, target;
86 {
87 rtx insn = src->end;
88 rtx insn2 = target->head;
89
90 if (src->index + 1 == target->index && !active_insn_p (insn2))
91 insn2 = next_active_insn (insn2);
92
93 /* ??? Later we may add code to move jump tables offline. */
94 return next_active_insn (insn) == insn2;
95 }
96 \f
97 /* Mark the back edges in DFS traversal.
98 Return non-zero if a loop (natural or otherwise) is present.
99 Inspired by Depth_First_Search_PP described in:
100
101 Advanced Compiler Design and Implementation
102 Steven Muchnick
103 Morgan Kaufmann, 1997
104
105 and heavily borrowed from flow_depth_first_order_compute. */
106
107 bool
108 mark_dfs_back_edges ()
109 {
110 edge *stack;
111 int *pre;
112 int *post;
113 int sp;
114 int prenum = 1;
115 int postnum = 1;
116 sbitmap visited;
117 bool found = false;
118
119 /* Allocate the preorder and postorder number arrays. */
120 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
121 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
122
123 /* Allocate stack for back-tracking up CFG. */
124 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
125 sp = 0;
126
127 /* Allocate bitmap to track nodes that have been visited. */
128 visited = sbitmap_alloc (n_basic_blocks);
129
130 /* None of the nodes in the CFG have been visited yet. */
131 sbitmap_zero (visited);
132
133 /* Push the first edge on to the stack. */
134 stack[sp++] = ENTRY_BLOCK_PTR->succ;
135
136 while (sp)
137 {
138 edge e;
139 basic_block src;
140 basic_block dest;
141
142 /* Look at the edge on the top of the stack. */
143 e = stack[sp - 1];
144 src = e->src;
145 dest = e->dest;
146 e->flags &= ~EDGE_DFS_BACK;
147
148 /* Check if the edge destination has been visited yet. */
149 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
150 {
151 /* Mark that we have visited the destination. */
152 SET_BIT (visited, dest->index);
153
154 pre[dest->index] = prenum++;
155 if (dest->succ)
156 {
157 /* Since the DEST node has been visited for the first
158 time, check its successors. */
159 stack[sp++] = dest->succ;
160 }
161 else
162 post[dest->index] = postnum++;
163 }
164 else
165 {
166 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
167 && pre[src->index] >= pre[dest->index]
168 && post[dest->index] == 0)
169 e->flags |= EDGE_DFS_BACK, found = true;
170
171 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
172 post[src->index] = postnum++;
173
174 if (e->succ_next)
175 stack[sp - 1] = e->succ_next;
176 else
177 sp--;
178 }
179 }
180
181 free (pre);
182 free (post);
183 free (stack);
184 sbitmap_free (visited);
185
186 return found;
187 }
188
189 /* Return true if we need to add fake edge to exit.
190 Helper function for the flow_call_edges_add. */
191
192 static bool
193 need_fake_edge_p (insn)
194 rtx insn;
195 {
196 if (!INSN_P (insn))
197 return false;
198
199 if ((GET_CODE (insn) == CALL_INSN
200 && !SIBLING_CALL_P (insn)
201 && !find_reg_note (insn, REG_NORETURN, NULL)
202 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
203 && !CONST_OR_PURE_CALL_P (insn)))
204 return true;
205
206 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
207 && MEM_VOLATILE_P (PATTERN (insn)))
208 || (GET_CODE (PATTERN (insn)) == PARALLEL
209 && asm_noperands (insn) != -1
210 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
211 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
212 }
213
214 /* Add fake edges to the function exit for any non constant and non noreturn
215 calls, volatile inline assembly in the bitmap of blocks specified by
216 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
217 that were split.
218
219 The goal is to expose cases in which entering a basic block does not imply
220 that all subsequent instructions must be executed. */
221
222 int
223 flow_call_edges_add (blocks)
224 sbitmap blocks;
225 {
226 int i;
227 int blocks_split = 0;
228 int bb_num = 0;
229 basic_block *bbs;
230 bool check_last_block = false;
231
232 /* Map bb indices into basic block pointers since split_block
233 will renumber the basic blocks. */
234
235 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
236
237 if (! blocks)
238 {
239 for (i = 0; i < n_basic_blocks; i++)
240 bbs[bb_num++] = BASIC_BLOCK (i);
241
242 check_last_block = true;
243 }
244 else
245 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
246 {
247 bbs[bb_num++] = BASIC_BLOCK (i);
248 if (i == n_basic_blocks - 1)
249 check_last_block = true;
250 });
251
252 /* In the last basic block, before epilogue generation, there will be
253 a fallthru edge to EXIT. Special care is required if the last insn
254 of the last basic block is a call because make_edge folds duplicate
255 edges, which would result in the fallthru edge also being marked
256 fake, which would result in the fallthru edge being removed by
257 remove_fake_edges, which would result in an invalid CFG.
258
259 Moreover, we can't elide the outgoing fake edge, since the block
260 profiler needs to take this into account in order to solve the minimal
261 spanning tree in the case that the call doesn't return.
262
263 Handle this by adding a dummy instruction in a new last basic block. */
264 if (check_last_block)
265 {
266 basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
267 rtx insn = bb->end;
268
269 /* Back up past insns that must be kept in the same block as a call. */
270 while (insn != bb->head
271 && keep_with_call_p (insn))
272 insn = PREV_INSN (insn);
273
274 if (need_fake_edge_p (insn))
275 {
276 edge e;
277
278 for (e = bb->succ; e; e = e->succ_next)
279 if (e->dest == EXIT_BLOCK_PTR)
280 break;
281
282 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
283 commit_edge_insertions ();
284 }
285 }
286
287 /* Now add fake edges to the function exit for any non constant
288 calls since there is no way that we can determine if they will
289 return or not... */
290
291 for (i = 0; i < bb_num; i++)
292 {
293 basic_block bb = bbs[i];
294 rtx insn;
295 rtx prev_insn;
296
297 for (insn = bb->end; ; insn = prev_insn)
298 {
299 prev_insn = PREV_INSN (insn);
300 if (need_fake_edge_p (insn))
301 {
302 edge e;
303 rtx split_at_insn = insn;
304
305 /* Don't split the block between a call and an insn that should
306 remain in the same block as the call. */
307 if (GET_CODE (insn) == CALL_INSN)
308 while (split_at_insn != bb->end
309 && keep_with_call_p (NEXT_INSN (split_at_insn)))
310 split_at_insn = NEXT_INSN (split_at_insn);
311
312 /* The handling above of the final block before the epilogue
313 should be enough to verify that there is no edge to the exit
314 block in CFG already. Calling make_edge in such case would
315 cause us to mark that edge as fake and remove it later. */
316
317 #ifdef ENABLE_CHECKING
318 if (split_at_insn == bb->end)
319 for (e = bb->succ; e; e = e->succ_next)
320 if (e->dest == EXIT_BLOCK_PTR)
321 abort ();
322 #endif
323
324 /* Note that the following may create a new basic block
325 and renumber the existing basic blocks. */
326 e = split_block (bb, split_at_insn);
327 if (e)
328 blocks_split++;
329
330 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
331 }
332
333 if (insn == bb->head)
334 break;
335 }
336 }
337
338 if (blocks_split)
339 verify_flow_info ();
340
341 free (bbs);
342 return blocks_split;
343 }
344
345 /* Find unreachable blocks. An unreachable block will have 0 in
346 the reachable bit in block->flags. A non-zero value indicates the
347 block is reachable. */
348
349 void
350 find_unreachable_blocks ()
351 {
352 edge e;
353 int i, n;
354 basic_block *tos, *worklist;
355
356 n = n_basic_blocks;
357 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
358
359 /* Clear all the reachability flags. */
360
361 for (i = 0; i < n; ++i)
362 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
363
364 /* Add our starting points to the worklist. Almost always there will
365 be only one. It isn't inconceivable that we might one day directly
366 support Fortran alternate entry points. */
367
368 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
369 {
370 *tos++ = e->dest;
371
372 /* Mark the block reachable. */
373 e->dest->flags |= BB_REACHABLE;
374 }
375
376 /* Iterate: find everything reachable from what we've already seen. */
377
378 while (tos != worklist)
379 {
380 basic_block b = *--tos;
381
382 for (e = b->succ; e; e = e->succ_next)
383 if (!(e->dest->flags & BB_REACHABLE))
384 {
385 *tos++ = e->dest;
386 e->dest->flags |= BB_REACHABLE;
387 }
388 }
389
390 free (worklist);
391 }
392 \f
393 /* Functions to access an edge list with a vector representation.
394 Enough data is kept such that given an index number, the
395 pred and succ that edge represents can be determined, or
396 given a pred and a succ, its index number can be returned.
397 This allows algorithms which consume a lot of memory to
398 represent the normally full matrix of edge (pred,succ) with a
399 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
400 wasted space in the client code due to sparse flow graphs. */
401
402 /* This functions initializes the edge list. Basically the entire
403 flowgraph is processed, and all edges are assigned a number,
404 and the data structure is filled in. */
405
406 struct edge_list *
407 create_edge_list ()
408 {
409 struct edge_list *elist;
410 edge e;
411 int num_edges;
412 int x;
413 int block_count;
414
415 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
416
417 num_edges = 0;
418
419 /* Determine the number of edges in the flow graph by counting successor
420 edges on each basic block. */
421 for (x = 0; x < n_basic_blocks; x++)
422 {
423 basic_block bb = BASIC_BLOCK (x);
424
425 for (e = bb->succ; e; e = e->succ_next)
426 num_edges++;
427 }
428
429 /* Don't forget successors of the entry block. */
430 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
431 num_edges++;
432
433 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
434 elist->num_blocks = block_count;
435 elist->num_edges = num_edges;
436 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
437
438 num_edges = 0;
439
440 /* Follow successors of the entry block, and register these edges. */
441 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
442 elist->index_to_edge[num_edges++] = e;
443
444 for (x = 0; x < n_basic_blocks; x++)
445 {
446 basic_block bb = BASIC_BLOCK (x);
447
448 /* Follow all successors of blocks, and register these edges. */
449 for (e = bb->succ; e; e = e->succ_next)
450 elist->index_to_edge[num_edges++] = e;
451 }
452
453 return elist;
454 }
455
456 /* This function free's memory associated with an edge list. */
457
458 void
459 free_edge_list (elist)
460 struct edge_list *elist;
461 {
462 if (elist)
463 {
464 free (elist->index_to_edge);
465 free (elist);
466 }
467 }
468
469 /* This function provides debug output showing an edge list. */
470
471 void
472 print_edge_list (f, elist)
473 FILE *f;
474 struct edge_list *elist;
475 {
476 int x;
477
478 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
479 elist->num_blocks - 2, elist->num_edges);
480
481 for (x = 0; x < elist->num_edges; x++)
482 {
483 fprintf (f, " %-4d - edge(", x);
484 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
485 fprintf (f, "entry,");
486 else
487 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
488
489 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
490 fprintf (f, "exit)\n");
491 else
492 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
493 }
494 }
495
496 /* This function provides an internal consistency check of an edge list,
497 verifying that all edges are present, and that there are no
498 extra edges. */
499
500 void
501 verify_edge_list (f, elist)
502 FILE *f;
503 struct edge_list *elist;
504 {
505 int x, pred, succ, index;
506 edge e;
507
508 for (x = 0; x < n_basic_blocks; x++)
509 {
510 basic_block bb = BASIC_BLOCK (x);
511
512 for (e = bb->succ; e; e = e->succ_next)
513 {
514 pred = e->src->index;
515 succ = e->dest->index;
516 index = EDGE_INDEX (elist, e->src, e->dest);
517 if (index == EDGE_INDEX_NO_EDGE)
518 {
519 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
520 continue;
521 }
522
523 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
524 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
525 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
526 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
527 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
528 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
529 }
530 }
531
532 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
533 {
534 pred = e->src->index;
535 succ = e->dest->index;
536 index = EDGE_INDEX (elist, e->src, e->dest);
537 if (index == EDGE_INDEX_NO_EDGE)
538 {
539 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
540 continue;
541 }
542
543 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
544 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
545 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
546 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
547 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
548 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
549 }
550
551 /* We've verified that all the edges are in the list, no lets make sure
552 there are no spurious edges in the list. */
553
554 for (pred = 0; pred < n_basic_blocks; pred++)
555 for (succ = 0; succ < n_basic_blocks; succ++)
556 {
557 basic_block p = BASIC_BLOCK (pred);
558 basic_block s = BASIC_BLOCK (succ);
559 int found_edge = 0;
560
561 for (e = p->succ; e; e = e->succ_next)
562 if (e->dest == s)
563 {
564 found_edge = 1;
565 break;
566 }
567
568 for (e = s->pred; e; e = e->pred_next)
569 if (e->src == p)
570 {
571 found_edge = 1;
572 break;
573 }
574
575 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
576 == EDGE_INDEX_NO_EDGE && found_edge != 0)
577 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
578 pred, succ);
579 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
580 != EDGE_INDEX_NO_EDGE && found_edge == 0)
581 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
582 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
583 BASIC_BLOCK (succ)));
584 }
585
586 for (succ = 0; succ < n_basic_blocks; succ++)
587 {
588 basic_block p = ENTRY_BLOCK_PTR;
589 basic_block s = BASIC_BLOCK (succ);
590 int found_edge = 0;
591
592 for (e = p->succ; e; e = e->succ_next)
593 if (e->dest == s)
594 {
595 found_edge = 1;
596 break;
597 }
598
599 for (e = s->pred; e; e = e->pred_next)
600 if (e->src == p)
601 {
602 found_edge = 1;
603 break;
604 }
605
606 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
607 == EDGE_INDEX_NO_EDGE && found_edge != 0)
608 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
609 succ);
610 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
611 != EDGE_INDEX_NO_EDGE && found_edge == 0)
612 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
613 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
614 BASIC_BLOCK (succ)));
615 }
616
617 for (pred = 0; pred < n_basic_blocks; pred++)
618 {
619 basic_block p = BASIC_BLOCK (pred);
620 basic_block s = EXIT_BLOCK_PTR;
621 int found_edge = 0;
622
623 for (e = p->succ; e; e = e->succ_next)
624 if (e->dest == s)
625 {
626 found_edge = 1;
627 break;
628 }
629
630 for (e = s->pred; e; e = e->pred_next)
631 if (e->src == p)
632 {
633 found_edge = 1;
634 break;
635 }
636
637 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
638 == EDGE_INDEX_NO_EDGE && found_edge != 0)
639 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
640 pred);
641 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
642 != EDGE_INDEX_NO_EDGE && found_edge == 0)
643 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
644 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
645 EXIT_BLOCK_PTR));
646 }
647 }
648
649 /* This routine will determine what, if any, edge there is between
650 a specified predecessor and successor. */
651
652 int
653 find_edge_index (edge_list, pred, succ)
654 struct edge_list *edge_list;
655 basic_block pred, succ;
656 {
657 int x;
658
659 for (x = 0; x < NUM_EDGES (edge_list); x++)
660 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
661 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
662 return x;
663
664 return (EDGE_INDEX_NO_EDGE);
665 }
666
667 /* Dump the list of basic blocks in the bitmap NODES. */
668
669 void
670 flow_nodes_print (str, nodes, file)
671 const char *str;
672 const sbitmap nodes;
673 FILE *file;
674 {
675 int node;
676
677 if (! nodes)
678 return;
679
680 fprintf (file, "%s { ", str);
681 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
682 fputs ("}\n", file);
683 }
684
685 /* Dump the list of edges in the array EDGE_LIST. */
686
687 void
688 flow_edge_list_print (str, edge_list, num_edges, file)
689 const char *str;
690 const edge *edge_list;
691 int num_edges;
692 FILE *file;
693 {
694 int i;
695
696 if (! edge_list)
697 return;
698
699 fprintf (file, "%s { ", str);
700 for (i = 0; i < num_edges; i++)
701 fprintf (file, "%d->%d ", edge_list[i]->src->index,
702 edge_list[i]->dest->index);
703
704 fputs ("}\n", file);
705 }
706
707 \f
708 /* This routine will remove any fake successor edges for a basic block.
709 When the edge is removed, it is also removed from whatever predecessor
710 list it is in. */
711
712 static void
713 remove_fake_successors (bb)
714 basic_block bb;
715 {
716 edge e;
717
718 for (e = bb->succ; e;)
719 {
720 edge tmp = e;
721
722 e = e->succ_next;
723 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
724 remove_edge (tmp);
725 }
726 }
727
728 /* This routine will remove all fake edges from the flow graph. If
729 we remove all fake successors, it will automatically remove all
730 fake predecessors. */
731
732 void
733 remove_fake_edges ()
734 {
735 int x;
736
737 for (x = 0; x < n_basic_blocks; x++)
738 remove_fake_successors (BASIC_BLOCK (x));
739
740 /* We've handled all successors except the entry block's. */
741 remove_fake_successors (ENTRY_BLOCK_PTR);
742 }
743
744 /* This function will add a fake edge between any block which has no
745 successors, and the exit block. Some data flow equations require these
746 edges to exist. */
747
748 void
749 add_noreturn_fake_exit_edges ()
750 {
751 int x;
752
753 for (x = 0; x < n_basic_blocks; x++)
754 if (BASIC_BLOCK (x)->succ == NULL)
755 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
756 }
757
758 /* This function adds a fake edge between any infinite loops to the
759 exit block. Some optimizations require a path from each node to
760 the exit node.
761
762 See also Morgan, Figure 3.10, pp. 82-83.
763
764 The current implementation is ugly, not attempting to minimize the
765 number of inserted fake edges. To reduce the number of fake edges
766 to insert, add fake edges from _innermost_ loops containing only
767 nodes not reachable from the exit block. */
768
769 void
770 connect_infinite_loops_to_exit ()
771 {
772 basic_block unvisited_block;
773 struct depth_first_search_dsS dfs_ds;
774
775 /* Perform depth-first search in the reverse graph to find nodes
776 reachable from the exit block. */
777 flow_dfs_compute_reverse_init (&dfs_ds);
778 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
779
780 /* Repeatedly add fake edges, updating the unreachable nodes. */
781 while (1)
782 {
783 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
784 if (!unvisited_block)
785 break;
786
787 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
788 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
789 }
790
791 flow_dfs_compute_reverse_finish (&dfs_ds);
792 return;
793 }
794 \f
795 /* Compute reverse top sort order */
796
797 void
798 flow_reverse_top_sort_order_compute (rts_order)
799 int *rts_order;
800 {
801 edge *stack;
802 int sp;
803 int postnum = 0;
804 sbitmap visited;
805
806 /* Allocate stack for back-tracking up CFG. */
807 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
808 sp = 0;
809
810 /* Allocate bitmap to track nodes that have been visited. */
811 visited = sbitmap_alloc (n_basic_blocks);
812
813 /* None of the nodes in the CFG have been visited yet. */
814 sbitmap_zero (visited);
815
816 /* Push the first edge on to the stack. */
817 stack[sp++] = ENTRY_BLOCK_PTR->succ;
818
819 while (sp)
820 {
821 edge e;
822 basic_block src;
823 basic_block dest;
824
825 /* Look at the edge on the top of the stack. */
826 e = stack[sp - 1];
827 src = e->src;
828 dest = e->dest;
829
830 /* Check if the edge destination has been visited yet. */
831 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
832 {
833 /* Mark that we have visited the destination. */
834 SET_BIT (visited, dest->index);
835
836 if (dest->succ)
837 /* Since the DEST node has been visited for the first
838 time, check its successors. */
839 stack[sp++] = dest->succ;
840 else
841 rts_order[postnum++] = dest->index;
842 }
843 else
844 {
845 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
846 rts_order[postnum++] = src->index;
847
848 if (e->succ_next)
849 stack[sp - 1] = e->succ_next;
850 else
851 sp--;
852 }
853 }
854
855 free (stack);
856 sbitmap_free (visited);
857 }
858
859 /* Compute the depth first search order and store in the array
860 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
861 RC_ORDER is non-zero, return the reverse completion number for each
862 node. Returns the number of nodes visited. A depth first search
863 tries to get as far away from the starting point as quickly as
864 possible. */
865
866 int
867 flow_depth_first_order_compute (dfs_order, rc_order)
868 int *dfs_order;
869 int *rc_order;
870 {
871 edge *stack;
872 int sp;
873 int dfsnum = 0;
874 int rcnum = n_basic_blocks - 1;
875 sbitmap visited;
876
877 /* Allocate stack for back-tracking up CFG. */
878 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
879 sp = 0;
880
881 /* Allocate bitmap to track nodes that have been visited. */
882 visited = sbitmap_alloc (n_basic_blocks);
883
884 /* None of the nodes in the CFG have been visited yet. */
885 sbitmap_zero (visited);
886
887 /* Push the first edge on to the stack. */
888 stack[sp++] = ENTRY_BLOCK_PTR->succ;
889
890 while (sp)
891 {
892 edge e;
893 basic_block src;
894 basic_block dest;
895
896 /* Look at the edge on the top of the stack. */
897 e = stack[sp - 1];
898 src = e->src;
899 dest = e->dest;
900
901 /* Check if the edge destination has been visited yet. */
902 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
903 {
904 /* Mark that we have visited the destination. */
905 SET_BIT (visited, dest->index);
906
907 if (dfs_order)
908 dfs_order[dfsnum] = dest->index;
909
910 dfsnum++;
911
912 if (dest->succ)
913 /* Since the DEST node has been visited for the first
914 time, check its successors. */
915 stack[sp++] = dest->succ;
916 else if (rc_order)
917 /* There are no successors for the DEST node so assign
918 its reverse completion number. */
919 rc_order[rcnum--] = dest->index;
920 }
921 else
922 {
923 if (! e->succ_next && src != ENTRY_BLOCK_PTR
924 && rc_order)
925 /* There are no more successors for the SRC node
926 so assign its reverse completion number. */
927 rc_order[rcnum--] = src->index;
928
929 if (e->succ_next)
930 stack[sp - 1] = e->succ_next;
931 else
932 sp--;
933 }
934 }
935
936 free (stack);
937 sbitmap_free (visited);
938
939 /* The number of nodes visited should not be greater than
940 n_basic_blocks. */
941 if (dfsnum > n_basic_blocks)
942 abort ();
943
944 /* There are some nodes left in the CFG that are unreachable. */
945 if (dfsnum < n_basic_blocks)
946 abort ();
947
948 return dfsnum;
949 }
950
951 struct dfst_node
952 {
953 unsigned nnodes;
954 struct dfst_node **node;
955 struct dfst_node *up;
956 };
957
958 /* Compute a preorder transversal ordering such that a sub-tree which
959 is the source of a cross edge appears before the sub-tree which is
960 the destination of the cross edge. This allows for easy detection
961 of all the entry blocks for a loop.
962
963 The ordering is compute by:
964
965 1) Generating a depth first spanning tree.
966
967 2) Walking the resulting tree from right to left. */
968
969 void
970 flow_preorder_transversal_compute (pot_order)
971 int *pot_order;
972 {
973 edge e;
974 edge *stack;
975 int i;
976 int max_successors;
977 int sp;
978 sbitmap visited;
979 struct dfst_node *node;
980 struct dfst_node *dfst;
981
982 /* Allocate stack for back-tracking up CFG. */
983 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
984 sp = 0;
985
986 /* Allocate the tree. */
987 dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
988 sizeof (struct dfst_node));
989
990 for (i = 0; i < n_basic_blocks; i++)
991 {
992 max_successors = 0;
993 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
994 max_successors++;
995
996 dfst[i].node
997 = (max_successors
998 ? (struct dfst_node **) xcalloc (max_successors,
999 sizeof (struct dfst_node *))
1000 : NULL);
1001 }
1002
1003 /* Allocate bitmap to track nodes that have been visited. */
1004 visited = sbitmap_alloc (n_basic_blocks);
1005
1006 /* None of the nodes in the CFG have been visited yet. */
1007 sbitmap_zero (visited);
1008
1009 /* Push the first edge on to the stack. */
1010 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1011
1012 while (sp)
1013 {
1014 basic_block src;
1015 basic_block dest;
1016
1017 /* Look at the edge on the top of the stack. */
1018 e = stack[sp - 1];
1019 src = e->src;
1020 dest = e->dest;
1021
1022 /* Check if the edge destination has been visited yet. */
1023 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1024 {
1025 /* Mark that we have visited the destination. */
1026 SET_BIT (visited, dest->index);
1027
1028 /* Add the destination to the preorder tree. */
1029 if (src != ENTRY_BLOCK_PTR)
1030 {
1031 dfst[src->index].node[dfst[src->index].nnodes++]
1032 = &dfst[dest->index];
1033 dfst[dest->index].up = &dfst[src->index];
1034 }
1035
1036 if (dest->succ)
1037 /* Since the DEST node has been visited for the first
1038 time, check its successors. */
1039 stack[sp++] = dest->succ;
1040 }
1041
1042 else if (e->succ_next)
1043 stack[sp - 1] = e->succ_next;
1044 else
1045 sp--;
1046 }
1047
1048 free (stack);
1049 sbitmap_free (visited);
1050
1051 /* Record the preorder transversal order by
1052 walking the tree from right to left. */
1053
1054 i = 0;
1055 node = &dfst[0];
1056 pot_order[i++] = 0;
1057
1058 while (node)
1059 {
1060 if (node->nnodes)
1061 {
1062 node = node->node[--node->nnodes];
1063 pot_order[i++] = node - dfst;
1064 }
1065 else
1066 node = node->up;
1067 }
1068
1069 /* Free the tree. */
1070
1071 for (i = 0; i < n_basic_blocks; i++)
1072 if (dfst[i].node)
1073 free (dfst[i].node);
1074
1075 free (dfst);
1076 }
1077
1078 /* Compute the depth first search order on the _reverse_ graph and
1079 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1080 Returns the number of nodes visited.
1081
1082 The computation is split into three pieces:
1083
1084 flow_dfs_compute_reverse_init () creates the necessary data
1085 structures.
1086
1087 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1088 structures. The block will start the search.
1089
1090 flow_dfs_compute_reverse_execute () continues (or starts) the
1091 search using the block on the top of the stack, stopping when the
1092 stack is empty.
1093
1094 flow_dfs_compute_reverse_finish () destroys the necessary data
1095 structures.
1096
1097 Thus, the user will probably call ..._init(), call ..._add_bb() to
1098 add a beginning basic block to the stack, call ..._execute(),
1099 possibly add another bb to the stack and again call ..._execute(),
1100 ..., and finally call _finish(). */
1101
1102 /* Initialize the data structures used for depth-first search on the
1103 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1104 added to the basic block stack. DATA is the current depth-first
1105 search context. If INITIALIZE_STACK is non-zero, there is an
1106 element on the stack. */
1107
1108 static void
1109 flow_dfs_compute_reverse_init (data)
1110 depth_first_search_ds data;
1111 {
1112 /* Allocate stack for back-tracking up CFG. */
1113 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1114 * sizeof (basic_block));
1115 data->sp = 0;
1116
1117 /* Allocate bitmap to track nodes that have been visited. */
1118 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1119
1120 /* None of the nodes in the CFG have been visited yet. */
1121 sbitmap_zero (data->visited_blocks);
1122
1123 return;
1124 }
1125
1126 /* Add the specified basic block to the top of the dfs data
1127 structures. When the search continues, it will start at the
1128 block. */
1129
1130 static void
1131 flow_dfs_compute_reverse_add_bb (data, bb)
1132 depth_first_search_ds data;
1133 basic_block bb;
1134 {
1135 data->stack[data->sp++] = bb;
1136 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1137 }
1138
1139 /* Continue the depth-first search through the reverse graph starting with the
1140 block at the stack's top and ending when the stack is empty. Visited nodes
1141 are marked. Returns an unvisited basic block, or NULL if there is none
1142 available. */
1143
1144 static basic_block
1145 flow_dfs_compute_reverse_execute (data)
1146 depth_first_search_ds data;
1147 {
1148 basic_block bb;
1149 edge e;
1150 int i;
1151
1152 while (data->sp > 0)
1153 {
1154 bb = data->stack[--data->sp];
1155
1156 /* Perform depth-first search on adjacent vertices. */
1157 for (e = bb->pred; e; e = e->pred_next)
1158 if (!TEST_BIT (data->visited_blocks,
1159 e->src->index - (INVALID_BLOCK + 1)))
1160 flow_dfs_compute_reverse_add_bb (data, e->src);
1161 }
1162
1163 /* Determine if there are unvisited basic blocks. */
1164 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1165 if (!TEST_BIT (data->visited_blocks, i))
1166 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1167
1168 return NULL;
1169 }
1170
1171 /* Destroy the data structures needed for depth-first search on the
1172 reverse graph. */
1173
1174 static void
1175 flow_dfs_compute_reverse_finish (data)
1176 depth_first_search_ds data;
1177 {
1178 free (data->stack);
1179 sbitmap_free (data->visited_blocks);
1180 }
This page took 0.088571 seconds and 6 git commands to generate.