This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: patch for mainline to renumber entry and exit basic blocks.


Ian Lance Taylor wrote:
Kenneth Zadeck <Kenneth.Zadeck@NaturalBridge.com> writes:

The only point of discussion is the following comment. This frag is
correct, the problem is that I did not make the proper changes to
predict_paths_leading_to. See frags 4 and 5 of the predict.c patch
for the proper correction.

OK.


Thanks.

Ian
The patch was bootstrapped and regression tested on suse10 x86-64 when I replied to Ian. I have now updated and am rebootstrapping. The bootstrap is in phase 3 so I believe that there has been no patch rot. But I will run a full regression test this morning.

2005-12-17  Danny Berlin <dberlin@dberlin.org>
       Kenneth Zadeck <zadeck@naturalbridge.com>

* basic-block.h: Changed basic block numbering so that the entry
block is 0 and the exit block is 1. Changed insn iterators so
that they are tolerant of blocks with no insns.
* regrename.c (copyprop_hardreg_forward): Changed basic block
numbering so that the entry block is 0 and the exit block is 1.
* sched-ebb.c (sehedule_ebbs): Ditto.
* tracer.c (branch_ratio_cutoff): Ditto.
* cfgloopmanip.c (fix_loop_structure): Ditto.
* cfghooks.c (verify_flow_info): Ditto.
* cfg.c (compact_blocks): Ditto.
* reorg.c (dbr_schedule): Ditto.
* flow.c (calculate_global_regs_live, libcall_dead_p): Ditto.
* dominance.c (calc_dfs_tree_nonrec, calc_dfs_tree,
calculate_dominance_info): Ditto.
* cfganal.c (create_edge_list, print_edge_list,
flow_depth_first_order_compute, flow_dfs_compute_reverse_init,
flow_dfs_compute_reverse_add_bb, flow_dfs_compute_reverse_execute,
dfs_enumerate_from): Ditto.
* global.c (global_alloc, set_up_bb_rts_numbers): Ditto.
* ifcvt.c (find_if_case_2): Ditto.
* cfgbuild.c (control_flow_insn_p, count_basic_blocks,
find_basic_blocks): Ditto.
* predict.c (predict_loops, tree_bb_level_predictions,
predict_paths_leading_to, propagate_freq): Ditto.
* lcm.c (compute_antinout_edge, compute_laterin,
compute_available): Ditto.
* function.c (thread_prologue_and_epilogue_insns): Ditto.
* gcse.c (gcse_main, bypass_jumps): Ditto.
* profile.c (compute_branch_probabilities,
compute_value_histograms, branch_prob): Ditto.
* tree-flow-inline.h (bsi_start, bsi_after_labels,
bsi_last): Ditto.
* tree-ssa-phiopt.c (tree_ssa_phiopt,
blocks_in_phiopt_order): Ditto.
* bt-load.c (compute_defs_uses_and_gen, compute_kill,
compute_out, link_btr_uses, migrate_btr_defs): Ditto.
* tree-dfa.c (collect_dfa_stats): Ditto.
* cfgcleanup.c (try_forward_edges, try_optimize_cfg): Ditto.
* cfglayout.c (fixup_reorder_chain): Ditto.
* bb-reorder.c (reorder_basic_blocks, duplicate_computed_gotos,
partition_hot_cold_basic_blocks): Ditto.
* var-tracking.c (vt_find_locations): Ditto.
* cfgloop.c (flow_loops_cfg_dump, flow_loops_find, get_loop_body): Ditto.
* sched-rgn.c (compute_trg_info, init_regions, schedule_insns): Ditto.
* tree-cfg.c (init_empty_tree_cfg, build_tree_cfg, make_edges
label_to_block_fn, print_loop_ir, tree_flow_call_edges_add): Ditto.
* tree-ssa-reassoc.c (init_reassoc): Ditto.
* cfgrtl.c (entry_of_function, rtl_verify_flow_info,
rtl_flow_call_edges_add, rtl_flow_call_edges_add): Ditto.
* df.c (df_analyze_1, hybrid_search, iterative_dataflow): Ditto
and removed unused reverse orders.
* df.h (): Ditto.
* combine.c: Fix document typo.


Index: regrename.c
===================================================================
--- regrename.c	(revision 108712)
+++ regrename.c	(working copy)
@@ -1776,20 +1776,19 @@ copyprop_hardreg_forward (void)
 
   all_vd = xmalloc (sizeof (struct value_data) * last_basic_block);
 
-  visited = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  visited = sbitmap_alloc (last_basic_block);
   sbitmap_zero (visited);
 
   FOR_EACH_BB (bb)
     {
-      SET_BIT (visited, bb->index - (INVALID_BLOCK + 1));
+      SET_BIT (visited, bb->index);
 
       /* If a block has a single predecessor, that we've already
 	 processed, begin with the value data that was live at
 	 the end of the predecessor block.  */
       /* ??? Ought to use more intelligent queuing of blocks.  */
-      if (single_pred_p (bb)
-	  && TEST_BIT (visited,
-		       single_pred (bb)->index - (INVALID_BLOCK + 1))
+      if (single_pred_p (bb) 
+	  && TEST_BIT (visited, single_pred (bb)->index)
 	  && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
 	all_vd[bb->index] = all_vd[single_pred (bb)->index];
       else
Index: sched-ebb.c
===================================================================
--- sched-ebb.c	(revision 108712)
+++ sched-ebb.c	(working copy)
@@ -568,7 +568,7 @@ schedule_ebbs (FILE *dump_file)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   sched_init (dump_file);
Index: tracer.c
===================================================================
--- tracer.c	(revision 108712)
+++ tracer.c	(working copy)
@@ -72,7 +72,7 @@ static int branch_ratio_cutoff;
 static bool
 ignore_bb_p (basic_block bb)
 {
-  if (bb->index < 0)
+  if (bb->index < NUM_FIXED_BLOCKS)
     return true;
   if (!maybe_hot_bb_p (bb))
     return true;
@@ -363,7 +363,7 @@ layout_superblocks (void)
 void
 tracer (unsigned int flags)
 {
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   cfg_layout_initialize (flags);
Index: cfgloopmanip.c
===================================================================
--- cfgloopmanip.c	(revision 108712)
+++ cfgloopmanip.c	(working copy)
@@ -1563,7 +1563,7 @@ fix_loop_structure (struct loops *loops,
     }
 
   /* Remove the dead loops from structures.  */
-  loops->tree_root->num_nodes = n_basic_blocks + 2;
+  loops->tree_root->num_nodes = n_basic_blocks; 
   for (i = 1; i < loops->num; i++)
     {
       loop = loops->parray[i];
Index: cfghooks.c
===================================================================
--- cfghooks.c	(revision 108712)
+++ cfghooks.c	(working copy)
@@ -77,8 +77,8 @@ verify_flow_info (void)
   basic_block *last_visited;
 
   timevar_push (TV_CFG_VERIFY);
-  last_visited = xcalloc (last_basic_block + 2, sizeof (basic_block));
-  edge_checksum = xcalloc (last_basic_block + 2, sizeof (size_t));
+  last_visited = xcalloc (last_basic_block, sizeof (basic_block));
+  edge_checksum = xcalloc (last_basic_block, sizeof (size_t));
 
   /* Check bb chain & numbers.  */
   last_bb_seen = ENTRY_BLOCK_PTR;
@@ -122,7 +122,7 @@ verify_flow_info (void)
 	}
       FOR_EACH_EDGE (e, ei, bb->succs)
 	{
-	  if (last_visited [e->dest->index + 2] == bb)
+	  if (last_visited [e->dest->index] == bb)
 	    {
 	      error ("verify_flow_info: Duplicate edge %i->%i",
 		     e->src->index, e->dest->index);
@@ -141,7 +141,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  last_visited [e->dest->index + 2] = bb;
+	  last_visited [e->dest->index] = bb;
 
 	  if (e->flags & EDGE_FALLTHRU)
 	    n_fallthru++;
@@ -158,7 +158,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  edge_checksum[e->dest->index + 2] += (size_t) e;
+	  edge_checksum[e->dest->index] += (size_t) e;
 	}
       if (n_fallthru > 1)
 	{
@@ -192,7 +192,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  edge_checksum[e->dest->index + 2] -= (size_t) e;
+	  edge_checksum[e->dest->index] -= (size_t) e;
 	}
     }
 
@@ -202,14 +202,14 @@ verify_flow_info (void)
     edge_iterator ei;
 
     FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-      edge_checksum[e->dest->index + 2] += (size_t) e;
+      edge_checksum[e->dest->index] += (size_t) e;
 
     FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-      edge_checksum[e->dest->index + 2] -= (size_t) e;
+      edge_checksum[e->dest->index] -= (size_t) e;
   }
 
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
-    if (edge_checksum[bb->index + 2])
+    if (edge_checksum[bb->index])
       {
 	error ("basic block %i edge lists are corrupted", bb->index);
 	err = 1;
Index: cfg.c
===================================================================
--- cfg.c	(revision 108712)
+++ cfg.c	(working copy)
@@ -163,8 +163,11 @@ compact_blocks (void)
   int i;
   basic_block bb;
 
-  i = 0;
-  FOR_EACH_BB (bb)
+  BASIC_BLOCK (ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK (EXIT_BLOCK) = EXIT_BLOCK_PTR;
+
+  i = NUM_FIXED_BLOCKS;
+  FOR_EACH_BB (bb) 
     {
       BASIC_BLOCK (i) = bb;
       bb->index = i;
Index: reorg.c
===================================================================
--- reorg.c	(revision 108712)
+++ reorg.c	(working copy)
@@ -3539,7 +3539,7 @@ dbr_schedule (rtx first, FILE *file)
 
   /* If the current function has no insns other than the prologue and
      epilogue, then do not try to fill any delay slots.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   /* Find the highest INSN_UID and allocate and initialize our map from
Index: flow.c
===================================================================
--- flow.c	(revision 108712)
+++ flow.c	(working copy)
@@ -1054,17 +1054,14 @@ calculate_global_regs_live (sbitmap bloc
       SET_REGNO_REG_SET (invalidated_by_call, i);
 
   /* Allocate space for the sets of local properties.  */
-  local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
-			sizeof (regset));
-  cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
-			     sizeof (regset));
-
-  /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
-     because the `head == tail' style test for an empty queue doesn't
-     work with a full queue.  */
-  queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
+  local_sets = xcalloc (last_basic_block, sizeof (regset));
+  cond_local_sets = xcalloc (last_basic_block, sizeof (regset));
+
+  /* Create a worklist.  Allocate an extra slot for the `head == tail'
+     style test for an empty queue doesn't work with a full queue.  */
+  queue = xmalloc ((n_basic_blocks + 1) * sizeof (*queue));
   qtail = queue;
-  qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
+  qhead = qend = queue + n_basic_blocks;
 
   /* Queue the blocks set in the initial mask.  Do this in reverse block
      number order so that we are more likely for the first round to do
@@ -1245,12 +1242,10 @@ calculate_global_regs_live (sbitmap bloc
          basic block.  On subsequent passes, we get to skip out early if
 	 live_at_end wouldn't have changed.  */
 
-      if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
+      if (local_sets[bb->index] == NULL)
 	{
-	  local_sets[bb->index - (INVALID_BLOCK + 1)]
-	    = ALLOC_REG_SET (&reg_obstack);
-	  cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
-	    = ALLOC_REG_SET (&reg_obstack);
+	  local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
+	  cond_local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
 	  rescan = 1;
 	}
       else
@@ -1274,7 +1269,7 @@ calculate_global_regs_live (sbitmap bloc
 		  successor block.  We can miss changes in those sets if
 		  we only compare the new live_at_end against the
 		  previous one.  */
-	      cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
+	      cond_local_set = cond_local_sets[bb->index];
 	      rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
 	    }
 
@@ -1290,7 +1285,7 @@ calculate_global_regs_live (sbitmap bloc
   
 	      /* If any of the changed bits overlap with local_sets[bb],
  		 we'll have to rescan the block.  */
-	      local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
+	      local_set = local_sets[bb->index];
 	      rescan = bitmap_intersect_p (tmp, local_set);
 	    }
 	}
@@ -1319,8 +1314,8 @@ calculate_global_regs_live (sbitmap bloc
 	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
 	     into live_at_start.  */
 	  propagate_block (bb, new_live_at_end,
-			   local_sets[bb->index - (INVALID_BLOCK + 1)],
-			   cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
+			   local_sets[bb->index],
+			   cond_local_sets[bb->index],
 			   flags);
 
 	  /* If live_at start didn't change, no need to go farther.  */
@@ -1366,11 +1361,11 @@ calculate_global_regs_live (sbitmap bloc
 			SET_BIT (blocks_out, pbb->index);
 
 		      /* Makes sure to really rescan the block.  */
-		      if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
+		      if (local_sets[pbb->index])
 			{
-			  FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
-			  FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
-			  local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
+			  FREE_REG_SET (local_sets[pbb->index]);
+			  FREE_REG_SET (cond_local_sets[pbb->index]);
+			  local_sets[pbb->index] = 0;
 			}
 
 		      /* Add it to the queue.  */
@@ -1416,16 +1411,16 @@ calculate_global_regs_live (sbitmap bloc
       EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
 	{
 	  basic_block bb = BASIC_BLOCK (i);
-	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
-	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ 	  FREE_REG_SET (local_sets[bb->index]);
+ 	  FREE_REG_SET (cond_local_sets[bb->index]);
 	};
     }
   else
     {
       FOR_EACH_BB (bb)
 	{
-	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
-	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ 	  FREE_REG_SET (local_sets[bb->index]);
+ 	  FREE_REG_SET (cond_local_sets[bb->index]);
 	}
     }
 
@@ -2472,7 +2467,7 @@ libcall_dead_p (struct propagate_block_i
 int
 regno_clobbered_at_setjmp (int regno)
 {
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   return ((REG_N_SETS (regno) > 1
Index: dominance.c
===================================================================
--- dominance.c	(revision 108712)
+++ dominance.c	(working copy)
@@ -51,8 +51,7 @@ enum dom_state dom_computed[2];
    'undefined' or 'end of list'.  The name of each node is given by the dfs
    number of the corresponding basic block.  Please note, that we include the
    artificial ENTRY_BLOCK (or EXIT_BLOCK in the post-dom case) in our lists to
-   support multiple entry points.  As it has no real basic block index we use
-   'last_basic_block' for that.  Its dfs number is of course 1.  */
+   support multiple entry points.  Its dfs number is of course 1.  */
 
 /* Type of Basic Block aka. TBB */
 typedef unsigned int TBB;
@@ -149,9 +148,7 @@ static unsigned n_bbs_in_dom_tree[2];
 static void
 init_dom_info (struct dom_info *di, enum cdi_direction dir)
 {
-  /* We need memory for n_basic_blocks nodes and the ENTRY_BLOCK or
-     EXIT_BLOCK.  */
-  unsigned int num = n_basic_blocks + 1 + 1;
+  unsigned int num = n_basic_blocks;
   init_ar (di->dfs_parent, TBB, num, 0);
   init_ar (di->path_min, TBB, num, i);
   init_ar (di->key, TBB, num, i);
@@ -216,7 +213,7 @@ calc_dfs_tree_nonrec (struct dom_info *d
   /* Ending block.  */
   basic_block ex_block;
 
-  stack = xmalloc ((n_basic_blocks + 3) * sizeof (edge_iterator));
+  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
   sp = 0;
 
   /* Initialize our border blocks, and the first edge.  */
@@ -372,8 +369,8 @@ calc_dfs_tree (struct dom_info *di, enum
 
   di->nodes = di->dfsnum - 1;
 
-  /* Make sure there is a path from ENTRY to EXIT at all.  */
-  gcc_assert (di->nodes == (unsigned int) n_basic_blocks + 1);
+  /* This aborts e.g. when there is _no_ path from ENTRY to EXIT at all.  */
+  gcc_assert (di->nodes == (unsigned int) n_basic_blocks - 1);
 }
 
 /* Compress the path from V to the root of its set and update path_min at the
@@ -627,7 +624,7 @@ calculate_dominance_info (enum cdi_direc
 	{
 	  b->dom[dir] = et_new_tree (b);
 	}
-      n_bbs_in_dom_tree[dir] = n_basic_blocks + 2;
+      n_bbs_in_dom_tree[dir] = n_basic_blocks;
 
       init_dom_info (&di, dir);
       calc_dfs_tree (&di, dir);
Index: cfganal.c
===================================================================
--- cfganal.c	(revision 108712)
+++ cfganal.c	(working copy)
@@ -345,7 +345,7 @@ create_edge_list (void)
   basic_block bb;
   edge_iterator ei;
 
-  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
+  block_count = n_basic_blocks; /* Include the entry and exit blocks.  */
 
   num_edges = 0;
 
@@ -391,7 +391,7 @@ print_edge_list (FILE *f, struct edge_li
   int x;
 
   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
-	   elist->num_blocks - 2, elist->num_edges);
+	   elist->num_blocks, elist->num_edges);
 
   for (x = 0; x < elist->num_edges; x++)
     {
@@ -721,7 +721,7 @@ flow_depth_first_order_compute (int *dfs
   edge_iterator *stack;
   int sp;
   int dfsnum = 0;
-  int rcnum = n_basic_blocks - 1;
+  int rcnum = n_basic_blocks - 1 - NUM_FIXED_BLOCKS;
   sbitmap visited;
 
   /* Allocate stack for back-tracking up CFG.  */
@@ -786,8 +786,9 @@ flow_depth_first_order_compute (int *dfs
   free (stack);
   sbitmap_free (visited);
 
-  /* The number of nodes visited should be the number of blocks.  */
-  gcc_assert (dfsnum == n_basic_blocks);
+  /* The number of nodes visited should be the number of blocks minus
+     the entry and exit blocks which are not visited here.  */
+  gcc_assert (dfsnum == n_basic_blocks - NUM_FIXED_BLOCKS);
 
   return dfsnum;
 }
@@ -826,12 +827,11 @@ static void
 flow_dfs_compute_reverse_init (depth_first_search_ds data)
 {
   /* Allocate stack for back-tracking up CFG.  */
-  data->stack = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
-			 * sizeof (basic_block));
+  data->stack = xmalloc (n_basic_blocks * sizeof (basic_block));
   data->sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  data->visited_blocks = sbitmap_alloc (last_basic_block);
 
   /* None of the nodes in the CFG have been visited yet.  */
   sbitmap_zero (data->visited_blocks);
@@ -847,7 +847,7 @@ static void
 flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
 {
   data->stack[data->sp++] = bb;
-  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
+  SET_BIT (data->visited_blocks, bb->index);
 }
 
 /* Continue the depth-first search through the reverse graph starting with the
@@ -869,14 +869,13 @@ flow_dfs_compute_reverse_execute (depth_
 
       /* Perform depth-first search on adjacent vertices.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
-	if (!TEST_BIT (data->visited_blocks,
-		       e->src->index - (INVALID_BLOCK + 1)))
+	if (!TEST_BIT (data->visited_blocks, e->src->index))
 	  flow_dfs_compute_reverse_add_bb (data, e->src);
     }
 
   /* Determine if there are unvisited basic blocks.  */
   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
-    if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
+    if (!TEST_BIT (data->visited_blocks, bb->index))
       return bb;
 
   return NULL;
@@ -912,12 +911,12 @@ dfs_enumerate_from (basic_block bb, int 
   static sbitmap visited;
   static unsigned v_size;
 
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index + 2))
-#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index + 2))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index + 2))
+#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) 
+#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index)) 
+#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) 
 
   /* Resize the VISITED sbitmap if necessary.  */
-  size = last_basic_block + 2;
+  size = last_basic_block; 
   if (size < 10)
     size = 10;
 
Index: global.c
===================================================================
--- global.c	(revision 108712)
+++ global.c	(working copy)
@@ -621,7 +621,7 @@ global_alloc (FILE *file)
 
 #if 0 /* We need to eliminate regs even if there is no rtl code,
 	 for the sake of debugging information.  */
-  if (n_basic_blocks > 0)
+  if (n_basic_blocks > NUM_FIXED_BLOCKS)
 #endif
     {
       build_insn_chain (get_insns ());
@@ -2281,9 +2281,9 @@ set_up_bb_rts_numbers (void)
   int i;
   int *rts_order;
   
-  rts_order = xmalloc (sizeof (int) * n_basic_blocks);
+  rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
   flow_reverse_top_sort_order_compute (rts_order);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
   free (rts_order);
 }
Index: ifcvt.c
===================================================================
--- ifcvt.c	(revision 108712)
+++ ifcvt.c	(working copy)
@@ -3194,14 +3194,14 @@ find_if_case_2 (basic_block test_bb, edg
     return FALSE;
 
   /* THEN is not EXIT.  */
-  if (then_bb->index < 0)
+  if (then_bb->index < NUM_FIXED_BLOCKS)
     return FALSE;
 
   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
   note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
   if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
     ;
-  else if (else_succ->dest->index < 0
+  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
 			      else_succ->dest))
     ;
Index: cfgbuild.c
===================================================================
--- cfgbuild.c	(revision 108712)
+++ cfgbuild.c	(working copy)
@@ -138,7 +138,7 @@ control_flow_insn_p (rtx insn)
 static int
 count_basic_blocks (rtx f)
 {
-  int count = 0;
+  int count = NUM_FIXED_BLOCKS;
   bool saw_insn = false;
   rtx insn;
 
@@ -164,10 +164,10 @@ count_basic_blocks (rtx f)
 
   /* The rest of the compiler works a bit smoother when we don't have to
      check for the edge case of do-nothing functions with no basic blocks.  */
-  if (count == 0)
+  if (count == NUM_FIXED_BLOCKS)
     {
       emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
-      count = 1;
+      count = NUM_FIXED_BLOCKS + 1;
     }
 
   return count;
@@ -529,10 +529,11 @@ find_basic_blocks (rtx f)
     }
 
   n_basic_blocks = count_basic_blocks (f);
-  last_basic_block = 0;
+  last_basic_block = NUM_FIXED_BLOCKS;
   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
 
+
   /* Size the basic block table.  The actual structures will be allocated
      by find_basic_blocks_1, since we want to keep the structure pointers
      stable across calls to find_basic_blocks.  */
@@ -542,6 +543,8 @@ find_basic_blocks (rtx f)
      actually lay them out.  */
 
   VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
+  BASIC_BLOCK (ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK (EXIT_BLOCK) = EXIT_BLOCK_PTR;
 
   find_basic_blocks_1 (f);
 
Index: predict.c
===================================================================
--- predict.c	(revision 108712)
+++ predict.c	(working copy)
@@ -707,7 +707,7 @@ predict_loops (struct loops *loops_info,
 	     conditional has no loop header successors as not taken.  */
 	  if (!header_found)
 	    FOR_EACH_EDGE (e, ei, bb->succs)
-	      if (e->dest->index < 0
+	      if (e->dest->index < NUM_FIXED_BLOCKS
 		  || !flow_bb_inside_loop_p (loop, e->dest))
 		predict_edge
 		  (e, PRED_LOOP_EXIT,
@@ -1271,7 +1271,7 @@ tree_bb_level_predictions (void)
   int *heads;
 
   heads = xmalloc (sizeof (int) * last_basic_block);
-  memset (heads, -1, sizeof (int) * last_basic_block);
+  memset (heads, ENTRY_BLOCK, sizeof (int) * last_basic_block);
   heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
 
   apply_return_prediction (heads);
@@ -1500,7 +1500,7 @@ predict_paths_leading_to (basic_block bb
   edge_iterator ei;
   int y;
 
-  if (heads[bb->index] < 0)
+  if (heads[bb->index] == ENTRY_BLOCK)
     {
       /* This is first time we need this field in heads array; so
          find first dominator that we do not post-dominate (we are
@@ -1509,7 +1509,7 @@ predict_paths_leading_to (basic_block bb
       basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
       int head;
 
-      while (heads[next_ai->index] < 0)
+      while (heads[next_ai->index] == ENTRY_BLOCK)
 	{
 	  if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
 	    break;
@@ -1524,10 +1524,7 @@ predict_paths_leading_to (basic_block bb
       while (next_ai != bb)
 	{
 	  next_ai = ai;
-	  if (heads[ai->index] == ENTRY_BLOCK)
-	    ai = ENTRY_BLOCK_PTR;
-	  else
-	    ai = BASIC_BLOCK (heads[ai->index]);
+	  ai = BASIC_BLOCK (heads[ai->index]);
 	  heads[next_ai->index] = head;
 	}
     }
@@ -1538,7 +1535,7 @@ predict_paths_leading_to (basic_block bb
   if (y == last_basic_block)
     return;
   FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
-    if (e->dest->index >= 0
+    if (e->dest->index >= NUM_FIXED_BLOCKS
 	&& dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
       predict_edge_def (e, pred, taken);
 }
@@ -1596,12 +1593,7 @@ propagate_freq (struct loop *loop, bitma
        /* The outermost "loop" includes the exit block, which we can not
 	  look up via BASIC_BLOCK.  Detect this and use EXIT_BLOCK_PTR
 	  directly.  Do the same for the entry block.  */
-     if (i == (unsigned)ENTRY_BLOCK)
-       bb = ENTRY_BLOCK_PTR;
-     else if (i == (unsigned)EXIT_BLOCK)
-       bb = EXIT_BLOCK_PTR;
-     else
-       bb = BASIC_BLOCK (i);
+      bb = BASIC_BLOCK (i);
 
       FOR_EACH_EDGE (e, ei, bb->preds)
 	{
Index: lcm.c
===================================================================
--- lcm.c	(revision 108712)
+++ lcm.c	(working copy)
@@ -122,8 +122,8 @@ compute_antinout_edge (sbitmap *antloc, 
     }
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are predecessors of the exit block so that we
      can easily identify them below.  */
@@ -260,7 +260,7 @@ compute_laterin (struct edge_list *edge_
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
   qin = qout = worklist
-    = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+    = xmalloc (sizeof (basic_block) * n_basic_blocks);
 
   /* Initialize a mapping from each edge to its index.  */
   for (i = 0; i < num_edges; i++)
@@ -294,11 +294,10 @@ compute_laterin (struct edge_list *edge_
     }
 
   /* Note that we do not use the last allocated element for our queue,
-     as EXIT_BLOCK is never inserted into it. In fact the above allocation
-     of n_basic_blocks + 1 elements is not necessary.  */
+     as EXIT_BLOCK is never inserted into it. */
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -485,7 +484,8 @@ compute_available (sbitmap *avloc, sbitm
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
+  qin = qout = worklist = 
+    xmalloc (sizeof (basic_block) * (n_basic_blocks - NUM_FIXED_BLOCKS));
 
   /* We want a maximal solution.  */
   sbitmap_vector_ones (avout, last_basic_block);
@@ -499,8 +499,8 @@ compute_available (sbitmap *avloc, sbitm
     }
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are successors of the entry block so that we
      can easily identify them below.  */
Index: function.c
===================================================================
--- function.c	(revision 108712)
+++ function.c	(working copy)
@@ -5256,7 +5256,8 @@ thread_prologue_and_epilogue_insns (rtx 
 	fixup_fallthru_exit_predecessor.  */
       cfg_layout_initialize (0);
       FOR_EACH_BB (cur_bb)
-	if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
+	if (cur_bb->index >= NUM_FIXED_BLOCKS
+	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
 	  cur_bb->aux = cur_bb->next_bb;
       cfg_layout_finalize ();
     }
Index: df.c
===================================================================
--- df.c	(revision 108712)
+++ df.c	(working copy)
@@ -1926,7 +1926,6 @@ df_analyze_1 (struct df *df, bitmap bloc
 {
   int aflags;
   int dflags;
-  int i;
   basic_block bb;
   struct dataflow dflow;
 
@@ -1996,18 +1995,9 @@ df_analyze_1 (struct df *df, bitmap bloc
   df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
   df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
   df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
-  df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
-  df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
-  df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
 
   flow_depth_first_order_compute (df->dfs_order, df->rc_order);
   flow_reverse_top_sort_order_compute (df->rts_order);
-  for (i = 0; i < n_basic_blocks; i++)
-    {
-      df->inverse_dfs_map[df->dfs_order[i]] = i;
-      df->inverse_rc_map[df->rc_order[i]] = i;
-      df->inverse_rts_map[df->rts_order[i]] = i;
-    }
   if (aflags & DF_RD)
     {
       /* Compute the sets of gens and kills for the defs of each bb.  */
@@ -2137,9 +2127,6 @@ df_analyze_1 (struct df *df, bitmap bloc
   free (df->dfs_order);
   free (df->rc_order);
   free (df->rts_order);
-  free (df->inverse_rc_map);
-  free (df->inverse_dfs_map);
-  free (df->inverse_rts_map);
 }
 
 
@@ -3923,8 +3910,7 @@ hybrid_search (basic_block bb, struct da
    DATAFLOW, producing the in and out sets.  Only the part of the cfg
    induced by blocks in DATAFLOW->order is taken into account.
 
-   For forward problems, you probably want to pass in a mapping of
-   block number to rc_order (like df->inverse_rc_map).  */
+   For forward problems, you probably want to pass in rc_order.  */
 
 void
 iterative_dataflow (struct dataflow *dataflow)
@@ -3939,7 +3925,7 @@ iterative_dataflow (struct dataflow *dat
   sbitmap_zero (visited);
   sbitmap_zero (considered);
 
-  for (i = 0; i < dataflow->n_blocks; i++)
+  for (i = 0; i < dataflow->n_blocks - NUM_FIXED_BLOCKS; i++)
     {
       idx = dataflow->order[i];
       SET_BIT (pending, idx);
@@ -3954,7 +3940,7 @@ iterative_dataflow (struct dataflow *dat
 
   while (1)
     {
-      for (i = 0; i < dataflow->n_blocks; i++)
+      for (i = 0; i < dataflow->n_blocks - NUM_FIXED_BLOCKS ; i++)
 	{
 	  idx = dataflow->order[i];
 
Index: df.h
===================================================================
--- df.h	(revision 108712)
+++ df.h	(working copy)
@@ -155,9 +155,6 @@ struct df
   int *dfs_order;		/* DFS order -> block number.  */
   int *rc_order;		/* Reverse completion order -> block number.  */
   int *rts_order;		/* Reverse top sort order -> block number.  */
-  int *inverse_rc_map;		/* Block number -> reverse completion order.  */
-  int *inverse_dfs_map;		/* Block number -> DFS order.  */
-  int *inverse_rts_map;		/* Block number -> reverse top-sort order.  */
 };
 
 
Index: gcse.c
===================================================================
--- gcse.c	(revision 108712)
+++ gcse.c	(working copy)
@@ -691,7 +691,7 @@ gcse_main (rtx f ATTRIBUTE_UNUSED, FILE 
     dump_flow_info (file);
 
   /* Return if there's nothing to do, or it is too expensive.  */
-  if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 || is_too_expensive (_("GCSE disabled")))
     return 0;
 
   gcc_obstack_init (&gcse_obstack);
@@ -6519,7 +6519,8 @@ bypass_jumps (FILE *file)
     dump_flow_info (file);
 
   /* Return if there's nothing to do, or it is too expensive.  */
-  if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 || 
+      is_too_expensive (_ ("jump bypassing disabled")))
     return 0;
 
   gcc_obstack_init (&gcse_obstack);
Index: profile.c
===================================================================
--- profile.c	(revision 108712)
+++ profile.c	(working copy)
@@ -531,7 +531,7 @@ compute_branch_probabilities (void)
 	{
 	  FOR_EACH_EDGE (e, ei, bb->succs)
 	    e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
-	  if (bb->index >= 0
+	  if (bb->index >= NUM_FIXED_BLOCKS
 	      && block_ends_with_condjump_p (bb)
 	      && EDGE_COUNT (bb->succs) >= 2)
 	    {
@@ -609,7 +609,7 @@ compute_branch_probabilities (void)
 	      FOR_EACH_EDGE (e, ei, bb->succs)
 		e->probability = REG_BR_PROB_BASE / total;
 	    }
-	  if (bb->index >= 0
+	  if (bb->index >= NUM_FIXED_BLOCKS
 	      && block_ends_with_condjump_p (bb)
 	      && EDGE_COUNT (bb->succs) >= 2)
 	    num_branches++, num_never_executed;
@@ -704,7 +704,9 @@ compute_value_histograms (histogram_valu
       free (histogram_counts[t]);
 }
 
-#define BB_TO_GCOV_INDEX(bb)  ((bb)->index + 1)
+/* The entry basic block will be moved around so that it has index=1,
+   there is nothing at index 0 and the exit is at n_basic_block.  */
+#define BB_TO_GCOV_INDEX(bb)  ((bb)->index - 1)
 /* When passed NULL as file_name, initialize.
    When passed something else, output the necessary commands to change
    line to LINE and offset to FILE_NAME.  */
@@ -917,7 +919,7 @@ branch_prob (void)
 	num_instrumented++;
     }
 
-  total_num_blocks += n_basic_blocks + 2;
+  total_num_blocks += n_basic_blocks;
   if (dump_file)
     fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
 
@@ -938,7 +940,7 @@ branch_prob (void)
       gcov_position_t offset;
 
       offset = gcov_write_tag (GCOV_TAG_BLOCKS);
-      for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
+      for (i = 0; i != (unsigned) (n_basic_blocks); i++)
 	gcov_write_unsigned (0);
       gcov_write_length (offset);
     }
@@ -946,7 +948,7 @@ branch_prob (void)
    /* Keep all basic block indexes nonnegative in the gcov output.
       Index 0 is used for entry block, last index is for exit block.
       */
-  ENTRY_BLOCK_PTR->index = -1;
+  ENTRY_BLOCK_PTR->index = 1;
   EXIT_BLOCK_PTR->index = last_basic_block;
 
   /* Arcs */
Index: tree-flow-inline.h
===================================================================
--- tree-flow-inline.h	(revision 108712)
+++ tree-flow-inline.h	(working copy)
@@ -702,7 +702,7 @@ bsi_start (basic_block bb)
     bsi.tsi = tsi_start (bb->stmt_list);
   else
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
     }
@@ -723,7 +723,7 @@ bsi_after_labels (basic_block bb)
 
   if (!bb->stmt_list)
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
       return bsi;
@@ -756,7 +756,7 @@ bsi_last (basic_block bb)
     bsi.tsi = tsi_last (bb->stmt_list);
   else
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
     }
Index: tree-ssa-phiopt.c
===================================================================
--- tree-ssa-phiopt.c	(revision 108712)
+++ tree-ssa-phiopt.c	(working copy)
@@ -148,9 +148,9 @@ tree_ssa_phiopt (void)
      outer ones, and also that we do not try to visit a removed
      block.  */
   bb_order = blocks_in_phiopt_order ();
-  n = n_basic_blocks;
+  n = n_basic_blocks - NUM_FIXED_BLOCKS;
 
-  for (i = 0; i < n; i++)
+  for (i = 0; i < n; i++) 
     {
       tree cond_expr;
       tree phi;
@@ -248,11 +248,12 @@ blocks_in_phiopt_order (void)
 {
   basic_block x, y;
   basic_block *order = xmalloc (sizeof (basic_block) * n_basic_blocks);
-  unsigned n = n_basic_blocks, np, i;
-  sbitmap visited = sbitmap_alloc (last_basic_block + 2);
+  unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS; 
+  unsigned np, i;
+  sbitmap visited = sbitmap_alloc (last_basic_block); 
 
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index + 2))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index + 2))
+#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) 
+#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) 
 
   sbitmap_zero (visited);
 
Index: bt-load.c
===================================================================
--- bt-load.c	(revision 108712)
+++ bt-load.c	(working copy)
@@ -461,7 +461,7 @@ compute_defs_uses_and_gen (fibheap_t all
   defs_uses_info info;
 
   sbitmap_vector_zero (bb_gen, n_basic_blocks);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       int reg;
@@ -622,7 +622,7 @@ compute_kill (sbitmap *bb_kill, sbitmap 
   /* For each basic block, form the set BB_KILL - the set
      of definitions that the block kills.  */
   sbitmap_vector_zero (bb_kill, n_basic_blocks);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       for (regno = first_btr; regno <= last_btr; regno++)
 	if (TEST_HARD_REG_BIT (all_btrs, regno)
@@ -645,14 +645,14 @@ compute_out (sbitmap *bb_out, sbitmap *b
   int changed;
   sbitmap bb_in = sbitmap_alloc (max_uid);
 
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     sbitmap_copy (bb_out[i], bb_gen[i]);
 
   changed = 1;
   while (changed)
     {
       changed = 0;
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	{
 	  sbitmap_union_of_preds (bb_in, bb_out, i);
 	  changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
@@ -671,7 +671,7 @@ link_btr_uses (btr_def *def_array, btr_u
 
   /* Link uses to the uses lists of all of their reaching defs.
      Count up the number of reaching defs of each use.  */
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       rtx insn;
@@ -1397,7 +1397,7 @@ migrate_btr_defs (enum reg_class btr_cla
     {
       int i;
 
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	{
 	  basic_block bb = BASIC_BLOCK (i);
 	  fprintf(dump_file,
Index: tree-dfa.c
===================================================================
--- tree-dfa.c	(revision 108712)
+++ tree-dfa.c	(working copy)
@@ -484,10 +484,11 @@ collect_dfa_stats (struct dfa_stats_d *d
   memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
 
   /* Walk all the trees in the function counting references.  Start at
-     basic block 0, but don't stop at block boundaries.  */
+     basic block NUM_FIXED_BLOCKS, but don't stop at block boundaries.  */
   pset = pointer_set_create ();
 
-  for (i = bsi_start (BASIC_BLOCK (0)); !bsi_end_p (i); bsi_next (&i))
+  for (i = bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
+       !bsi_end_p (i); bsi_next (&i))
     walk_tree (bsi_stmt_ptr (i), collect_dfa_stats_r, (void *) dfa_stats_p,
 	       pset);
 
Index: cfgcleanup.c
===================================================================
--- cfgcleanup.c	(revision 108712)
+++ cfgcleanup.c	(working copy)
@@ -441,7 +441,7 @@ try_forward_edges (int mode, basic_block
 	}
 
       target = first = e->dest;
-      counter = 0;
+      counter = NUM_FIXED_BLOCKS;
 
       /* If we are partitioning hot/cold basic_blocks, we don't want to mess
 	 up jumps that cross between hot/cold sections.
@@ -503,7 +503,7 @@ try_forward_edges (int mode, basic_block
 		  if (t->dest == b)
 		    break;
 
-		  gcc_assert (nthreaded_edges < n_basic_blocks);
+		  gcc_assert (nthreaded_edges < n_basic_blocks - NUM_FIXED_BLOCKS);
 		  threaded_edges[nthreaded_edges++] = t;
 
 		  new_target = t->dest;
@@ -1676,7 +1676,7 @@ try_optimize_cfg (int mode)
 		  /* Note that forwarder_block_p true ensures that
 		     there is a successor for this block.  */
 		  && (single_succ_edge (b)->flags & EDGE_FALLTHRU)
-		  && n_basic_blocks > 1)
+		  && n_basic_blocks > NUM_FIXED_BLOCKS + 1)
 		{
 		  if (dump_file)
 		    fprintf (dump_file,
Index: cfglayout.c
===================================================================
--- cfglayout.c	(revision 108712)
+++ cfglayout.c	(working copy)
@@ -601,7 +601,7 @@ fixup_reorder_chain (void)
   /* First do the bulk reordering -- rechain the blocks without regard to
      the needed changes to jumps and labels.  */
 
-  for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
+  for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
        bb != 0;
        bb = bb->aux, index++)
     {
@@ -818,7 +818,7 @@ fixup_reorder_chain (void)
   if (dump_file)
     {
       fprintf (dump_file, "Reordered sequence:\n");
-      for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
+      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
 	   bb;
 	   bb = bb->aux, index++)
 	{
@@ -837,7 +837,7 @@ fixup_reorder_chain (void)
 
   prev_bb = ENTRY_BLOCK_PTR;
   bb = ENTRY_BLOCK_PTR->next_bb;
-  index = 0;
+  index = NUM_FIXED_BLOCKS;
 
   for (; bb; prev_bb = bb, bb = bb->aux, index ++)
     {
Index: combine.c
===================================================================
--- combine.c	(revision 108712)
+++ combine.c	(working copy)
@@ -55,7 +55,7 @@ Software Foundation, 51 Franklin Street,
    - reg_live_length is not updated
    - reg_n_refs is not adjusted in the rare case when a register is
      no longer required in a computation
-   - there are extremely rare cases (see distribute_regnotes) when a
+   - there are extremely rare cases (see distribute_notes) when a
      REG_DEAD note is lost
    - a LOG_LINKS entry that refers to an insn with multiple SETs may be
      removed because there is no way to know which register it was
Index: bb-reorder.c
===================================================================
--- bb-reorder.c	(revision 108712)
+++ bb-reorder.c	(working copy)
@@ -1894,7 +1894,7 @@ reorder_basic_blocks (unsigned int flags
   int i;
   struct trace *traces;
 
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   if (targetm.cannot_modify_jumps_p ())
@@ -1986,7 +1986,7 @@ duplicate_computed_gotos (void)
   bitmap candidates;
   int max_size;
 
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   if (targetm.cannot_modify_jumps_p ())
@@ -2169,7 +2169,7 @@ partition_hot_cold_basic_blocks (void)
   int n_crossing_edges;
   int max_edges = 2 * last_basic_block;
   
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
   
   crossing_edges = xcalloc (max_edges, sizeof (edge));
@@ -2177,8 +2177,8 @@ partition_hot_cold_basic_blocks (void)
   cfg_layout_initialize (0);
   
   FOR_EACH_BB (cur_bb)
-    if (cur_bb->index >= 0
- 	&& cur_bb->next_bb->index >= 0)
+    if (cur_bb->index >= NUM_FIXED_BLOCKS
+ 	&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
       cur_bb->aux = cur_bb->next_bb;
   
   find_rarely_executed_basic_blocks_and_crossing_edges (crossing_edges, 
Index: var-tracking.c
===================================================================
--- var-tracking.c	(revision 108712)
+++ var-tracking.c	(working copy)
@@ -1638,10 +1638,10 @@ vt_find_locations (void)
 
   /* Compute reverse completion order of depth first search of the CFG
      so that the data-flow runs faster.  */
-  rc_order = xmalloc (n_basic_blocks * sizeof (int));
+  rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int));
   bb_order = xmalloc (last_basic_block * sizeof (int));
   flow_depth_first_order_compute (NULL, rc_order);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     bb_order[rc_order[i]] = i;
   free (rc_order);
 
Index: cfgloop.c
===================================================================
--- cfgloop.c	(revision 108712)
+++ cfgloop.c	(working copy)
@@ -73,7 +73,7 @@ flow_loops_cfg_dump (const struct loops 
   if (loops->cfg.dfs_order)
     {
       fputs (";; DFS order: ", file);
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	fprintf (file, "%d ", loops->cfg.dfs_order[i]);
 
       fputs ("\n", file);
@@ -83,7 +83,7 @@ flow_loops_cfg_dump (const struct loops 
   if (loops->cfg.rc_order)
     {
       fputs (";; RC order: ", file);
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	fprintf (file, "%d ", loops->cfg.rc_order[i]);
 
       fputs ("\n", file);
@@ -610,7 +610,7 @@ flow_loops_find (struct loops *loops)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   dfs_order = NULL;
@@ -676,7 +676,7 @@ flow_loops_find (struct loops *loops)
   loops->parray[0]->outer = NULL;
   loops->parray[0]->depth = 0;
   loops->parray[0]->pred = NULL;
-  loops->parray[0]->num_nodes = n_basic_blocks + 2;
+  loops->parray[0]->num_nodes = n_basic_blocks;
   loops->parray[0]->latch = EXIT_BLOCK_PTR;
   loops->parray[0]->header = ENTRY_BLOCK_PTR;
   ENTRY_BLOCK_PTR->loop_father = loops->parray[0];
@@ -704,7 +704,7 @@ flow_loops_find (struct loops *loops)
 
       num_loops = 1;
 
-      for (b = 0; b < n_basic_blocks; b++)
+      for (b = 0; b < n_basic_blocks - NUM_FIXED_BLOCKS; b++)
 	{
 	  struct loop *loop;
 	  edge_iterator ei;
@@ -804,7 +804,7 @@ get_loop_body (const struct loop *loop)
   if (loop->latch == EXIT_BLOCK_PTR)
     {
       /* There may be blocks unreachable from EXIT_BLOCK.  */
-      gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks + 2);
+      gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks);
       FOR_EACH_BB (bb)
 	tovisit[tv++] = bb;
       tovisit[tv++] = EXIT_BLOCK_PTR;
Index: sched-rgn.c
===================================================================
--- sched-rgn.c	(revision 108712)
+++ sched-rgn.c	(working copy)
@@ -999,7 +999,7 @@ compute_trg_info (int trg)
   sp->is_speculative = 0;
   sp->src_prob = 100;
 
-  visited = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  visited = sbitmap_alloc (last_basic_block);
 
   for (i = trg + 1; i < current_nr_blocks; i++)
     {
@@ -1044,8 +1044,7 @@ compute_trg_info (int trg)
 	      block = el.first_member[j]->src;
 	      FOR_EACH_EDGE (e, ei, block->succs)
 		{
-		  if (!TEST_BIT (visited,
-				 e->dest->index - (INVALID_BLOCK + 1)))
+		  if (!TEST_BIT (visited, e->dest->index))
 		    {
 		      for (k = 0; k < el.nr_members; k++)
 			if (e == el.first_member[k])
@@ -1054,8 +1053,7 @@ compute_trg_info (int trg)
 		      if (k >= el.nr_members)
 			{
 			  bblst_table[bblst_last++] = e->dest;
-			  SET_BIT (visited,
-				   e->dest->index - (INVALID_BLOCK + 1));
+			  SET_BIT (visited, e->dest->index);
 			  update_idx++;
 			}
 		    }
@@ -2469,7 +2467,7 @@ init_regions (void)
 
   /* Compute regions for scheduling.  */
   if (reload_completed
-      || n_basic_blocks == 1
+      || n_basic_blocks == NUM_FIXED_BLOCKS + 1
       || !flag_schedule_interblock
       || is_cfg_nonregular ())
     {
@@ -2526,18 +2524,16 @@ schedule_insns (FILE *dump_file)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   nr_inter = 0;
   nr_spec = 0;
-
   sched_init (dump_file);
 
   init_regions ();
 
   current_sched_info = &region_sched_info;
-
   /* Schedule every region in the subroutine.  */
   for (rgn = 0; rgn < nr_regions; rgn++)
     schedule_region (rgn);
Index: basic-block.h
===================================================================
--- basic-block.h	(revision 108712)
+++ basic-block.h	(working copy)
@@ -430,12 +430,12 @@ extern bool rediscover_loops_after_threa
 /* For iterating over insns in basic block.  */
 #define FOR_BB_INSNS(BB, INSN)			\
   for ((INSN) = BB_HEAD (BB);			\
-       (INSN) != NEXT_INSN (BB_END (BB));	\
+       (INSN) && (INSN) != NEXT_INSN (BB_END (BB));	\
        (INSN) = NEXT_INSN (INSN))
 
 #define FOR_BB_INSNS_REVERSE(BB, INSN)		\
   for ((INSN) = BB_END (BB);			\
-       (INSN) != PREV_INSN (BB_HEAD (BB));	\
+       (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB));	\
        (INSN) = PREV_INSN (INSN))
 
 /* Cycles through _all_ basic blocks, even the fake ones (entry and
@@ -467,11 +467,12 @@ extern bitmap_obstack reg_obstack;
 #define BB_END(B)       (B)->il.rtl->end_
 
 /* Special block numbers [markers] for entry and exit.  */
-#define ENTRY_BLOCK (-1)
-#define EXIT_BLOCK (-2)
+#define ENTRY_BLOCK (0)
+#define EXIT_BLOCK (1)
+
+/* The two blocks that are always in the cfg.  */
+#define NUM_FIXED_BLOCKS (2)
 
-/* Special block number not valid for any block.  */
-#define INVALID_BLOCK (-3)
 
 #define BLOCK_NUM(INSN)	      (BLOCK_FOR_INSN (INSN)->index + 0)
 #define set_block_for_insn(INSN, BB)  (BLOCK_FOR_INSN (INSN) = BB)
Index: tree-cfg.c
===================================================================
--- tree-cfg.c	(revision 108712)
+++ tree-cfg.c	(working copy)
@@ -129,14 +129,16 @@ init_empty_tree_cfg (void)
   /* Initialize the basic block array.  */
   init_flow ();
   profile_status = PROFILE_ABSENT;
-  n_basic_blocks = 0;
-  last_basic_block = 0;
+  n_basic_blocks = NUM_FIXED_BLOCKS;
+  last_basic_block = NUM_FIXED_BLOCKS;
   VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
 
   /* Build a mapping of labels to their associated blocks.  */
   VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
 		  "label to block map");
 
+  BASIC_BLOCK (ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK (EXIT_BLOCK) = EXIT_BLOCK_PTR;
   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
 }
@@ -170,7 +172,7 @@ build_tree_cfg (tree *tp)
     factor_computed_gotos ();
 
   /* Make sure there is always at least one block, even if it's empty.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     create_empty_bb (ENTRY_BLOCK_PTR);
 
   /* Adjust the size of the array.  */
@@ -430,7 +432,7 @@ make_edges (void)
 
   /* Create an edge from entry to the first block with executable
      statements in it.  */
-  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
+  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
 
   /* Traverse the basic block array placing edges.  */
   FOR_EACH_BB (bb)
@@ -794,7 +796,8 @@ label_to_block_fn (struct function *ifun
      and undefined variable warnings quite right.  */
   if ((errorcount || sorrycount) && uid < 0)
     {
-      block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
+      block_stmt_iterator bsi = 
+	bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
       tree stmt;
 
       stmt = build1 (LABEL_EXPR, void_type_node, dest);
@@ -4656,7 +4659,7 @@ print_loop_ir (FILE *file)
 {
   basic_block bb;
   
-  bb = BASIC_BLOCK (0);
+  bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
   if (bb && bb->loop_father)
     print_loop (file, bb->loop_father, 0);
 }
@@ -4738,7 +4741,7 @@ tree_flow_call_edges_add (sbitmap blocks
   int last_bb = last_basic_block;
   bool check_last_block = false;
 
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   if (! blocks)
Index: tree-ssa-reassoc.c
===================================================================
--- tree-ssa-reassoc.c	(revision 108712)
+++ tree-ssa-reassoc.c	(working copy)
@@ -234,7 +234,6 @@ operand_entry_eq (const void *p1, const 
   return vr1->op == vr2->op;
 }
 
-
 /* Given an expression E, return the rank of the expression.  */
 
 static unsigned int
@@ -1452,7 +1451,7 @@ init_reassoc (void)
     }
 
   /* Set up rank for each BB  */
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     bb_rank[bbs[i]] = ++rank  << 16;
 
   free (bbs);
Index: cfgrtl.c
===================================================================
--- cfgrtl.c	(revision 108712)
+++ cfgrtl.c	(working copy)
@@ -440,7 +440,8 @@ struct tree_opt_pass pass_free_cfg =
 rtx
 entry_of_function (void)
 {
-  return (n_basic_blocks ? BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
+  return (n_basic_blocks > NUM_FIXED_BLOCKS ? 
+	  BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
 }
 
 /* Update insns block within BB.  */
@@ -2258,7 +2259,7 @@ rtl_verify_flow_info (void)
 	curr_bb = NULL;
     }
 
-  if (num_bb_notes != n_basic_blocks)
+  if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
     internal_error
       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
        num_bb_notes, n_basic_blocks);
@@ -2913,7 +2914,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
   int last_bb = last_basic_block;
   bool check_last_block = false;
 
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   if (! blocks)
@@ -2960,7 +2961,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
      calls since there is no way that we can determine if they will
      return or not...  */
 
-  for (i = 0; i < last_bb; i++)
+  for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       rtx insn;

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]