This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Here is the regions patch for those who wanted to play with it.


The current version uses too much space in the basic block. I will fix this later.


? gcc/regions.c
Index: gcc/Makefile.in
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Makefile.in,v
retrieving revision 1.1543
diff -u -p -r1.1543 Makefile.in
--- gcc/Makefile.in	28 Sep 2005 23:49:58 -0000	1.1543
+++ gcc/Makefile.in	29 Sep 2005 18:42:46 -0000
@@ -960,8 +960,8 @@ OBJS-common = \
  insn-extract.o insn-opinit.o insn-output.o insn-peep.o insn-recog.o	   \
  integrate.o intl.o jump.o  langhooks.o lcm.o lists.o local-alloc.o  	   \
  loop.o mode-switching.o modulo-sched.o optabs.o options.o opts.o	   \
- params.o postreload.o postreload-gcse.o predict.o			   \
- insn-preds.o pointer-set.o 					   	   \
+ params.o postreload.o postreload-gcse.o predict.o regions.o	   \
+ insn-preds.o pointer-set.o						   \
  print-rtl.o print-tree.o profile.o value-prof.o var-tracking.o		   \
  real.o recog.o reg-stack.o regclass.o regmove.o regrename.o		   \
  reload.o reload1.o reorg.o resource.o rtl.o rtlanal.o rtl-error.o	   \
@@ -2339,6 +2339,9 @@ local-alloc.o : local-alloc.c $(CONFIG_H
    $(RTL_H) $(FLAGS_H) $(REGS_H) hard-reg-set.h insn-config.h $(RECOG_H) \
    output.h function.h $(INSN_ATTR_H) toplev.h  except.h reload.h $(TM_P_H) \
    $(GGC_H) $(INTEGRATE_H) timevar.h tree-pass.h
+regions.o : regions.c $(CONFIG_H) $(SYSTEM_H)  coretypes.h $(TM_H) $(RTL_H) \
+   $(CFGLOOP_H) $(BASIC_BLOCK_H) $(REGS_H) $(TM_H) $(RTL_H) \
+   bitmap.h $(DF_H)
 bitmap.o : bitmap.c $(CONFIG_H) $(SYSTEM_H)  coretypes.h $(TM_H) $(RTL_H) \
    $(FLAGS_H) $(GGC_H) gt-bitmap.h bitmap.h $(OBSTACK_H)
 global.o : global.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
Index: gcc/basic-block.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/basic-block.h,v
retrieving revision 1.269
diff -u -p -r1.269 basic-block.h
--- gcc/basic-block.h	11 Jul 2005 13:31:41 -0000	1.269
+++ gcc/basic-block.h	29 Sep 2005 18:42:47 -0000
@@ -185,6 +185,165 @@ struct loops;
 struct edge_prediction;
 struct rtl_bb_info;
 
+/* Control flow analysis information.  */
+
+enum region_type {
+  SINGLE_NODE_REGION,  /* Regular basic block.  */
+  ROOT_REGION,         /* Entry region.  */
+
+  IMPROPER_REGION,     /* Various loops.  */
+  REDUCED_LOOP_REGION, /* Generic reducible loop.  */
+
+  BLOCK_REGION,        /* Sequence of blocks.  */
+
+  IF_EXIT_ELSE_REGION, /* Condition where one branch exits */
+  IF_THEN_EXIT_REGION, /* enclosing loop or root.  */
+  BRANCH2_EXIT_REGION, 
+
+  IF_THEN_ELSE_REGION, /* Standard conditional branch diamonds.  */
+  DIAMOND_REGION,
+  
+  COMPLEX_CASE_REGION, /* Complex cases for switch.  */
+  SWITCH_REGION        /* Switch region.  */
+};
+
+/* IMPROPER_REGION has a set of entry and exit nodes. */
+struct improper_region_s {
+  /* The real entries to the region.  */
+  int entry_count;
+  struct basic_block_def **entries;
+};
+  
+/* For the IF_* regions, one of the arms corresponds EDGE_TRUE_VALUE
+   and the other to the edge with EDGE_FALSE_VALUE.  BRANCH2 and
+   DIAMOND are used when we do not have that kind of information. 
+   For the next 5 types, the conditional is the entry node of the region.
+ */
+
+/* The next three regions are for conditional branches where one of
+   the arms exits the current loop or returns and the other arm
+   remains within the loop. */
+struct if_exit_else_region_s {
+  struct basic_block_def *exit_region;
+  struct basic_block_def *else_region;
+  int num_loops_exited;
+};
+
+struct if_then_exit_region_s {
+  struct basic_block_def *then_region;
+  struct basic_block_def *exit_region;
+  int num_loops_exited;
+};
+
+struct branch2_exit_region_s {
+  struct basic_block_def *local_region;
+  struct basic_block_def *exit_region;
+  int num_loops_exited;
+};
+
+/* The next two regions are diamond shaped.  */
+
+struct if_then_else_region_s {
+  struct basic_block_def *then_region;
+  struct basic_block_def *else_region;
+};
+
+/* This is the same as an IF_THEN_ELSE region but is created when the
+   cfg edges are not marked (as in the rtl part of the
+   compiler). There are other cases that will also not have a marking.
+   A common example will be if the condition is not a simple node, but
+   a two exit loop. If the two exits happen to join back together, a
+   diamond region will be formed.
+ */
+
+struct diamond_region_s {
+  struct basic_block_def *first_region;
+  struct basic_block_def *second_region;
+};
+
+/* The COMPLEX_CASE_REGION is generated when the case has been broken
+   into two pieces.  There are three reasons why this might happen:
+
+   1) This case has the previous case drop into it.  When this
+   happens, critical edge splitting will insert a empty block between
+   the conditional and the code for the case.  The empty block will be
+   the only entry in the vector ENTRY_CASES the FIRST_REGION and the
+   original case will be the CASE_REGION.  The PREVIOUS_CASE is set to
+   point to the region that falls into this case.
+
+   2) This case has a group of labels that point to it.  In this case,
+   the critical edge splitting will also insert extra empty blocks
+   between the conditional and the CASE_REGION.  All of those empty
+   blocks are gathered into a ENTRY_CASES vector.  The PREVIOUS_CASE
+   will be NULL if there is no drop in.
+
+   3) Both (1) and (2) happen for this case.  When that happens, the
+   ENTRY_CASES will have more than one element and the PREVIOUS_CASE
+   will be not NULL.  */
+struct complex_case_region_s {
+  struct basic_block_def *previous_region;
+  unsigned int entry_case_count; 
+  struct basic_block_def ** entry_cases; 
+  struct basic_block_def *case_region;
+};
+
+struct switch_region_s {
+  struct basic_block_def *condition;
+  unsigned int case_count;
+
+  /* The regular and return cases are vectors of vectors of arms.
+     Each inner vector contain one list of cases that all drop thru to
+     the next.  The last one being the one with the break.  To support
+     this, there is a vector of ints that say how many arms are in the
+     vec.*/ 
+  unsigned int regular_case_count_count; 
+  unsigned int *regular_case_counts; 
+  struct basic_block_def *** regular_cases; 
+
+  unsigned int return_case_count_count; 
+  unsigned int *return_case_counts; 
+  struct basic_block_def *** return_cases;
+
+  /* For cases that loop exit, there is not enough structure to see if
+     any of them dropped down since all of these are technically
+     outside of the loop being analyzed.  */
+  unsigned int exit_case_count; 
+  struct basic_block_def ** exit_cases; 
+};
+
+typedef union region_details_u_a {
+  struct improper_region_s improper_region;
+
+  struct if_exit_else_region_s if_exit_else_region;
+  struct if_then_exit_region_s if_then_exit_region;
+  struct branch2_exit_region_s branch2_exit_region;
+
+  struct if_then_else_region_s if_then_else_region;
+  struct diamond_region_s diamond_region;
+
+  struct complex_case_region_s complex_case_region;
+  struct switch_region_s switch_region;
+} region_details_u;
+
+/* This structure is allocated for all non regular basic blocks.  */
+struct basic_block_region_def GTY(())
+{
+  enum region_type type;
+  struct basic_block_def *region_head;
+  struct basic_block_def *first_bb_in_region;
+  struct basic_block_def *last_bb_in_region;
+
+  /* The set of basic blocks in this region AND in all of the regions
+     with this region.  */
+  bitmap basic_blocks_in_region;
+  /* The set of regions in this region AND in all of the regions with
+     this region.  */
+  bitmap regions_in_region;
+  union region_details_u_a *GTY ((skip)) region_details;
+};
+
+typedef struct basic_block_region_def *basic_block_region;
+
 /* A basic block is a sequence of instructions with only entry and
    only one exit.  If any one of the instructions are executed, they
    will all be executed, and in sequence from first to last.
@@ -211,7 +370,7 @@ struct rtl_bb_info;
    basic blocks.  */
 
 /* Basic block information indexed by block number.  */
-struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
+struct basic_block_def GTY(())
 {
   /* Pointers to the first and last trees of the block.  */
   tree stmt_list;
@@ -233,6 +392,22 @@ struct basic_block_def GTY((chain_next (
   struct basic_block_def *prev_bb;
   struct basic_block_def *next_bb;
 
+  /* Parent, previous and next blocks in the current region chain.  */
+  struct basic_block_def *parent_region;
+  struct basic_block_def *prev_bb_in_region;
+  struct basic_block_def *next_bb_in_region;
+
+  /* Preds and succs in the region graph. */
+  int num_region_preds;
+  int num_region_succs;
+  struct basic_block_def ** GTY ((skip (""))) region_preds;
+  struct basic_block_def ** GTY ((skip (""))) region_succs;
+
+  /* This field is null if this is a regular basic block.  If this
+     basic_block is a region node, then this is a pointer to the
+     region information.  */
+  struct basic_block_region_def *region_info;
+
   union basic_block_il_dependent {
       struct rtl_bb_info * GTY ((tag ("1"))) rtl;
     } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
@@ -246,7 +421,8 @@ struct basic_block_def GTY((chain_next (
   /* Expected number of executions: calculated in profile.c.  */
   gcov_type count;
 
-  /* The index of this block.  */
+  /* The index of this block.  ENTRY_BLOCK=0, EXIT_BLOCK=1, all other
+     regular blocks >1, all regions nodes <0. */
   int index;
 
   /* The loop depth of this block.  */
@@ -362,9 +538,15 @@ struct control_flow_graph GTY(())
   basic_block x_entry_block_ptr;
   basic_block x_exit_block_ptr;
 
+  /* The entry node of the structure graph if it exists.  */
+  basic_block x_root_block_ptr;
+
   /* Index by basic block number, get basic block struct info.  */
   varray_type x_basic_block_info;
 
+  /* Number of region nodes in the structure graph.  */
+  int x_n_region_blocks;
+
   /* Number of basic blocks in this flow graph.  */
   int x_n_basic_blocks;
 
@@ -388,8 +570,10 @@ struct control_flow_graph GTY(())
 /* Defines for accessing the fields of the CFG structure for function FN.  */
 #define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN)     ((FN)->cfg->x_entry_block_ptr)
 #define EXIT_BLOCK_PTR_FOR_FUNCTION(FN)	     ((FN)->cfg->x_exit_block_ptr)
+#define ROOT_BLOCK_PTR_FOR_FUNCTION(FN)	     ((FN)->cfg->x_root_block_ptr)
 #define basic_block_info_for_function(FN)    ((FN)->cfg->x_basic_block_info)
 #define n_basic_blocks_for_function(FN)	     ((FN)->cfg->x_n_basic_blocks)
+#define n_region_blocks_for_function(FN)     ((FN)->cfg->x_n_region_blocks)
 #define n_edges_for_function(FN)	     ((FN)->cfg->x_n_edges)
 #define last_basic_block_for_function(FN)    ((FN)->cfg->x_last_basic_block)
 #define label_to_block_map_for_function(FN)  ((FN)->cfg->x_label_to_block_map)
@@ -400,8 +584,10 @@ struct control_flow_graph GTY(())
 /* Defines for textual backward source compatibility.  */
 #define ENTRY_BLOCK_PTR		(cfun->cfg->x_entry_block_ptr)
 #define EXIT_BLOCK_PTR		(cfun->cfg->x_exit_block_ptr)
+#define ROOT_BLOCK_PTR		(cfun->cfg->x_root_block_ptr)
 #define basic_block_info	(cfun->cfg->x_basic_block_info)
 #define n_basic_blocks		(cfun->cfg->x_n_basic_blocks)
+#define n_region_blocks		(cfun->cfg->x_n_region_blocks)
 #define n_edges			(cfun->cfg->x_n_edges)
 #define last_basic_block	(cfun->cfg->x_last_basic_block)
 #define label_to_block_map	(cfun->cfg->x_label_to_block_map)
@@ -427,17 +613,36 @@ extern bool rediscover_loops_after_threa
 
 #define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
 
+#define FOR_EACH_BB_IN_REGION(BB, REGION) \
+  for (BB= (REGION)->region_info->first_bb_in_region; BB; BB = BB->next_bb_in_region)
+
+#define FOR_EACH_BB_REVERSE_IN_REGION(BB, REGION) \
+  for (BB= (REGION)->region_info->last_bb_in_region; BB; BB = BB->prev_bb_in_region)
+
 /* For iterating over insns in basic block.  */
 #define FOR_BB_INSNS(BB, INSN)			\
   for ((INSN) = BB_HEAD (BB);			\
-       (INSN) != NEXT_INSN (BB_END (BB));	\
+       (INSN) && (INSN) != NEXT_INSN (BB_END (BB));	\
        (INSN) = NEXT_INSN (INSN))
 
+/* For iterating over insns in basic block when we might remove the
+   current insn.  */
+#define FOR_BB_INSNS_SAFE(BB, INSN, CURR)			\
+  for ((INSN) = BB_HEAD (BB), (CURR)=(INSN) ? NEXT_INSN ((INSN)): NULL;	\
+       (INSN) && (INSN) != NEXT_INSN (BB_END (BB));	\
+       (INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
+       
+
 #define FOR_BB_INSNS_REVERSE(BB, INSN)		\
   for ((INSN) = BB_END (BB);			\
-       (INSN) != PREV_INSN (BB_HEAD (BB));	\
+       (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB));	\
        (INSN) = PREV_INSN (INSN))
 
+#define FOR_BB_INSNS_REVERSE_SAFE(BB, INSN, CURR)	\
+  for ((INSN) = BB_END (BB),(CURR)=(INSN) ? PREV_INSN ((INSN)) : NULL;	\
+       (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB));	\
+       (INSN) = (CURR), (CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL)
+
 /* Cycles through _all_ basic blocks, even the fake ones (entry and
    exit block).  */
 
@@ -467,11 +672,12 @@ extern bitmap_obstack reg_obstack;
 #define BB_END(B)       (B)->il.rtl->end_
 
 /* Special block numbers [markers] for entry and exit.  */
-#define ENTRY_BLOCK (-1)
-#define EXIT_BLOCK (-2)
+#define ENTRY_BLOCK (0)
+#define EXIT_BLOCK (1)
+
+/* The two blocks that are always in the cfg.  */
+#define NUM_FIXED_BLOCKS (2)
 
-/* Special block number not valid for any block.  */
-#define INVALID_BLOCK (-3)
 
 #define BLOCK_NUM(INSN)	      (BLOCK_FOR_INSN (INSN)->index + 0)
 #define set_block_for_insn(INSN, BB)  (BLOCK_FOR_INSN (INSN) = BB)
@@ -989,6 +1195,24 @@ extern basic_block get_bb_original (basi
 extern void set_bb_copy (basic_block, basic_block);
 extern basic_block get_bb_copy (basic_block);
 
+
+/* In regions.c.  */
+extern void create_regions (bool);
+extern void delete_regions (void);
+extern int split_blocks_for_regions (void);
+extern enum region_type get_region_type (basic_block); 
+extern const char * get_region_type_name (enum region_type); 
+extern void dump_structure_graph (void);
+extern void debug_region_bb (basic_block); 
+extern bool single_block_region_p (basic_block);
+extern bool loop_region_p (basic_block);
+extern bool single_entry_region_p (basic_block);
+extern bool single_exit_region_p (basic_block);
+extern bool get_real_entry_blocks (basic_block, bitmap, bitmap);
+extern bool get_real_exit_blocks (basic_block, bitmap, bitmap);
+extern void get_real_blocks (basic_block, bitmap);
+
+
 #include "cfghooks.h"
 
 #endif /* GCC_BASIC_BLOCK_H */
Index: gcc/bb-reorder.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/bb-reorder.c,v
retrieving revision 1.113
diff -u -p -r1.113 bb-reorder.c
--- gcc/bb-reorder.c	20 Sep 2005 21:48:34 -0000	1.113
+++ gcc/bb-reorder.c	29 Sep 2005 18:42:47 -0000
@@ -1893,7 +1893,7 @@ reorder_basic_blocks (unsigned int flags
   int i;
   struct trace *traces;
 
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   if (targetm.cannot_modify_jumps_p ())
@@ -1985,7 +1985,7 @@ duplicate_computed_gotos (void)
   bitmap candidates;
   int max_size;
 
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   if (targetm.cannot_modify_jumps_p ())
@@ -2168,7 +2168,7 @@ partition_hot_cold_basic_blocks (void)
   int n_crossing_edges;
   int max_edges = 2 * last_basic_block;
   
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
   
   crossing_edges = xcalloc (max_edges, sizeof (edge));
@@ -2176,8 +2176,8 @@ partition_hot_cold_basic_blocks (void)
   cfg_layout_initialize (0);
   
   FOR_EACH_BB (cur_bb)
-    if (cur_bb->index >= 0
- 	&& cur_bb->next_bb->index >= 0)
+    if (cur_bb->index >= NUM_FIXED_BLOCKS
+ 	&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
       cur_bb->aux = cur_bb->next_bb;
   
   find_rarely_executed_basic_blocks_and_crossing_edges (crossing_edges, 
Index: gcc/bitmap.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/bitmap.c,v
retrieving revision 1.72
diff -u -p -r1.72 bitmap.c
--- gcc/bitmap.c	16 Aug 2005 00:13:16 -0000	1.72
+++ gcc/bitmap.c	29 Sep 2005 18:42:47 -0000
@@ -40,7 +40,7 @@ static void bitmap_element_free (bitmap,
 static bitmap_element *bitmap_element_allocate (bitmap);
 static int bitmap_element_zerop (bitmap_element *);
 static void bitmap_element_link (bitmap, bitmap_element *);
-static bitmap_element *bitmap_elt_insert_after (bitmap, bitmap_element *);
+static bitmap_element *bitmap_elt_insert_after (bitmap, bitmap_element *, unsigned int);
 static void bitmap_elt_clear_from (bitmap, bitmap_element *);
 static bitmap_element *bitmap_find_bit (bitmap, unsigned int);
 
@@ -89,6 +89,8 @@ bitmap_element_free (bitmap head, bitmap
       head->current = next != 0 ? next : prev;
       if (head->current)
 	head->indx = head->current->indx;
+      else 
+	head->indx = 0;
     }
   bitmap_elem_to_freelist (head, elt);
 }
@@ -351,14 +353,18 @@ bitmap_element_link (bitmap head, bitmap
    new element.  */
 
 static bitmap_element *
-bitmap_elt_insert_after (bitmap head, bitmap_element *elt)
+bitmap_elt_insert_after (bitmap head, bitmap_element *elt, unsigned int indx)
 {
   bitmap_element *node = bitmap_element_allocate (head);
+  node->indx = indx;
 
   if (!elt)
     {
       if (!head->current)
-	head->current = node;
+	{
+	  head->current = node;
+	  head->indx = indx;
+	}
       node->next = head->first;
       if (node->next)
 	node->next->prev = node;
@@ -521,6 +527,62 @@ bitmap_bit_p (bitmap head, int bit)
   return (ptr->bits[word_num] >> bit_num) & 1;
 }
 
+#if GCC_VERSION < 3400
+/* Table of number of set bits in a character, indexed by value of char.  */
+static unsigned char popcount_table[] = 
+{
+    0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,
+    1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
+    1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
+    2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
+    1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
+    2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
+    2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
+    3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8,
+};
+
+static unsigned long
+bitmap_popcount (BITMAP_WORD a)
+{
+  unsigned long ret = 0;
+  unsigned i;
+
+  /* Just do this the table way for now  */
+  for (i = 0; i < BITMAP_WORD_BITS; i+= 8)
+      ret += popcount_table[(a >> i) & 0xff];
+  return ret;
+}
+#endif
+/* Count the number of bits set in the bitmap, and return it.  */
+
+unsigned long
+bitmap_count_bits (bitmap a)
+{
+  unsigned long count = 0;
+  bitmap_element *elt;
+  unsigned ix;
+
+  if (!a->first) 
+    return 0;
+
+  for (elt = a->first; elt; elt = elt->next)
+    {
+      for (ix = 0; ix != BITMAP_ELEMENT_WORDS; ix++)
+	{
+#if GCC_VERSION >= 3400
+ 	  /* Note that popcountl matches BITMAP_WORD in type, so the actual size
+	 of BITMAP_WORD is not material.  */
+	  count += __builtin_popcountl (elt->bits[ix]);
+#else
+	  count += bitmap_popcount (elt->bits[ix]);	  
+#endif
+	}
+    }
+  return count;
+}
+      
+
+
 /* Return the bit number of the first set bit in the bitmap.  The
    bitmap must be non-empty.  */
 
@@ -604,9 +666,9 @@ bitmap_and (bitmap dst, bitmap a, bitmap
 	  BITMAP_WORD ior = 0;
 
 	  if (!dst_elt)
-	    dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	  
-	  dst_elt->indx = a_elt->indx;
+	    dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
+	  else 
+	    dst_elt->indx = a_elt->indx;
 	  for (ix = BITMAP_ELEMENT_WORDS; ix--;)
 	    {
 	      BITMAP_WORD r = a_elt->bits[ix] & b_elt->bits[ix];
@@ -700,9 +762,9 @@ bitmap_and_compl (bitmap dst, bitmap a, 
 	{
 	  /* Copy a_elt.  */
 	  if (!dst_elt)
-	    dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	  
-	  dst_elt->indx = a_elt->indx;
+	    dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
+	  else
+	    dst_elt->indx = a_elt->indx;
 	  memcpy (dst_elt->bits, a_elt->bits, sizeof (dst_elt->bits));
 	  dst_prev = dst_elt;
 	  dst_elt = dst_elt->next;
@@ -717,9 +779,9 @@ bitmap_and_compl (bitmap dst, bitmap a, 
 	  BITMAP_WORD ior = 0;
 
 	  if (!dst_elt)
-	    dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	  
-	  dst_elt->indx = a_elt->indx;
+	    dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
+	  else 
+	    dst_elt->indx = a_elt->indx;
 	  for (ix = BITMAP_ELEMENT_WORDS; ix--;)
 	    {
 	      BITMAP_WORD r = a_elt->bits[ix] & ~b_elt->bits[ix];
@@ -796,6 +858,75 @@ bitmap_and_compl_into (bitmap a, bitmap 
   return changed != 0;
 }
 
+/* A = ~A & B. */
+
+void
+bitmap_compl_and_into (bitmap a, bitmap b)
+{
+  bitmap_element *a_elt = a->first;
+  bitmap_element *b_elt = b->first;
+  bitmap_element *a_prev = NULL;
+  bitmap_element *next;
+
+  gcc_assert (a != b);
+
+  if (bitmap_empty_p (a))
+    {
+      bitmap_copy (a, b);
+      return;
+    }
+  if (bitmap_empty_p (b))
+    {
+      bitmap_clear (a);
+      return;
+    }
+
+  while (a_elt || b_elt)
+    {
+      if (!b_elt || (a_elt && a_elt->indx < b_elt->indx))
+	{
+	  /* A is before B.  Remove A */
+	  next = a_elt->next;
+	  a_prev = a_elt->prev;
+	  bitmap_element_free (a, a_elt);
+	  a_elt = next;
+	}
+      else if (!a_elt || b_elt->indx < a_elt->indx)
+	{
+	  /* B is before A.  Copy B. */
+	  next = bitmap_elt_insert_after (a, a_prev, b_elt->indx);
+	  memcpy (next->bits, b_elt->bits, sizeof (next->bits));
+	  a_prev = next;
+	  b_elt = b_elt->next;
+	}
+      else
+	{
+	  /* Matching elts, generate A = ~A & B.  */
+	  unsigned ix;
+	  BITMAP_WORD ior = 0;
+
+	  for (ix = BITMAP_ELEMENT_WORDS; ix--;)
+	    {
+	      BITMAP_WORD cleared = a_elt->bits[ix] & b_elt->bits[ix];
+	      BITMAP_WORD r = b_elt->bits[ix] ^ cleared;
+
+	      a_elt->bits[ix] = r;
+	      ior |= r;
+	    }
+	  next = a_elt->next;
+	  if (!ior)
+	    bitmap_element_free (a, a_elt);
+	  else
+	    a_prev = a_elt;
+	  a_elt = next;
+	  b_elt = b_elt->next;
+	}
+    }
+  gcc_assert (!a->current == !a->first);
+  gcc_assert (!a->current || a->indx == a->current->indx);
+  return;
+}
+
 /* DST = A | B.  Return true if DST changes.  */
 
 bool
@@ -833,8 +964,9 @@ bitmap_ior (bitmap dst, bitmap a, bitmap
 	    {
 	      changed = true;
 	      if (!dst_elt)
-		dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	      dst_elt->indx = a_elt->indx;
+		dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
+	      else 
+		dst_elt->indx = a_elt->indx;
 	      for (ix = BITMAP_ELEMENT_WORDS; ix--;)
 		{
 		  BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
@@ -878,8 +1010,9 @@ bitmap_ior (bitmap dst, bitmap a, bitmap
 	    {
 	      changed = true;
 	      if (!dst_elt)
-		dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	      dst_elt->indx = src->indx;
+		dst_elt = bitmap_elt_insert_after (dst, dst_prev, src->indx);
+	      else 
+		dst_elt->indx = src->indx;
 	      memcpy (dst_elt->bits, src->bits, sizeof (dst_elt->bits));
 	    }
 	  
@@ -917,9 +1050,7 @@ bitmap_ior_into (bitmap a, bitmap b)
       if (!a_elt || b_elt->indx < a_elt->indx)
 	{
 	  /* Copy b_elt.  */
-	  bitmap_element *dst = bitmap_elt_insert_after (a, a_prev);
-	  
-	  dst->indx = b_elt->indx;
+	  bitmap_element *dst = bitmap_elt_insert_after (a, a_prev, b_elt->indx);
 	  memcpy (dst->bits, b_elt->bits, sizeof (dst->bits));
 	  a_prev = dst;
 	  b_elt = b_elt->next;
@@ -990,9 +1121,9 @@ bitmap_xor (bitmap dst, bitmap a, bitmap
 	  BITMAP_WORD ior = 0;
 
 	  if (!dst_elt)
-	    dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	  
-	  dst_elt->indx = a_elt->indx;
+	    dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
+	  else
+	    dst_elt->indx = a_elt->indx;
 	  for (ix = BITMAP_ELEMENT_WORDS; ix--;)
 	    {
 	      BITMAP_WORD r = a_elt->bits[ix] ^ b_elt->bits[ix];
@@ -1025,9 +1156,9 @@ bitmap_xor (bitmap dst, bitmap a, bitmap
 	    }
 
 	  if (!dst_elt)
-	    dst_elt = bitmap_elt_insert_after (dst, dst_prev);
-	  
-	  dst_elt->indx = src->indx;
+	    dst_elt = bitmap_elt_insert_after (dst, dst_prev, src->indx);
+	  else 
+	    dst_elt->indx = src->indx;
 	  memcpy (dst_elt->bits, src->bits, sizeof (dst_elt->bits));
 	  dst_prev = dst_elt;
 	  dst_elt = dst_elt->next;
@@ -1059,9 +1190,7 @@ bitmap_xor_into (bitmap a, bitmap b)
       if (!a_elt || b_elt->indx < a_elt->indx)
 	{
 	  /* Copy b_elt.  */
-	  bitmap_element *dst = bitmap_elt_insert_after (a, a_prev);
-	  
-	  dst->indx = b_elt->indx;
+	  bitmap_element *dst = bitmap_elt_insert_after (a, a_prev, b_elt->indx);
 	  memcpy (dst->bits, b_elt->bits, sizeof (dst->bits));
 	  a_prev = dst;
 	  b_elt = b_elt->next;
Index: gcc/bitmap.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/bitmap.h,v
retrieving revision 1.58
diff -u -p -r1.58 bitmap.h
--- gcc/bitmap.h	25 Jun 2005 01:59:10 -0000	1.58
+++ gcc/bitmap.h	29 Sep 2005 18:42:47 -0000
@@ -102,6 +102,9 @@ extern bool bitmap_intersect_compl_p (bi
 /* True if MAP is an empty bitmap.  */
 #define bitmap_empty_p(MAP) (!(MAP)->first)
 
+/* Count the number of bits set in the bitmap.  */
+extern unsigned long bitmap_count_bits (bitmap);
+
 /* Boolean operations on bitmaps.  The _into variants are two operand
    versions that modify the first source operand.  The other variants
    are three operand versions that to not destroy the source bitmaps.
@@ -110,6 +113,8 @@ extern void bitmap_and (bitmap, bitmap, 
 extern void bitmap_and_into (bitmap, bitmap);
 extern void bitmap_and_compl (bitmap, bitmap, bitmap);
 extern bool bitmap_and_compl_into (bitmap, bitmap);
+#define bitmap_compl_and(DST, A, B) bitmap_and_compl (DST, B, A)
+extern void bitmap_compl_and_into (bitmap, bitmap);
 extern bool bitmap_ior (bitmap, bitmap, bitmap);
 extern bool bitmap_ior_into (bitmap, bitmap);
 extern void bitmap_xor (bitmap, bitmap, bitmap);
Index: gcc/bt-load.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/bt-load.c,v
retrieving revision 2.39
diff -u -p -r2.39 bt-load.c
--- gcc/bt-load.c	19 Jul 2005 04:08:27 -0000	2.39
+++ gcc/bt-load.c	29 Sep 2005 18:42:47 -0000
@@ -461,7 +461,7 @@ compute_defs_uses_and_gen (fibheap_t all
   defs_uses_info info;
 
   sbitmap_vector_zero (bb_gen, n_basic_blocks);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       int reg;
@@ -622,7 +622,7 @@ compute_kill (sbitmap *bb_kill, sbitmap 
   /* For each basic block, form the set BB_KILL - the set
      of definitions that the block kills.  */
   sbitmap_vector_zero (bb_kill, n_basic_blocks);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       for (regno = first_btr; regno <= last_btr; regno++)
 	if (TEST_HARD_REG_BIT (all_btrs, regno)
@@ -645,14 +645,14 @@ compute_out (sbitmap *bb_out, sbitmap *b
   int changed;
   sbitmap bb_in = sbitmap_alloc (max_uid);
 
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     sbitmap_copy (bb_out[i], bb_gen[i]);
 
   changed = 1;
   while (changed)
     {
       changed = 0;
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	{
 	  sbitmap_union_of_preds (bb_in, bb_out, i);
 	  changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
@@ -671,7 +671,7 @@ link_btr_uses (btr_def *def_array, btr_u
 
   /* Link uses to the uses lists of all of their reaching defs.
      Count up the number of reaching defs of each use.  */
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       rtx insn;
@@ -1397,7 +1397,7 @@ migrate_btr_defs (enum reg_class btr_cla
     {
       int i;
 
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	{
 	  basic_block bb = BASIC_BLOCK (i);
 	  fprintf(dump_file,
Index: gcc/cfg.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfg.c,v
retrieving revision 1.103
diff -u -p -r1.103 cfg.c
--- gcc/cfg.c	29 Jul 2005 14:51:57 -0000	1.103
+++ gcc/cfg.c	29 Sep 2005 18:42:47 -0000
@@ -163,8 +163,11 @@ compact_blocks (void)
   int i;
   basic_block bb;
 
-  i = 0;
-  FOR_EACH_BB (bb)
+  BASIC_BLOCK(ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK(EXIT_BLOCK) = EXIT_BLOCK_PTR;
+
+  i = NUM_FIXED_BLOCKS;
+  FOR_EACH_BB (bb) 
     {
       BASIC_BLOCK (i) = bb;
       bb->index = i;
Index: gcc/cfganal.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfganal.c,v
retrieving revision 1.65
diff -u -p -r1.65 cfganal.c
--- gcc/cfganal.c	19 Jul 2005 04:08:27 -0000	1.65
+++ gcc/cfganal.c	29 Sep 2005 18:42:47 -0000
@@ -345,7 +345,7 @@ create_edge_list (void)
   basic_block bb;
   edge_iterator ei;
 
-  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
+  block_count = n_basic_blocks; /* Include the entry and exit blocks.  */
 
   num_edges = 0;
 
@@ -391,7 +391,7 @@ print_edge_list (FILE *f, struct edge_li
   int x;
 
   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
-	   elist->num_blocks - 2, elist->num_edges);
+	   elist->num_blocks, elist->num_edges);
 
   for (x = 0; x < elist->num_edges; x++)
     {
@@ -721,7 +721,7 @@ flow_depth_first_order_compute (int *dfs
   edge_iterator *stack;
   int sp;
   int dfsnum = 0;
-  int rcnum = n_basic_blocks - 1;
+  int rcnum = n_basic_blocks - 1 - NUM_FIXED_BLOCKS;
   sbitmap visited;
 
   /* Allocate stack for back-tracking up CFG.  */
@@ -786,8 +786,9 @@ flow_depth_first_order_compute (int *dfs
   free (stack);
   sbitmap_free (visited);
 
-  /* The number of nodes visited should be the number of blocks.  */
-  gcc_assert (dfsnum == n_basic_blocks);
+  /* The number of nodes visited should be the number of blocks minus
+     the entry and exit blocks which are not visited here.  */
+  gcc_assert (dfsnum == n_basic_blocks - NUM_FIXED_BLOCKS);
 
   return dfsnum;
 }
@@ -826,12 +827,11 @@ static void
 flow_dfs_compute_reverse_init (depth_first_search_ds data)
 {
   /* Allocate stack for back-tracking up CFG.  */
-  data->stack = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
-			 * sizeof (basic_block));
+  data->stack = xmalloc (n_basic_blocks * sizeof (basic_block));
   data->sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  data->visited_blocks = sbitmap_alloc (last_basic_block);
 
   /* None of the nodes in the CFG have been visited yet.  */
   sbitmap_zero (data->visited_blocks);
@@ -847,7 +847,7 @@ static void
 flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
 {
   data->stack[data->sp++] = bb;
-  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
+  SET_BIT (data->visited_blocks, bb->index);
 }
 
 /* Continue the depth-first search through the reverse graph starting with the
@@ -869,14 +869,13 @@ flow_dfs_compute_reverse_execute (depth_
 
       /* Perform depth-first search on adjacent vertices.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
-	if (!TEST_BIT (data->visited_blocks,
-		       e->src->index - (INVALID_BLOCK + 1)))
+	if (!TEST_BIT (data->visited_blocks, e->src->index))
 	  flow_dfs_compute_reverse_add_bb (data, e->src);
     }
 
   /* Determine if there are unvisited basic blocks.  */
   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
-    if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
+    if (!TEST_BIT (data->visited_blocks, bb->index))
       return bb;
 
   return NULL;
@@ -912,12 +911,12 @@ dfs_enumerate_from (basic_block bb, int 
   static sbitmap visited;
   static unsigned v_size;
 
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index + 2))
-#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index + 2))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index + 2))
+#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) 
+#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index)) 
+#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) 
 
   /* Resize the VISITED sbitmap if necessary.  */
-  size = last_basic_block + 2;
+  size = last_basic_block; 
   if (size < 10)
     size = 10;
 
Index: gcc/cfgbuild.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfgbuild.c,v
retrieving revision 1.68
diff -u -p -r1.68 cfgbuild.c
--- gcc/cfgbuild.c	25 Jun 2005 01:59:28 -0000	1.68
+++ gcc/cfgbuild.c	29 Sep 2005 18:42:47 -0000
@@ -138,7 +138,7 @@ control_flow_insn_p (rtx insn)
 static int
 count_basic_blocks (rtx f)
 {
-  int count = 0;
+  int count = NUM_FIXED_BLOCKS;
   bool saw_insn = false;
   rtx insn;
 
@@ -164,10 +164,10 @@ count_basic_blocks (rtx f)
 
   /* The rest of the compiler works a bit smoother when we don't have to
      check for the edge case of do-nothing functions with no basic blocks.  */
-  if (count == 0)
+  if (count == NUM_FIXED_BLOCKS)
     {
       emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
-      count = 1;
+      count = NUM_FIXED_BLOCKS + 1;
     }
 
   return count;
@@ -529,10 +529,11 @@ find_basic_blocks (rtx f)
     }
 
   n_basic_blocks = count_basic_blocks (f);
-  last_basic_block = 0;
+  last_basic_block = NUM_FIXED_BLOCKS;
   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
 
+
   /* Size the basic block table.  The actual structures will be allocated
      by find_basic_blocks_1, since we want to keep the structure pointers
      stable across calls to find_basic_blocks.  */
@@ -542,6 +543,8 @@ find_basic_blocks (rtx f)
      actually lay them out.  */
 
   VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
+  BASIC_BLOCK(ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK(EXIT_BLOCK) = EXIT_BLOCK_PTR;
 
   find_basic_blocks_1 (f);
 
Index: gcc/cfgcleanup.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfgcleanup.c,v
retrieving revision 1.151
diff -u -p -r1.151 cfgcleanup.c
--- gcc/cfgcleanup.c	29 Jul 2005 00:45:57 -0000	1.151
+++ gcc/cfgcleanup.c	29 Sep 2005 18:42:47 -0000
@@ -444,7 +444,7 @@ try_forward_edges (int mode, basic_block
 	}
 
       target = first = e->dest;
-      counter = 0;
+      counter = NUM_FIXED_BLOCKS;
 
       /* If we are partitioning hot/cold basic_blocks, we don't want to mess
 	 up jumps that cross between hot/cold sections.
@@ -506,7 +506,7 @@ try_forward_edges (int mode, basic_block
 		  if (t->dest == b)
 		    break;
 
-		  gcc_assert (nthreaded_edges < n_basic_blocks);
+		  gcc_assert (nthreaded_edges < n_basic_blocks - NUM_FIXED_BLOCKS);
 		  threaded_edges[nthreaded_edges++] = t;
 
 		  new_target = t->dest;
@@ -1905,7 +1905,7 @@ try_optimize_cfg (int mode)
 		  /* Note that forwarder_block_p true ensures that
 		     there is a successor for this block.  */
 		  && (single_succ_edge (b)->flags & EDGE_FALLTHRU)
-		  && n_basic_blocks > 1)
+		  && n_basic_blocks > NUM_FIXED_BLOCKS + 1)
 		{
 		  if (dump_file)
 		    fprintf (dump_file,
Index: gcc/cfghooks.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfghooks.c,v
retrieving revision 1.32
diff -u -p -r1.32 cfghooks.c
--- gcc/cfghooks.c	24 Aug 2005 07:56:51 -0000	1.32
+++ gcc/cfghooks.c	29 Sep 2005 18:42:47 -0000
@@ -77,8 +77,8 @@ verify_flow_info (void)
   basic_block *last_visited;
 
   timevar_push (TV_CFG_VERIFY);
-  last_visited = xcalloc (last_basic_block + 2, sizeof (basic_block));
-  edge_checksum = xcalloc (last_basic_block + 2, sizeof (size_t));
+  last_visited = xcalloc (last_basic_block, sizeof (basic_block));
+  edge_checksum = xcalloc (last_basic_block, sizeof (size_t));
 
   /* Check bb chain & numbers.  */
   last_bb_seen = ENTRY_BLOCK_PTR;
@@ -122,7 +122,7 @@ verify_flow_info (void)
 	}
       FOR_EACH_EDGE (e, ei, bb->succs)
 	{
-	  if (last_visited [e->dest->index + 2] == bb)
+	  if (last_visited [e->dest->index] == bb)
 	    {
 	      error ("verify_flow_info: Duplicate edge %i->%i",
 		     e->src->index, e->dest->index);
@@ -141,7 +141,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  last_visited [e->dest->index + 2] = bb;
+	  last_visited [e->dest->index] = bb;
 
 	  if (e->flags & EDGE_FALLTHRU)
 	    n_fallthru++;
@@ -158,7 +158,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  edge_checksum[e->dest->index + 2] += (size_t) e;
+	  edge_checksum[e->dest->index] += (size_t) e;
 	}
       if (n_fallthru > 1)
 	{
@@ -192,7 +192,7 @@ verify_flow_info (void)
 	      err = 1;
 	    }
 
-	  edge_checksum[e->dest->index + 2] -= (size_t) e;
+	  edge_checksum[e->dest->index] -= (size_t) e;
 	}
     }
 
@@ -202,14 +202,14 @@ verify_flow_info (void)
     edge_iterator ei;
 
     FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-      edge_checksum[e->dest->index + 2] += (size_t) e;
+      edge_checksum[e->dest->index] += (size_t) e;
 
     FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-      edge_checksum[e->dest->index + 2] -= (size_t) e;
+      edge_checksum[e->dest->index] -= (size_t) e;
   }
 
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
-    if (edge_checksum[bb->index + 2])
+    if (edge_checksum[bb->index])
       {
 	error ("basic block %i edge lists are corrupted", bb->index);
 	err = 1;
@@ -888,3 +888,22 @@ lv_add_condition_to_bb (basic_block firs
   gcc_assert (cfg_hooks->lv_add_condition_to_bb);
   cfg_hooks->lv_add_condition_to_bb (first, second, new, cond);
 }  
+
+
+/* Return true if BB contains only labels or non-executable
+   instructions */
+bool
+block_empty_p (basic_block bb)
+{
+  gcc_assert (cfg_hooks->block_empty_p);
+  return cfg_hooks->block_empty_p (bb);
+}
+
+/* Split a basic block if it ends with a conditional branch and if
+   the other part of the block is not empty.  */
+basic_block
+split_block_before_cond_jump (basic_block bb)
+{
+  gcc_assert (cfg_hooks->split_block_before_cond_jump);
+  return cfg_hooks->split_block_before_cond_jump (bb);
+}
Index: gcc/cfghooks.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfghooks.h,v
retrieving revision 1.18
diff -u -p -r1.18 cfghooks.h
--- gcc/cfghooks.h	24 Aug 2005 07:56:52 -0000	1.18
+++ gcc/cfghooks.h	29 Sep 2005 18:42:47 -0000
@@ -135,6 +135,13 @@ struct cfg_hooks
   /* Add PHI arguments queued in PENDINT_STMT list on edge E to edge
      E->dest (only in tree-ssa loop versioning.  */
   void (*flush_pending_stmts) (edge);
+  
+  /* True if a block contains no executable instructions.  */
+  bool (*block_empty_p) (basic_block);
+  
+  /* Split a basic block if it ends with a conditional branch and if
+     the other part of the block is not empty.  */
+  basic_block (*split_block_before_cond_jump) (basic_block);
 };
 
 extern void verify_flow_info (void);
@@ -160,6 +167,8 @@ extern bool can_duplicate_block_p (basic
 extern basic_block duplicate_block (basic_block, edge, basic_block);
 extern bool block_ends_with_call_p (basic_block bb);
 extern bool block_ends_with_condjump_p (basic_block bb);
+extern bool block_empty_p (basic_block);
+extern basic_block split_block_before_cond_jump (basic_block);
 extern int flow_call_edges_add (sbitmap);
 extern void execute_on_growing_pred (edge);
 extern void execute_on_shrinking_pred (edge);
Index: gcc/cfglayout.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfglayout.c,v
retrieving revision 1.95
diff -u -p -r1.95 cfglayout.c
--- gcc/cfglayout.c	24 Aug 2005 07:56:53 -0000	1.95
+++ gcc/cfglayout.c	29 Sep 2005 18:42:47 -0000
@@ -601,7 +601,7 @@ fixup_reorder_chain (void)
   /* First do the bulk reordering -- rechain the blocks without regard to
      the needed changes to jumps and labels.  */
 
-  for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
+  for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
        bb != 0;
        bb = bb->aux, index++)
     {
@@ -818,7 +818,7 @@ fixup_reorder_chain (void)
   if (dump_file)
     {
       fprintf (dump_file, "Reordered sequence:\n");
-      for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
+      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
 	   bb;
 	   bb = bb->aux, index++)
 	{
@@ -837,7 +837,7 @@ fixup_reorder_chain (void)
 
   prev_bb = ENTRY_BLOCK_PTR;
   bb = ENTRY_BLOCK_PTR->next_bb;
-  index = 0;
+  index = NUM_FIXED_BLOCKS;
 
   for (; bb; prev_bb = bb, bb = bb->aux, index ++)
     {
Index: gcc/cfgloop.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfgloop.c,v
retrieving revision 1.54
diff -u -p -r1.54 cfgloop.c
--- gcc/cfgloop.c	3 Jul 2005 21:07:47 -0000	1.54
+++ gcc/cfgloop.c	29 Sep 2005 18:42:47 -0000
@@ -73,7 +73,7 @@ flow_loops_cfg_dump (const struct loops 
   if (loops->cfg.dfs_order)
     {
       fputs (";; DFS order: ", file);
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	fprintf (file, "%d ", loops->cfg.dfs_order[i]);
 
       fputs ("\n", file);
@@ -83,7 +83,7 @@ flow_loops_cfg_dump (const struct loops 
   if (loops->cfg.rc_order)
     {
       fputs (";; RC order: ", file);
-      for (i = 0; i < n_basic_blocks; i++)
+      for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
 	fprintf (file, "%d ", loops->cfg.rc_order[i]);
 
       fputs ("\n", file);
@@ -610,7 +610,7 @@ flow_loops_find (struct loops *loops)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   dfs_order = NULL;
@@ -676,7 +676,7 @@ flow_loops_find (struct loops *loops)
   loops->parray[0]->outer = NULL;
   loops->parray[0]->depth = 0;
   loops->parray[0]->pred = NULL;
-  loops->parray[0]->num_nodes = n_basic_blocks + 2;
+  loops->parray[0]->num_nodes = n_basic_blocks;
   loops->parray[0]->latch = EXIT_BLOCK_PTR;
   loops->parray[0]->header = ENTRY_BLOCK_PTR;
   ENTRY_BLOCK_PTR->loop_father = loops->parray[0];
@@ -704,7 +704,7 @@ flow_loops_find (struct loops *loops)
 
       num_loops = 1;
 
-      for (b = 0; b < n_basic_blocks; b++)
+      for (b = 0; b < n_basic_blocks - NUM_FIXED_BLOCKS; b++)
 	{
 	  struct loop *loop;
 	  edge_iterator ei;
@@ -804,7 +804,7 @@ get_loop_body (const struct loop *loop)
   if (loop->latch == EXIT_BLOCK_PTR)
     {
       /* There may be blocks unreachable from EXIT_BLOCK.  */
-      gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks + 2);
+      gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks);
       FOR_EACH_BB (bb)
 	tovisit[tv++] = bb;
       tovisit[tv++] = EXIT_BLOCK_PTR;
Index: gcc/cfgloopmanip.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfgloopmanip.c,v
retrieving revision 1.53
diff -u -p -r1.53 cfgloopmanip.c
--- gcc/cfgloopmanip.c	24 Aug 2005 07:56:53 -0000	1.53
+++ gcc/cfgloopmanip.c	29 Sep 2005 18:42:47 -0000
@@ -1563,7 +1563,7 @@ fix_loop_structure (struct loops *loops,
     }
 
   /* Remove the dead loops from structures.  */
-  loops->tree_root->num_nodes = n_basic_blocks + 2;
+  loops->tree_root->num_nodes = n_basic_blocks; 
   for (i = 1; i < loops->num; i++)
     {
       loop = loops->parray[i];
Index: gcc/cfgrtl.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cfgrtl.c,v
retrieving revision 1.181
diff -u -p -r1.181 cfgrtl.c
--- gcc/cfgrtl.c	28 Jul 2005 21:11:30 -0000	1.181
+++ gcc/cfgrtl.c	29 Sep 2005 18:42:48 -0000
@@ -82,6 +82,51 @@ static int rtl_verify_flow_info_1 (void)
 static void mark_killed_regs (rtx, rtx, void *);
 static void rtl_make_forwarder_block (edge);
 
+
+/* Return true if BB contains only labels or non-executable
+   instructions */
+
+static bool
+rtl_block_empty_p (basic_block bb)
+{
+  rtx insn;
+  if (BB_HEAD (bb))
+    FOR_BB_INSNS (bb, insn)
+      {
+	if (!NOTE_P (insn) && !any_uncondjump_p (insn))
+	  return false;
+      }
+  return true;
+}
+
+/* Split a basic block if it ends with a conditional branch and if
+   the other part of the block is not empty.  */
+
+static basic_block
+rtl_split_block_before_cond_jump (basic_block bb)
+{
+  rtx insn;
+  rtx split_point = NULL;
+  rtx last = NULL;
+  bool found_code = false;
+
+  if (BB_HEAD (bb))
+    FOR_BB_INSNS (bb, insn)
+      {
+	if (any_condjump_p (insn))
+	  split_point = last;
+	else if (!NOTE_P (insn) && !LABEL_P (insn))
+	  found_code = true;
+	last = insn;
+      }
+
+  /* Did not find everything.  */ 
+  if (found_code && split_point)
+    return split_block (bb, split_point)->dest;
+  else 
+    return NULL;
+}
+
 /* Return true if NOTE is not one of the ones that must be kept paired,
    so that we may simply delete it.  */
 
@@ -440,7 +485,8 @@ struct tree_opt_pass pass_free_cfg =
 rtx
 entry_of_function (void)
 {
-  return (n_basic_blocks ? BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
+  return (n_basic_blocks - NUM_FIXED_BLOCKS ? 
+	  BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
 }
 
 /* Update insns block within BB.  */
@@ -522,6 +568,29 @@ rtl_split_block (basic_block bb, void *i
 	}
 #endif
     }
+  else if (bb == ENTRY_BLOCK_PTR)
+    {
+      /* If we are splitting the entry block, we may need to create
+	 the bitmaps if the bitmaps are attached to other nodes.
+	 However, the entry and exit blocks do not have bitmaps so we
+	 must look elsewhere.  */ 
+      basic_block b = EXIT_BLOCK_PTR->prev_bb;
+      if (b->il.rtl->global_live_at_start)
+	{
+	  /* We can leave the blocks empty because there is nothing in
+	     the entry block to copy anyway.  */
+	  new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
+	  new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
+	}
+      
+      
+#ifdef HAVE_conditional_execution
+      /* In the presence of conditional execution we are not able to update
+	 liveness precisely.  */
+      if (reload_completed)
+	new_bb->flags |= BB_DIRTY;
+#endif
+    }
 
   return new_bb;
 }
@@ -1065,6 +1134,17 @@ force_nonfallthru_and_redirect (edge e, 
 	  bool found = false;
 	  
 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
+	  /* We may need to create the bitmaps if the bitmaps are
+	     attached to other nodes.  However, the entry and exit
+	     blocks do not have bitmaps so we must look elsewhere.  */ 
+	  basic_block b = EXIT_BLOCK_PTR->prev_bb;
+	  if (b->il.rtl->global_live_at_start)
+	    {
+	      /* We can leave the blocks empty because there is nothing in
+		 the entry block to copy anyway.  */
+	      bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
+	      bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
+	    }
 	  
 	  /* Change the existing edge's source to be the new block, and add
 	     a new edge from the entry block to the new block.  */
@@ -1116,6 +1196,21 @@ force_nonfallthru_and_redirect (edge e, 
 	  COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
 			target->il.rtl->global_live_at_start);
 	}
+      else if (target == EXIT_BLOCK_PTR)
+	{
+	  /* If we are splitting the edge whose dest is the exit block, we
+	     may need to create the bitmaps if the bitmaps are attached to
+	     other nodes.  However, the entry and exit blocks do not have
+	     bitmaps so we must look elsewhere.  */ 
+	  basic_block b = ENTRY_BLOCK_PTR->next_bb;
+	  if (b->il.rtl->global_live_at_start)
+	    {
+	      /* We can leave the blocks empty because there is nothing in
+		 the entry block to copy anyway.  */
+	      jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
+	      jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
+	    }
+	}	  
 
       /* Make sure new block ends up in correct hot/cold section.  */
 
@@ -1384,6 +1479,21 @@ rtl_split_edge (edge edge_in)
       COPY_REG_SET (bb->il.rtl->global_live_at_end,
 		    edge_in->dest->il.rtl->global_live_at_start);
     }
+  else if (edge_in->dest == EXIT_BLOCK_PTR)
+    {
+      /* If we are splitting the edge whose dest is the exit block, we
+	 may need to create the bitmaps if the bitmaps are attached to
+	 other nodes.  However, the entry and exit blocks do not have
+	 bitmaps so we must look elsewhere.  */ 
+      basic_block b = ENTRY_BLOCK_PTR->next_bb;
+      if (b->il.rtl->global_live_at_start)
+	{
+	  /* We can leave the blocks empty because there is nothing in
+	     the entry block to copy anyway.  */
+	  bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
+	  bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
+	}
+    }
 
   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
 
@@ -1787,9 +1897,12 @@ rtl_dump_bb (basic_block bb, FILE *outf,
   dump_regset (bb->il.rtl->global_live_at_start, outf);
   putc ('\n', outf);
 
-  for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
-       insn = NEXT_INSN (insn))
-    print_rtl_single (outf, insn);
+  insn = BB_HEAD (bb);
+  if (insn) 
+    for (last = NEXT_INSN (BB_END (bb)); 
+	 insn && (insn != last);
+	 insn = NEXT_INSN (insn))
+      print_rtl_single_with_indent (outf, insn, indent);
 
   fprintf (outf, ";;%s Registers live at end: ", s_indent);
   dump_regset (bb->il.rtl->global_live_at_end, outf);
@@ -2264,7 +2377,7 @@ rtl_verify_flow_info (void)
 	curr_bb = NULL;
     }
 
-  if (num_bb_notes != n_basic_blocks)
+  if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
     internal_error
       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
        num_bb_notes, n_basic_blocks);
@@ -2837,6 +2950,18 @@ cfg_layout_split_edge (edge e)
       COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
 		    e->dest->il.rtl->global_live_at_start);
     }
+  else if (e->dest == EXIT_BLOCK_PTR)
+    {
+      basic_block b = ENTRY_BLOCK_PTR->next_bb;
+      if (b->il.rtl->global_live_at_start)
+	{
+	  /* We can leave the blocks empty because there is nothing in
+	     the entry block to copy anyway.  */
+	  new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
+	  new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
+	}
+    }
+    
 
   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
   redirect_edge_and_branch_force (e, new_bb);
@@ -2913,7 +3038,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
   int last_bb = last_basic_block;
   bool check_last_block = false;
 
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   if (! blocks)
@@ -2960,7 +3085,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
      calls since there is no way that we can determine if they will
      return or not...  */
 
-  for (i = 0; i < last_bb; i++)
+  for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
       rtx insn;
@@ -3119,7 +3244,9 @@ struct cfg_hooks rtl_cfg_hooks = {
   NULL, /* lv_add_condition_to_bb */
   NULL, /* lv_adjust_loop_header_phi*/
   NULL, /* extract_cond_bb_edges */
-  NULL 		/* flush_pending_stmts */
+  NULL, /* flush_pending_stmts */
+  rtl_block_empty_p, /* block_empty_p */
+  rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
 };
 
 /* Implementation of CFG manipulation for cfg layout RTL, where
@@ -3162,6 +3289,8 @@ struct cfg_hooks cfg_layout_rtl_cfg_hook
   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
   NULL, /* lv_adjust_loop_header_phi*/
   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
-  NULL 		/* flush_pending_stmts */  
+  NULL, /* flush_pending_stmts */  
+  rtl_block_empty_p, /* block_empty_p */
+  rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
 };
 
Index: gcc/cselib.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cselib.c,v
retrieving revision 1.63
diff -u -p -r1.63 cselib.c
--- gcc/cselib.c	29 Jul 2005 05:57:40 -0000	1.63
+++ gcc/cselib.c	29 Sep 2005 18:42:48 -0000
@@ -1089,7 +1089,7 @@ cselib_rtx_varies_p (rtx x ATTRIBUTE_UNU
      invariant.  */
   return 0;
 }
-
+static bool print_me_next_time = false;
 /* Invalidate any locations in the table which are changed because of a
    store to MEM_RTX.  If this is called because of a non-const call
    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
@@ -1103,14 +1103,22 @@ cselib_invalidate_mem (rtx mem_rtx)
 
   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
   mem_rtx = canon_rtx (mem_rtx);
-
+  if (print_me_next_time)
+    fprintf (stderr, "Calling cselib_invalidate_mem!\n");
   vp = &first_containing_mem;
   for (v = *vp; v != &dummy_val; v = next)
     {
       bool has_mem = false;
       struct elt_loc_list **p = &v->locs;
       int had_locs = v->locs != 0;
-
+#if 0
+      if (num_mems > 400)
+	print_me_next_time = true;
+#endif
+      if (print_me_next_time)
+	{
+	  fprintf (stderr, "RTL for value number %d\n", v->value);
+	}
       while (*p)
 	{
 	  rtx x = (*p)->loc;
@@ -1124,6 +1132,14 @@ cselib_invalidate_mem (rtx mem_rtx)
 	      p = &(*p)->next;
 	      continue;
 	    }
+	  if (print_me_next_time)
+	    {
+	      fprintf (stderr, "loc is ");
+	      print_rtl_single (stderr, x);
+	      fprintf (stderr, " setting insn is ");
+	      print_rtl_single (stderr, (*p)->setting_insn);
+	      fprintf (stderr, "\n");
+	    }
 	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
 	      && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
 		      			  x, cselib_rtx_varies_p))
@@ -1436,7 +1452,6 @@ cselib_process_insn (rtx insn)
   if (find_reg_note (insn, REG_RETVAL, NULL))
     cselib_current_insn_in_libcall = false;
   cselib_current_insn = 0;
-
   if (n_useless_values > MAX_USELESS_VALUES)
     remove_useless_values ();
 }
Index: gcc/df.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/df.c,v
retrieving revision 1.87
diff -u -p -r1.87 df.c
--- gcc/df.c	28 Jul 2005 01:47:06 -0000	1.87
+++ gcc/df.c	29 Sep 2005 18:42:48 -0000
@@ -3939,7 +3939,7 @@ iterative_dataflow (struct dataflow *dat
   sbitmap_zero (visited);
   sbitmap_zero (considered);
 
-  for (i = 0; i < dataflow->n_blocks; i++)
+  for (i = 0; i < dataflow->n_blocks - NUM_FIXED_BLOCKS; i++)
     {
       idx = dataflow->order[i];
       SET_BIT (pending, idx);
@@ -3954,7 +3954,7 @@ iterative_dataflow (struct dataflow *dat
 
   while (1)
     {
-      for (i = 0; i < dataflow->n_blocks; i++)
+      for (i = 0; i < dataflow->n_blocks - NUM_FIXED_BLOCKS ; i++)
 	{
 	  idx = dataflow->order[i];
 
Index: gcc/dominance.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/dominance.c,v
retrieving revision 1.42
diff -u -p -r1.42 dominance.c
--- gcc/dominance.c	25 Jun 2005 01:59:42 -0000	1.42
+++ gcc/dominance.c	29 Sep 2005 18:42:48 -0000
@@ -51,8 +51,7 @@ enum dom_state dom_computed[2];
    'undefined' or 'end of list'.  The name of each node is given by the dfs
    number of the corresponding basic block.  Please note, that we include the
    artificial ENTRY_BLOCK (or EXIT_BLOCK in the post-dom case) in our lists to
-   support multiple entry points.  As it has no real basic block index we use
-   'last_basic_block' for that.  Its dfs number is of course 1.  */
+   support multiple entry points.  Its dfs number is of course 1.  */
 
 /* Type of Basic Block aka. TBB */
 typedef unsigned int TBB;
@@ -149,9 +148,8 @@ static unsigned n_bbs_in_dom_tree[2];
 static void
 init_dom_info (struct dom_info *di, enum cdi_direction dir)
 {
-  /* We need memory for n_basic_blocks nodes and the ENTRY_BLOCK or
-     EXIT_BLOCK.  */
-  unsigned int num = n_basic_blocks + 1 + 1;
+  /* We need memory for n_basic_blocks nodes.  */
+  unsigned int num = n_basic_blocks;
   init_ar (di->dfs_parent, TBB, num, 0);
   init_ar (di->path_min, TBB, num, i);
   init_ar (di->key, TBB, num, i);
@@ -216,7 +214,7 @@ calc_dfs_tree_nonrec (struct dom_info *d
   /* Ending block.  */
   basic_block ex_block;
 
-  stack = xmalloc ((n_basic_blocks + 3) * sizeof (edge_iterator));
+  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
   sp = 0;
 
   /* Initialize our border blocks, and the first edge.  */
@@ -372,8 +370,8 @@ calc_dfs_tree (struct dom_info *di, enum
 
   di->nodes = di->dfsnum - 1;
 
-  /* Make sure there is a path from ENTRY to EXIT at all.  */
-  gcc_assert (di->nodes == (unsigned int) n_basic_blocks + 1);
+  /* This aborts e.g. when there is _no_ path from ENTRY to EXIT at all.  */
+  gcc_assert (di->nodes == (unsigned int) n_basic_blocks - 1);
 }
 
 /* Compress the path from V to the root of its set and update path_min at the
@@ -627,7 +625,7 @@ calculate_dominance_info (enum cdi_direc
 	{
 	  b->dom[dir] = et_new_tree (b);
 	}
-      n_bbs_in_dom_tree[dir] = n_basic_blocks + 2;
+      n_bbs_in_dom_tree[dir] = n_basic_blocks;
 
       init_dom_info (&di, dir);
       calc_dfs_tree (&di, dir);
Index: gcc/flow.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/flow.c,v
retrieving revision 1.635
diff -u -p -r1.635 flow.c
--- gcc/flow.c	22 Aug 2005 16:58:46 -0000	1.635
+++ gcc/flow.c	29 Sep 2005 18:42:48 -0000
@@ -1056,17 +1056,14 @@ calculate_global_regs_live (sbitmap bloc
       SET_REGNO_REG_SET (invalidated_by_call, i);
 
   /* Allocate space for the sets of local properties.  */
-  local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
-			sizeof (regset));
-  cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
-			     sizeof (regset));
-
-  /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
-     because the `head == tail' style test for an empty queue doesn't
-     work with a full queue.  */
-  queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
+  local_sets = xcalloc (last_basic_block, sizeof (regset));
+  cond_local_sets = xcalloc (last_basic_block, sizeof (regset));
+
+  /* Create a worklist.  Allocate an extra slot for the `head == tail'
+     style test for an empty queue doesn't work with a full queue.  */
+  queue = xmalloc ((n_basic_blocks + 1) * sizeof (*queue));
   qtail = queue;
-  qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
+  qhead = qend = queue + n_basic_blocks;
 
   /* Queue the blocks set in the initial mask.  Do this in reverse block
      number order so that we are more likely for the first round to do
@@ -1247,12 +1244,10 @@ calculate_global_regs_live (sbitmap bloc
          basic block.  On subsequent passes, we get to skip out early if
 	 live_at_end wouldn't have changed.  */
 
-      if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
+      if (local_sets[bb->index] == NULL)
 	{
-	  local_sets[bb->index - (INVALID_BLOCK + 1)]
-	    = ALLOC_REG_SET (&reg_obstack);
-	  cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
-	    = ALLOC_REG_SET (&reg_obstack);
+	  local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
+	  cond_local_sets[bb->index] = ALLOC_REG_SET (&reg_obstack);
 	  rescan = 1;
 	}
       else
@@ -1276,7 +1271,7 @@ calculate_global_regs_live (sbitmap bloc
 		  successor block.  We can miss changes in those sets if
 		  we only compare the new live_at_end against the
 		  previous one.  */
-	      cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
+	      cond_local_set = cond_local_sets[bb->index];
 	      rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
 	    }
 
@@ -1292,7 +1287,7 @@ calculate_global_regs_live (sbitmap bloc
   
 	      /* If any of the changed bits overlap with local_sets[bb],
  		 we'll have to rescan the block.  */
-	      local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
+	      local_set = local_sets[bb->index];
 	      rescan = bitmap_intersect_p (tmp, local_set);
 	    }
 	}
@@ -1321,8 +1316,8 @@ calculate_global_regs_live (sbitmap bloc
 	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
 	     into live_at_start.  */
 	  propagate_block (bb, new_live_at_end,
-			   local_sets[bb->index - (INVALID_BLOCK + 1)],
-			   cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
+			   local_sets[bb->index],
+			   cond_local_sets[bb->index],
 			   flags);
 
 	  /* If live_at start didn't change, no need to go farther.  */
@@ -1368,11 +1363,11 @@ calculate_global_regs_live (sbitmap bloc
 			SET_BIT (blocks_out, pbb->index);
 
 		      /* Makes sure to really rescan the block.  */
-		      if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
+		      if (local_sets[pbb->index])
 			{
-			  FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
-			  FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
-			  local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
+			  FREE_REG_SET (local_sets[pbb->index]);
+			  FREE_REG_SET (cond_local_sets[pbb->index]);
+			  local_sets[pbb->index] = 0;
 			}
 
 		      /* Add it to the queue.  */
@@ -1418,16 +1413,16 @@ calculate_global_regs_live (sbitmap bloc
       EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
 	{
 	  basic_block bb = BASIC_BLOCK (i);
-	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
-	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ 	  FREE_REG_SET (local_sets[bb->index]);
+ 	  FREE_REG_SET (cond_local_sets[bb->index]);
 	};
     }
   else
     {
       FOR_EACH_BB (bb)
 	{
-	  FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
-	  FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ 	  FREE_REG_SET (local_sets[bb->index]);
+ 	  FREE_REG_SET (cond_local_sets[bb->index]);
 	}
     }
 
@@ -2474,7 +2469,7 @@ libcall_dead_p (struct propagate_block_i
 int
 regno_clobbered_at_setjmp (int regno)
 {
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   return ((REG_N_SETS (regno) > 1
Index: gcc/function.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/function.c,v
retrieving revision 1.645
diff -u -p -r1.645 function.c
--- gcc/function.c	17 Sep 2005 18:38:36 -0000	1.645
+++ gcc/function.c	29 Sep 2005 18:42:48 -0000
@@ -5276,7 +5276,8 @@ thread_prologue_and_epilogue_insns (rtx 
 	fixup_fallthru_exit_predecessor.  */
       cfg_layout_initialize (0);
       FOR_EACH_BB (cur_bb)
-	if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
+	if (cur_bb->index >= NUM_FIXED_BLOCKS
+	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
 	  cur_bb->aux = cur_bb->next_bb;
       cfg_layout_finalize ();
     }
Index: gcc/global.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/global.c,v
retrieving revision 1.132
diff -u -p -r1.132 global.c
--- gcc/global.c	1 Sep 2005 05:29:02 -0000	1.132
+++ gcc/global.c	29 Sep 2005 18:42:48 -0000
@@ -621,7 +621,7 @@ global_alloc (FILE *file)
 
 #if 0 /* We need to eliminate regs even if there is no rtl code,
 	 for the sake of debugging information.  */
-  if (n_basic_blocks > 0)
+  if (n_basic_blocks > NUM_FIXED_BLOCKS)
 #endif
     {
       build_insn_chain (get_insns ());
@@ -2281,9 +2281,9 @@ set_up_bb_rts_numbers (void)
   int i;
   int *rts_order;
   
-  rts_order = xmalloc (sizeof (int) * n_basic_blocks);
+  rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
   flow_reverse_top_sort_order_compute (rts_order);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
   free (rts_order);
 }
Index: gcc/ifcvt.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/ifcvt.c,v
retrieving revision 1.194
diff -u -p -r1.194 ifcvt.c
--- gcc/ifcvt.c	22 Jul 2005 12:25:20 -0000	1.194
+++ gcc/ifcvt.c	29 Sep 2005 18:42:48 -0000
@@ -3149,14 +3149,14 @@ find_if_case_2 (basic_block test_bb, edg
     return FALSE;
 
   /* THEN is not EXIT.  */
-  if (then_bb->index < 0)
+  if (then_bb->index < NUM_FIXED_BLOCKS)
     return FALSE;
 
   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
   note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
   if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
     ;
-  else if (else_succ->dest->index < 0
+  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
 			      else_succ->dest))
     ;
Index: gcc/lcm.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/lcm.c,v
retrieving revision 1.76
diff -u -p -r1.76 lcm.c
--- gcc/lcm.c	25 Jun 2005 02:00:29 -0000	1.76
+++ gcc/lcm.c	29 Sep 2005 18:42:48 -0000
@@ -107,7 +107,8 @@ compute_antinout_edge (sbitmap *antloc, 
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
+  qin = qout = worklist = xmalloc (sizeof (basic_block) * 
+				   (n_basic_blocks - NUM_FIXED_BLOCKS));
 
   /* We want a maximal solution, so make an optimistic initialization of
      ANTIN.  */
@@ -122,8 +123,8 @@ compute_antinout_edge (sbitmap *antloc, 
     }
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are predecessors of the exit block so that we
      can easily identify them below.  */
@@ -260,7 +261,7 @@ compute_laterin (struct edge_list *edge_
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
   qin = qout = worklist
-    = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+    = xmalloc (sizeof (basic_block) * (n_basic_blocks - NUM_FIXED_BLOCKS));
 
   /* Initialize a mapping from each edge to its index.  */
   for (i = 0; i < num_edges; i++)
@@ -294,11 +295,10 @@ compute_laterin (struct edge_list *edge_
     }
 
   /* Note that we do not use the last allocated element for our queue,
-     as EXIT_BLOCK is never inserted into it. In fact the above allocation
-     of n_basic_blocks + 1 elements is not necessary.  */
+     as EXIT_BLOCK is never inserted into it. */
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -485,7 +485,8 @@ compute_available (sbitmap *avloc, sbitm
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
+  qin = qout = worklist = 
+    xmalloc (sizeof (basic_block) * (n_basic_blocks - NUM_FIXED_BLOCKS));
 
   /* We want a maximal solution.  */
   sbitmap_vector_ones (avout, last_basic_block);
@@ -499,8 +500,8 @@ compute_available (sbitmap *avloc, sbitm
     }
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks];
-  qlen = n_basic_blocks;
+  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are successors of the entry block so that we
      can easily identify them below.  */
Index: gcc/modulo-sched.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/modulo-sched.c,v
retrieving revision 1.38
diff -u -p -r1.38 modulo-sched.c
--- gcc/modulo-sched.c	24 Aug 2005 07:56:54 -0000	1.38
+++ gcc/modulo-sched.c	29 Sep 2005 18:42:48 -0000
@@ -2536,7 +2536,6 @@ rest_of_handle_sms (void)
 {
 #ifdef INSN_SCHEDULING
   basic_block bb;
-  sbitmap blocks;
 
   /* We want to be able to create new pseudos.  */
   no_new_pseudos = 0;
@@ -2547,9 +2546,7 @@ rest_of_handle_sms (void)
   /* Update the life information, because we add pseudos.  */
   max_regno = max_reg_num ();
   allocate_reg_info (max_regno, FALSE, FALSE);
-  blocks = sbitmap_alloc (last_basic_block);
-  sbitmap_ones (blocks);
-  update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
+  update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
                     (PROP_DEATH_NOTES
                      | PROP_REG_INFO
                      | PROP_KILL_DEAD_CODE
Index: gcc/predict.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/predict.c,v
retrieving revision 1.155
diff -u -p -r1.155 predict.c
--- gcc/predict.c	1 Aug 2005 03:54:46 -0000	1.155
+++ gcc/predict.c	29 Sep 2005 18:42:48 -0000
@@ -703,7 +703,7 @@ predict_loops (struct loops *loops_info,
 	     conditional has no loop header successors as not taken.  */
 	  if (!header_found)
 	    FOR_EACH_EDGE (e, ei, bb->succs)
-	      if (e->dest->index < 0
+	      if (e->dest->index < NUM_FIXED_BLOCKS
 		  || !flow_bb_inside_loop_p (loop, e->dest))
 		predict_edge
 		  (e, PRED_LOOP_EXIT,
@@ -1267,7 +1267,7 @@ tree_bb_level_predictions (void)
   int *heads;
 
   heads = xmalloc (sizeof (int) * last_basic_block);
-  memset (heads, -1, sizeof (int) * last_basic_block);
+  memset (heads, ENTRY_BLOCK, sizeof (int) * last_basic_block);
   heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
 
   apply_return_prediction (heads);
@@ -1520,10 +1520,7 @@ predict_paths_leading_to (basic_block bb
       while (next_ai != bb)
 	{
 	  next_ai = ai;
-	  if (heads[ai->index] == ENTRY_BLOCK)
-	    ai = ENTRY_BLOCK_PTR;
-	  else
-	    ai = BASIC_BLOCK (heads[ai->index]);
+	  ai = BASIC_BLOCK (heads[ai->index]);
 	  heads[next_ai->index] = head;
 	}
     }
@@ -1534,7 +1531,7 @@ predict_paths_leading_to (basic_block bb
   if (y == last_basic_block)
     return;
   FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
-    if (e->dest->index >= 0
+    if (e->dest->index >= NUM_FIXED_BLOCKS
 	&& dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
       predict_edge_def (e, pred, taken);
 }
@@ -1592,12 +1589,7 @@ propagate_freq (struct loop *loop, bitma
        /* The outermost "loop" includes the exit block, which we can not
 	  look up via BASIC_BLOCK.  Detect this and use EXIT_BLOCK_PTR
 	  directly.  Do the same for the entry block.  */
-     if (i == (unsigned)ENTRY_BLOCK)
-       bb = ENTRY_BLOCK_PTR;
-     else if (i == (unsigned)EXIT_BLOCK)
-       bb = EXIT_BLOCK_PTR;
-     else
-       bb = BASIC_BLOCK (i);
+      bb = BASIC_BLOCK (i);
 
       FOR_EACH_EDGE (e, ei, bb->preds)
 	{
Index: gcc/print-rtl.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/print-rtl.c,v
retrieving revision 1.125
diff -u -p -r1.125 print-rtl.c
--- gcc/print-rtl.c	16 Aug 2005 00:13:21 -0000	1.125
+++ gcc/print-rtl.c	29 Sep 2005 18:42:48 -0000
@@ -774,16 +774,34 @@ print_rtl (FILE *outf, rtx rtx_first)
 int
 print_rtl_single (FILE *outf, rtx x)
 {
+  return print_rtl_single_with_indent (outf, x, 0);
+}
+
+/* Like print_rtx, except specify a file.  */
+/* Return nonzero if we actually printed anything.  */
+
+int
+print_rtl_single_with_indent (FILE *outf, rtx x, int ind)
+{
+  int old_indent = indent;
+  char *s_indent = (char *) alloca ((size_t) ind + 1);
+  memset ((void *) s_indent, ' ', (size_t) ind);
+  s_indent[ind] = '\0';
+
+  indent = ind;
   outfile = outf;
   sawclose = 0;
   if (! flag_dump_unnumbered
       || !NOTE_P (x) || NOTE_LINE_NUMBER (x) < 0)
     {
+      fputs (s_indent, outfile);
       fputs (print_rtx_head, outfile);
       print_rtx (x);
       putc ('\n', outf);
+      indent = old_indent;
       return 1;
     }
+  indent = old_indent;
   return 0;
 }
 
Index: gcc/profile.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/profile.c,v
retrieving revision 1.164
diff -u -p -r1.164 profile.c
--- gcc/profile.c	3 Aug 2005 22:10:41 -0000	1.164
+++ gcc/profile.c	29 Sep 2005 18:42:48 -0000
@@ -531,7 +531,7 @@ compute_branch_probabilities (void)
 	{
 	  FOR_EACH_EDGE (e, ei, bb->succs)
 	    e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
-	  if (bb->index >= 0
+	  if (bb->index >= NUM_FIXED_BLOCKS
 	      && block_ends_with_condjump_p (bb)
 	      && EDGE_COUNT (bb->succs) >= 2)
 	    {
@@ -609,7 +609,7 @@ compute_branch_probabilities (void)
 	      FOR_EACH_EDGE (e, ei, bb->succs)
 		e->probability = REG_BR_PROB_BASE / total;
 	    }
-	  if (bb->index >= 0
+	  if (bb->index >= NUM_FIXED_BLOCKS
 	      && block_ends_with_condjump_p (bb)
 	      && EDGE_COUNT (bb->succs) >= 2)
 	    num_branches++, num_never_executed;
@@ -704,7 +704,9 @@ compute_value_histograms (histogram_valu
       free (histogram_counts[t]);
 }
 
-#define BB_TO_GCOV_INDEX(bb)  ((bb)->index + 1)
+/* The entry basic block will be moved around so that it has index=1,
+   there is noting at index 0 and the exit is at n_basic_block.  */
+#define BB_TO_GCOV_INDEX(bb)  ((bb)->index - 1)
 /* When passed NULL as file_name, initialize.
    When passed something else, output the necessary commands to change
    line to LINE and offset to FILE_NAME.  */
@@ -904,7 +906,7 @@ branch_prob (void)
 	num_instrumented++;
     }
 
-  total_num_blocks += n_basic_blocks + 2;
+  total_num_blocks += n_basic_blocks;
   if (dump_file)
     fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
 
@@ -925,7 +927,7 @@ branch_prob (void)
       gcov_position_t offset;
 
       offset = gcov_write_tag (GCOV_TAG_BLOCKS);
-      for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
+      for (i = 0; i != (unsigned) (n_basic_blocks); i++)
 	gcov_write_unsigned (0);
       gcov_write_length (offset);
     }
@@ -933,7 +935,7 @@ branch_prob (void)
    /* Keep all basic block indexes nonnegative in the gcov output.
       Index 0 is used for entry block, last index is for exit block.
       */
-  ENTRY_BLOCK_PTR->index = -1;
+  ENTRY_BLOCK_PTR->index = 1;
   EXIT_BLOCK_PTR->index = last_basic_block;
 
   /* Arcs */
Index: gcc/regrename.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/regrename.c,v
retrieving revision 1.102
diff -u -p -r1.102 regrename.c
--- gcc/regrename.c	5 Jul 2005 16:20:13 -0000	1.102
+++ gcc/regrename.c	29 Sep 2005 18:42:48 -0000
@@ -1763,20 +1763,19 @@ copyprop_hardreg_forward (void)
 
   all_vd = xmalloc (sizeof (struct value_data) * last_basic_block);
 
-  visited = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  visited = sbitmap_alloc (last_basic_block);
   sbitmap_zero (visited);
 
   FOR_EACH_BB (bb)
     {
-      SET_BIT (visited, bb->index - (INVALID_BLOCK + 1));
+      SET_BIT (visited, bb->index);
 
       /* If a block has a single predecessor, that we've already
 	 processed, begin with the value data that was live at
 	 the end of the predecessor block.  */
       /* ??? Ought to use more intelligent queuing of blocks.  */
-      if (single_pred_p (bb)
-	  && TEST_BIT (visited,
-		       single_pred (bb)->index - (INVALID_BLOCK + 1))
+      if (single_pred_p (bb) 
+	  && TEST_BIT (visited, single_pred (bb)->index)
 	  && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
 	all_vd[bb->index] = all_vd[single_pred (bb)->index];
       else
Index: gcc/reorg.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/reorg.c,v
retrieving revision 1.111
diff -u -p -r1.111 reorg.c
--- gcc/reorg.c	3 Sep 2005 18:49:37 -0000	1.111
+++ gcc/reorg.c	29 Sep 2005 18:42:48 -0000
@@ -3589,7 +3589,7 @@ dbr_schedule (rtx first, FILE *file)
 
   /* If the current function has no insns other than the prologue and
      epilogue, then do not try to fill any delay slots.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   /* Find the highest INSN_UID and allocate and initialize our map from
Index: gcc/rtl.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/rtl.h,v
retrieving revision 1.560
diff -u -p -r1.560 rtl.h
--- gcc/rtl.h	6 Sep 2005 08:52:51 -0000	1.560
+++ gcc/rtl.h	29 Sep 2005 18:42:49 -0000
@@ -1998,6 +1998,7 @@ extern void print_mem_expr (FILE *, tree
 extern void print_rtl (FILE *, rtx);
 extern void print_simple_rtl (FILE *, rtx);
 extern int print_rtl_single (FILE *, rtx);
+extern int print_rtl_single_with_indent (FILE *, rtx, int);
 extern void print_inline_rtx (FILE *, rtx, int);
 
 /* In loop.c */
Index: gcc/sched-ebb.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/sched-ebb.c,v
retrieving revision 1.45
diff -u -p -r1.45 sched-ebb.c
--- gcc/sched-ebb.c	27 Jul 2005 16:28:34 -0000	1.45
+++ gcc/sched-ebb.c	29 Sep 2005 18:42:49 -0000
@@ -568,7 +568,7 @@ schedule_ebbs (FILE *dump_file)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   sched_init (dump_file);
Index: gcc/sched-rgn.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/sched-rgn.c,v
retrieving revision 1.101
diff -u -p -r1.101 sched-rgn.c
--- gcc/sched-rgn.c	24 Aug 2005 20:28:06 -0000	1.101
+++ gcc/sched-rgn.c	29 Sep 2005 18:42:49 -0000
@@ -999,7 +999,7 @@ compute_trg_info (int trg)
   sp->is_speculative = 0;
   sp->src_prob = 100;
 
-  visited = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
+  visited = sbitmap_alloc (last_basic_block);
 
   for (i = trg + 1; i < current_nr_blocks; i++)
     {
@@ -1044,8 +1044,7 @@ compute_trg_info (int trg)
 	      block = el.first_member[j]->src;
 	      FOR_EACH_EDGE (e, ei, block->succs)
 		{
-		  if (!TEST_BIT (visited,
-				 e->dest->index - (INVALID_BLOCK + 1)))
+		  if (!TEST_BIT (visited, e->dest->index))
 		    {
 		      for (k = 0; k < el.nr_members; k++)
 			if (e == el.first_member[k])
@@ -1054,8 +1053,7 @@ compute_trg_info (int trg)
 		      if (k >= el.nr_members)
 			{
 			  bblst_table[bblst_last++] = e->dest;
-			  SET_BIT (visited,
-				   e->dest->index - (INVALID_BLOCK + 1));
+			  SET_BIT (visited, e->dest->index);
 			  update_idx++;
 			}
 		    }
@@ -2469,7 +2467,7 @@ init_regions (void)
 
   /* Compute regions for scheduling.  */
   if (reload_completed
-      || n_basic_blocks == 1
+      || n_basic_blocks == NUM_FIXED_BLOCKS + 1
       || !flag_schedule_interblock
       || is_cfg_nonregular ())
     {
@@ -2526,7 +2524,7 @@ schedule_insns (FILE *dump_file)
 
   /* Taking care of this degenerate case makes the rest of
      this code simpler.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return;
 
   nr_inter = 0;
Index: gcc/tracer.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tracer.c,v
retrieving revision 1.29
diff -u -p -r1.29 tracer.c
--- gcc/tracer.c	24 Aug 2005 07:56:54 -0000	1.29
+++ gcc/tracer.c	29 Sep 2005 18:42:49 -0000
@@ -72,7 +72,7 @@ static int branch_ratio_cutoff;
 static bool
 ignore_bb_p (basic_block bb)
 {
-  if (bb->index < 0)
+  if (bb->index < NUM_FIXED_BLOCKS)
     return true;
   if (!maybe_hot_bb_p (bb))
     return true;
@@ -363,7 +363,7 @@ layout_superblocks (void)
 void
 tracer (unsigned int flags)
 {
-  if (n_basic_blocks <= 1)
+  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
     return;
 
   cfg_layout_initialize (flags);
Index: gcc/tree-cfg.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-cfg.c,v
retrieving revision 2.221
diff -u -p -r2.221 tree-cfg.c
--- gcc/tree-cfg.c	13 Sep 2005 07:33:49 -0000	2.221
+++ gcc/tree-cfg.c	29 Sep 2005 18:42:49 -0000
@@ -129,14 +129,16 @@ init_empty_tree_cfg (void)
   /* Initialize the basic block array.  */
   init_flow ();
   profile_status = PROFILE_ABSENT;
-  n_basic_blocks = 0;
-  last_basic_block = 0;
+  n_basic_blocks = NUM_FIXED_BLOCKS;
+  last_basic_block = NUM_FIXED_BLOCKS;
   VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
 
   /* Build a mapping of labels to their associated blocks.  */
   VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
 		  "label to block map");
 
+  BASIC_BLOCK(ENTRY_BLOCK) = ENTRY_BLOCK_PTR;
+  BASIC_BLOCK(EXIT_BLOCK) = EXIT_BLOCK_PTR;
   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
 }
@@ -170,7 +172,7 @@ build_tree_cfg (tree *tp)
     factor_computed_gotos ();
 
   /* Make sure there is always at least one block, even if it's empty.  */
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     create_empty_bb (ENTRY_BLOCK_PTR);
 
   /* Adjust the size of the array.  */
@@ -430,7 +432,7 @@ make_edges (void)
 
   /* Create an edge from entry to the first block with executable
      statements in it.  */
-  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
+  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
 
   /* Traverse the basic block array placing edges.  */
   FOR_EACH_BB (bb)
@@ -794,7 +796,8 @@ label_to_block_fn (struct function *ifun
      and undefined variable warnings quite right.  */
   if ((errorcount || sorrycount) && uid < 0)
     {
-      block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
+      block_stmt_iterator bsi = 
+	bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
       tree stmt;
 
       stmt = build1 (LABEL_EXPR, void_type_node, dest);
@@ -4138,7 +4141,51 @@ tree_move_block_after (basic_block bb, b
   return true;
 }
 
+/* Return true if BB contains only labels or non-executable
+   instructions.  */
 
+static bool
+tree_block_empty_p (basic_block bb)
+{
+  block_stmt_iterator bsi;
+  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    {
+      tree stmt = bsi_stmt (bsi);
+      if ((TREE_CODE (stmt) != LABEL_EXPR)
+	  && (TREE_CODE (stmt) != GOTO_EXPR))
+	return false;
+    }
+  return true;
+}
+      
+/* Split a basic block if it ends with a conditional branch and if the
+   other part of the block is not empty.  */
+
+static basic_block
+tree_split_block_before_cond_jump (basic_block bb)
+{
+  tree split_point = NULL;
+  tree last = NULL;
+  bool found_code = false;
+
+  block_stmt_iterator bsi;
+  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    {
+      tree stmt = bsi_stmt (bsi);
+      if ((TREE_CODE (stmt) == COND_EXPR)
+	  || (TREE_CODE (stmt) == SWITCH_EXPR))
+	split_point = last;
+      else if (TREE_CODE (stmt) != LABEL_EXPR) 
+	found_code = true;
+      last = stmt;
+    }
+
+  if (found_code && split_point)
+    return split_block (bb, split_point)->dest;
+  else 
+    return NULL;
+}
+      
 /* Return true if basic_block can be duplicated.  */
 
 static bool
@@ -4601,7 +4648,7 @@ print_loop_ir (FILE *file)
 {
   basic_block bb;
   
-  bb = BASIC_BLOCK (0);
+  bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
   if (bb && bb->loop_father)
     print_loop (file, bb->loop_father, 0);
 }
@@ -4683,7 +4730,7 @@ tree_flow_call_edges_add (sbitmap blocks
   int last_bb = last_basic_block;
   bool check_last_block = false;
 
-  if (n_basic_blocks == 0)
+  if (n_basic_blocks == NUM_FIXED_BLOCKS)
     return 0;
 
   if (! blocks)
@@ -4959,7 +5006,9 @@ struct cfg_hooks tree_cfg_hooks = {
   tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
   tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
-  flush_pending_stmts 		/* flush_pending_stmts */  
+  flush_pending_stmts, 		/* flush_pending_stmts */  
+  tree_block_empty_p,           /* block_empty_p */
+  tree_split_block_before_cond_jump, /* split_block_before_cond_jump */
 };
 
 
Index: gcc/tree-dfa.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-dfa.c,v
retrieving revision 2.64
diff -u -p -r2.64 tree-dfa.c
--- gcc/tree-dfa.c	13 Sep 2005 16:05:37 -0000	2.64
+++ gcc/tree-dfa.c	29 Sep 2005 18:42:49 -0000
@@ -478,10 +478,11 @@ collect_dfa_stats (struct dfa_stats_d *d
   memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
 
   /* Walk all the trees in the function counting references.  Start at
-     basic block 0, but don't stop at block boundaries.  */
+     basic block NUM_FIXED_BLOCKS, but don't stop at block boundaries.  */
   pset = pointer_set_create ();
 
-  for (i = bsi_start (BASIC_BLOCK (0)); !bsi_end_p (i); bsi_next (&i))
+  for (i = bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
+       !bsi_end_p (i); bsi_next (&i))
     walk_tree (bsi_stmt_ptr (i), collect_dfa_stats_r, (void *) dfa_stats_p,
 	       pset);
 
Index: gcc/tree-flow-inline.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-flow-inline.h,v
retrieving revision 2.58
diff -u -p -r2.58 tree-flow-inline.h
--- gcc/tree-flow-inline.h	27 Sep 2005 21:44:46 -0000	2.58
+++ gcc/tree-flow-inline.h	29 Sep 2005 18:42:49 -0000
@@ -719,7 +719,7 @@ bsi_start (basic_block bb)
     bsi.tsi = tsi_start (bb->stmt_list);
   else
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
     }
@@ -740,7 +740,7 @@ bsi_after_labels (basic_block bb)
 
   if (!bb->stmt_list)
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
       return bsi;
@@ -773,7 +773,7 @@ bsi_last (basic_block bb)
     bsi.tsi = tsi_last (bb->stmt_list);
   else
     {
-      gcc_assert (bb->index < 0);
+      gcc_assert (bb->index < NUM_FIXED_BLOCKS);
       bsi.tsi.ptr = NULL;
       bsi.tsi.container = NULL;
     }
Index: gcc/tree-ssa-phiopt.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-phiopt.c,v
retrieving revision 2.42
diff -u -p -r2.42 tree-ssa-phiopt.c
--- gcc/tree-ssa-phiopt.c	6 Sep 2005 22:06:29 -0000	2.42
+++ gcc/tree-ssa-phiopt.c	29 Sep 2005 18:42:49 -0000
@@ -148,9 +148,9 @@ tree_ssa_phiopt (void)
      outer ones, and also that we do not try to visit a removed
      block.  */
   bb_order = blocks_in_phiopt_order ();
-  n = n_basic_blocks;
+  n = n_basic_blocks - NUM_FIXED_BLOCKS;
 
-  for (i = 0; i < n; i++)
+  for (i = 0; i < n; i++) 
     {
       tree cond_expr;
       tree phi;
@@ -248,11 +248,12 @@ blocks_in_phiopt_order (void)
 {
   basic_block x, y;
   basic_block *order = xmalloc (sizeof (basic_block) * n_basic_blocks);
-  unsigned n = n_basic_blocks, np, i;
-  sbitmap visited = sbitmap_alloc (last_basic_block + 2);
+  unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS; 
+  unsigned np, i;
+  sbitmap visited = sbitmap_alloc (last_basic_block); 
 
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index + 2))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index + 2))
+#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) 
+#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) 
 
   sbitmap_zero (visited);
 
Index: gcc/tree-ssa-reassoc.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-reassoc.c,v
retrieving revision 2.6
diff -u -p -r2.6 tree-ssa-reassoc.c
--- gcc/tree-ssa-reassoc.c	26 Jul 2005 13:53:51 -0000	2.6
+++ gcc/tree-ssa-reassoc.c	29 Sep 2005 18:42:49 -0000
@@ -239,7 +239,7 @@ init_reassoc (void)
     }
   
   /* Set up rank for each BB  */
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     bb_rank[bbs[i]] = ++rank  << 16;
 
   free (bbs);
Index: gcc/var-tracking.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/var-tracking.c,v
retrieving revision 2.32
diff -u -p -r2.32 var-tracking.c
--- gcc/var-tracking.c	7 Sep 2005 07:47:15 -0000	2.32
+++ gcc/var-tracking.c	29 Sep 2005 18:42:49 -0000
@@ -1633,10 +1633,10 @@ vt_find_locations (void)
 
   /* Compute reverse completion order of depth first search of the CFG
      so that the data-flow runs faster.  */
-  rc_order = xmalloc (n_basic_blocks * sizeof (int));
+  rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int));
   bb_order = xmalloc (last_basic_block * sizeof (int));
   flow_depth_first_order_compute (NULL, rc_order);
-  for (i = 0; i < n_basic_blocks; i++)
+  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
     bb_order[rc_order[i]] = i;
   free (rc_order);
 

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]