[tree-ssa] Walking blocks in reverse

Andrew MacLeod amacleod@redhat.com
Fri Apr 4 21:36:00 GMT 2003


This is something I need which others might find useful. If you need to process
the instructions in a block in reverse, it can be *very* painful. Sure, you
can write 
  for (bsi = bsi_last(bb); !bsi_end_p (bsi); bsi_prev (&bsi))

but have you looked at the implementation?... 
since CE nodes have no back link, bsi_prev() walks from the start of the block
until it finds the current stmt. Thats how it finds the previous stmt. That
means walks of big blocks in reverse may take a long time...

All I've implemented here is a quick stack in which we quickly walk the
block forward pushing stmts onto the stack. When done, we pop them off one
at a time in order to process them.  So you end up with a stmt that looks
like this instead:

   FOR_EACH_BSI_IN_REVERSE (bsi_stack, bsi)
     {
       stmt = bsi_stmt (bsi);
     }

Should we ever get a more efficient bsi_prev (), we can either replace the
macro with the bsi_prev loop, or simply replace all the macro calls with
the loop. In either case, we can throw this code away then. Until then,
it should be handy.

Is this OK to check in? Or would you rather I keep it local to the live
range code for now?

Andrew


2003-04-04  Andrew MacLeod  <amacleod@redhat.com>

	* tree-cfg.c (push_bsi): New. Push a block_stmt_iterator onto a stack.
	(pop_bsi): New. Pop a block_stmt_iterator off a stack.
	* tree-flow-inline.h (struct bsi_list_d): Block iterator stack struct.
	(new_bsi_list): Start a new bsi stack.
	(empty_bsi_stack): Is stack empty.
	(FOR_EACH_BSI_IN_REVERSE): Macro for processing bsi's in reverse.


Index: tree-cfg.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Attic/tree-cfg.c,v
retrieving revision 1.1.4.68
diff -c -p -r1.1.4.68 tree-cfg.c
*** tree-cfg.c	1 Apr 2003 23:04:02 -0000	1.1.4.68
--- tree-cfg.c	4 Apr 2003 21:24:26 -0000
*************** bsi_insert_on_edge (e, stmt)
*** 3126,3131 ****
--- 3126,3185 ----
    
  }
  
+ /* These 2 routines are used to process BSI's in reverse within a block.
+    When there is a decent implementation of bsi_prev, we can get rid of 
+    these forever!  */
+ 
+ /* Push another block_stmt_iterator onto the stack.  */
+ void 
+ push_bsi (list, bsi)
+      bsi_list_p *list;
+      block_stmt_iterator bsi;
+ {
+   bsi_list_p tmp;
+   if (*list == NULL)
+     {
+       *list = new_bsi_list ();
+       (*list)->bsi[0] = bsi;
+     }
+   else
+     {
+       if ((*list)->curr_index == (BSI_NUM_ELEMENTS - 1))
+         {
+ 	  tmp = new_bsi_list ();
+ 	  tmp->bsi[0] = bsi;
+ 	  tmp->next = *list;
+ 	  *list = tmp;
+ 	}
+       else
+         {
+ 	  (*list)->bsi[++((*list)->curr_index)] = bsi;
+ 	}
+     }
+ }
+ 
+ /* Pop a block_stmt_iterator off the stack.  */
+ block_stmt_iterator
+ pop_bsi (list)
+      bsi_list_p *list;
+ {
+   block_stmt_iterator bsi;
+   bsi_list_p tmp;
+   if (!list)
+     abort ();
+     
+   tmp = *list;
+   bsi = tmp->bsi[(tmp->curr_index)--];
+   if (tmp->curr_index< 0)
+     {
+       tmp = *list;
+       *list = (*list)->next;
+       free (tmp);
+     }
+   return bsi;
+ }
+ 
+ 
  /* Replace the statement pointed by TP1 with the statement pointed by TP2.
     Note that this function will not replace COMPOUND_EXPR nodes, only
     individual statements.
Index: tree-flow-inline.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Attic/tree-flow-inline.h,v
retrieving revision 1.1.2.32
diff -c -p -r1.1.2.32 tree-flow-inline.h
*** tree-flow-inline.h	1 Apr 2003 23:04:02 -0000	1.1.2.32
--- tree-flow-inline.h	4 Apr 2003 21:24:26 -0000
*************** is_label_stmt (t)
*** 465,468 ****
--- 465,525 ----
    return false;
  }
  
+ /* Routines to allow a block to be walked backwards reasonably efficiently.
+    Once a decent implementation of bsi_prev() is implemented, this can
+    be removed.  */
+ 
+ #define BSI_NUM_ELEMENTS	50
+ 
+ typedef struct bsi_list_d {
+   block_stmt_iterator bsi[BSI_NUM_ELEMENTS];
+   int curr_index;
+   struct bsi_list_d *next;
+ } *bsi_list_p;
+ 
+ 
+ static inline bsi_list_p new_bsi_list	PARAMS ((void));
+ static inline int empty_bsi_stack	PARAMS ((bsi_list_p));
+ extern void push_bsi			PARAMS ((bsi_list_p *, 
+ 						 block_stmt_iterator));
+ extern block_stmt_iterator pop_bsi	PARAMS ((bsi_list_p *));
+ 
+ 
+ /* Allocate a bsi_list structure.  */
+ static inline bsi_list_p
+ new_bsi_list ()
+ {
+   bsi_list_p b;
+   b = (bsi_list_p) xmalloc (sizeof (struct bsi_list_d));
+   b->curr_index = 0;
+   b->next = NULL;
+ 
+   return b;
+ }
+ 
+ /* Is the iterator stack empty?  */
+ static inline int
+ empty_bsi_stack (list)
+      bsi_list_p list;
+ {
+   if (!list || (list->curr_index < 0 && list->next == NULL))
+     return 1;
+   return 0;
+ }
+ 
+ 
+ /* Process an entire block of bsi's in reverse by pushing them on a stack
+    as they are encountered, and then popping them off as they are needed.  
+    There are a couple of odd things. Since the last loop is a for loop, 
+    a dummy entry is pushed on the beginning of the stack, this allows the first
+    item pushed on the stack to be processed in the final for loop, as well
+    as guaranteeing there will be at least one to pop off.  */
+ 
+ #define FOR_EACH_BSI_IN_REVERSE(bsi_stack, bsi)			\
+   bsi_stack = new_bsi_list ();					\
+   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))	\
+     push_bsi (&bsi_stack, bsi);					\
+   bsi = pop_bsi (&bsi_stack);					\
+   for ( ; !empty_bsi_stack (bsi_stack); bsi = pop_bsi (&bsi_stack))
+ 
  #endif /* _TREE_FLOW_INLINE_H  */



More information about the Gcc-patches mailing list