This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH]: Teach call clobbering to use ipa static vars analysisresults


Patch 6/7
This patch teaches call clobbering about the results of the IPA static
vars analysis, so we avoid call clobbering statics that aren't actually
call clobbered.


Unfortunately, this knows nothing of structure alias vars, since they
aren't created at ipa analysis time, so we'll still call clobber static
structures since they are only known by their SFT's.

I'd like to clean that up in a followup patch, it just requires some
bitmap futzing after SFT's are created.

Bootstrapped and regtested on i686-pc-linux-gnu and powerpc-linux-gnu.


2005-06-06  Kenneth Zadeck  <zadeck@naturalbridge.com>
       Danny Berlin <dberlin@dberlin.org>

   * tree-ssa-operands (add_call_clobber_ops): Changed call clobbering
so that statics are not marked as clobbered if the call does not
   clobber them.




Index: Makefile.in
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Makefile.in,v
retrieving revision 1.1496
diff -u -p -r1.1496 Makefile.in
--- Makefile.in	4 Jun 2005 17:07:50 -0000	1.1496
+++ Makefile.in	6 Jun 2005 21:39:26 -0000
@@ -1786,7 +1790,8 @@
 tree-ssa-operands.o : tree-ssa-operands.c $(TREE_FLOW_H) $(CONFIG_H) \
-   $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) tree-inline.h \
+   $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h tree-inline.h \
    $(FLAGS_H) function.h $(TM_H) $(TIMEVAR_H) tree-pass.h toplev.h \
-   gt-tree-ssa-operands.h coretypes.h langhooks.h tree-ssa-opfinalize.h
+   gt-tree-ssa-operands.h coretypes.h langhooks.h tree-ssa-opfinalize.h \
+   $(IPA_REFERENCE_H)
 tree-eh.o : tree-eh.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_H) $(FLAGS_H) function.h except.h langhooks.h \
    $(GGC_H) tree-pass.h coretypes.h $(TIMEVAR_H) $(TM_P_H) \
Index: tree-ssa-operands.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-operands.c,v
retrieving revision 2.86
diff -u -p -r2.86 tree-ssa-operands.c
--- tree-ssa-operands.c	1 Jun 2005 02:51:02 -0000	2.86
+++ tree-ssa-operands.c	6 Jun 2005 21:39:36 -0000
@@ -34,6 +34,7 @@ Boston, MA 02111-1307, USA.  */
 #include "toplev.h"
 
 #include "langhooks.h"
+#include "ipa-reference.h"
 
 /* This file contains the code required to manage the operands cache of the 
    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt 
@@ -157,7 +158,7 @@ static inline void append_def (tree *);
 static inline void append_use (tree *);
 static void append_v_may_def (tree);
 static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree);
+static void add_call_clobber_ops (tree, tree);
 static void add_call_read_ops (tree);
 static void add_stmt_operand (tree *, stmt_ann_t, int);
 static void build_ssa_operands (tree stmt);
@@ -1699,7 +1700,7 @@ get_call_expr_operands (tree stmt, tree 
 	 there is no point in recording that.  */ 
       if (TREE_SIDE_EFFECTS (expr)
 	  && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
-	add_call_clobber_ops (stmt);
+	add_call_clobber_ops (stmt, get_callee_fndecl (expr));
       else if (!(call_flags & ECF_CONST))
 	add_call_read_ops (stmt);
     }
@@ -1923,7 +1924,7 @@ note_addressable (tree var, stmt_ann_t s
    clobbered variables in the function.  */
 
 static void
-add_call_clobber_ops (tree stmt)
+add_call_clobber_ops (tree stmt, tree callee)
 {
   int i;
   unsigned u;
@@ -1931,6 +1932,7 @@ add_call_clobber_ops (tree stmt)
   bitmap_iterator bi;
   stmt_ann_t s_ann = stmt_ann (stmt);
   struct stmt_ann_d empty_ann;
+  bitmap not_read_b, not_written_b;
 
   /* Functions that are not const, pure or never return may clobber
      call-clobbered variables.  */
@@ -1945,8 +1947,22 @@ add_call_clobber_ops (tree stmt)
       return;
     }
 
+  /* FIXME - if we have better information from the static vars
+     analysis, we need to make the cache call site specific.  This way
+     we can have the performance benefits even if we are doing good
+     optimization.  */
+
+  /* Get info for local and module level statics.  There is a bit
+     set for each static if the call being processed does not read
+     or write that variable.  */
+
+  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; 
+  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; 
+
   /* If cache is valid, copy the elements into the build vectors.  */
-  if (ssa_call_clobbered_cache_valid)
+  if (ssa_call_clobbered_cache_valid
+      && (!not_read_b || bitmap_empty_p (not_read_b))
+      && (!not_written_b || bitmap_empty_p (not_written_b)))
     {
       /* Process the caches in reverse order so we are always inserting at
          the head of the list.  */
@@ -1981,43 +1997,62 @@ add_call_clobber_ops (tree stmt)
       if (unmodifiable_var_p (var))
 	add_stmt_operand (&var, &empty_ann, opf_none);
       else
-	add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
+	{
+	  bool not_read
+	    = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
+	  bool not_written
+	    = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
+
+	  if ((TREE_READONLY (var)
+	       && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+	      || not_written)
+	    {
+	      if (!not_read)
+		add_stmt_operand (&var, &empty_ann, opf_none);
+	    }
+	  else
+	    add_stmt_operand (&var, &empty_ann, opf_is_def);
+	}
     }
 
-  clobbered_aliased_loads = empty_ann.makes_aliased_loads;
-  clobbered_aliased_stores = empty_ann.makes_aliased_stores;
-
-  /* Set the flags for a stmt's annotation.  */
-  if (s_ann)
+  if ((!not_read_b || bitmap_empty_p (not_read_b))
+      && (!not_written_b || bitmap_empty_p (not_written_b)))
     {
-      s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
-      s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
-    }
-
-  /* Prepare empty cache vectors.  */
-  VEC_truncate (tree, clobbered_vuses, 0);
-  VEC_truncate (tree, clobbered_v_may_defs, 0);
+      clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+      clobbered_aliased_stores = empty_ann.makes_aliased_stores;
 
-  /* Now fill the clobbered cache with the values that have been found.  */
-  for (i = opbuild_first (&build_vuses);
-       i != OPBUILD_LAST;
-       i = opbuild_next (&build_vuses, i))
-    VEC_safe_push (tree, heap, clobbered_vuses,
-		   opbuild_elem_virtual (&build_vuses, i));
-
-  gcc_assert (opbuild_num_elems (&build_vuses) 
-	      == VEC_length (tree, clobbered_vuses));
+      /* Set the flags for a stmt's annotation.  */
+      if (s_ann)
+	{
+	  s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+	  s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+	}
 
-  for (i = opbuild_first (&build_v_may_defs);
-       i != OPBUILD_LAST;
-       i = opbuild_next (&build_v_may_defs, i))
-    VEC_safe_push (tree, heap, clobbered_v_may_defs, 
-		   opbuild_elem_virtual (&build_v_may_defs, i));
+      /* Prepare empty cache vectors.  */
+      VEC_truncate (tree, clobbered_vuses, 0);
+      VEC_truncate (tree, clobbered_v_may_defs, 0);
+
+      /* Now fill the clobbered cache with the values that have been found.  */
+      for (i = opbuild_first (&build_vuses);
+	   i != OPBUILD_LAST;
+	   i = opbuild_next (&build_vuses, i))
+	VEC_safe_push (tree, heap, clobbered_vuses,
+		       opbuild_elem_virtual (&build_vuses, i));
+
+      gcc_assert (opbuild_num_elems (&build_vuses) 
+		  == VEC_length (tree, clobbered_vuses));
+
+      for (i = opbuild_first (&build_v_may_defs);
+	   i != OPBUILD_LAST;
+	   i = opbuild_next (&build_v_may_defs, i))
+	VEC_safe_push (tree, heap, clobbered_v_may_defs, 
+		       opbuild_elem_virtual (&build_v_may_defs, i));
 
-  gcc_assert (opbuild_num_elems (&build_v_may_defs) 
-	      == VEC_length (tree, clobbered_v_may_defs));
+      gcc_assert (opbuild_num_elems (&build_v_may_defs) 
+		  == VEC_length (tree, clobbered_v_may_defs));
 
-  ssa_call_clobbered_cache_valid = true;
+      ssa_call_clobbered_cache_valid = true;
+    }
 }
 
 


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]