This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [PATCH]: Interprocedural detection of readonly and non-addressablestatic variables


Here is one big unified diff rather than the 6 patches.

Diego note that the it was made with -N.

I also fixed one bug that danny inserted into the 7 patches when he changed "else abort ()" to "gcc_unreachable()" rather than "else gcc_unreachable()".

I made a stage2 compiler with this just to make sure there were no other issues.

Kenny
Index: Makefile.in
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Makefile.in,v
retrieving revision 1.1501
diff -u -p -r1.1501 Makefile.in
--- Makefile.in	7 Jun 2005 19:51:21 -0000	1.1501
+++ Makefile.in	8 Jun 2005 20:16:29 -0000
@@ -736,6 +736,9 @@ SCHED_INT_H = sched-int.h $(INSN_ATTR_H)
 INTEGRATE_H = integrate.h varray.h
 CFGLAYOUT_H = cfglayout.h $(BASIC_BLOCK_H)
 CFGLOOP_H = cfgloop.h $(BASIC_BLOCK_H) $(RTL_H)
+IPA_UTILS_H = ipa-utils.h $(TREE_H) $(CGRAPH_H) 
+IPA_REFERENCE_H = ipa-reference.h bitmap.h $(TREE_H)  
+IPA_TYPE_ESCAPE_H = ipa-type-escape.h $(TREE_H)  
 CGRAPH_H = cgraph.h tree.h 
 DF_H = df.h bitmap.h sbitmap.h $(BASIC_BLOCK_H)
 DDG_H = ddg.h sbitmap.h $(DF_H)
@@ -757,7 +760,7 @@ TREE_DUMP_H = tree-dump.h $(SPLAY_TREE_H
 TREE_GIMPLE_H = tree-gimple.h tree-iterator.h
 TREE_FLOW_H = tree-flow.h tree-flow-inline.h tree-ssa-operands.h \
 		bitmap.h $(BASIC_BLOCK_H) hard-reg-set.h $(TREE_GIMPLE_H) \
-		$(HASHTAB_H) $(CGRAPH_H)
+		$(HASHTAB_H) $(CGRAPH_H) $(IPA_REFERENCE_H)
 TREE_SSA_LIVE_H = tree-ssa-live.h $(PARTITION_H)
 PRETTY_PRINT_H = pretty-print.h input.h $(OBSTACK_H)
 DIAGNOSTIC_H = diagnostic.h diagnostic.def $(PRETTY_PRINT_H)
@@ -952,7 +955,7 @@ OBJS-common = \
  integrate.o intl.o jump.o  langhooks.o lcm.o lists.o local-alloc.o  	   \
  loop.o mode-switching.o modulo-sched.o optabs.o options.o opts.o	   \
  params.o postreload.o postreload-gcse.o predict.o			   \
- insn-preds.o pointer-set.o postreload.o				   \
+ insn-preds.o pointer-set.o postreload.o tree-promote-statics.o		   \
  print-rtl.o print-tree.o profile.o value-prof.o var-tracking.o		   \
  real.o recog.o reg-stack.o regclass.o regmove.o regrename.o		   \
  reload.o reload1.o reorg.o resource.o rtl.o rtlanal.o rtl-error.o	   \
@@ -968,7 +971,8 @@ OBJS-common = \
 
 OBJS-md = $(out_object_file)
 OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) tree-inline.o		   \
-  cgraph.o cgraphunit.o tree-nomudflap.o ipa.o ipa-inline.o
+  cgraph.o cgraphunit.o tree-nomudflap.o ipa.o ipa-inline.o                \
+  ipa-utils.o ipa-reference.o ipa-pure-const.o ipa-type-escape.o
 
 OBJS = $(OBJS-common) $(out_object_file) $(OBJS-archive)
 
@@ -1784,11 +1788,12 @@ tree-dfa.o : tree-dfa.c $(TREE_FLOW_H) $
    tree-inline.h $(HASHTAB_H) pointer-set.h $(FLAGS_H) function.h \
    $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h $(TREE_DUMP_H) \
    tree-pass.h $(PARAMS_H) $(CGRAPH_H) $(BASIC_BLOCK_H) hard-reg-set.h \
-   $(TREE_GIMPLE_H)
+   $(TREE_GIMPLE_H) $(IPA_REFERENCE_H)
 tree-ssa-operands.o : tree-ssa-operands.c $(TREE_FLOW_H) $(CONFIG_H) \
-   $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) tree-inline.h \
+   $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h tree-inline.h \
    $(FLAGS_H) function.h $(TM_H) $(TIMEVAR_H) tree-pass.h toplev.h \
-   gt-tree-ssa-operands.h coretypes.h langhooks.h tree-ssa-opfinalize.h
+   gt-tree-ssa-operands.h coretypes.h langhooks.h tree-ssa-opfinalize.h \
+   $(IPA_REFERENCE_H)
 tree-eh.o : tree-eh.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_H) $(FLAGS_H) function.h except.h langhooks.h \
    $(GGC_H) tree-pass.h coretypes.h $(TIMEVAR_H) $(TM_P_H) \
@@ -1842,7 +1847,7 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TR
    $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) tree-inline.h $(FLAGS_H) \
    function.h $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h \
    $(TREE_DUMP_H) tree-pass.h $(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
-   hard-reg-set.h $(TREE_GIMPLE_H) vec.h
+   hard-reg-set.h $(TREE_GIMPLE_H) vec.h $(IPA_TYPE_ESCAPE_H)
 tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h $(TIMEVAR_H) \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) tree-iterator.h\
@@ -2084,6 +2089,22 @@ ipa-inline.o : ipa-inline.c $(CONFIG_H) 
    $(TREE_H) langhooks.h tree-inline.h $(FLAGS_H) $(CGRAPH_H) intl.h \
    $(DIAGNOSTIC_H) $(FIBHEAP_H) $(PARAMS_H) $(TIMEVAR_H) tree-pass.h \
    $(COVERAGE_H)
+ipa-utils.o : ipa-utils.c $(IPA_UTILS_H) $(CONFIG_H) $(SYSTEM_H) \
+   coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) tree-inline.h langhooks.h \
+   pointer-set.h $(GGC_H) $(C_COMMON_H) $(TREE_GIMPLE_H) \
+   $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(DIAGNOSTIC_H) 
+ipa-reference.o : ipa-reference.c $(CONFIG_H) $(SYSTEM_H) \
+   coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) tree-inline.h langhooks.h \
+   pointer-set.h $(GGC_H) $(IPA_REFERENCE_H) $(IPA_UTILS_H) $(C_COMMON_H) \
+   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(DIAGNOSTIC_H)  
+ipa-pure-const.o : ipa-pure-const.c $(CONFIG_H) $(SYSTEM_H) \
+   coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) tree-inline.h langhooks.h \
+   pointer-set.h $(GGC_H) $(IPA_UTILS_H) $(C_COMMON_H) \
+   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(DIAGNOSTIC_H)  
+ipa-type-escape.o : ipa-type-escape.c $(CONFIG_H) $(SYSTEM_H) \
+   coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) tree-inline.h langhooks.h \
+   pointer-set.h $(GGC_H) $(IPA_TYPE_ESCAPE_H) $(IPA_UTILS_H) $(C_COMMON_H) \
+   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(DIAGNOSTIC_H)  
 coverage.o : coverage.c $(GCOV_IO_H) $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(RTL_H) $(TREE_H) $(FLAGS_H) output.h $(REGS_H) $(EXPR_H) \
    function.h toplev.h $(GGC_H) langhooks.h $(COVERAGE_H) gt-coverage.h \
@@ -2137,6 +2158,9 @@ tree-vect-generic.o : tree-vect-generic.
     $(FLAGS_H) $(OPTABS_H) $(RTL_H) $(MACHMODE_H) $(EXPR_H) \
     langhooks.h $(FLAGS_H) $(DIAGNOSTIC_H) gt-tree-vect-generic.h $(GGC_H) \
     coretypes.h insn-codes.h
+tree-promote-statics.o : tree-promote-statics.c $(CONFIG_H) system.h \
+   $(TREE_H) $(TM_H) $(BASIC_BLOCK_H) $(TREE_FLOW_H) $(IPA_UTILS_H) \
+   $(IPA_REFERENCE_H) bitmap.h tree-pass.h $(FLAGS_H) 
 df.o : df.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
    insn-config.h $(RECOG_H) function.h $(REGS_H) alloc-pool.h hard-reg-set.h \
    $(BASIC_BLOCK_H) $(DF_H) bitmap.h sbitmap.h $(TM_P_H)
@@ -2281,7 +2305,7 @@ alias.o : alias.c $(CONFIG_H) $(SYSTEM_H
    $(FLAGS_H) hard-reg-set.h $(BASIC_BLOCK_H) $(REGS_H) toplev.h output.h \
    $(ALIAS_H) $(EMIT_RTL_H) $(GGC_H) function.h cselib.h $(TREE_H) $(TM_P_H) \
    langhooks.h $(TARGET_H) gt-alias.h $(TIMEVAR_H) $(CGRAPH_H) \
-   $(SPLAY_TREE_H) $(VARRAY_H)
+   $(SPLAY_TREE_H) $(VARRAY_H) $(IPA_TYPE_ESCAPE_H)
 regmove.o : regmove.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) insn-config.h \
    $(RECOG_H) output.h $(REGS_H) hard-reg-set.h $(FLAGS_H) function.h \
    $(EXPR_H) $(BASIC_BLOCK_H) toplev.h $(TM_P_H) except.h reload.h
@@ -2614,6 +2638,7 @@ GTFILES = $(srcdir)/input.h $(srcdir)/co
   $(srcdir)/coverage.c $(srcdir)/function.h $(srcdir)/rtl.h \
   $(srcdir)/optabs.h $(srcdir)/tree.h $(srcdir)/libfuncs.h $(SYMTAB_H) \
   $(srcdir)/real.h $(srcdir)/varray.h $(srcdir)/insn-addr.h $(srcdir)/hwint.h \
+  $(srcdir)/ipa-reference.h \
   $(srcdir)/cselib.h $(srcdir)/basic-block.h  $(srcdir)/cgraph.h \
   $(srcdir)/c-common.h $(srcdir)/c-tree.h $(srcdir)/reload.h \
   $(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
@@ -2635,6 +2660,7 @@ GTFILES = $(srcdir)/input.h $(srcdir)/co
   $(srcdir)/tree-chrec.h $(srcdir)/tree-vect-generic.c \
   $(srcdir)/tree-ssa-operands.h $(srcdir)/tree-ssa-operands.c \
   $(srcdir)/tree-profile.c $(srcdir)/rtl-profile.c $(srcdir)/tree-nested.c \
+  $(srcdir)/ipa-reference.c \
   $(out_file) \
   @all_gtfiles@
 
Index: alias.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/alias.c,v
retrieving revision 1.250
diff -u -p -r1.250 alias.c
--- alias.c	21 Apr 2005 15:47:04 -0000	1.250
+++ alias.c	8 Jun 2005 20:16:30 -0000
@@ -44,6 +44,55 @@ Software Foundation, 59 Temple Place - S
 #include "target.h"
 #include "cgraph.h"
 #include "varray.h"
+#include "ipa-type-escape.h"
+
+/* The aliasing API provided here solves related but different problems:
+
+   Say there exists (in c) 
+
+   struct X {
+     struct Y y1;
+     struct Z z2;
+   } x1, *px1,  *px2;
+
+   struct Y y2, *py;
+   struct Z z2, *pz;
+
+
+   py = &px1.y1;
+   px2 = &x1;
+
+   Consider the four questions:
+
+   Can a store to x1 interfere with px2->y2?
+   Can a store to x1 interfere with px2->z2?
+   (*px2).z2
+   Can a store to x1 change the value pointed to by with py?
+   Can a store to x1 change the value pointed to by with pz?
+
+   The answer to these questions can be yes, yes, yes, and no.
+
+   The first two questions can be answered with a simple examination
+   of the type system.  If structure X contains a field of type Y then
+   a store thru a pointer to an X can overwrite any field that is
+   contained (recursively) in an X (unless we know that px1 != px2).
+
+   The last two of the questions can be solved in the same way as the
+   first two questions but this is too conservative.  The observation
+   is that if the address of a field is not explicitly taken and the
+   type is completely local to the compilation unit, then it is
+   impossible that a pointer to an instance of the field type overlaps
+   with an enclosing structure.
+
+   Historically in GCC, these two problems were combined and a single
+   data structure was used to represent the solution to these
+   problems.  We now have two similar but different data structures,
+   The data structure to solve the last two question is similar to the
+   first, but does not contain have the fields in it whose address are
+   never taken.  For types that do escape the compilation unit, the
+   data structures will have identical information.
+
+*/
 
 /* The alias sets assigned to MEMs assist the back-end in determining
    which MEMs can alias which other MEMs.  In general, two MEMs in
@@ -115,12 +164,6 @@ static rtx adjust_offset_for_component_r
 static int nonoverlapping_memrefs_p (rtx, rtx);
 static int write_dependence_p (rtx, rtx, int);
 
-static int nonlocal_mentioned_p_1 (rtx *, void *);
-static int nonlocal_mentioned_p (rtx);
-static int nonlocal_referenced_p_1 (rtx *, void *);
-static int nonlocal_referenced_p (rtx);
-static int nonlocal_set_p_1 (rtx *, void *);
-static int nonlocal_set_p (rtx);
 static void memory_modified_1 (rtx, rtx, void *);
 static void record_alias_subset (HOST_WIDE_INT, HOST_WIDE_INT);
 
@@ -1889,6 +1932,7 @@ aliases_everything_p (rtx mem)
   return 0;
 }
 
+/*#define AGRESSIVE_ALIASING 1*/
 /* Return true if we can determine that the fields referenced cannot
    overlap for any pair of objects.  */
 
@@ -1923,9 +1967,12 @@ nonoverlapping_component_refs_p (tree x,
 	  x = TREE_OPERAND (x, 0);
 	}
       while (x && TREE_CODE (x) == COMPONENT_REF);
-
       /* Never found a common type.  */
+#ifdef AGRESSIVE_ALIASING
+      return true;
+#else 
       return false;
+#endif /* AGRESSIVE_ALIASING */
 
     found:
       /* If we're left with accessing different fields of a structure,
@@ -2005,22 +2052,36 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
   /* Unless both have exprs, we can't tell anything.  */
   if (exprx == 0 || expry == 0)
     return 0;
-
+  
   /* If both are field references, we may be able to determine something.  */
   if (TREE_CODE (exprx) == COMPONENT_REF
       && TREE_CODE (expry) == COMPONENT_REF
       && nonoverlapping_component_refs_p (exprx, expry))
     return 1;
 
+  
   /* If the field reference test failed, look at the DECLs involved.  */
   moffsetx = MEM_OFFSET (x);
   if (TREE_CODE (exprx) == COMPONENT_REF)
     {
-      tree t = decl_for_component_ref (exprx);
-      if (! t)
-	return 0;
-      moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
-      exprx = t;
+#ifdef AGRESSIVE_ALIASING
+      if (TREE_CODE (expry) == VAR_DECL
+	  && POINTER_TYPE_P (TREE_TYPE (expry)))
+	{
+	 tree field = TREE_OPERAND (exprx, 1);
+	 tree fieldcontext = DECL_FIELD_CONTEXT (field);
+	 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
+						       TREE_TYPE (field)))
+	   return 1;	 
+	}
+#endif /* AGRESSIVE_ALIASING */
+      {
+	tree t = decl_for_component_ref (exprx);
+	if (! t)
+	  return 0;
+	moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
+	exprx = t;
+      }
     }
   else if (INDIRECT_REF_P (exprx))
     {
@@ -2033,11 +2094,24 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
   moffsety = MEM_OFFSET (y);
   if (TREE_CODE (expry) == COMPONENT_REF)
     {
-      tree t = decl_for_component_ref (expry);
-      if (! t)
-	return 0;
-      moffsety = adjust_offset_for_component_ref (expry, moffsety);
-      expry = t;
+#ifdef AGRESSIVE_ALIASING
+      if (TREE_CODE (exprx) == VAR_DECL
+	  && POINTER_TYPE_P (TREE_TYPE (exprx)))
+	{
+	 tree field = TREE_OPERAND (expry, 1);
+	 tree fieldcontext = DECL_FIELD_CONTEXT (field);
+	 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
+						       TREE_TYPE (field)))
+	   return 1;	 
+	}
+#endif /* AGRESSIVE_ALIASING */
+      {
+	tree t = decl_for_component_ref (expry);
+	if (! t)
+	  return 0;
+	moffsety = adjust_offset_for_component_ref (expry, moffsety);
+	expry = t;
+      }
     }
   else if (INDIRECT_REF_P (expry))
     {
@@ -2325,353 +2399,6 @@ output_dependence (rtx mem, rtx x)
   return write_dependence_p (mem, x, /*writep=*/1);
 }
 
-/* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
-   something which is not local to the function and is not constant.  */
-
-static int
-nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
-{
-  rtx x = *loc;
-  rtx base;
-  int regno;
-
-  if (! x)
-    return 0;
-
-  switch (GET_CODE (x))
-    {
-    case SUBREG:
-      if (REG_P (SUBREG_REG (x)))
-	{
-	  /* Global registers are not local.  */
-	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
-	      && global_regs[subreg_regno (x)])
-	    return 1;
-	  return 0;
-	}
-      break;
-
-    case REG:
-      regno = REGNO (x);
-      /* Global registers are not local.  */
-      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
-	return 1;
-      return 0;
-
-    case SCRATCH:
-    case PC:
-    case CC0:
-    case CONST_INT:
-    case CONST_DOUBLE:
-    case CONST_VECTOR:
-    case CONST:
-    case LABEL_REF:
-      return 0;
-
-    case SYMBOL_REF:
-      /* Constants in the function's constants pool are constant.  */
-      if (CONSTANT_POOL_ADDRESS_P (x))
-	return 0;
-      return 1;
-
-    case CALL:
-      /* Non-constant calls and recursion are not local.  */
-      return 1;
-
-    case MEM:
-      /* Be overly conservative and consider any volatile memory
-	 reference as not local.  */
-      if (MEM_VOLATILE_P (x))
-	return 1;
-      base = find_base_term (XEXP (x, 0));
-      if (base)
-	{
-	  /* A Pmode ADDRESS could be a reference via the structure value
-	     address or static chain.  Such memory references are nonlocal.
-
-	     Thus, we have to examine the contents of the ADDRESS to find
-	     out if this is a local reference or not.  */
-	  if (GET_CODE (base) == ADDRESS
-	      && GET_MODE (base) == Pmode
-	      && (XEXP (base, 0) == stack_pointer_rtx
-		  || XEXP (base, 0) == arg_pointer_rtx
-#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
-		  || XEXP (base, 0) == hard_frame_pointer_rtx
-#endif
-		  || XEXP (base, 0) == frame_pointer_rtx))
-	    return 0;
-	  /* Constants in the function's constant pool are constant.  */
-	  if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
-	    return 0;
-	}
-      return 1;
-
-    case UNSPEC_VOLATILE:
-    case ASM_INPUT:
-      return 1;
-
-    case ASM_OPERANDS:
-      if (MEM_VOLATILE_P (x))
-	return 1;
-
-    /* Fall through.  */
-
-    default:
-      break;
-    }
-
-  return 0;
-}
-
-/* Returns nonzero if X might mention something which is not
-   local to the function and is not constant.  */
-
-static int
-nonlocal_mentioned_p (rtx x)
-{
-  if (INSN_P (x))
-    {
-      if (CALL_P (x))
-	{
-	  if (! CONST_OR_PURE_CALL_P (x))
-	    return 1;
-	  x = CALL_INSN_FUNCTION_USAGE (x);
-	  if (x == 0)
-	    return 0;
-	}
-      else
-	x = PATTERN (x);
-    }
-
-  return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
-}
-
-/* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
-   something which is not local to the function and is not constant.  */
-
-static int
-nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
-{
-  rtx x = *loc;
-
-  if (! x)
-    return 0;
-
-  switch (GET_CODE (x))
-    {
-    case MEM:
-    case REG:
-    case SYMBOL_REF:
-    case SUBREG:
-      return nonlocal_mentioned_p (x);
-
-    case CALL:
-      /* Non-constant calls and recursion are not local.  */
-      return 1;
-
-    case SET:
-      if (nonlocal_mentioned_p (SET_SRC (x)))
-	return 1;
-
-      if (MEM_P (SET_DEST (x)))
-	return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
-
-      /* If the destination is anything other than a CC0, PC,
-	 MEM, REG, or a SUBREG of a REG that occupies all of
-	 the REG, then X references nonlocal memory if it is
-	 mentioned in the destination.  */
-      if (GET_CODE (SET_DEST (x)) != CC0
-	  && GET_CODE (SET_DEST (x)) != PC
-	  && !REG_P (SET_DEST (x))
-	  && ! (GET_CODE (SET_DEST (x)) == SUBREG
-		&& REG_P (SUBREG_REG (SET_DEST (x)))
-		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
-		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
-		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
-			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
-	return nonlocal_mentioned_p (SET_DEST (x));
-      return 0;
-
-    case CLOBBER:
-      if (MEM_P (XEXP (x, 0)))
-	return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
-      return 0;
-
-    case USE:
-      return nonlocal_mentioned_p (XEXP (x, 0));
-
-    case ASM_INPUT:
-    case UNSPEC_VOLATILE:
-      return 1;
-
-    case ASM_OPERANDS:
-      if (MEM_VOLATILE_P (x))
-	return 1;
-
-    /* Fall through.  */
-
-    default:
-      break;
-    }
-
-  return 0;
-}
-
-/* Returns nonzero if X might reference something which is not
-   local to the function and is not constant.  */
-
-static int
-nonlocal_referenced_p (rtx x)
-{
-  if (INSN_P (x))
-    {
-      if (CALL_P (x))
-	{
-	  if (! CONST_OR_PURE_CALL_P (x))
-	    return 1;
-	  x = CALL_INSN_FUNCTION_USAGE (x);
-	  if (x == 0)
-	    return 0;
-	}
-      else
-	x = PATTERN (x);
-    }
-
-  return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
-}
-
-/* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
-   something which is not local to the function and is not constant.  */
-
-static int
-nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
-{
-  rtx x = *loc;
-
-  if (! x)
-    return 0;
-
-  switch (GET_CODE (x))
-    {
-    case CALL:
-      /* Non-constant calls and recursion are not local.  */
-      return 1;
-
-    case PRE_INC:
-    case PRE_DEC:
-    case POST_INC:
-    case POST_DEC:
-    case PRE_MODIFY:
-    case POST_MODIFY:
-      return nonlocal_mentioned_p (XEXP (x, 0));
-
-    case SET:
-      if (nonlocal_mentioned_p (SET_DEST (x)))
-	return 1;
-      return nonlocal_set_p (SET_SRC (x));
-
-    case CLOBBER:
-      return nonlocal_mentioned_p (XEXP (x, 0));
-
-    case USE:
-      return 0;
-
-    case ASM_INPUT:
-    case UNSPEC_VOLATILE:
-      return 1;
-
-    case ASM_OPERANDS:
-      if (MEM_VOLATILE_P (x))
-	return 1;
-
-    /* Fall through.  */
-
-    default:
-      break;
-    }
-
-  return 0;
-}
-
-/* Returns nonzero if X might set something which is not
-   local to the function and is not constant.  */
-
-static int
-nonlocal_set_p (rtx x)
-{
-  if (INSN_P (x))
-    {
-      if (CALL_P (x))
-	{
-	  if (! CONST_OR_PURE_CALL_P (x))
-	    return 1;
-	  x = CALL_INSN_FUNCTION_USAGE (x);
-	  if (x == 0)
-	    return 0;
-	}
-      else
-	x = PATTERN (x);
-    }
-
-  return for_each_rtx (&x, nonlocal_set_p_1, NULL);
-}
-
-/* Mark the function if it is pure or constant.  */
-
-void
-mark_constant_function (void)
-{
-  rtx insn;
-  int nonlocal_memory_referenced;
-
-  if (TREE_READONLY (current_function_decl)
-      || DECL_IS_PURE (current_function_decl)
-      || TREE_THIS_VOLATILE (current_function_decl)
-      || current_function_has_nonlocal_goto
-      || !targetm.binds_local_p (current_function_decl))
-    return;
-
-  /* A loop might not return which counts as a side effect.  */
-  if (mark_dfs_back_edges ())
-    return;
-
-  nonlocal_memory_referenced = 0;
-
-  init_alias_analysis ();
-
-  /* Determine if this is a constant or pure function.  */
-
-  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
-    {
-      if (! INSN_P (insn))
-	continue;
-
-      if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
-	  || volatile_refs_p (PATTERN (insn)))
-	break;
-
-      if (! nonlocal_memory_referenced)
-	nonlocal_memory_referenced = nonlocal_referenced_p (insn);
-    }
-
-  end_alias_analysis ();
-
-  /* Mark the function.  */
-
-  if (insn)
-    ;
-  else if (nonlocal_memory_referenced)
-    {
-      cgraph_rtl_info (current_function_decl)->pure_function = 1;
-      DECL_IS_PURE (current_function_decl) = 1;
-    }
-  else
-    {
-      cgraph_rtl_info (current_function_decl)->const_function = 1;
-      TREE_READONLY (current_function_decl) = 1;
-    }
-}
-
 
 void
 init_alias_once (void)
Index: calls.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/calls.c,v
retrieving revision 1.389
diff -u -p -r1.389 calls.c
--- calls.c	9 May 2005 17:52:15 -0000	1.389
+++ calls.c	8 Jun 2005 20:16:31 -0000
@@ -570,17 +570,8 @@ flags_from_decl_or_type (tree exp)
 
   if (DECL_P (exp))
     {
-      struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
       type = TREE_TYPE (exp);
 
-      if (i)
-	{
-	  if (i->pure_function)
-	    flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
-	  if (i->const_function)
-	    flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
-	}
-
       /* The function exp may have the `malloc' attribute.  */
       if (DECL_IS_MALLOC (exp))
 	flags |= ECF_MALLOC;
Index: cgraph.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cgraph.h,v
retrieving revision 1.57
diff -u -p -r1.57 cgraph.h
--- cgraph.h	2 Jun 2005 19:41:31 -0000	1.57
+++ cgraph.h	8 Jun 2005 20:16:31 -0000
@@ -107,8 +107,6 @@ struct cgraph_global_info GTY(())
 struct cgraph_rtl_info GTY(())
 {
    int preferred_incoming_stack_boundary;
-   bool const_function;
-   bool pure_function;
 };
 
 /* The cgraph data structure.
Index: common.opt
===================================================================
RCS file: /cvs/gcc/gcc/gcc/common.opt,v
retrieving revision 1.73
diff -u -p -r1.73 common.opt
--- common.opt	4 Jun 2005 17:07:55 -0000	1.73
+++ common.opt	8 Jun 2005 20:16:31 -0000
@@ -483,6 +483,18 @@ finstrument-functions
 Common Report Var(flag_instrument_function_entry_exit)
 Instrument function entry and exit with profiling calls
 
+fipa-pure-const
+Common Report Var(flag_ipa_pure_const) Init(0)
+Discover pure and const functions.
+
+fipa-reference
+Common Report Var(flag_ipa_reference) Init(0)
+Discover readonly and non addressable static variables.
+
+fipa-type-escape
+Common Report Var(flag_ipa_type_escape) Init(0)
+Type based escape and alias analysis..
+
 fivopts
 Common Report Var(flag_ivopts) Init(1)
 Optimize induction variables on trees
Index: ipa-pure-const.c
===================================================================
RCS file: ipa-pure-const.c
diff -N ipa-pure-const.c
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-pure-const.c	8 Jun 2005 20:16:31 -0000
@@ -0,0 +1,793 @@
+/* Callgraph based analysis of static variables.
+   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  */
+
+/* This file mark functions as being either const (TREE_READONLY) or
+   pure (DECL_IS_PURE).
+
+   This must be run after inlining decisions have been made since
+   otherwise, the local sets will not contain information that is
+   consistent with post inlined state.  The global sets are not prone
+   to this problem since they are by definition transitive.  */
+
+/* The code in this module is called by the ipa pass manager. It
+   should be one of the later passes since it's information is used by
+   the rest of the compilation. */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "tree-flow.h"
+#include "tree-inline.h"
+#include "tree-pass.h"
+#include "langhooks.h"
+#include "pointer-set.h"
+#include "ggc.h"
+#include "ipa-utils.h"
+#include "c-common.h"
+#include "tree-gimple.h"
+#include "cgraph.h"
+#include "output.h"
+#include "flags.h"
+#include "timevar.h"
+#include "diagnostic.h"
+#include "langhooks.h"
+
+static struct pointer_set_t *visited_nodes;
+
+/* Lattice values for const and pure functions.  Everything starts out
+   being const, then may drop to pure and then neither depending on
+   what is found.  */
+enum pure_const_state_e
+{
+  IPA_CONST,
+  IPA_PURE,
+  IPA_NEITHER
+};
+
+/* Holder inserted into the ipa_dfs_info aux field to hold the
+   const_state.  */
+struct funct_state_d 
+{
+  enum pure_const_state_e pure_const_state;
+  bool state_set_in_source;
+};
+
+typedef struct funct_state_d * funct_state;
+
+/* Return the function state from NODE.  */ 
+
+static inline funct_state
+get_function_state (struct cgraph_node *node)
+{
+  struct ipa_dfs_info * info = node->aux;
+  return info->aux;
+}
+
+/* Return true if the variable T is the right kind of static variable to
+   perform compilation unit scope escape analysis.  */
+
+static inline void 
+check_decl (funct_state local, 
+	    tree t, bool checking_write)
+{
+  /* If the variable has the "used" attribute, treat it as if it had a
+     been touched by the devil.  */
+  if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
+    {
+      local->pure_const_state = IPA_NEITHER;
+      return;
+    }
+
+  /* Do not want to do anything with volatile except mark any
+     function that uses one to be not const or pure.  */
+  if (TREE_THIS_VOLATILE (t)) 
+    { 
+      local->pure_const_state = IPA_NEITHER;
+      return;
+    }
+
+  /* Do not care about a local automatic that is not static.  */
+  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
+    return;
+
+  /* Since we have dispatched the locals and params, if we are writing, this
+     cannot be a pure or constant function.  */
+  if (checking_write) 
+    local->pure_const_state = IPA_NEITHER;
+
+  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
+    {
+      /* If the front end set the variable to be READONLY and
+	 constant, we can allow this variable in pure or const
+	 functions but the scope is too large for our analysis to set
+	 these bits ourselves.  */
+      
+      if (TREE_READONLY (t)
+	  && DECL_INITIAL (t)
+	  && is_gimple_min_invariant (DECL_INITIAL (t)))
+	; /* Read of a constant, do not change the function state.  */
+      else 
+	{
+	  /* Just a regular read.  */
+	  if (local->pure_const_state == IPA_CONST)
+	    local->pure_const_state = IPA_PURE;
+	}
+    }
+  
+  /* Compilation level statics can be read if they are readonly
+     variables.  */
+  if (TREE_READONLY (t))
+    return;
+
+  /* Just a regular read.  */
+  if (local->pure_const_state == IPA_CONST)
+    local->pure_const_state = IPA_PURE;
+  
+}
+
+/* If T is a VAR_DECL check to see if it is an allowed reference.  */
+
+static void
+check_operand (funct_state local, 
+	       tree t, bool checking_write)
+{
+  if (!t) return;
+
+  if (TREE_CODE (t) == VAR_DECL)
+    check_decl (local, t, checking_write); 
+}
+
+/* Examine tree T for references.  */
+
+static void
+check_tree (funct_state local, tree t, bool checking_write)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return;
+
+  while (TREE_CODE (t) == REALPART_EXPR 
+	 || TREE_CODE (t) == IMAGPART_EXPR
+	 || handled_component_p (t))
+    {
+      if (TREE_CODE (t) == ARRAY_REF)
+	check_operand (local, TREE_OPERAND (t, 1), false);
+      t = TREE_OPERAND (t, 0);
+    }
+
+  /* The bottom of an indirect reference can only be read, not
+     written.  */
+  if (INDIRECT_REF_P (t))
+    {
+      check_tree (local, TREE_OPERAND (t, 0), false);
+      
+      /* Any indirect reference that occurs on the lhs
+	 disqualifies the function from being pure or const. Any
+	 indirect reference that occurs on the rhs disqualifies
+	 the function from being const.  */
+      if (checking_write) 
+	local->pure_const_state = IPA_NEITHER;
+      else 
+	if (local->pure_const_state == IPA_CONST)
+	  local->pure_const_state = IPA_PURE;
+    }
+
+  if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
+    check_operand (local, t, checking_write);
+}
+
+/* Given a memory reference T, will return the variable at the bottom
+   of the access.  Unlike get_base_address, this will recurse thru
+   INDIRECT_REFS.  */
+
+static tree
+get_base_var (tree t)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return t;
+
+  while (!SSA_VAR_P (t) 
+	 && (!CONSTANT_CLASS_P (t))
+	 && TREE_CODE (t) != LABEL_DECL
+	 && TREE_CODE (t) != FUNCTION_DECL
+	 && TREE_CODE (t) != CONST_DECL)
+    {
+      t = TREE_OPERAND (t, 0);
+    }
+  return t;
+} 
+
+/* Scan tree T to see if there are any addresses taken in within T.  */
+
+static void 
+look_for_address_of (funct_state local, tree t)
+{
+  if (TREE_CODE (t) == ADDR_EXPR)
+    {
+      tree x = get_base_var (t);
+      if (TREE_CODE (x) == VAR_DECL) 
+	{
+	  check_decl (local, x, false);
+	  
+	  /* Taking the address of something appears to be reasonable
+	     in PURE code.  Not allowed in const.  */
+	  if (local->pure_const_state == IPA_CONST)
+	    local->pure_const_state = IPA_PURE;
+	}
+    }
+}
+
+/* Check to see if T is a read or address of operation on a static var
+   we are interested in analyzing.  LOCAL is passed in to get access to
+   its bit vectors.  */
+
+static void
+check_rhs_var (funct_state local, tree t)
+{
+  look_for_address_of (local, t);
+
+  /* Memcmp and strlen can both trap and they are declared pure.  */
+  if (tree_could_trap_p (t)
+      && local->pure_const_state == IPA_CONST)
+    local->pure_const_state = IPA_PURE;
+
+  check_tree(local, t, false);
+}
+
+/* Check to see if T is an assignment to a static var we are
+   interrested in analyzing.  LOCAL is passed in to get access to its bit
+   vectors.
+*/
+
+static void
+check_lhs_var (funct_state local, tree t)
+{
+  /* Memcmp and strlen can both trap and they are declared pure.  */
+  if (tree_could_trap_p (t)
+      && local->pure_const_state == IPA_CONST)
+    local->pure_const_state = IPA_PURE;
+    
+  check_tree(local, t, true);
+}
+
+/* This is a scaled down version of get_asm_expr_operands from
+   tree_ssa_operands.c.  The version there runs much later and assumes
+   that aliasing information is already available. Here we are just
+   trying to find if the set of inputs and outputs contain references
+   or address of operations to local static variables.  FN is the
+   function being analyzed and STMT is the actual asm statement.  */
+
+static void
+get_asm_expr_operands (funct_state local, tree stmt)
+{
+  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  const char **oconstraints
+    = (const char **) alloca ((noutputs) * sizeof (const char *));
+  int i;
+  tree link;
+  const char *constraint;
+  bool allows_mem, allows_reg, is_inout;
+  
+  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+    {
+      oconstraints[i] = constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_output_constraint (&constraint, i, 0, 0,
+			       &allows_mem, &allows_reg, &is_inout);
+      
+      check_lhs_var (local, TREE_VALUE (link));
+    }
+
+  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+    {
+      constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+			      oconstraints, &allows_mem, &allows_reg);
+      
+      check_rhs_var (local, TREE_VALUE (link));
+    }
+  
+  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
+      /* Abandon all hope, ye who enter here. */
+      local->pure_const_state = IPA_NEITHER;
+
+  if (ASM_VOLATILE_P (stmt))
+    local->pure_const_state = IPA_NEITHER;
+}
+
+/* Check the parameters of a function call from CALLER to CALL_EXPR to
+   see if any of them are static vars.  Also check to see if this is
+   either an indirect call, a call outside the compilation unit, or
+   has special attributes that effect the clobbers.  The caller
+   parameter is the tree node for the caller and the second operand is
+   the tree node for the entire call expression.  */
+
+static void
+check_call (funct_state local, 
+	      tree call_expr) 
+{
+  int flags = call_expr_flags(call_expr);
+  tree operand_list = TREE_OPERAND (call_expr, 1);
+  tree operand;
+  tree callee_t = get_callee_fndecl (call_expr);
+  struct cgraph_node* callee;
+  enum availability avail = AVAIL_NOT_AVAILABLE;
+
+  for (operand = operand_list;
+       operand != NULL_TREE;
+       operand = TREE_CHAIN (operand))
+    {
+      tree argument = TREE_VALUE (operand);
+      check_rhs_var (local, argument);
+    }
+  
+  /* The const and pure flags are set by a variety of places in the
+     compiler (including here).  If someone has already set the flags
+     for the callee, (such as for some of the builtins) we will use
+     them, otherwise we will compute our own information.  */
+  
+  /* Const and pure functions have less clobber effects than other
+     functions so we process these first.  Otherwise if it is a call
+     outside the compilation unit or an indirect call we punt.  This
+     leaves local calls which will be processed by following the call
+     graph.  */  
+  if (callee_t)
+    {
+      callee = cgraph_node(callee_t);
+      avail = cgraph_function_body_availability (callee);
+
+      /* When bad things happen to bad functions, they cannot be const
+	 or pure.  */
+      if (setjmp_call_p (callee_t))
+	local->pure_const_state = IPA_NEITHER;
+
+      if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
+	switch (DECL_FUNCTION_CODE (callee_t))
+	  {
+	  case BUILT_IN_LONGJMP:
+	  case BUILT_IN_NONLOCAL_GOTO:
+	    local->pure_const_state = IPA_NEITHER;
+	    break;
+	  default:
+	    break;
+	  }
+    }
+
+  /* The callee is either unknown (indirect call) or there is just no
+     scannable code for it (external call) .  We look to see if there
+     are any bits available for the callee (such as by declaration or
+     because it is builtin) and process solely on the basis of those
+     bits. */
+  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
+    {
+      if (flags & ECF_PURE) 
+	{
+	  if (local->pure_const_state == IPA_CONST)
+	    local->pure_const_state = IPA_PURE;
+	}
+      else 
+	local->pure_const_state = IPA_NEITHER;
+    }
+  else
+    {
+      /* We have the code and we will scan it for the effects. */
+      if (flags & ECF_PURE) 
+	{
+	  if (local->pure_const_state == IPA_CONST)
+	    local->pure_const_state = IPA_PURE;
+	}
+    }
+}
+
+/* Walk tree and record all calls.  Called via walk_tree.  The data is
+   the function that is being scanned.  */
+/* TP is the part of the tree currently under the
+   microscope. WALK_SUBTREES is part of the walk_tree api but is
+   unused here.  DATA is cgraph_node of the function being walked.  */
+
+static tree
+scan_for_static_refs (tree *tp, 
+		      int *walk_subtrees, 
+		      void *data)
+{
+  struct cgraph_node *fn = data;
+  tree t = *tp;
+  funct_state local = get_function_state (fn);
+
+  switch (TREE_CODE (t))  
+    {
+    case VAR_DECL:
+      if (DECL_INITIAL (t))
+	walk_tree (&DECL_INITIAL (t), scan_for_static_refs, fn, visited_nodes);
+      *walk_subtrees = 0;
+      break;
+
+    case MODIFY_EXPR:
+      {
+	/* First look on the lhs and see what variable is stored to */
+	tree lhs = TREE_OPERAND (t, 0);
+	tree rhs = TREE_OPERAND (t, 1);
+
+	check_lhs_var (local, lhs);
+
+	/* For the purposes of figuring out what the cast affects */
+
+	/* Next check the operands on the rhs to see if they are ok. */
+	switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
+	  {
+	  case tcc_binary:	    
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+ 	      tree op1 = TREE_OPERAND (rhs, 1);
+ 
+ 	      check_rhs_var (local, op0);
+ 	      check_rhs_var (local, op1);
+	    }
+	    break;
+	  case tcc_unary:
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+ 	      check_rhs_var (local, op0);
+ 	    }
+
+	    break;
+	  case tcc_reference:
+	    check_rhs_var (local, rhs);
+	    break;
+	  case tcc_declaration:
+	    check_rhs_var (local, rhs);
+	    break;
+	  case tcc_expression:
+	    switch (TREE_CODE (rhs)) 
+	      {
+	      case ADDR_EXPR:
+		check_rhs_var (local, rhs);
+		break;
+	      case CALL_EXPR: 
+		check_call (local, rhs);
+		break;
+	      default:
+		break;
+	      }
+	    break;
+	  default:
+	    break;
+	  }
+	*walk_subtrees = 0;
+      }
+      break;
+
+    case ADDR_EXPR:
+      /* This case is here to find addresses on rhs of constructors in
+	 decl_initial of static variables. */
+      check_rhs_var (local, t);
+      *walk_subtrees = 0;
+      break;
+
+    case LABEL_EXPR:
+      if (DECL_NONLOCAL (TREE_OPERAND (t, 0)))
+	/* Target of long jump. */
+	local->pure_const_state = IPA_NEITHER;
+      break;
+
+    case CALL_EXPR: 
+      check_call (local, t);
+      *walk_subtrees = 0;
+      break;
+      
+    case ASM_EXPR:
+      get_asm_expr_operands (local, t);
+      *walk_subtrees = 0;
+      break;
+      
+    default:
+      break;
+    }
+  return NULL;
+}
+
+/* This is the main routine for finding the reference patterns for
+   global variables within a function FN.  */
+
+static void
+analyze_function (struct cgraph_node *fn)
+{
+  funct_state l = xcalloc (1, sizeof (struct funct_state_d));
+  tree decl = fn->decl;
+  struct ipa_dfs_info * w_info = fn->aux;
+
+  w_info->aux = l;
+
+  l->pure_const_state = IPA_CONST;
+  l->state_set_in_source = false;
+
+  /* If this is a volatile function, do not touch this unless it has
+     been marked as const or pure by the front end.  */
+  if (TREE_THIS_VOLATILE (decl))
+    {
+      l->pure_const_state = IPA_NEITHER;
+      return;
+    }
+
+  if (TREE_READONLY (decl))
+    {
+      l->pure_const_state = IPA_CONST;
+      l->state_set_in_source = true;
+    }
+  if (DECL_IS_PURE (decl))
+    {
+      l->pure_const_state = IPA_PURE;
+      l->state_set_in_source = true;
+    }
+
+  if (dump_file)
+    {
+      fprintf (dump_file, "\n local analysis of %s with initial value = %d\n ", 
+	       cgraph_node_name (fn),
+	       l->pure_const_state);
+    }
+  
+  if (!l->state_set_in_source)
+    {
+      struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
+      basic_block this_block;
+      
+      FOR_EACH_BB_FN (this_block, this_cfun)
+	{
+	  block_stmt_iterator bsi;
+	  for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+	    {
+	      walk_tree (bsi_stmt_ptr (bsi), scan_for_static_refs, 
+			 fn, visited_nodes);
+	      if (l->pure_const_state == IPA_NEITHER) 
+		return;
+	    }
+	}
+
+      if (l->pure_const_state != IPA_NEITHER)
+	{
+	  tree old_decl = current_function_decl;
+	  /* Const functions cannot have back edges (an
+	     indication of possible infinite loop side
+	     effect.  */
+	    
+	  current_function_decl = fn->decl;
+
+	  /* This is an utter HACK FIXEME PLEASE!!!!!!!  The C++
+	     front end, for reasons that are only apparent to it's
+	     designers, decides that it was only kidding when it
+	     generated some of the functions.  As a further joke it
+	     then decides, after compilation is underway, to null
+	     out the DECL_STRUCT_FUNCTION (node->decl) field of
+	     these function so that no one can use them.  What a
+	     funny joke!!!!  */
+	  gcc_assert (DECL_STRUCT_FUNCTION (fn->decl));
+	  
+	  push_cfun (DECL_STRUCT_FUNCTION (fn->decl));
+	  
+	  if (mark_dfs_back_edges ())
+	    l->pure_const_state = IPA_NEITHER;
+	  
+	  current_function_decl = old_decl;
+	  pop_cfun ();
+	}
+    }
+  
+  if (l->pure_const_state != IPA_NEITHER)
+    return;
+
+  /* FIXME - When Jan gets the local statics promoted to the global
+     variable list, the next two loops go away.  */
+  /* Walk over any private statics that may take addresses of functions.  */
+  if (TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
+    {
+      tree step;
+      for (step = BLOCK_VARS (DECL_INITIAL (decl));
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  if (DECL_INITIAL (step))
+	    walk_tree (&DECL_INITIAL (step), scan_for_static_refs, 
+		       fn, visited_nodes);
+	}
+
+    }
+  
+  /* Also look here for private statics.  */
+  if (DECL_STRUCT_FUNCTION (decl))
+    {
+      tree step;
+      for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  tree var = TREE_VALUE (step);
+	  if (DECL_INITIAL (var) && TREE_STATIC (var))
+	    walk_tree (&DECL_INITIAL (var), scan_for_static_refs, 
+		       fn, visited_nodes);
+	}
+    }
+}
+
+
+/* Produce the global information by preforming a transitive closure
+   on the local information that was produced by ipa_analyze_function
+   and ipa_analyze_variable.  */
+
+static void
+static_execute (void)
+{
+  struct cgraph_node *node;
+  struct cgraph_node *w;
+  struct cgraph_node **order =
+    xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+  int order_pos = order_pos = ipa_utils_reduced_inorder (order, true, false);
+  int i;
+  struct ipa_dfs_info * w_info;
+
+  if (!memory_identifier_string)
+    memory_identifier_string = build_string(7, "memory");
+
+  /* There are some shared nodes, in particular the initializers on
+     static declarations.  We do not need to scan them more than once
+     since all we would be interrested in are the addressof
+     operations.  */
+  visited_nodes = pointer_set_create ();
+
+  /* Process all of the functions. 
+
+     We do not want to process any of the clones so we check that this
+     is a master clone.  However, we do NOT process any
+     AVAIL_OVERWRITABLE functions (these are never clones) we cannot
+     guarantee that what we learn about the one we see will be true
+     for the one that overriders it.
+  */
+  for (node = cgraph_nodes; node; node = node->next)
+    if (node->analyzed && cgraph_is_master_clone (node))
+      analyze_function (node);
+
+  pointer_set_destroy (visited_nodes);
+  visited_nodes = NULL;
+  if (dump_file)
+    {
+      dump_cgraph (dump_file);
+      ipa_utils_print_order(dump_file, "reduced", order, order_pos);
+    }
+
+  /* Propagate the local information thru the call graph to produce
+     the global information.  All the nodes within a cycle will have
+     the same info so we collapse cycles first.  Then we can do the
+     propagation in one pass from the leaves to the roots.  */
+  for (i = 0; i < order_pos; i++ )
+    {
+      enum pure_const_state_e pure_const_state = IPA_CONST;
+      node = order[i];
+
+      /* Find the worst state for any node in the cycle.  */
+      w = node;
+      while (w)
+	{
+	  funct_state w_l = get_function_state (w);
+	  if (pure_const_state < w_l->pure_const_state)
+	    pure_const_state = w_l->pure_const_state;
+
+	  if (pure_const_state == IPA_NEITHER) 
+	    break;
+
+	  if (!w_l->state_set_in_source)
+	    {
+	      struct cgraph_edge *e;
+	      for (e = w->callees; e; e = e->next_callee) 
+		{
+		  struct cgraph_node *y = e->callee;
+		  /* Only look at the master nodes and skip external nodes.  */
+		  y = cgraph_master_clone (y);
+		  if (y)
+		    {
+		      funct_state y_l = get_function_state (y);
+		      if (pure_const_state < y_l->pure_const_state)
+			pure_const_state = y_l->pure_const_state;
+		      if (pure_const_state == IPA_NEITHER) 
+			break;
+		    }
+		}
+	    }
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+
+      /* Copy back the region's pure_const_state which is shared by
+	 all nodes in the region.  */
+      w = node;
+      while (w)
+	{
+	  funct_state w_l = get_function_state (w);
+
+	  /* All nodes within a cycle share the same info.  */
+	  if (!w_l->state_set_in_source)
+	    {
+	      w_l->pure_const_state = pure_const_state;
+	      switch (pure_const_state)
+		{
+		case IPA_CONST:
+		  TREE_READONLY (w->decl) = 1;
+		  if (dump_file)
+		    fprintf (dump_file, "Function found to be const: %s\n",  
+			     lang_hooks.decl_printable_name(w->decl, 2)); 
+		  break;
+		  
+		case IPA_PURE:
+		  DECL_IS_PURE (w->decl) = 1;
+		  if (dump_file)
+		    fprintf (dump_file, "Function found to be pure: %s\n",  
+			     lang_hooks.decl_printable_name(w->decl, 2)); 
+		  break;
+		  
+		default:
+		  break;
+		}
+	    }
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+    }
+
+  /* Cleanup. */
+  for (node = cgraph_nodes; node; node = node->next)
+    /* Get rid of the aux information.  */
+    if (node->aux)
+      {
+	free (node->aux);
+	node->aux = NULL;
+      }
+
+  free (order);
+}
+
+static bool
+gate_pure_const (void)
+{
+  return (flag_unit_at_a_time != 0 && flag_ipa_pure_const 
+	  /* Don't bother doing anything if the program has errors.  */
+	  && !(errorcount || sorrycount));
+}
+
+struct tree_opt_pass pass_ipa_pure_const =
+{
+  "ipa-pure-const",		        /* name */
+  gate_pure_const,			/* gate */
+  static_execute,			/* execute */
+  NULL,					/* sub */
+  NULL,					/* next */
+  0,					/* static_pass_number */
+  TV_IPA_PURE_CONST,		        /* tv_id */
+  0,	                                /* properties_required */
+  0,					/* properties_provided */
+  0,					/* properties_destroyed */
+  0,					/* todo_flags_start */
+  0,                                    /* todo_flags_finish */
+  0					/* letter */
+};
+
+
Index: ipa-reference.c
===================================================================
RCS file: ipa-reference.c
diff -N ipa-reference.c
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-reference.c	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,1495 @@
+/* Callgraph based analysis of static variables.
+   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  
+*/
+
+/* This file gathers information about how variables whose scope is
+   confined to the compilation unit are used.  
+
+   There are two categories of information produced by this pass:
+
+   1) The addressable (TREE_ADDRESSABLE) bit and readonly
+   (TREE_READONLY) bit associated with these variables is properly set
+   based on scanning all of the code withing the compilation unit.
+
+   2) The transitive call site specific clobber effects are computed
+   for the variables whose scope is contained within this compilation
+   unit.
+
+   First each function and static variable initialization is analyzed
+   to determine which local static variables are either read, written,
+   or have their address taken.  Any local static that has its address
+   taken is removed from consideration.  Once the local read and
+   writes are determined, a transitive closure of this information is
+   performed over the call graph to determine the worst case set of
+   side effects of each call.  In later parts of the compiler, these
+   local and global sets are examined to make the call clobbering less
+   traumatic, promote some statics to registers, and improve aliasing
+   information.
+   
+   Currently must be run after inlining decisions have been made since
+   otherwise, the local sets will not contain information that is
+   consistent with post inlined state.  The global sets are not prone
+   to this problem since they are by definition transitive.  
+*/
+
+/* FIXME -- PROFILE-RESTRUCTURE: There are a large number of places in
+   this module that will need to be changed when the final engineering
+   is done to support ssa form in more than a single function at a
+   time.
+
+   The problem is that the ssa parts of the compiler work in terms of
+   the UID field in the var_ann structure.  Local variables are not
+   unique and statics are not stable.  Thus, all analysis within this
+   module is done using the DECL_UID numbering.  The information is
+   converted to a local var_ann numbering for the function being
+   compiled.
+
+   It is not clear what the final solution to this is. There is a plan
+   to make var_ann numbering assign low stable numbers to the static
+   variables and start numbering the locals after the last
+   static. This would allow the conversion code to be dumped but only
+   if this module is not generalized to perform the same analysis on
+   local variables.
+
+   To make it transparent, all variables that matter are suffixed with 
+   either "_by_ann_uid" or "_by_decl_uid".
+*/
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "tree-flow.h"
+#include "tree-inline.h"
+#include "tree-pass.h"
+#include "langhooks.h"
+#include "pointer-set.h"
+#include "ggc.h"
+#include "ipa-utils.h"
+#include "ipa-reference.h"
+#include "c-common.h"
+#include "tree-gimple.h"
+#include "cgraph.h"
+#include "output.h"
+#include "flags.h"
+#include "timevar.h"
+#include "diagnostic.h"
+#include "langhooks.h"
+
+/* This splay tree contains all of the static variables that are
+   being considered by the compilation level alias analysis.  For
+   module_at_a_time compilation, this is the set of static but not
+   public variables.  Any variables that either have their address
+   taken or participate in otherwise unsavory operations are deleted
+   from this list.  */
+static GTY((param1_is(int), param2_is(tree)))
+     splay_tree reference_vars_to_consider_by_decl_uid;
+
+/* This bitmap is used to knock out the module static variables whose
+   addresses have been taken and passed around.  */
+static bitmap module_statics_escape_by_decl_uid;
+
+/* This bitmap is used to knock out the module static variables that
+   are not readonly.  */
+static bitmap module_statics_written_by_decl_uid;
+
+/* A bit is set for every module static we are considering.  This is
+   ored into the local info when asm code is found that clobbers all
+   memory. */
+static bitmap all_module_statics_by_decl_uid;
+
+static struct pointer_set_t *visited_nodes;
+
+static bitmap_obstack ipa_obstack;
+
+enum initialization_status_t
+{
+  UNINITIALIZED,
+  RUNNING,
+  FINISHED
+};
+
+tree memory_identifier_string;
+
+/* Convert IN_DECL bitmap which is indexed by DECL_UID to IN_ANN, a
+   bitmap indexed by var_ann (VAR_DECL)->uid.  */
+
+static void 
+convert_UIDs_in_bitmap (bitmap in_ann, bitmap in_decl) 
+{
+  unsigned int index;
+  bitmap_iterator bi;
+
+  EXECUTE_IF_SET_IN_BITMAP(in_decl, 0, index, bi)
+    {
+      splay_tree_node n = 
+	      splay_tree_lookup (reference_vars_to_consider_by_decl_uid, 
+				 index);
+      if (n != NULL) 
+	{
+	  tree t = (tree)n->value;
+	  var_ann_t va = var_ann (t);
+	  if (va) 
+	    bitmap_set_bit (in_ann, va->uid);
+	}
+    }
+}
+
+/* Return the ipa_reference_vars structure starting from the cgraph NODE.  */
+static inline ipa_reference_vars_info_t
+get_reference_vars_info_from_cgraph (struct cgraph_node * node)
+{
+  return get_var_ann (node->decl)->reference_vars_info;
+}
+
+/* The bitmaps used to represent the static global variables are
+   indexed by DECL_UID however, this is not used inside of functions
+   to index the ssa variables.  The denser var_ann (VAR_DECL)->uid is
+   used there.  This function is called from
+   tree_dfa:find_referenced_vars after the denser representation is
+   built.  This function invalidates any cached indexes.  */ 
+
+void
+ipa_reference_reset_maps (void) 
+{
+  struct cgraph_node *node;
+  
+  for (node = cgraph_nodes; node; node = node->next)
+    {
+      ipa_reference_vars_info_t info = 
+	get_reference_vars_info_from_cgraph (node);
+
+      if (info) 
+	{
+	  ipa_reference_local_vars_info_t l = info->local;
+	  ipa_reference_global_vars_info_t g = info->global;
+
+	  if (l->var_anns_valid) 
+	    {
+	      bitmap_clear (l->statics_read_by_ann_uid);
+	      bitmap_clear (l->statics_written_by_ann_uid);
+	      l->var_anns_valid = false;
+	    }
+
+	  /* There may be orphans in the cgraph so must check that
+	     there is global info.  */
+	  if (g && g->var_anns_valid) 
+	    {
+	      bitmap_clear (g->statics_read_by_ann_uid);
+	      bitmap_clear (g->statics_written_by_ann_uid);
+	      bitmap_clear (g->statics_not_read_by_ann_uid);
+	      bitmap_clear (g->statics_not_written_by_ann_uid);
+	      g->var_anns_valid = false;
+	    }
+	}
+    }
+}
+
+/* Get the cgraph_node for the local function and make sure the
+   var_ann bitmaps are properly converted.  */
+ 
+static ipa_reference_local_vars_info_t
+get_local_reference_vars_info (tree fn) 
+{
+  ipa_reference_local_vars_info_t l;
+
+  /* Was not compiled -O2 or higher.  */ 
+  ipa_reference_vars_info_t info = get_var_ann (fn)->reference_vars_info;
+
+  if (!info)
+    return NULL;
+
+  l = info->local;
+  if (!l->var_anns_valid) 
+    {
+      convert_UIDs_in_bitmap (l->statics_read_by_ann_uid, 
+			      l->statics_read_by_decl_uid);
+      convert_UIDs_in_bitmap (l->statics_written_by_ann_uid, 
+			      l->statics_written_by_decl_uid);
+      l->var_anns_valid = true;
+    }
+  return l;
+}
+
+/* Get the global reference_vars_info structure for the function FN and
+   make sure the ann_uid's bitmaps are properly converted.  */
+ 
+static ipa_reference_global_vars_info_t
+get_global_reference_vars_info (tree fn) 
+{
+  ipa_reference_global_vars_info_t g;
+
+  /* Was not compiled -O2 or higher.  */ 
+  ipa_reference_vars_info_t info = get_var_ann (fn)->reference_vars_info;
+
+  if (!info)
+    return NULL;
+
+  g = info->global;
+  if (!g) 
+    return NULL;
+
+  if (!g->var_anns_valid) 
+    {
+      convert_UIDs_in_bitmap (g->statics_read_by_ann_uid, 
+			      g->statics_read_by_decl_uid);
+      convert_UIDs_in_bitmap (g->statics_written_by_ann_uid, 
+			      g->statics_written_by_decl_uid);
+      convert_UIDs_in_bitmap (g->statics_not_read_by_ann_uid, 
+			      g->statics_not_read_by_decl_uid);
+      convert_UIDs_in_bitmap (g->statics_not_written_by_ann_uid, 
+			      g->statics_not_written_by_decl_uid);
+      g->var_anns_valid = true;
+    }
+  return g;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that may be read locally by the execution of the function
+   fn.  Returns NULL if no data is available, such as it was not
+   compiled with -O2 or higher.  */
+
+bitmap 
+ipa_reference_get_read_local (tree fn)
+{
+  ipa_reference_local_vars_info_t l = get_local_reference_vars_info (fn);
+  if (l) 
+    return l->statics_read_by_ann_uid;
+  else
+    return NULL;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that may be written locally by the execution of the function
+   fn.  Returns NULL if no data is available, such as it was not
+   compiled with -O2 or higher or the function was not available.  */
+
+bitmap 
+ipa_reference_get_written_local (tree fn)
+{
+  ipa_reference_local_vars_info_t l = get_local_reference_vars_info (fn);
+  if (l) 
+    return l->statics_written_by_ann_uid;
+  else
+    return NULL;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that are read during the execution of the function
+   FN.  Returns NULL if no data is available, such as it was not
+   compiled with -O2 or higher or the function was not available.  */
+
+bitmap 
+ipa_reference_get_read_global (tree fn) 
+{
+  ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
+  if (g) 
+    return g->statics_read_by_ann_uid;
+  else
+    return NULL;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that are written during the execution of the function
+   FN.  Note that variables written may or may not be read during the
+   function call.  Returns NULL if no data is available, such as it
+   was not compiled with -O2 or higher or the function was not available.  */
+
+bitmap 
+ipa_reference_get_written_global (tree fn) 
+{
+  ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
+  if (g) 
+    return g->statics_written_by_ann_uid;
+  else
+    return NULL;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that are not read during the execution of the function
+   FN.  Returns NULL if no data is available, such as it was not
+   compiled with -O2 or higher or the function was not available.  */
+
+bitmap 
+ipa_reference_get_not_read_global (tree fn) 
+{
+  ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
+  if (g) 
+    return g->statics_not_read_by_ann_uid;
+  else
+    return NULL;
+}
+
+/* Return a bitmap indexed by var_ann (VAR_DECL)->uid for the static
+   variables that are not written during the execution of the function
+   FN.  Note that variables written may or may not be read during the
+   function call.  Returns NULL if no data is available, such as it
+   was not compiled with -O2 or higher or the function was not available.  */
+
+bitmap 
+ipa_reference_get_not_written_global (tree fn) 
+{
+  ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
+  if (g) 
+    return g->statics_not_written_by_ann_uid;
+  else
+    return NULL;
+}
+
+
+
+/* Add VAR to all_module_statics_by_decl_uid and the two
+   reference_vars_to_consider* sets.  */
+
+static inline void 
+add_static_var (tree var) 
+{
+  int uid = DECL_UID (var);
+  if (!bitmap_bit_p (all_module_statics_by_decl_uid, uid))
+    {
+      splay_tree_insert (reference_vars_to_consider_by_decl_uid,
+			 uid, (splay_tree_value)var);
+      bitmap_set_bit (all_module_statics_by_decl_uid, uid);
+    }
+}
+
+/* Return true if the variable T is the right kind of static variable to
+   perform compilation unit scope escape analysis.  */
+
+static inline bool 
+has_proper_scope_for_analysis (tree t)
+{
+  /* If the variable has the "used" attribute, treat it as if it had a
+     been touched by the devil.  */
+  if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
+    return false;
+
+  /* Do not want to do anything with volatile except mark any
+     function that uses one to be not const or pure.  */
+  if (TREE_THIS_VOLATILE (t)) 
+    return false;
+
+  /* Do not care about a local automatic that is not static.  */
+  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
+    return false;
+
+  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
+    return false;
+
+  /* This is a variable we care about.  Check if we have seen it
+     before, and if not add it the set of variables we care about.  */
+  if (!bitmap_bit_p (all_module_statics_by_decl_uid, DECL_UID (t)))
+    add_static_var (t);
+
+  return true;
+}
+
+/* If T is a VAR_DECL for a static that we are interrested in, add the
+   uid to the bitmap.  */
+
+static void
+check_operand (ipa_reference_local_vars_info_t local, 
+	       tree t, bool checking_write)
+{
+  if (!t) return;
+
+  if ((TREE_CODE (t) == VAR_DECL)
+      && (has_proper_scope_for_analysis (t))) 
+    {
+      if (checking_write)
+	{
+	  if (local)
+	    bitmap_set_bit (local->statics_written_by_decl_uid, DECL_UID (t));
+	  /* Mark the write so we can tell which statics are
+	     readonly.  */
+	  bitmap_set_bit (module_statics_written_by_decl_uid, DECL_UID (t));
+	}
+      else if (local)
+	bitmap_set_bit (local->statics_read_by_decl_uid, DECL_UID (t));
+    }
+}
+
+/* Examine tree T for references to static variables. All internal
+   references like array references or indirect references are added
+   to the READ_BM. Direct references are added to either READ_BM or
+   WRITE_BM depending on the value of CHECKING_WRITE.   */
+
+static void
+check_tree (ipa_reference_local_vars_info_t local, tree t, bool checking_write)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return;
+
+  while (TREE_CODE (t) == REALPART_EXPR 
+	 || TREE_CODE (t) == IMAGPART_EXPR
+	 || handled_component_p (t))
+    {
+      if (TREE_CODE (t) == ARRAY_REF)
+	check_operand (local, TREE_OPERAND (t, 1), false);
+      t = TREE_OPERAND (t, 0);
+    }
+
+  /* The bottom of an indirect reference can only be read, not
+     written.  So just recurse and whatever we find, check it against
+     the read bitmaps.  */
+
+  /*  if (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF) */
+  /* FIXME when we have array_ref's of pointers.  */
+  if (INDIRECT_REF_P (t))
+    check_tree (local, TREE_OPERAND (t, 0), false);
+
+  if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
+    check_operand (local, t, checking_write);
+}
+
+/* Given a memory reference T, will return the variable at the bottom
+   of the access.  Unlike get_base_address, this will recurse thru
+   INDIRECT_REFS.  */
+
+static tree
+get_base_var (tree t)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return t;
+
+  while (!SSA_VAR_P (t) 
+	 && (!CONSTANT_CLASS_P (t))
+	 && TREE_CODE (t) != LABEL_DECL
+	 && TREE_CODE (t) != FUNCTION_DECL
+	 && TREE_CODE (t) != CONST_DECL)
+    {
+      t = TREE_OPERAND (t, 0);
+    }
+  return t;
+} 
+
+/* Scan tree T to see if there are any addresses taken in within T.  */
+
+static void 
+look_for_address_of (tree t)
+{
+  if (TREE_CODE (t) == ADDR_EXPR)
+    {
+      tree x = get_base_var (t);
+      if (TREE_CODE (x) == VAR_DECL) 
+	if (has_proper_scope_for_analysis (x))
+	  bitmap_set_bit (module_statics_escape_by_decl_uid, DECL_UID (x));
+    }
+}
+
+/* Check to see if T is a read or address of operation on a static var
+   we are interested in analyzing.  LOCAL is passed in to get access
+   to its bit vectors.  Local is NULL if this is called from a static
+   initializer.  */
+
+static void
+check_rhs_var (ipa_reference_local_vars_info_t local, tree t)
+{
+  look_for_address_of (t);
+
+  if (local == NULL) 
+    return;
+
+  check_tree(local, t, false);
+}
+
+/* Check to see if T is an assignment to a static var we are
+   interrested in analyzing.  LOCAL is passed in to get access to its bit
+   vectors.
+*/
+
+static void
+check_lhs_var (ipa_reference_local_vars_info_t local, tree t)
+{
+  if (local == NULL) 
+    return;
+   
+  check_tree(local, t, true);
+}
+
+/* This is a scaled down version of get_asm_expr_operands from
+   tree_ssa_operands.c.  The version there runs much later and assumes
+   that aliasing information is already available. Here we are just
+   trying to find if the set of inputs and outputs contain references
+   or address of operations to local static variables.  FN is the
+   function being analyzed and STMT is the actual asm statement.  */
+
+static void
+get_asm_expr_operands (ipa_reference_local_vars_info_t local, tree stmt)
+{
+  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  const char **oconstraints
+    = (const char **) alloca ((noutputs) * sizeof (const char *));
+  int i;
+  tree link;
+  const char *constraint;
+  bool allows_mem, allows_reg, is_inout;
+  
+  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+    {
+      oconstraints[i] = constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_output_constraint (&constraint, i, 0, 0,
+			       &allows_mem, &allows_reg, &is_inout);
+      
+      check_lhs_var (local, TREE_VALUE (link));
+    }
+
+  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+    {
+      constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+			      oconstraints, &allows_mem, &allows_reg);
+      
+      check_rhs_var (local, TREE_VALUE (link));
+    }
+  
+  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
+      {
+	/* Abandon all hope, ye who enter here. */
+	local->calls_read_all = true;
+	local->calls_write_all = true;
+      }      
+}
+
+/* Check the parameters of a function call from CALLER to CALL_EXPR to
+   see if any of them are static vars.  Also check to see if this is
+   either an indirect call, a call outside the compilation unit, or
+   has special attributes that effect the clobbers.  The caller
+   parameter is the tree node for the caller and the second operand is
+   the tree node for the entire call expression.  */
+
+static void
+check_call (ipa_reference_local_vars_info_t local, tree call_expr) 
+{
+  int flags = call_expr_flags (call_expr);
+  tree operand_list = TREE_OPERAND (call_expr, 1);
+  tree operand;
+  tree callee_t = get_callee_fndecl (call_expr);
+  enum availability avail = AVAIL_NOT_AVAILABLE;
+
+  for (operand = operand_list;
+       operand != NULL_TREE;
+       operand = TREE_CHAIN (operand))
+    {
+      tree argument = TREE_VALUE (operand);
+      check_rhs_var (local, argument);
+    }
+
+  if (callee_t)
+    {
+      struct cgraph_node* callee = cgraph_node(callee_t);
+      avail = cgraph_function_body_availability (callee);
+    }
+
+  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
+    if (local) 
+      {
+	if (flags & ECF_PURE) 
+	  local->calls_read_all = true;
+	else 
+	  {
+	    local->calls_read_all = true;
+	    local->calls_write_all = true;
+	  }
+      }
+}
+
+/* Walk tree and record all calls.  Called via walk_tree.  The data is
+   the function that is being scanned.  */
+/* TP is the part of the tree currently under the
+   microscope. WALK_SUBTREES is part of the walk_tree api but is
+   unused here.  DATA is cgraph_node of the function being walked.  */
+
+static tree
+scan_for_static_refs (tree *tp, 
+		      int *walk_subtrees, 
+		      void *data)
+{
+  struct cgraph_node *fn = data;
+  tree t = *tp;
+  ipa_reference_local_vars_info_t local = NULL;
+  if (fn)
+    local = get_reference_vars_info_from_cgraph (fn)->local;
+
+  switch (TREE_CODE (t))  
+    {
+    case VAR_DECL:
+      if (DECL_INITIAL (t))
+	walk_tree (&DECL_INITIAL (t), scan_for_static_refs, fn, visited_nodes);
+      *walk_subtrees = 0;
+      break;
+
+    case MODIFY_EXPR:
+      {
+	/* First look on the lhs and see what variable is stored to */
+	tree lhs = TREE_OPERAND (t, 0);
+	tree rhs = TREE_OPERAND (t, 1);
+	check_lhs_var (local, lhs);
+
+	/* For the purposes of figuring out what the cast affects */
+
+	/* Next check the operands on the rhs to see if they are ok. */
+	switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
+	  {
+	  case tcc_binary:	    
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+ 	      tree op1 = TREE_OPERAND (rhs, 1);
+ 	      check_rhs_var (local, op0);
+ 	      check_rhs_var (local, op1);
+	    }
+	    break;
+	  case tcc_unary:
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+ 	      check_rhs_var (local, op0);
+ 	    }
+
+	    break;
+	  case tcc_reference:
+	    check_rhs_var (local, rhs);
+	    break;
+	  case tcc_declaration:
+	    check_rhs_var (local, rhs);
+	    break;
+	  case tcc_expression:
+	    switch (TREE_CODE (rhs)) 
+	      {
+	      case ADDR_EXPR:
+		check_rhs_var (local, rhs);
+		break;
+	      case CALL_EXPR: 
+		check_call (local, rhs);
+		break;
+	      default:
+		break;
+	      }
+	    break;
+	  default:
+	    break;
+	  }
+	*walk_subtrees = 0;
+      }
+      break;
+
+    case ADDR_EXPR:
+      /* This case is here to find addresses on rhs of constructors in
+	 decl_initial of static variables. */
+      check_rhs_var (local, t);
+      *walk_subtrees = 0;
+      break;
+
+    case LABEL_EXPR:
+      if (DECL_NONLOCAL (TREE_OPERAND (t, 0)))
+	{
+	  /* Target of long jump. */
+	  local->calls_read_all = true;
+	  local->calls_write_all = true;
+	}
+      break;
+
+    case CALL_EXPR: 
+      check_call (local, t);
+      *walk_subtrees = 0;
+      break;
+      
+    case ASM_EXPR:
+      get_asm_expr_operands (local, t);
+      *walk_subtrees = 0;
+      break;
+      
+    default:
+      break;
+    }
+  return NULL;
+}
+
+
+/* Lookup the tree node for the static variable that has UID.  */
+static tree
+get_static_decl_by_decl_uid (int index)
+{
+  splay_tree_node stn = 
+    splay_tree_lookup (reference_vars_to_consider_by_decl_uid, index);
+  if (stn)
+    return (tree)stn->value;
+  return NULL;
+}
+
+/* Lookup the tree node for the static variable that has UID and
+   conver the name to a string for debugging.  */
+
+static const char *
+get_static_name_by_decl_uid (int index)
+{
+  splay_tree_node stn = 
+    splay_tree_lookup (reference_vars_to_consider_by_decl_uid, index);
+  if (stn)
+    return lang_hooks.decl_printable_name ((tree)(stn->value), 2);
+  return NULL;
+}
+
+/* Or in all of the bits from every callee into X, the caller's, bit
+   vector.  There are several cases to check to avoid the sparse
+   bitmap oring.  */
+
+static void
+propagate_bits (struct cgraph_node *x)
+{
+  ipa_reference_vars_info_t x_info = get_reference_vars_info_from_cgraph (x);
+  ipa_reference_global_vars_info_t x_global = x_info->global;
+
+  struct cgraph_edge *e;
+  for (e = x->callees; e; e = e->next_callee) 
+    {
+      struct cgraph_node *y = e->callee;
+
+      /* Only look at the master nodes and skip external nodes.  */
+      y = cgraph_master_clone (y);
+      if (y)
+	{
+	  if (get_reference_vars_info_from_cgraph (y))
+	    {
+	      ipa_reference_vars_info_t y_info = get_reference_vars_info_from_cgraph (y);
+	      ipa_reference_global_vars_info_t y_global = y_info->global;
+	      
+	      if (x_global->statics_read_by_decl_uid
+		  != all_module_statics_by_decl_uid)
+		{
+		  if (y_global->statics_read_by_decl_uid 
+		      == all_module_statics_by_decl_uid)
+		    {
+		      BITMAP_FREE (x_global->statics_read_by_decl_uid);
+		      x_global->statics_read_by_decl_uid 
+			= all_module_statics_by_decl_uid;
+		    }
+		  /* Skip bitmaps that are pointer equal to node's bitmap
+		     (no reason to spin within the cycle).  */
+		  else if (x_global->statics_read_by_decl_uid 
+			   != y_global->statics_read_by_decl_uid)
+		    bitmap_ior_into (x_global->statics_read_by_decl_uid,
+				     y_global->statics_read_by_decl_uid);
+		}
+	      
+	      if (x_global->statics_written_by_decl_uid 
+		  != all_module_statics_by_decl_uid)
+		{
+		  if (y_global->statics_written_by_decl_uid 
+		      == all_module_statics_by_decl_uid)
+		    {
+		      BITMAP_FREE (x_global->statics_written_by_decl_uid);
+		      x_global->statics_written_by_decl_uid 
+			= all_module_statics_by_decl_uid;
+		    }
+		  /* Skip bitmaps that are pointer equal to node's bitmap
+		     (no reason to spin within the cycle).  */
+		  else if (x_global->statics_written_by_decl_uid 
+			   != y_global->statics_written_by_decl_uid)
+		    bitmap_ior_into (x_global->statics_written_by_decl_uid,
+				     y_global->statics_written_by_decl_uid);
+		}
+	    }
+	  else 
+	    {
+	      gcc_unreachable ();
+	    }
+	}
+    }
+}
+
+/* Look at all of the callees of X to see which ones represent inlined
+   calls.  For each of these callees, merge their local info into
+   TARGET and check their children recursively.  
+
+   This function goes away when Jan changes the inliner and IPA
+   analysis so that this is not run between the time when inlining
+   decisions are made and when the inlining actually occurs.  */
+
+static void 
+merge_callee_local_info (struct cgraph_node *target, 
+			 struct cgraph_node *x)
+{
+  struct cgraph_edge *e;
+  ipa_reference_local_vars_info_t x_l = 
+    get_reference_vars_info_from_cgraph (target)->local;
+
+  /* Make the world safe for tail recursion.  */
+  struct ipa_dfs_info *node_info = x->aux;
+  
+  if (node_info->aux) 
+    return;
+
+  node_info->aux = x;
+
+  for (e = x->callees; e; e = e->next_callee) 
+    {
+      struct cgraph_node *y = e->callee;
+      if (y->global.inlined_to) 
+	{
+	  ipa_reference_vars_info_t y_info;
+	  ipa_reference_local_vars_info_t y_l;
+	  struct cgraph_node* orig_y = y;
+	 
+	  y = cgraph_master_clone (y);
+	  if (y)
+	    {
+	      y_info = get_reference_vars_info_from_cgraph (y);
+	      y_l = y_info->local;
+	      if (x_l != y_l)
+		{
+		  bitmap_ior_into (x_l->statics_read_by_decl_uid,
+				   y_l->statics_read_by_decl_uid);
+		  bitmap_ior_into (x_l->statics_written_by_decl_uid,
+				   y_l->statics_written_by_decl_uid);
+		}
+	      x_l->calls_read_all |= y_l->calls_read_all;
+	      x_l->calls_write_all |= y_l->calls_write_all;
+	      merge_callee_local_info (target, y);
+	    }
+	  else 
+	    {
+	      fprintf(stderr, "suspect inlining of ");
+	      dump_cgraph_node (stderr, orig_y);
+	      fprintf(stderr, "\ninto ");
+	      dump_cgraph_node (stderr, target);
+	      dump_cgraph (stderr);
+	      gcc_assert(false);
+	    }
+	}
+    }
+
+  node_info->aux = NULL;
+}
+
+/* The init routine for analyzing global static variable usage.  See
+   comments at top for description.  */
+static void 
+ipa_init (void) 
+{
+  memory_identifier_string = build_string(7, "memory");
+
+  reference_vars_to_consider_by_decl_uid =
+    splay_tree_new_ggc (splay_tree_compare_ints);
+
+  bitmap_obstack_initialize (&ipa_obstack);
+  module_statics_escape_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+  module_statics_written_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+  all_module_statics_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+
+  /* There are some shared nodes, in particular the initializers on
+     static declarations.  We do not need to scan them more than once
+     since all we would be interrested in are the addressof
+     operations.  */
+  visited_nodes = pointer_set_create ();
+}
+
+/* Check out the rhs of a static or global initialization VNODE to see
+   if any of them contain addressof operations.  Note that some of
+   these variables may  not even be referenced in the code in this
+   compilation unit but their right hand sides may contain references
+   to variables defined within this unit.  */
+
+static void 
+analyze_variable (struct cgraph_varpool_node *vnode)
+{
+  tree global = vnode->decl;
+  if (TREE_CODE (global) == VAR_DECL)
+    {
+      if (DECL_INITIAL (global)) 
+	walk_tree (&DECL_INITIAL (global), scan_for_static_refs, 
+		   NULL, visited_nodes);
+    } 
+  else gcc_unreachable ();
+}
+
+/* This is the main routine for finding the reference patterns for
+   global variables within a function FN.  */
+
+static void
+analyze_function (struct cgraph_node *fn)
+{
+  ipa_reference_vars_info_t info 
+    = xcalloc (1, sizeof (struct ipa_reference_vars_info_d));
+  ipa_reference_local_vars_info_t l
+    = xcalloc (1, sizeof (struct ipa_reference_local_vars_info_d));
+  tree decl = fn->decl;
+  var_ann_t var_ann = get_var_ann (decl);
+
+
+  /* Add the info to the tree's annotation.  */
+  var_ann->reference_vars_info = info;
+
+  info->local = l;
+  l->statics_read_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+  l->statics_written_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+  l->statics_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+  l->statics_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+  l->var_anns_valid = false;
+
+  if (dump_file)
+    fprintf (dump_file, "\n local analysis of %s\n", cgraph_node_name (fn));
+  
+  {
+    struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
+    basic_block this_block;
+
+    FOR_EACH_BB_FN (this_block, this_cfun)
+      {
+	block_stmt_iterator bsi;
+	for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+	  walk_tree (bsi_stmt_ptr (bsi), scan_for_static_refs, 
+		     fn, visited_nodes);
+      }
+  }
+
+  /* FIXME - When Jan gets the local statics promoted to the global
+     variable list, the next two loops go away.  */
+  /* Walk over any private statics that may take addresses of functions.  */
+  if (TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
+    {
+      tree step;
+      for (step = BLOCK_VARS (DECL_INITIAL (decl));
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  if (DECL_INITIAL (step))
+	    walk_tree (&DECL_INITIAL (step), scan_for_static_refs, 
+		       fn, visited_nodes);
+	}
+    }
+  
+  /* Also look here for private statics.  */
+  if (DECL_STRUCT_FUNCTION (decl))
+    {
+      tree step;
+      for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  tree var = TREE_VALUE (step);
+	  if (DECL_INITIAL (var) && TREE_STATIC (var))
+	    walk_tree (&DECL_INITIAL (var), scan_for_static_refs, 
+		       fn, visited_nodes);
+	}
+    }
+}
+
+/* If FN is avail == AVAIL_OVERWRITABLE, replace the effects bit
+   vectors with worst case bit vectors.  We had to analyze it above to
+   find out if it took the address of any statics. However, now that
+   we know that, we can get rid of all of the other side effects.  */
+
+static void
+clean_function (struct cgraph_node *fn)
+{
+  ipa_reference_vars_info_t info = get_reference_vars_info_from_cgraph (fn);
+  ipa_reference_local_vars_info_t l = info->local;
+  ipa_reference_global_vars_info_t g = info->global;
+  var_ann_t var_ann = get_var_ann (fn->decl);
+  
+  if (l)
+    {
+      if (l->statics_read_by_decl_uid
+	  && l->statics_read_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (l->statics_read_by_decl_uid);
+      if (l->statics_written_by_decl_uid
+	  &&l->statics_written_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (l->statics_written_by_decl_uid);
+      free (l);
+    }
+  
+  if (g)
+    {
+      if (g->statics_read_by_decl_uid
+	  && g->statics_read_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (g->statics_read_by_decl_uid);
+      
+      if (g->statics_written_by_decl_uid
+	  && g->statics_written_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (g->statics_written_by_decl_uid);
+      
+      if (g->statics_not_read_by_decl_uid
+	  && g->statics_not_read_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (g->statics_not_read_by_decl_uid);
+      
+      if (g->statics_not_written_by_decl_uid
+	  && g->statics_not_written_by_decl_uid != 
+	  all_module_statics_by_decl_uid)
+	BITMAP_FREE (g->statics_not_written_by_decl_uid);
+      free (g);
+    }
+  
+  free (var_ann->reference_vars_info);
+  var_ann->reference_vars_info = NULL;
+}
+
+
+/* Produce the global information by preforming a transitive closure
+   on the local information that was produced by ipa_analyze_function
+   and ipa_analyze_variable.  */
+
+static void
+static_execute (void)
+{
+  struct cgraph_node *node;
+  struct cgraph_varpool_node *vnode;
+  struct cgraph_node *w;
+  struct cgraph_node **order =
+    xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+  int order_pos = order_pos = ipa_utils_reduced_inorder (order, false, true);
+  int i;
+
+  ipa_init ();
+
+  /* Process all of the variables first.  */
+  for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
+    analyze_variable (vnode);
+
+  /* Process all of the functions next. 
+
+     We do not want to process any of the clones so we check that this
+     is a master clone.  However, we do need to process any
+     AVAIL_OVERWRITABLE functions (these are never clones) because
+     they may cause a static variable to escape.  The code that can
+     overwrite such a function cannot access the statics because it
+     would not be in the same compilation unit.  When the analysis is
+     finished, the computed information of these AVAIL_OVERWRITABLE is
+     replaced with worst case info.  
+  */
+  for (node = cgraph_nodes; node; node = node->next)
+    if (node->analyzed 
+	&& (cgraph_is_master_clone (node)
+	    || (cgraph_function_body_availability (node) 
+		== AVAIL_OVERWRITABLE)))
+      analyze_function (node);
+
+  pointer_set_destroy (visited_nodes);
+  visited_nodes = NULL;
+  if (dump_file) 
+    dump_cgraph (dump_file);
+
+  /* Prune out the variables that were found to behave badly
+     (i.e. have there address taken).  */
+  {
+    unsigned int index;
+    bitmap_iterator bi;
+    bitmap module_statics_readonly = BITMAP_ALLOC (&ipa_obstack);
+    bitmap module_statics_const = BITMAP_ALLOC (&ipa_obstack);
+    bitmap bm_temp = BITMAP_ALLOC (&ipa_obstack);
+
+    EXECUTE_IF_SET_IN_BITMAP (module_statics_escape_by_decl_uid, 0, index, bi)
+      {
+	splay_tree_remove (reference_vars_to_consider_by_decl_uid, index);
+      }
+
+    bitmap_and_compl_into (all_module_statics_by_decl_uid, 
+			   module_statics_escape_by_decl_uid);
+
+    bitmap_and_compl (module_statics_readonly, all_module_statics_by_decl_uid,
+		      module_statics_written_by_decl_uid);
+
+    /* If the address is not taken, we can unset the addressable bit
+       on this variable.  */
+    EXECUTE_IF_SET_IN_BITMAP (all_module_statics_by_decl_uid, 0, index, bi)
+      {
+	tree var = get_static_decl_by_decl_uid (index);
+ 	TREE_ADDRESSABLE (var) = 0;
+	if (dump_file) 
+	  fprintf (dump_file, "Not TREE_ADDRESSABLE var %s\n",
+		   get_static_name_by_decl_uid (index));
+      }
+
+    /* If the variable is never written, we can set the TREE_READONLY
+       flag.  Additionally if it has a DECL_INITIAL that is made up of
+       constants we can treat the entire global as a constant.  */
+
+    bitmap_and_compl (module_statics_readonly, all_module_statics_by_decl_uid,
+		      module_statics_written_by_decl_uid);
+    EXECUTE_IF_SET_IN_BITMAP (module_statics_readonly, 0, index, bi)
+      {
+	tree var = get_static_decl_by_decl_uid (index);
+	TREE_READONLY (var) = 1;
+	if (dump_file)
+	  fprintf (dump_file, "read-only var %s\n", 
+		   get_static_name_by_decl_uid (index)); 
+	if (DECL_INITIAL (var)
+	    && is_gimple_min_invariant (DECL_INITIAL (var)))
+	  {
+ 	    bitmap_set_bit (module_statics_const, index);
+	    if (dump_file)
+	      fprintf (dump_file, "read-only constant %s\n",
+		       get_static_name_by_decl_uid (index));
+	  }
+      }
+
+    BITMAP_FREE(module_statics_escape_by_decl_uid);
+    BITMAP_FREE(module_statics_written_by_decl_uid);
+
+    if (dump_file)
+      EXECUTE_IF_SET_IN_BITMAP (all_module_statics_by_decl_uid, 0, index, bi)
+	{
+	  fprintf (dump_file, "\nPromotable global:%s",
+		   get_static_name_by_decl_uid (index));
+	}
+
+    for (i = 0; i < order_pos; i++ )
+      {
+	ipa_reference_local_vars_info_t l;
+	node = order[i];
+	l = get_reference_vars_info_from_cgraph (node)->local;
+
+	/* Any variables that are not in all_module_statics_by_decl_uid are
+	   removed from the local maps.  This will include all of the
+	   variables that were found to escape in the function
+	   scanning.  */
+	bitmap_and_into (l->statics_read_by_decl_uid, 
+		         all_module_statics_by_decl_uid);
+	bitmap_and_into (l->statics_written_by_decl_uid, 
+		         all_module_statics_by_decl_uid);
+      }
+
+    BITMAP_FREE(module_statics_readonly);
+    BITMAP_FREE(module_statics_const);
+    BITMAP_FREE(bm_temp);
+  }
+
+  if (dump_file)
+    {
+      for (i = 0; i < order_pos; i++ )
+	{
+	  unsigned int index;
+	  ipa_reference_local_vars_info_t l;
+	  bitmap_iterator bi;
+
+	  node = order[i];
+	  l = get_reference_vars_info_from_cgraph (node)->local;
+	  fprintf (dump_file, 
+		   "\nFunction name:%s/%i:", 
+		   cgraph_node_name (node), node->uid);
+	  fprintf (dump_file, "\n  locals read: ");
+	  EXECUTE_IF_SET_IN_BITMAP (l->statics_read_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf (dump_file, "%s ",
+		       get_static_name_by_decl_uid (index));
+	    }
+	  fprintf (dump_file, "\n  locals written: ");
+	  EXECUTE_IF_SET_IN_BITMAP (l->statics_written_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf(dump_file, "%s ",
+		      get_static_name_by_decl_uid (index));
+	    }
+	}
+    }
+
+  /* Propagate the local information thru the call graph to produce
+     the global information.  All the nodes within a cycle will have
+     the same info so we collapse cycles first.  Then we can do the
+     propagation in one pass from the leaves to the roots.  */
+  order_pos = ipa_utils_reduced_inorder (order, true, true);
+  if (dump_file)
+    ipa_utils_print_order(dump_file, "reduced", order, order_pos);
+
+  for (i = 0; i < order_pos; i++ )
+    {
+      ipa_reference_vars_info_t node_info;
+      ipa_reference_global_vars_info_t node_g = 
+	xcalloc (1, sizeof (struct ipa_reference_global_vars_info_d));
+      ipa_reference_local_vars_info_t node_l;
+      
+      bool read_all;
+      bool write_all;
+      struct ipa_dfs_info * w_info;
+
+      node = order[i];
+      node_info = get_reference_vars_info_from_cgraph (node);
+      if (!node_info) 
+	{
+	  dump_cgraph_node (stderr, node);
+	  dump_cgraph (stderr);
+	  gcc_unreachable ();
+	}
+
+      node_info->global = node_g;
+      node_l = node_info->local;
+
+      read_all = node_l->calls_read_all;
+      write_all = node_l->calls_write_all;
+
+      /* If any node in a cycle is calls_read_all or calls_write_all
+	 they all are. */
+      w_info = node->aux;
+      w = w_info->next_cycle;
+      while (w)
+	{
+	  ipa_reference_local_vars_info_t w_l = 
+	    get_reference_vars_info_from_cgraph (w)->local;
+	  read_all |= w_l->calls_read_all;
+	  write_all |= w_l->calls_write_all;
+
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+
+      /* Initialized the bitmaps for the reduced nodes */
+      if (read_all) 
+	node_g->statics_read_by_decl_uid = all_module_statics_by_decl_uid;
+      else 
+	{
+	  node_g->statics_read_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+	  bitmap_copy (node_g->statics_read_by_decl_uid, 
+		       node_l->statics_read_by_decl_uid);
+	}
+
+      if (write_all) 
+	node_g->statics_written_by_decl_uid = all_module_statics_by_decl_uid;
+      else
+	{
+	  node_g->statics_written_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+	  bitmap_copy (node_g->statics_written_by_decl_uid, 
+		       node_l->statics_written_by_decl_uid);
+	}
+
+      w_info = node->aux;
+      w = w_info->next_cycle;
+      while (w)
+	{
+	  ipa_reference_vars_info_t w_ri = get_reference_vars_info_from_cgraph (w);
+	  ipa_reference_local_vars_info_t w_l = w_ri->local;
+
+	  /* All nodes within a cycle share the same global info bitmaps.  */
+	  w_ri->global = node_g;
+	  
+	  /* These global bitmaps are initialized from the local info
+	     of all of the nodes in the region.  However there is no
+	     need to do any work if the bitmaps were set to
+	     all_module_statics_by_decl_uid.  */
+	  if (!read_all)
+	    bitmap_ior_into (node_g->statics_read_by_decl_uid,
+			     w_l->statics_read_by_decl_uid);
+	  if (!write_all)
+	    bitmap_ior_into (node_g->statics_written_by_decl_uid,
+			     w_l->statics_written_by_decl_uid);
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+
+      w = node;
+      while (w)
+	{
+	  propagate_bits (w);
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+    }
+
+  /* Need to fix up the local information sets.  The information that
+     has been gathered so far is preinlining.  However, the
+     compilation will progress post inlining so the local sets for the
+     inlined calls need to be merged into the callers.  Note that the
+     local sets are not shared between all of the nodes in a cycle so
+     those nodes in the cycle must be processed explicitly.  */
+  for (i = 0; i < order_pos; i++ )
+    {
+      struct ipa_dfs_info * w_info;
+      node = order[i];
+      merge_callee_local_info (node, node);
+      
+      w_info = node->aux;
+      w = w_info->next_cycle;
+      while (w)
+	{
+	  merge_callee_local_info (w, w);
+	  w_info = w->aux;
+	  w = w_info->next_cycle;
+	}
+    }
+
+  if (dump_file)
+    {
+      for (i = 0; i < order_pos; i++ )
+	{
+	  ipa_reference_vars_info_t node_info;
+	  ipa_reference_global_vars_info_t node_g;
+	  ipa_reference_local_vars_info_t node_l;
+	  unsigned int index;
+	  bitmap_iterator bi;
+	  struct ipa_dfs_info * w_info;
+
+	  node = order[i];
+	  node_info = get_reference_vars_info_from_cgraph (node);
+	  node_g = node_info->global;
+	  node_l = node_info->local;
+	  fprintf (dump_file, 
+		   "\nFunction name:%s/%i:", 
+		   cgraph_node_name (node), node->uid);
+	  fprintf (dump_file, "\n  locals read: ");
+	  EXECUTE_IF_SET_IN_BITMAP (node_l->statics_read_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf (dump_file, "%s ",
+		       get_static_name_by_decl_uid (index));
+	    }
+	  fprintf (dump_file, "\n  locals written: ");
+	  EXECUTE_IF_SET_IN_BITMAP (node_l->statics_written_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf(dump_file, "%s ",
+		      get_static_name_by_decl_uid (index));
+	    }
+
+	  w_info = node->aux;
+	  w = w_info->next_cycle;
+	  while (w) 
+	    {
+	      ipa_reference_vars_info_t w_ri = 
+		get_reference_vars_info_from_cgraph (w);
+	      ipa_reference_local_vars_info_t w_l = w_ri->local;
+	      fprintf (dump_file, "\n  next cycle: %s/%i ",
+		       cgraph_node_name (w), w->uid);
+ 	      fprintf (dump_file, "\n    locals read: ");
+	      EXECUTE_IF_SET_IN_BITMAP (w_l->statics_read_by_decl_uid,
+					0, index, bi)
+		{
+		  fprintf (dump_file, "%s ",
+			   get_static_name_by_decl_uid (index));
+		}
+
+	      fprintf (dump_file, "\n    locals written: ");
+	      EXECUTE_IF_SET_IN_BITMAP (w_l->statics_written_by_decl_uid,
+					0, index, bi)
+		{
+		  fprintf(dump_file, "%s ",
+			  get_static_name_by_decl_uid (index));
+		}
+	      
+
+	      w_info = w->aux;
+	      w = w_info->next_cycle;
+	    }
+	  fprintf (dump_file, "\n  globals read: ");
+	  EXECUTE_IF_SET_IN_BITMAP (node_g->statics_read_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf (dump_file, "%s ",
+		       get_static_name_by_decl_uid (index));
+	    }
+	  fprintf (dump_file, "\n  globals written: ");
+	  EXECUTE_IF_SET_IN_BITMAP (node_g->statics_written_by_decl_uid,
+				    0, index, bi)
+	    {
+	      fprintf (dump_file, "%s ",
+		       get_static_name_by_decl_uid (index));
+	    }
+	}
+    }
+
+  /* Cleanup. */
+  for (i = 0; i < order_pos; i++ )
+    {
+      ipa_reference_vars_info_t node_info;
+      ipa_reference_global_vars_info_t node_g;
+      node = order[i];
+      node_info = get_reference_vars_info_from_cgraph (node);
+      node_g = node_info->global;
+      
+      node_g->var_anns_valid = false;
+
+      /* Create the complimentary sets.  These are more useful for
+	 certain apis.  */
+      node_g->statics_not_read_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+      node_g->statics_not_written_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
+
+      if (node_g->statics_read_by_decl_uid != all_module_statics_by_decl_uid) 
+	{
+	  bitmap_and_compl (node_g->statics_not_read_by_decl_uid, 
+			    all_module_statics_by_decl_uid,
+			    node_g->statics_read_by_decl_uid);
+	}
+
+      if (node_g->statics_written_by_decl_uid 
+	  != all_module_statics_by_decl_uid) 
+	bitmap_and_compl (node_g->statics_not_written_by_decl_uid, 
+			  all_module_statics_by_decl_uid,
+			  node_g->statics_written_by_decl_uid);
+
+      node_g->statics_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+      node_g->statics_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+      node_g->statics_not_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+      node_g->statics_not_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+    }
+
+  free (order);
+
+  for (node = cgraph_nodes; node; node = node->next)
+    {
+      /* Get rid of the aux information.  */
+      
+      if (node->aux)
+	{
+	  free (node->aux);
+	  node->aux = NULL;
+	}
+      
+      if (node->analyzed 
+	  && (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE))
+	clean_function (node);
+    }
+}
+
+
+static bool
+gate_reference (void)
+{
+  return (flag_unit_at_a_time != 0  && flag_ipa_reference
+	  /* Don't bother doing anything if the program has errors.  */
+	  && !(errorcount || sorrycount));
+}
+
+struct tree_opt_pass pass_ipa_reference =
+{
+  "static-var",				/* name */
+  gate_reference,			/* gate */
+  static_execute,			/* execute */
+  NULL,					/* sub */
+  NULL,					/* next */
+  0,					/* static_pass_number */
+  TV_IPA_REFERENCE,		        /* tv_id */
+  0,	                                /* properties_required */
+  0,					/* properties_provided */
+  0,					/* properties_destroyed */
+  0,					/* todo_flags_start */
+  0,                                    /* todo_flags_finish */
+  0					/* letter */
+};
+
+#include "gt-ipa-reference.h"
+
Index: ipa-reference.h
===================================================================
RCS file: ipa-reference.h
diff -N ipa-reference.h
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-reference.h	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,101 @@
+/* IPA handling of references.
+   Copyright (C) 2004-2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  */
+
+#ifndef GCC_IPA_REFERENCE_H
+#define GCC_IPA_REFERENCE_H
+#include "bitmap.h"
+#include "tree.h"
+
+/* The static variables defined within the compilation unit that are
+   loaded or stored directly by function that owns this structure.  */ 
+
+struct ipa_reference_local_vars_info_d 
+{
+  bitmap statics_read_by_decl_uid;
+  bitmap statics_written_by_decl_uid;
+  bitmap statics_read_by_ann_uid;
+  bitmap statics_written_by_ann_uid;
+
+  /* Var_anns_valid is reset at the start of compilation for each
+     function because the indexing that the "_var_anns" is based
+     on is invalidated between function compilations.  This allows for
+     lazy creation of the "_var_ann" variables.  */
+  bool var_anns_valid;
+  /* Set when this function calls another function external to the
+     compilation unit or if the function has a asm clobber of memory.
+     In general, such calls are modeled as reading and writing all
+     variables (both bits on) but sometime there are attributes on the
+     called function so we can do better.  */
+  bool calls_read_all;
+  bool calls_write_all;
+};
+
+struct ipa_reference_global_vars_info_d
+{
+  bitmap statics_read_by_decl_uid;
+  bitmap statics_written_by_decl_uid;
+  bitmap statics_read_by_ann_uid;
+  bitmap statics_written_by_ann_uid;
+  bitmap statics_not_read_by_decl_uid;
+  bitmap statics_not_written_by_decl_uid;
+  bitmap statics_not_read_by_ann_uid;
+  bitmap statics_not_written_by_ann_uid;
+
+  /* Var_anns_valid is reset at the start of compilation for each
+     function because the indexing that the "_var_anns" is based
+     on is invalidated between function compilations.  This allows for
+     lazy creation of the "_var_ann" variables.  */
+  bool var_anns_valid;
+};
+
+/* Statics that are read and written by some set of functions. The
+   local ones are based on the loads and stores local to the function.
+   The global ones are based on the local info as well as the
+   transitive closure of the functions that are called.  The
+   structures are separated to allow the global structures to be
+   shared between several functions since every function within a
+   strongly connected component will have the same information.  This
+   sharing saves both time and space in the computation of the vectors
+   as well as their translation from decl_uid form to ann_uid
+   form.  */ 
+
+typedef struct ipa_reference_local_vars_info_d *ipa_reference_local_vars_info_t;
+typedef struct ipa_reference_global_vars_info_d *ipa_reference_global_vars_info_t;
+
+struct ipa_reference_vars_info_d 
+{
+  ipa_reference_local_vars_info_t local;
+  ipa_reference_global_vars_info_t global;
+};
+
+typedef struct ipa_reference_vars_info_d *ipa_reference_vars_info_t;
+
+/* In ipa-reference.c  */
+void   ipa_reference_reset_maps (void);
+bitmap ipa_reference_get_read_local (tree fn);
+bitmap ipa_reference_get_written_local (tree fn);
+bitmap ipa_reference_get_read_global (tree fn);
+bitmap ipa_reference_get_written_global (tree fn);
+bitmap ipa_reference_get_not_read_global (tree fn);
+bitmap ipa_reference_get_not_written_global (tree fn);
+
+#endif  /* GCC_IPA_REFERENCE_H  */
+
Index: ipa-type-escape.c
===================================================================
RCS file: ipa-type-escape.c
diff -N ipa-type-escape.c
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-type-escape.c	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,1861 @@
+/* Type based alias analysis.
+   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  */
+
+/* This pass determines which types in the program contain only
+   instances that are completely encapsulated by the compilation unit.
+   Those types that are encapsulated must also pass the further
+   requirement that there be no bad operations on any instances of
+   those types.
+
+   A great deal of freedom in compilation is allowed for the instances
+   of those types that pass these conditions.
+*/
+
+/* The code in this module is called by the ipa pass manager. It
+   should be one of the later passes since it's information is used by
+   the rest of the compilation. */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "tree-flow.h"
+#include "tree-inline.h"
+#include "tree-pass.h"
+#include "langhooks.h"
+#include "pointer-set.h"
+#include "ggc.h"
+#include "ipa-utils.h"
+#include "ipa-type-escape.h"
+#include "c-common.h"
+#include "tree-gimple.h"
+#include "cgraph.h"
+#include "output.h"
+#include "flags.h"
+#include "timevar.h"
+#include "diagnostic.h"
+#include "langhooks.h"
+
+/* Some of the aliasing is called very early, before this phase is
+   called.  To assure that this is not a problem, we keep track of if
+   this phase has been run.  */
+static bool initialized = false;
+
+/* This bitmap contains the set of local vars that are the lhs of
+   calls to mallocs.  These variables, when seen on the rhs as part of
+   a cast, the cast are not marked as doing bad things to the type
+   even though they are generally of the form 
+   "foo = (type_of_foo)void_temp". */
+static bitmap results_of_malloc;
+
+/* Scratch bitmap for avoiding work. */
+static bitmap been_there_done_that;
+
+/* There are two levels of escape that types can undergo.
+
+   EXPOSED_PARAMETER - some instance of the variable is
+   passed by value into an externally visible function or some
+   instance of the variable is passed out of an externally visible
+   function as a return value.  In this case any of the fields of the
+   variable that are pointer types end up having their types marked as
+   FULL_ESCAPE.
+
+   FULL_ESCAPE - when bad things happen to good types. One of the
+   following things happens to the type: (a) either an instance of the
+   variable has it's address passed to an externally visible function,
+   (b) the address is taken and some bad cast happens to the address
+   or (c) explicit arithmetic is done to the address.
+*/
+
+enum escape_t
+{
+  EXPOSED_PARAMETER,
+  FULL_ESCAPE
+};
+
+/* The following two bit vectors global_types_* correspond to
+   previous cases above.  During the analysis phase, a bit is set in
+   one of these vectors if an operation of the offending class is
+   discovered to happen on the associated type.  */
+ 
+static bitmap global_types_exposed_parameter;
+static bitmap global_types_full_escape;
+
+/* All of the types seen in this compilation unit. */
+static bitmap global_types_seen;
+static splay_tree uid_to_type;
+
+/* Map the several instances of a type into a single instance.  These
+   can arise in several ways, nono of which can be justified except by
+   laziness and stupidity.  */
+static splay_tree uid_to_unique_type;
+static splay_tree all_unique_types;
+
+/* A splay tree of bitmaps.  An element X in the splay tree has a bit
+   set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y) if there was
+   an operation in the program of the form "&X.Y".  */
+static splay_tree uid_to_addressof_map;
+
+/* Tree to hold the subtype maps used to mark subtypes of escaped
+   types.  */
+static splay_tree uid_to_subtype_map;
+
+/* Records tree nodes seen in cgraph_create_edges.  Simply using
+   walk_tree_without_duplicates doesn't guarantee each node is visited
+   once because it gets a new htab upon each recursive call from
+   scan_for_refs.  */
+static struct pointer_set_t *visited_nodes;
+
+static bitmap_obstack ipa_obstack;
+
+/* All of the "unique_type" code is a hack to get around the sleezy
+   implementation used to compile more than file.  If the same type is
+   declared in several files, multiple types will appear that are the
+   same.  The code in this unit chooses one "unique" instance of that
+   type as the representative and has all of the others point to
+   it. If ALLOW_MISSING is true, just return UID if it is not found in
+   the table, otherwise abort. */
+
+/* Find the unique representative for a type with UID.  */  
+static int
+unique_type_id_for (int uid, bool allow_missing ATTRIBUTE_UNUSED)
+{
+  splay_tree_node result = 
+    splay_tree_lookup(uid_to_unique_type, (splay_tree_key) uid);
+
+  if (result)
+    return TYPE_UID((tree) result->value);
+  else 
+    {
+      /* ICE when compiling libstdc++.  */
+      if (!allow_missing)
+	abort();
+      return uid;
+    }
+}
+
+/* Return true if the type with UID is the unique representative.  */
+static bool 
+unique_type_id_p (int uid)
+{
+  return uid == unique_type_id_for (uid, false);
+}
+
+/* Return 0 if TYPE is a record or union type.  Return a positive
+   number if TYPE is a pointer to a record or union.  The number is
+   the number of pointer types stripped to get to the record or union
+   type.  Return -1 if TYPE is none of the above.  */
+ 
+int
+ipa_type_escape_star_count_of_interesting_type (tree type) 
+{
+  int count = 0;
+  /* Strip the *'s off.  */
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type))
+    {
+      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+      count++;
+    }
+
+  /* We are interested in records, and unions only.  */
+  if (TREE_CODE (type) == RECORD_TYPE 
+      || TREE_CODE (type) == QUAL_UNION_TYPE 
+      || TREE_CODE (type) == UNION_TYPE)
+    return count;
+  else 
+    return -1;
+} 
+
+
+/* Return 0 if TYPE is a record or union type.  Return a positive
+   number if TYPE is a pointer to a record or union.  The number is
+   the number of pointer types stripped to get to the record or union
+   type.  Return -1 if TYPE is none of the above.  */
+ 
+int
+ipa_type_escape_star_count_of_interesting_or_array_type (tree type) 
+{
+  int count = 0;
+  /* Strip the *'s off.  */
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+    {
+      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+      count++;
+    }
+
+  /* We are interested in records, and unions only.  */
+  if (TREE_CODE (type) == RECORD_TYPE 
+      || TREE_CODE (type) == QUAL_UNION_TYPE 
+      || TREE_CODE (type) == UNION_TYPE)
+    return count;
+  else 
+    return -1;
+} 
+ 
+ 
+/* Return true if the record, or union TYPE passed in escapes this
+   compilation unit. Note that all of the pointer-to's are removed
+   before testing since these may not be correct.  */
+
+bool
+ipa_type_escape_type_contained_p (tree type)
+{
+  int uid;
+
+  if (!initialized)
+    return false;
+
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type))
+    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+
+  uid = unique_type_id_for (TYPE_UID (type), true);
+  return !bitmap_bit_p (global_types_full_escape, uid);
+}
+
+/* Return true a modification to a field of type FIELD_TYPE cannot
+   clobber a record of RECORD_TYPE.  */
+
+bool 
+ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
+{ 
+  splay_tree_node result;
+  int uid;
+  
+  if (!initialized)
+    return false;
+
+  /* Strip off all of the pointer tos on the record type.  Strip the
+     same number of pointer tos from the field type.  If the field
+     type has fewer, it could not have been aliased. */
+  record_type = TYPE_MAIN_VARIANT (record_type);
+  field_type = TYPE_MAIN_VARIANT (field_type);
+  while (POINTER_TYPE_P (record_type))
+    {
+      record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
+      if (POINTER_TYPE_P (field_type)) 
+	field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
+      else 
+	/* However, if field_type is a union, this quick test is not
+	   correct since one of the variants of the union may be a
+	   pointer to type and we cannot see across that here.  So we
+	   just strip the remaining pointer tos off the record type
+	   and fall thru to the more precise code.  */
+	if (TREE_CODE (field_type) == QUAL_UNION_TYPE 
+	    || TREE_CODE (field_type) == UNION_TYPE)
+	  {
+	    while (POINTER_TYPE_P (record_type))
+	      record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
+	    break;
+	  } 
+	else 
+	  return true;
+    }
+  
+  /* The record type must be contained.  The field type may
+     escape.  */
+  if (!ipa_type_escape_type_contained_p (record_type))
+    return false;
+
+  uid = unique_type_id_for (TYPE_UID (record_type), false);
+  result = splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+  
+  if (result) 
+    {
+      bitmap field_type_map = (bitmap) result->value;
+      uid = unique_type_id_for (TYPE_UID (field_type), true);
+      /* If the bit is there, the address was taken. If not, it
+	 wasn't.  */
+      return !bitmap_bit_p (field_type_map, uid);
+    }
+  else
+    /* No bitmap means no addresses were taken.  */
+    return true;
+}
+
+
+/* Mark a TYPE as being seen.  */ 
+
+static bool
+mark_any_type_seen (tree type)
+{
+  int uid;
+  
+  type = TYPE_MAIN_VARIANT (type);
+  uid = TYPE_UID (type);
+  if (bitmap_bit_p (global_types_seen, uid))
+    return false;
+  else
+    {
+      splay_tree_insert (uid_to_type, 
+			 (splay_tree_key) uid,
+			 (splay_tree_value) type);	  
+      bitmap_set_bit (global_types_seen, uid);
+    }
+  return true;
+}
+
+/* Mark the underlying record or union type of TYPE as being seen.
+   Pointer tos and array ofs are stripped from the type and non record
+   or unions are not considered.  */
+
+static bool
+mark_type_seen (tree type)
+{
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+
+  /* We are interested in records, and unions only.  */
+  if (TREE_CODE (type) == RECORD_TYPE 
+	  || TREE_CODE (type) == QUAL_UNION_TYPE 
+	  || TREE_CODE (type) == UNION_TYPE)
+    return mark_any_type_seen (type);
+  else 
+    {
+      /* Allow primitive and function types to get into the set of
+	 types seen, but do not return true because there is not
+	 reason to recurse in the calls that check the result.  */
+      mark_any_type_seen (type);
+      return false;
+    }
+}
+
+/* Add TYPE to the suspect type set. Return true if the bit needed to
+   be marked.  */
+
+static bool
+mark_type (tree type, enum escape_t escape_status)
+{
+  bitmap map = NULL;
+  int uid;
+
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+  
+  switch (escape_status) 
+    {
+    case EXPOSED_PARAMETER:
+      map = global_types_exposed_parameter;
+      break;
+    case FULL_ESCAPE:
+      map = global_types_full_escape;
+      break;
+    }
+
+  uid = TYPE_UID (type);
+  if (bitmap_bit_p (map, uid))
+    return false;
+  else
+    {
+      bitmap_set_bit (map, uid);
+      mark_any_type_seen (type);
+
+      if (escape_status == FULL_ESCAPE)
+	{
+	  /* Effeciency hack. When things are bad, do not mess around
+	     with this type anymore.  */
+	  bitmap_set_bit (global_types_exposed_parameter, uid);
+	}      
+    }
+  return true;
+}
+
+/* Add interesting TYPE to the suspect type set. If the set is
+   EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
+   changed to FULL_ESCAPE.  */
+
+static void 
+mark_interesting_type (tree type, enum escape_t escape_status)
+{
+  if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
+    {
+      if ((escape_status == EXPOSED_PARAMETER)
+	  && POINTER_TYPE_P (type))
+	/* EXPOSED_PARAMETERs are only structs or unions are passed by
+	   value.  Anything passed by reference to an external
+	   function fully exposes the type.  */
+	mark_type (type, FULL_ESCAPE);
+      else
+	mark_type (type, escape_status);
+    }
+}
+
+/* Return true if PARENT is supertype of CHILD.  Both types must be
+   known to be structures or unions. */
+ 
+static bool
+parent_type_p (tree parent, tree child)
+{
+  int i;
+  tree binfo, base_binfo;
+  if (TYPE_BINFO (parent)) 
+    for (binfo = TYPE_BINFO (parent), i = 0;
+	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+      {
+	tree binfotype = BINFO_TYPE (base_binfo);
+	if (binfotype == child) 
+	  return true;
+	else if (parent_type_p (binfotype, child))
+	  return true;
+      }
+  if (TREE_CODE (parent) == UNION_TYPE
+      || TREE_CODE (parent) == QUAL_UNION_TYPE) 
+    {
+      tree field;
+      /* Search all of the variants in the union to see if one of them
+	 is the child.  */
+      for (field = TYPE_FIELDS (parent);
+	   field;
+	   field = TREE_CHAIN (field))
+	{
+	  tree field_type;
+	  if (TREE_CODE (field) != FIELD_DECL)
+	    continue;
+	  
+	  field_type = TREE_TYPE (field);
+	  if (field_type == child) 
+	    return true;
+	}
+
+      /* If we did not find it, recursively ask the variants if one of
+	 their children is the child type.  */
+      for (field = TYPE_FIELDS (parent);
+	   field;
+	   field = TREE_CHAIN (field))
+	{
+	  tree field_type;
+	  if (TREE_CODE (field) != FIELD_DECL)
+	    continue;
+	  
+	  field_type = TREE_TYPE (field);
+	  if (TREE_CODE (field_type) == RECORD_TYPE 
+	      || TREE_CODE (field_type) == QUAL_UNION_TYPE 
+	      || TREE_CODE (field_type) == UNION_TYPE)
+	    if (parent_type_p (field_type, child)) 
+	      return true;
+	}
+    }
+  
+  if (TREE_CODE (parent) == RECORD_TYPE)
+    {
+      tree field;
+      for (field = TYPE_FIELDS (parent);
+	   field;
+	   field = TREE_CHAIN (field))
+	{
+	  tree field_type;
+	  if (TREE_CODE (field) != FIELD_DECL)
+	    continue;
+	  
+	  field_type = TREE_TYPE (field);
+	  if (field_type == child) 
+	    return true;
+	  /* You can only cast to the first field so if it does not
+	     match, quit.  */
+	  if (TREE_CODE (field_type) == RECORD_TYPE 
+	      || TREE_CODE (field_type) == QUAL_UNION_TYPE 
+	      || TREE_CODE (field_type) == UNION_TYPE)
+	    {
+	      if (parent_type_p (field_type, child)) 
+		return true;
+	      else 
+		break;
+	    }
+	}
+    }
+  return false;
+}
+
+/* Return the number of pointer tos for TYPE and return TYPE with all
+   of these stripped off.  */
+
+static int 
+count_stars (tree* type_ptr)
+{
+  tree type = *type_ptr;
+  int i = 0;
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type))
+    {
+      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+      i++;
+    }
+
+  *type_ptr = type;
+  return i;
+}
+
+enum cast_type {
+  CT_UP,
+  CT_DOWN,
+  CT_SIDEWAYS,
+  CT_USELESS
+};
+
+/* Check the cast FROM_TYPE to TO_TYPE.  This function requires that
+   the two types have already passed the
+   ipa_type_escape_star_count_of_interesting_type test.  */
+
+static enum cast_type
+check_cast_type (tree to_type, tree from_type)
+{
+  int to_stars = count_stars (&to_type);
+  int from_stars = count_stars (&from_type);
+  if (to_stars != from_stars) 
+    return CT_SIDEWAYS;
+
+  if (to_type == from_type)
+    return CT_USELESS;
+
+  if (parent_type_p (to_type, from_type)) return CT_UP;
+  if (parent_type_p (from_type, to_type)) return CT_DOWN;
+  return CT_SIDEWAYS;
+}     
+
+/* Check a cast FROM this variable, TO_TYPE.  Mark the escaping types
+   if appropriate.  */ 
+static void
+check_cast (tree to_type, tree from) 
+{
+  tree from_type = TYPE_MAIN_VARIANT (TREE_TYPE (from));
+  bool to_interesting_type, from_interesting_type;
+
+  to_type = TYPE_MAIN_VARIANT (to_type);
+  if (from_type == to_type)
+    {
+      mark_type_seen (to_type);
+      return;
+    }
+
+  to_interesting_type = 
+    ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
+  from_interesting_type = 
+    ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
+
+  if (to_interesting_type) 
+    if (from_interesting_type)
+      {
+	/* Both types are interesting. This can be one of four types
+	   of cast: useless, up, down, or sideways.  We do not care
+	   about up or useless.  Sideways casts are always bad and
+	   both sides get marked as escaping.  Downcasts are not
+	   interesting here because if type is marked as escaping, all
+	   of it's subtypes escape.  */
+	switch (check_cast_type (to_type, from_type)) 
+	  {
+	  case CT_UP:
+	  case CT_USELESS:
+	  case CT_DOWN:
+	    mark_type_seen (to_type);
+	    mark_type_seen (from_type);
+	    break;
+
+	  case CT_SIDEWAYS:
+	    mark_type (to_type, FULL_ESCAPE);
+	    mark_type (from_type, FULL_ESCAPE);
+	    break;
+	  }
+      }
+    else
+      {
+	/* If this is a cast from the local that is a result from a
+	   call to malloc, do not mark the cast as bad.  */
+	if (DECL_P (from) && !bitmap_bit_p (results_of_malloc, DECL_UID (from)))
+	  mark_type (to_type, FULL_ESCAPE);
+	else
+	  mark_type_seen (to_type);
+      }
+  else if (from_interesting_type)
+    mark_type (from_type, FULL_ESCAPE);
+}
+
+/* Register the parameter and return types of function FN as
+   escaping.  */
+static void 
+check_function_parameter_and_return_types (tree fn, bool escapes) 
+{
+  tree arg;
+  
+  if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
+    {
+      for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
+	   arg && TREE_VALUE (arg) != void_type_node;
+	   arg = TREE_CHAIN (arg))
+	{
+	  if (escapes)
+	    mark_interesting_type (TREE_VALUE (arg), EXPOSED_PARAMETER);
+	  else
+	    mark_type_seen (TREE_VALUE (arg));
+	}
+    }
+  else
+    {
+      /* FIXME - According to Geoff Keating, we should never have to
+	 do this; the front ends should always process the arg list
+	 from the TYPE_ARG_LIST. */
+
+      for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
+	{
+	  if (escapes)
+	    mark_interesting_type (TREE_TYPE (arg), EXPOSED_PARAMETER);
+	  else
+	    mark_type_seen (TREE_TYPE (arg));
+	}
+    }
+  if (escapes)
+    mark_interesting_type (TREE_TYPE (TREE_TYPE (fn)), EXPOSED_PARAMETER); 
+  else 
+    mark_type_seen (TREE_TYPE (TREE_TYPE (fn)));
+}
+
+/* Return true if the variable T is the right kind of static variable to
+   perform compilation unit scope escape analysis.  */
+
+static inline void
+has_proper_scope_for_analysis (tree t)
+{
+  /* If the variable has the "used" attribute, treat it as if it had a
+     been touched by the devil.  */
+  if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
+    {
+      mark_interesting_type (TREE_TYPE (t), FULL_ESCAPE);
+      return;
+    }
+
+  /* Do not want to do anything with volatile except mark any
+     function that uses one to be not const or pure.  */
+  if (TREE_THIS_VOLATILE (t)) 
+    return;
+
+  /* Do not care about a local automatic that is not static.  */
+  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
+    return;
+
+  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
+    {
+      /* If the front end set the variable to be READONLY and
+	 constant, we can allow this variable in pure or const
+	 functions but the scope is too large for our analysis to set
+	 these bits ourselves.  */
+      
+      if (TREE_READONLY (t)
+	  && DECL_INITIAL (t)
+	  && is_gimple_min_invariant (DECL_INITIAL (t)))
+	; /* Read of a constant, do not change the function state.  */
+      else 
+	{
+	  /* The type escapes for all public and externs. */
+	  mark_interesting_type (TREE_TYPE (t), FULL_ESCAPE);
+	}
+    }
+}
+
+/* If T is a VAR_DECL for a static that we are interrested in, add the
+   uid to the bitmap.  */
+
+static void
+check_operand (tree t)
+{
+  if (!t) return;
+
+  /* This is an assignment from a function, register the types as
+     escaping.  */
+  if (TREE_CODE (t) == FUNCTION_DECL)
+    check_function_parameter_and_return_types (t, true);
+
+  else if (TREE_CODE (t) == VAR_DECL)
+    {
+      mark_type_seen (TREE_TYPE (t));
+      has_proper_scope_for_analysis (t); 
+    }
+}
+
+/* Examine tree T for references.   */
+
+static void
+check_tree (tree t)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return;
+
+  while (TREE_CODE (t) == REALPART_EXPR 
+	 || TREE_CODE (t) == IMAGPART_EXPR
+	 || handled_component_p (t))
+    {
+      if (TREE_CODE (t) == ARRAY_REF)
+	check_operand (TREE_OPERAND (t, 1));
+      t = TREE_OPERAND (t, 0);
+    }
+
+  /* The bottom of an indirect reference can only be read, not
+     written.  So just recurse and whatever we find, check it against
+     the read bitmaps.  */
+  if (INDIRECT_REF_P (t))
+/*  || TREE_CODE (t) == MEM_REF) */
+    check_tree (TREE_OPERAND (t, 0));
+
+  if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
+    check_operand (t);
+}
+
+/* Given a memory reference T, will return the variable at the bottom
+   of the access.  Unlike get_base_address, this will recurse thru
+   INDIRECT_REFS.  */
+
+static tree
+get_base_var (tree t)
+{
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return t;
+
+  while (!SSA_VAR_P (t) 
+	 && (!CONSTANT_CLASS_P (t))
+	 && TREE_CODE (t) != LABEL_DECL
+	 && TREE_CODE (t) != FUNCTION_DECL
+	 && TREE_CODE (t) != CONST_DECL)
+    {
+      t = TREE_OPERAND (t, 0);
+    }
+  return t;
+} 
+
+/* Create an address_of edge FROM_TYPE.TO_TYPE.  */
+static void
+mark_interesting_addressof (tree to_type, tree from_type)
+{
+  from_type = TYPE_MAIN_VARIANT (from_type);
+  to_type = TYPE_MAIN_VARIANT (to_type);
+  if (ipa_type_escape_star_count_of_interesting_type (from_type) == 0)
+    {
+      int uid = TYPE_UID (from_type);
+      bitmap from_type_map;
+      splay_tree_node result = 
+	splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+ 
+      if (result) 
+	from_type_map = (bitmap) result->value;  
+      else 
+	{
+	  from_type_map = BITMAP_ALLOC (&ipa_obstack);
+	  splay_tree_insert (uid_to_addressof_map,
+			     uid, 
+			     (splay_tree_value)from_type_map);
+	}
+      bitmap_set_bit (from_type_map, TYPE_UID (to_type));
+      mark_type_seen (from_type);
+      mark_any_type_seen (to_type);
+    }
+  else 
+    {
+      fprintf(stderr, "trying to mark the address of pointer type ");
+      print_generic_expr (stderr, from_type, 0);
+      fprintf(stderr, "\n");
+      gcc_unreachable ();
+    }
+}
+
+/* Scan tree T to see if there are any addresses taken in within T.  */
+
+static void 
+look_for_address_of (tree t)
+{
+  if (TREE_CODE (t) == ADDR_EXPR)
+    {
+      tree x = get_base_var (t);
+      tree cref = TREE_OPERAND (t, 0);
+
+      /* If we have an expression of the form "&a.b.c.d", mark a.b,
+	 b.c and c.d. as having it's address taken.  */ 
+      tree fielddecl = NULL_TREE;
+      while (cref!= x)
+	{
+	  if (TREE_CODE (cref) == COMPONENT_REF)
+	    {
+	      fielddecl =  TREE_OPERAND (cref, 1);
+	      mark_interesting_addressof (TREE_TYPE (fielddecl), 
+					  DECL_FIELD_CONTEXT (fielddecl));
+	    }
+	  else if (TREE_CODE (cref) == ARRAY_REF)
+	    mark_type_seen (TREE_TYPE (cref));
+
+	  cref = TREE_OPERAND (cref, 0);
+	}
+
+      if (TREE_CODE (x) == VAR_DECL) 
+	has_proper_scope_for_analysis (x);
+    }
+}
+
+
+/* Scan tree T to see if there are any casts within it.
+   LHS Is the LHS of the expression involving the cast.  */
+
+static void 
+look_for_casts (tree lhs __attribute__((unused)), tree t)
+{
+  if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
+    {
+      tree castfromvar = TREE_OPERAND (t, 0);
+      check_cast (TREE_TYPE (t), castfromvar);
+    }
+  else if (TREE_CODE (t) == COMPONENT_REF
+	   || TREE_CODE (t) == INDIRECT_REF
+	   || TREE_CODE (t) == BIT_FIELD_REF)
+    {
+      tree base = get_base_address (t);
+      while (t != base)
+	{
+	  t = TREE_OPERAND (t, 0);
+	  if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
+	    {
+	      /* This may be some part of a component ref.
+		 IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
+		 castfromref will give you a.b.c, not a. */
+	      tree castfromref = TREE_OPERAND (t, 0);
+	      check_cast (TREE_TYPE (t), castfromref);
+	    }
+	  else if (TREE_CODE (t) == COMPONENT_REF)
+	    mark_type_seen (TREE_TYPE (TREE_OPERAND (t, 1)));
+	}
+    } 
+} 
+
+/* Check to see if T is a read or address of operation on a static var
+   we are interested in analyzing.  */
+
+static void
+check_rhs_var (tree t)
+{
+  look_for_address_of (t);
+  check_tree(t);
+}
+
+/* Check to see if T is an assignment to a static var we are
+   interrested in analyzing.  */
+
+static void
+check_lhs_var (tree t)
+{
+  check_tree(t);
+}
+
+/* This is a scaled down version of get_asm_expr_operands from
+   tree_ssa_operands.c.  The version there runs much later and assumes
+   that aliasing information is already available. Here we are just
+   trying to find if the set of inputs and outputs contain references
+   or address of operations to local.  FN is the function being
+   analyzed and STMT is the actual asm statement.  */
+
+static void
+get_asm_expr_operands (tree stmt)
+{
+  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  const char **oconstraints
+    = (const char **) alloca ((noutputs) * sizeof (const char *));
+  int i;
+  tree link;
+  const char *constraint;
+  bool allows_mem, allows_reg, is_inout;
+  
+  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+    {
+      oconstraints[i] = constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_output_constraint (&constraint, i, 0, 0,
+			       &allows_mem, &allows_reg, &is_inout);
+      
+      check_lhs_var (TREE_VALUE (link));
+    }
+
+  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+    {
+      constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+			      oconstraints, &allows_mem, &allows_reg);
+      
+      check_rhs_var (TREE_VALUE (link));
+    }
+  
+  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
+      {
+	/* Abandon all hope, ye who enter here. */
+	/* FIXME I think something drastic needs to be done here. */
+      }      
+}
+
+/* Check the parameters of a function call from CALLER to CALL_EXPR to
+   see if any of them are static vars.  Also check to see if this is
+   either an indirect call, a call outside the compilation unit, or
+   has special attributes that effect the clobbers.  The caller
+   parameter is the tree node for the caller and the second operand is
+   the tree node for the entire call expression.  */
+
+static bool
+check_call (tree call_expr) 
+{
+  int flags = call_expr_flags(call_expr);
+  tree operand_list = TREE_OPERAND (call_expr, 1);
+  tree operand;
+  tree callee_t = get_callee_fndecl (call_expr);
+  tree argument;
+  struct cgraph_node* callee;
+  enum availability avail = AVAIL_NOT_AVAILABLE;
+
+  for (operand = operand_list;
+       operand != NULL_TREE;
+       operand = TREE_CHAIN (operand))
+    {
+      tree argument = TREE_VALUE (operand);
+      check_rhs_var (argument);
+    }
+  
+  if (callee_t)
+    {
+      tree arg_type;
+      tree last_arg_type = NULL;
+      callee = cgraph_node(callee_t);
+      avail = cgraph_function_body_availability (callee);
+      
+      /* Check that there are no implicit casts in the passing of
+	 parameters.  */
+      if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
+	{
+	  operand = operand_list;
+	  for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t));
+	       arg_type && TREE_VALUE (arg_type) != void_type_node;
+	       arg_type = TREE_CHAIN (arg_type))
+	    {
+	      if (operand)
+		{
+		  argument = TREE_VALUE (operand);
+		  last_arg_type = TREE_VALUE(arg_type);
+		  check_cast (last_arg_type, argument);
+		  operand = TREE_CHAIN (operand);
+		}
+	      else 
+		/* The code reaches here for some unfortunate
+		   builtin functions that do not have a list of
+		   argument types.  */
+		break; 
+	    }
+	} 
+      else  
+	{ 
+	  /* FIXME - According to Geoff Keating, we should never
+	     have to do this; the front ends should always process
+	     the arg list from the TYPE_ARG_LIST. */
+	  operand = operand_list;
+	  for (arg_type = DECL_ARGUMENTS (callee_t); 
+	       arg_type;
+	       arg_type = TREE_CHAIN (arg_type))
+	    {
+	      if (operand)
+		{
+		  argument = TREE_VALUE (operand);
+		  last_arg_type = TREE_TYPE(arg_type);
+		  check_cast (last_arg_type, argument);
+		  operand = TREE_CHAIN (operand);
+		} 
+	      else 
+		/* The code reaches here for some unfortunate
+		   builtin functions that do not have a list of
+		   argument types.  */
+		break; 
+	    }
+	}
+      
+      /* In the case where we have a var_args function, we need to
+	 check the remaining parameters against the last argument.  */
+      arg_type = last_arg_type;
+      for (;
+	   operand != NULL_TREE;
+	   operand = TREE_CHAIN (operand))
+	{
+	  argument = TREE_VALUE (operand);
+	  if (arg_type)
+	    check_cast (arg_type, argument);
+	  else 
+	    {
+	      /* The code reaches here for some unfortunate
+		 builtin functions that do not have a list of
+		 argument types.  Most of these functions have
+		 been marked as having their parameters not
+		 escape, but for the rest, the type is doomed.  */
+	      mark_interesting_type (TREE_TYPE (argument), FULL_ESCAPE);
+	    }
+	}
+    }
+
+  /* The callee is either unknown (indirect call) or there is just no
+     scannable code for it (external call) .  We look to see if there
+     are any bits available for the callee (such as by declaration or
+     because it is builtin) and process solely on the basis of those
+     bits. */
+
+  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
+    {
+      /* If this is a direct call to an external function, mark all of
+	 the parameter and return types.  */
+      for (operand = operand_list;
+	   operand != NULL_TREE;
+	   operand = TREE_CHAIN (operand))
+	{
+	  mark_interesting_type (TREE_TYPE (TREE_VALUE (operand)), 
+				 EXPOSED_PARAMETER);
+	}
+	  
+      if (callee_t) 
+	mark_interesting_type (TREE_TYPE (TREE_TYPE (callee_t)), 
+			       EXPOSED_PARAMETER);
+    }
+  return (flags & ECF_MALLOC);
+}
+
+/* OP0 is the one we *know* is a pointer type.
+   OP1 may be a pointer type.  */
+static bool 
+okay_pointer_operation (enum tree_code code,  tree op0, tree op1)
+{
+  tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
+  tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
+  if (POINTER_TYPE_P (op1type))
+    return false;
+  switch (code)
+    {
+    case MULT_EXPR:
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+      /* TODO: Handle multiples of op0 size as well */
+      if (operand_equal_p (size_in_bytes (op0type), op1, 0))
+	return true;
+      /* fallthrough */
+
+    default:
+      return false;
+    }
+  return false;
+}
+
+/* Walk tree and record all calls.  Called via walk_tree.  The data is
+   the function that is being scanned.  */
+/* TP is the part of the tree currently under the
+   microscope. WALK_SUBTREES is part of the walk_tree api but is
+   unused here.  DATA is cgraph_node of the function being walked.  */
+
+static tree
+scan_for_refs (tree *tp, int *walk_subtrees, void *data)
+{
+  struct cgraph_node *fn = data;
+  tree t = *tp;
+
+  switch (TREE_CODE (t))  
+    {
+    case VAR_DECL:
+      if (DECL_INITIAL (t))
+	walk_tree (&DECL_INITIAL (t), scan_for_refs, fn, visited_nodes);
+      *walk_subtrees = 0;
+      break;
+
+    case MODIFY_EXPR:
+      {
+	/* First look on the lhs and see what variable is stored to */
+	tree lhs = TREE_OPERAND (t, 0);
+	tree rhs = TREE_OPERAND (t, 1);
+
+	check_lhs_var (lhs);
+ 	check_cast (TREE_TYPE (lhs), rhs);
+
+	/* For the purposes of figuring out what the cast affects */
+
+	/* Next check the operands on the rhs to see if they are ok. */
+	switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
+	  {
+	  case tcc_binary:	    
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+ 	      tree op1 = TREE_OPERAND (rhs, 1);
+ 
+ 	      /* If this is pointer arithmetic of any bad sort, then
+ 		 we need to mark the types as bad.  For binary
+ 		 operations, no binary operator we currently support
+ 		 is always "safe" in regard to what it would do to
+ 		 pointers for purposes of determining which types
+ 		 escape, except operations of the size of the type.
+ 		 It is possible that min and max under the right set
+ 		 of circumstances and if the moon is in the correct
+ 		 place could be safe, but it is hard to see how this
+ 		 is worth the effort.  */
+ 
+ 	      if (POINTER_TYPE_P (TREE_TYPE (op0))
+		  && !okay_pointer_operation (TREE_CODE (rhs), op0, op1))
+ 		mark_interesting_type (TREE_TYPE (op0), FULL_ESCAPE);
+ 	      if (POINTER_TYPE_P (TREE_TYPE (op1))
+		  && !okay_pointer_operation (TREE_CODE (rhs), op1, op0))
+ 		mark_interesting_type (TREE_TYPE (op1), FULL_ESCAPE);
+ 	      
+	      look_for_casts (lhs, op0);
+	      look_for_casts (lhs, op1);
+ 	      check_rhs_var (op0);
+ 	      check_rhs_var (op1);
+	    }
+	    break;
+	  case tcc_unary:
+ 	    {
+ 	      tree op0 = TREE_OPERAND (rhs, 0);
+	      /* For unary operations, if the operation is NEGATE or
+		 ABS on a pointer, this is also considered pointer
+		 arithmetic and thus, bad for business.  */
+ 	      if ((TREE_CODE (op0) == NEGATE_EXPR
+ 		   || TREE_CODE (op0) == ABS_EXPR)
+ 		  && POINTER_TYPE_P (TREE_TYPE (op0)))
+ 		{
+ 		  mark_interesting_type (TREE_TYPE (op0), FULL_ESCAPE);
+ 		}
+ 	      check_rhs_var (op0);
+	      look_for_casts (lhs, op0);
+	      look_for_casts (lhs, rhs);
+ 	    }
+
+	    break;
+	  case tcc_reference:
+	    look_for_casts (lhs, rhs);
+	    check_rhs_var (rhs);
+	    break;
+	  case tcc_declaration:
+	    check_rhs_var (rhs);
+	    break;
+	  case tcc_expression:
+	    switch (TREE_CODE (rhs)) 
+	      {
+	      case ADDR_EXPR:
+		look_for_casts (lhs, TREE_OPERAND (rhs, 0));
+		check_rhs_var (rhs);
+		break;
+	      case CALL_EXPR: 
+		/* If this is a call to malloc, squirrel away the
+		   result so we do mark the resulting cast as being
+		   bad.  */
+		if (check_call (rhs))
+		  bitmap_set_bit (results_of_malloc, DECL_UID (lhs));
+		break;
+	      default:
+		break;
+	      }
+	    break;
+	  default:
+	    break;
+	  }
+	*walk_subtrees = 0;
+      }
+      break;
+
+    case ADDR_EXPR:
+      /* This case is here to find addresses on rhs of constructors in
+	 decl_initial of static variables. */
+      check_rhs_var (t);
+      *walk_subtrees = 0;
+      break;
+
+    case CALL_EXPR: 
+      check_call (t);
+      *walk_subtrees = 0;
+      break;
+      
+    case ASM_EXPR:
+      get_asm_expr_operands (t);
+      *walk_subtrees = 0;
+      break;
+      
+    default:
+      break;
+    }
+  return NULL;
+}
+
+
+/* The init routine for analyzing global static variable usage.  See
+   comments at top for description.  */
+static void 
+ipa_init (void) 
+{
+  bitmap_obstack_initialize (&ipa_obstack);
+  global_types_seen = BITMAP_ALLOC (&ipa_obstack);
+  global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
+  global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
+  results_of_malloc = BITMAP_ALLOC (&ipa_obstack);
+  uid_to_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
+  uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
+  uid_to_addressof_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
+
+  /* There are some shared nodes, in particular the initializers on
+     static declarations.  We do not need to scan them more than once
+     since all we would be interrested in are the addressof
+     operations.  */
+  visited_nodes = pointer_set_create ();
+  initialized = true;
+}
+
+/* Check out the rhs of a static or global initialization VNODE to see
+   if any of them contain addressof operations.  Note that some of
+   these variables may  not even be referenced in the code in this
+   compilation unit but their right hand sides may contain references
+   to variables defined within this unit.  */
+
+static void 
+analyze_variable (struct cgraph_varpool_node *vnode)
+{
+  tree global = vnode->decl;
+
+  /* If this variable has exposure beyond the compilation unit, add
+     it's type to the global types.  */
+  if (vnode->externally_visible)
+    mark_interesting_type (TREE_TYPE (global), FULL_ESCAPE);
+  else 
+    mark_type_seen (TREE_TYPE (global));
+
+  if (TREE_CODE (global) == VAR_DECL)
+    {
+      if (DECL_INITIAL (global)) 
+	walk_tree (&DECL_INITIAL (global), scan_for_refs, 
+		   NULL, visited_nodes);
+    } 
+  else abort();
+}
+
+/* This is the main routine for finding the reference patterns for
+   global variables within a function FN.  */
+
+static void
+analyze_function (struct cgraph_node *fn)
+{
+  tree decl = fn->decl;
+  check_function_parameter_and_return_types (decl, 
+					     fn->local.externally_visible);
+  if (dump_file)
+    fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
+  
+  {
+    struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
+    basic_block this_block;
+
+    FOR_EACH_BB_FN (this_block, this_cfun)
+      {
+	block_stmt_iterator bsi;
+	for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+	  walk_tree (bsi_stmt_ptr (bsi), scan_for_refs, 
+		     fn, visited_nodes);
+      }
+  }
+
+  /* Walk over any private statics that may take addresses of functions.  */
+  if (TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
+    {
+      tree step;
+      for (step = BLOCK_VARS (DECL_INITIAL (decl));
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  if (DECL_INITIAL (step))
+	    walk_tree (&DECL_INITIAL (step), scan_for_refs, 
+		       fn, visited_nodes);
+	  mark_type_seen (TREE_TYPE (step));
+	}
+
+    }
+  
+  /* Also look here for private statics.  */
+  if (DECL_STRUCT_FUNCTION (decl))
+    {
+      tree step;
+      for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
+	   step;
+	   step = TREE_CHAIN (step))
+	{
+	  tree var = TREE_VALUE (step);
+	  if (DECL_INITIAL (var) && TREE_STATIC (var))
+	    walk_tree (&DECL_INITIAL (var), scan_for_refs, 
+		       fn, visited_nodes);
+	  mark_type_seen (TREE_TYPE (var));
+	}
+    }
+}
+
+
+
+/* Convert a type_UID into a type.  */
+static tree
+type_for_uid (int uid)
+{
+  splay_tree_node result = 
+    splay_tree_lookup (uid_to_type, (splay_tree_key) uid);
+  
+  if (result)
+    return (tree) result->value;  
+  else return NULL;
+}
+
+/* Return the a bitmap with the subtypes of the type for UID.  If it
+   does not exist, return either NULL or a new bitmap depending on the
+   value of CREATE.  */ 
+
+static bitmap
+subtype_map_for_uid (int uid, bool create)
+{
+  splay_tree_node result = 
+    splay_tree_lookup (uid_to_subtype_map, (splay_tree_key) uid);
+  
+  if (result) 
+    return (bitmap) result->value;  
+  else if (create)
+    {
+      bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
+      splay_tree_insert (uid_to_subtype_map,
+			 uid, 
+			 (splay_tree_value)subtype_map);
+      return subtype_map;
+    }
+  else return NULL;
+}
+
+/* Mark all of the supertypes and field types of TYPE as being seen.
+   Also accumulate the subtypes for each type so that
+   close_types_full_escape can mark a subtype as escaping if the
+   supertype escapes.  */
+
+static void
+close_type_seen (tree type)
+{
+  tree field;
+  int i, uid;
+  tree binfo, base_binfo;
+
+  /* See thru all pointer tos and array ofs. */
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+  
+  uid = TYPE_UID (type);
+
+  if (bitmap_bit_p (been_there_done_that, uid))
+    return;
+  bitmap_set_bit (been_there_done_that, uid);
+
+  /* If we are doing a language with a type heirarchy, mark all of
+     the superclasses.  */
+  if (TYPE_BINFO (type)) 
+    for (binfo = TYPE_BINFO (type), i = 0;
+	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+      {
+	tree binfo_type = BINFO_TYPE (base_binfo);
+	bitmap subtype_map = subtype_map_for_uid 
+	  (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
+	bitmap_set_bit (subtype_map, uid);
+	if (mark_type_seen (binfo_type))
+	  close_type_seen (binfo_type);
+      }
+      
+  /* If the field is a struct or union type, mark all of the
+     subfields.  */
+  for (field = TYPE_FIELDS (type);
+       field;
+       field = TREE_CHAIN (field))
+    {
+      tree field_type;
+      if (TREE_CODE (field) != FIELD_DECL)
+	continue;
+
+      field_type = TREE_TYPE (field);
+      if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
+	if (mark_type_seen (field_type))
+	  close_type_seen (field_type);
+    }
+}
+
+struct type_brand_s 
+{
+  char* name;
+  int seq;
+};
+
+/* Splay tree comparison function on type_brand_s structures.  */
+
+static int 
+compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
+{
+  struct type_brand_s * k1 = (struct type_brand_s *) sk1;
+  struct type_brand_s * k2 = (struct type_brand_s *) sk2;
+
+  int value = strcmp(k1->name, k2->name);
+  if (value == 0)
+    return k2->seq - k1->seq;
+  else 
+    return value;
+}
+
+/* Get the name of TYPE or return the string "<UNNAMED>".  */
+static char*
+get_name_of_type (tree type)
+{
+  tree name = TYPE_NAME (type);
+  
+  if (!name)
+    /* Unnamed type, do what you like here.  */
+    return (char*)"<UNNAMED>";
+  
+  /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
+     identifier_node */
+  if (TREE_CODE (name) == TYPE_DECL)
+    {
+      /*  Each DECL has a DECL_NAME field which contains an
+	  IDENTIFIER_NODE.  (Some decls, most often labels, may have
+	  zero as the DECL_NAME).  */
+      if (DECL_NAME (name))
+	return (char*)IDENTIFIER_POINTER (DECL_NAME (name));
+      else
+	/* Unnamed type, do what you like here.  */
+	return (char*)"<UNNAMED>";
+    }
+  else if (TREE_CODE (name) == IDENTIFIER_NODE)
+    return (char*)IDENTIFIER_POINTER (name);
+  else 
+    return (char*)"<UNNAMED>";
+}
+
+
+/* Use a completely lame algorithm for removing duplicate types.  This
+   code should not be here except for a bad implementation of whole
+   program compilation.  */
+/* Return either TYPE if this is first time TYPE has been seen an
+   compatible TYPE that has already been processed.  */ 
+
+static tree
+discover_unique_type (tree type)
+{
+  struct type_brand_s * brand = xmalloc(sizeof(struct type_brand_s));
+  int i = 0;
+  splay_tree_node result;
+  
+  while (1)
+  {
+    brand->name = get_name_of_type (type);
+    brand->seq = i;
+    result = splay_tree_lookup (all_unique_types, (splay_tree_key) brand);
+    if (result)
+      {
+	tree other_type = (tree) result->value;
+	if (lang_hooks.types_compatible_p (type, other_type) == 1)
+	  {
+	    free (brand);
+	    return other_type;
+	  }
+	/* Not compatible, look for next instance with same name.  */
+      }
+    else 
+      {
+	/* No more instances, create new one.  */
+	brand->seq = i++;
+	splay_tree_insert (all_unique_types, 
+			   (splay_tree_key) brand,
+			   (splay_tree_value) type);	  
+	
+	return type;
+      }
+    i++;
+  } 
+}
+
+/* Take a TYPE that has been passed by value to an external function
+   and mark all of the fields that have pointer types as escaping. For
+   any of the non pointer types that are structures or unions,
+   recurse.  TYPE is never a pointer type.  */ 
+
+static void
+close_type_exposed_parameter (tree type)
+{
+  tree field;
+  int uid = TYPE_UID (TYPE_MAIN_VARIANT (type));
+
+  if (bitmap_bit_p (been_there_done_that, uid))
+    return;
+  bitmap_set_bit (been_there_done_that, uid);
+
+  /* If the field is a struct or union type, mark all of the
+     subfields.  */
+  for (field = TYPE_FIELDS (type);
+       field;
+       field = TREE_CHAIN (field))
+    {
+      tree field_type;
+
+      if (TREE_CODE (field) != FIELD_DECL)
+	continue;
+
+      field_type = TREE_TYPE (field);
+      mark_interesting_type (field_type, EXPOSED_PARAMETER);
+
+      if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0) 
+	close_type_exposed_parameter (field_type);
+    }
+}
+
+/* The next function handles the case where a type fully escapes.
+   This means that not only does the type itself escape, 
+
+   a) the type of every field recursively escapes
+   b) the type of every subtype escapes as well as the super as well
+   as all of the pointer to types for each field.
+
+   Note that pointer to types are not marked as escaping.  If the
+   pointed to type escapes, the pointer to type also escapes.
+
+   Take a TYPE that has had the address taken for an instance of it
+   and mark all of the types for its fields as having their addresses
+   taken. */ 
+
+static void
+close_type_full_escape (tree type)
+{
+  tree field;
+  unsigned int i;
+  int uid;
+  tree binfo, base_binfo;
+  bitmap_iterator bi;
+  bitmap subtype_map;
+
+  /* Strip off any pointer or array types.  */
+  type = TYPE_MAIN_VARIANT (type);
+  while (POINTER_TYPE_P (type) || TREE_CODE(type) == ARRAY_TYPE)
+    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
+
+  uid = TYPE_UID (type);
+
+  if (bitmap_bit_p (been_there_done_that, uid))
+    return;
+  bitmap_set_bit (been_there_done_that, uid);
+
+  subtype_map = subtype_map_for_uid (uid, false);
+
+  /* If we are doing a language with a type heirarchy, mark all of
+     the superclasses.  */
+  if (TYPE_BINFO (type)) 
+    for (binfo = TYPE_BINFO (type), i = 0;
+	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+      {
+	tree binfotype = BINFO_TYPE (base_binfo);
+	if (mark_type (binfotype, FULL_ESCAPE))
+	  close_type_full_escape (binfotype);
+      }
+      
+  /* Mark as escaped any types that have been down casted to
+     this type. */
+  if (subtype_map)
+    EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
+      {
+	tree subtype = type_for_uid (i); 
+	if (mark_type (subtype, FULL_ESCAPE))
+	  close_type_full_escape (subtype);
+      }
+
+  /* If the field is a struct or union type, mark all of the
+     subfields.  */
+  for (field = TYPE_FIELDS (type);
+       field;
+       field = TREE_CHAIN (field))
+    {
+      tree field_type;
+      if (TREE_CODE (field) != FIELD_DECL)
+	continue;
+
+      field_type = TREE_TYPE (field);
+      if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
+	if (mark_type (field_type, FULL_ESCAPE))
+	  close_type_full_escape (field_type);
+    }
+}
+
+/* It is not necessary to carry around the addressof map for any
+   escaping TYPE.  */
+ 
+static void 
+delete_addressof_map (tree from_type) 
+{
+  int uid = TYPE_UID (TYPE_MAIN_VARIANT (from_type));
+  splay_tree_node result = 
+    splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+  
+  if (result) 
+    {
+      bitmap map = (bitmap) result->value;
+      BITMAP_FREE (map);
+      splay_tree_remove (uid_to_addressof_map, (splay_tree_key) uid);
+    }
+}
+
+/* Transitively close the addressof bitmap for the type with UID.
+   This means that if we had a.b and b.c, a would have both b an c in
+   it's maps.  */ 
+
+static bitmap
+close_addressof (int uid) 
+{
+  bitmap_iterator bi;
+  splay_tree_node result = 
+    splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+  bitmap map = NULL;
+  bitmap new_map;
+  unsigned int i;
+  
+  if (result) 
+    map = (bitmap) result->value;
+  else 
+    return NULL;
+
+  if (bitmap_bit_p (been_there_done_that, uid))
+    return map;
+  bitmap_set_bit (been_there_done_that, uid);
+
+  /* The new_map will have all of the bits for the enclosed fields and
+     will have the unique id version of the old map.  */
+  new_map = BITMAP_ALLOC (&ipa_obstack);
+
+  EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
+    {
+      int new_uid = unique_type_id_for (i, false);
+      bitmap submap = close_addressof (new_uid);
+      bitmap_set_bit (new_map, new_uid);
+      if (submap) 
+	bitmap_ior_into (new_map, submap);
+    }      
+  result->value = (splay_tree_value) new_map;
+
+  BITMAP_FREE (map);
+  return new_map;
+}
+
+
+/* The main entry point for type escape analysis.  */
+
+static void
+type_escape_execute (void)
+{
+  struct cgraph_node *node;
+  struct cgraph_varpool_node *vnode;
+  unsigned int i;
+  bitmap_iterator bi;
+  splay_tree_node result;
+
+  ipa_init ();
+
+  /* Process all of the variables first.  */
+  for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
+    analyze_variable (vnode);
+
+  /* Process all of the functions. next
+
+     We do not want to process any of the clones so we check that this
+     is a master clone.  However, we do need to process any
+     AVAIL_OVERWRITABLE functions (these are never clones) because
+     they may cause a type variable to escape.  
+  */
+  for (node = cgraph_nodes; node; node = node->next)
+    if (node->analyzed 
+	&& (cgraph_is_master_clone (node)
+	    || (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE)))
+      analyze_function (node);
+
+
+  pointer_set_destroy (visited_nodes);
+  visited_nodes = NULL;
+
+  /* Do all of the closures to discover which types escape the
+     compilation unit.  */
+
+  been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
+
+  /* Examine the types that we have directly seen in scanning the code
+     and add to that any contained types or superclasses.  */
+
+  EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+    {
+      tree type = type_for_uid (i);
+      /* Only look at records and unions with no pointer tos.  */
+      if (ipa_type_escape_star_count_of_interesting_or_array_type (type) == 0)
+	close_type_seen (type);
+    }
+  bitmap_clear (been_there_done_that);
+
+  /* Map the duplicate types to a single unique type.  This is a hack
+     it is not a general algorithm.  */
+  uid_to_unique_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
+  all_unique_types = splay_tree_new (compare_type_brand, 0, 0);
+  
+  EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+    {
+      tree unique_type = discover_unique_type (type_for_uid (i));
+      splay_tree_insert (uid_to_unique_type, 
+			 (splay_tree_key) i, 
+			 (splay_tree_value) unique_type);
+      if (dump_file)
+	fprintf(dump_file, "dta i=%d,%d j=%d,%s\n", i, 
+		TYPE_UID(type_for_uid(i)),
+		TYPE_UID(unique_type), get_name_of_type(unique_type));
+    }
+
+  /* Get rid of the temporary data structures used to find the unique
+     type.  */
+  result = splay_tree_min (all_unique_types);
+  while (result)
+    {
+      struct type_brand_s * b = (struct type_brand_s *) result->key;
+      splay_tree_remove (all_unique_types, result->key);
+      free (b);
+      result = splay_tree_min (all_unique_types);
+    }
+  splay_tree_delete (all_unique_types);
+  all_unique_types = NULL;
+
+  /* Examine all of the types passed by value and mark any enclosed
+     pointer types as escaping.  */
+
+  EXECUTE_IF_SET_IN_BITMAP (global_types_exposed_parameter, 0, i, bi)
+    {
+      close_type_exposed_parameter (type_for_uid (i));
+    }
+  bitmap_clear (been_there_done_that);
+
+  /* Close the types for escape.  If something escapes, then any
+     enclosed types escape as well as any subtypes.  */
+
+  EXECUTE_IF_SET_IN_BITMAP (global_types_full_escape, 0, i, bi)
+    {
+      tree type = type_for_uid (i);
+      close_type_full_escape (type);
+    }
+  bitmap_clear (been_there_done_that);
+
+  result = splay_tree_min (uid_to_addressof_map);
+  while (result)
+    {
+      int uid = result->key;
+      tree type = type_for_uid (uid);
+      if (bitmap_bit_p (global_types_full_escape, uid))
+	/* If the type escaped, we will never use the map, so get rid
+	   of it.  */ 
+	delete_addressof_map (type);
+      else  
+	{
+	  if (unique_type_id_p (uid))
+	    /* Close the addressof map, i.e. copy all of the
+	       transitive substructures up to this level.  */
+	    close_addressof (uid);
+	  else
+	    /* This type is not the unique designate, so get rid of
+	       it.  */
+	    delete_addressof_map (type);
+	}
+      result = splay_tree_successor (uid_to_addressof_map, uid);
+    }
+  
+  /* If a type is set in global_types_full_escape, make sure that the
+     unique type is also set in that map.  */
+  bitmap_copy (been_there_done_that, global_types_full_escape);
+  EXECUTE_IF_SET_IN_BITMAP (been_there_done_that, 0, i, bi)
+    {
+      unsigned int j = unique_type_id_for (i, false);
+      if (i != j)
+	{
+	  bitmap_set_bit(global_types_full_escape, j);
+	  bitmap_clear_bit(global_types_full_escape, i);
+	}
+    }
+  bitmap_clear (been_there_done_that);
+
+  if (dump_file)
+    { 
+      EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+	{
+	  /* The pointer types are in the global_types_full_escape
+	     bitmap but not in the backwards map.  They also contain
+	     no useful information since they are not marked.  */
+	  tree type = type_for_uid (i);
+	  if (!POINTER_TYPE_P (type))
+	    {
+	      fprintf(dump_file, "type %d ", i);
+	      print_generic_expr (dump_file, type, 0);
+	      if (bitmap_bit_p (global_types_full_escape, i))
+		fprintf(dump_file, " escaped\n");
+	      else if (unique_type_id_p (i))
+		fprintf(dump_file, " contained\n");
+	      else
+		fprintf(dump_file, " replaced\n");
+	    }
+	}
+    }
+
+  /* Get rid of the subtype map.  */
+  result = splay_tree_min (uid_to_subtype_map);
+  while (result)
+    {
+      bitmap b = (bitmap)result->value;
+      BITMAP_FREE(b);
+      splay_tree_remove (uid_to_subtype_map, result->key);
+      result = splay_tree_min (uid_to_subtype_map);
+    }
+  splay_tree_delete (uid_to_subtype_map);
+  uid_to_subtype_map = NULL;
+
+  BITMAP_FREE (global_types_exposed_parameter);
+  BITMAP_FREE (been_there_done_that);
+  BITMAP_FREE (results_of_malloc);
+}
+
+static bool
+gate_type_escape_vars (void)
+{
+  return (flag_unit_at_a_time != 0 && flag_ipa_type_escape
+	  /* Don't bother doing anything if the program has errors.  */
+	  && !(errorcount || sorrycount));
+}
+
+struct tree_opt_pass pass_ipa_type_escape =
+{
+  "type-escape-var",			/* name */
+  gate_type_escape_vars,		/* gate */
+  type_escape_execute,			/* execute */
+  NULL,					/* sub */
+  NULL,					/* next */
+  0,					/* static_pass_number */
+  TV_IPA_TYPE_ESCAPE,	        	/* tv_id */
+  0,	                                /* properties_required */
+  0,					/* properties_provided */
+  0,					/* properties_destroyed */
+  0,					/* todo_flags_start */
+  0,                                    /* todo_flags_finish */
+  0					/* letter */
+};
+
Index: ipa-type-escape.h
===================================================================
RCS file: ipa-type-escape.h
diff -N ipa-type-escape.h
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-type-escape.h	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,33 @@
+/* Type based alias analysis.
+   Copyright (C) 2004 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  */
+
+#ifndef GCC_IPA_TYPE_ESCAPE_H
+#define GCC_IPA_TYPE_ESCAPE_H
+#include "tree.h"
+
+bool   ipa_type_escape_type_contained_p (tree type);
+bool   ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type);
+int    ipa_type_escape_star_count_of_interesting_type (tree type); 
+int    ipa_type_escape_star_count_of_interesting_or_array_type (tree type);
+
+
+#endif  /* GCC_IPA_TYPE_ESCAPE_H  */
+
Index: ipa-utils.c
===================================================================
RCS file: ipa-utils.c
diff -N ipa-utils.c
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-utils.c	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,206 @@
+/* Utilities for ipa analysis.
+   Copyright (C) 2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  
+*/
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "tree-flow.h"
+#include "tree-inline.h"
+#include "tree-pass.h"
+#include "langhooks.h"
+#include "pointer-set.h"
+#include "ggc.h"
+#include "ipa-utils.h"
+#include "ipa-reference.h"
+#include "c-common.h"
+#include "tree-gimple.h"
+#include "cgraph.h"
+#include "output.h"
+#include "flags.h"
+#include "timevar.h"
+#include "diagnostic.h"
+#include "langhooks.h"
+
+/* Debugging function for postorder and inorder code. NOTE is a string
+   that is printed before the nodes are printed.  ORDER is an array of
+   cgraph_nodes that has COUNT useful nodes in it.  */
+
+void 
+ipa_utils_print_order (FILE* out, 
+		       const char * note, 
+		       struct cgraph_node** order, 
+		       int count) 
+{
+  int i;
+  fprintf (out, "\n\n ordered call graph: %s\n", note);
+  
+  for (i = count - 1; i >= 0; i--)
+    dump_cgraph_node(dump_file, order[i]);
+  fprintf (out, "\n");
+  fflush(out);
+}
+
+
+struct searchc_env {
+  struct cgraph_node **stack;
+  int stack_size;
+  struct cgraph_node **result;
+  int order_pos;
+  splay_tree nodes_marked_new;
+  bool reduce;
+  int count;
+};
+
+/* This is an implementation of Tarjan's strongly connected region
+   finder as reprinted in Aho Hopcraft and Ullman's The Design and
+   Analysis of Computer Programs (1975) pages 192-193.  This version
+   has been customized for cgraph_nodes.  The env parameter is because
+   it is recursive and there are no nested functions here.  This
+   function should only be called from itself or
+   cgraph_reduced_inorder.  ENV is a stack env and would be
+   unnecessary if C had nested functions.  V is the node to start
+   searching from.  */
+
+static void
+searchc (struct searchc_env* env, struct cgraph_node *v) 
+{
+  struct cgraph_edge *edge;
+  struct ipa_dfs_info *v_info = v->aux;
+  
+  /* mark node as old */
+  v_info->new = false;
+  splay_tree_remove (env->nodes_marked_new, v->uid);
+  
+  v_info->dfn_number = env->count;
+  v_info->low_link = env->count;
+  env->count++;
+  env->stack[(env->stack_size)++] = v;
+  v_info->on_stack = true;
+  
+  for (edge = v->callees; edge; edge = edge->next_callee)
+    {
+      struct ipa_dfs_info * w_info;
+      struct cgraph_node *w = edge->callee;
+      /* Bypass the clones and only look at the master node.  Skip
+	 external and other bogus nodes.  */
+      w = cgraph_master_clone (w);
+      if (w && w->aux) 
+	{
+	  w_info = w->aux;
+	  if (w_info->new) 
+	    {
+	      searchc (env, w);
+	      v_info->low_link =
+		(v_info->low_link < w_info->low_link) ?
+		v_info->low_link : w_info->low_link;
+	    } 
+	  else 
+	    if ((w_info->dfn_number < v_info->dfn_number) 
+		&& (w_info->on_stack)) 
+	      v_info->low_link =
+		(w_info->dfn_number < v_info->low_link) ?
+		w_info->dfn_number : v_info->low_link;
+	}
+    }
+
+
+  if (v_info->low_link == v_info->dfn_number) 
+    {
+      struct cgraph_node *last = NULL;
+      struct cgraph_node *x;
+      struct ipa_dfs_info *x_info;
+      do {
+	x = env->stack[--(env->stack_size)];
+	x_info = x->aux;
+	x_info->on_stack = false;
+	
+	if (env->reduce) 
+	  {
+	    x_info->next_cycle = last;
+	    last = x;
+	  } 
+	else 
+	  env->result[env->order_pos++] = x;
+      } 
+      while (v != x);
+      if (env->reduce) 
+	env->result[env->order_pos++] = v;
+    }
+}
+
+/* Topsort the call graph by caller relation.  Put the result in ORDER.
+
+   The REDUCE flag is true if you want the cycles reduced to single
+   nodes.  Only consider nodes that have the output bit set. */
+
+int
+ipa_utils_reduced_inorder (struct cgraph_node **order, 
+			   bool reduce, bool allow_overwritable)
+{
+  struct cgraph_node *node;
+  struct searchc_env env;
+  splay_tree_node result;
+  env.stack = xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+  env.stack_size = 0;
+  env.result = order;
+  env.order_pos = 0;
+  env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
+  env.count = 1;
+  env.reduce = reduce;
+  
+  for (node = cgraph_nodes; node; node = node->next) 
+    if ((node->analyzed)
+	&& (cgraph_is_master_clone (node) 
+	 || (allow_overwritable 
+	     && (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE))))
+      {
+	/* Reuse the info if it is already there.  */
+	struct ipa_dfs_info *info = node->aux;
+	if (!info)
+	  info = xcalloc (1, sizeof (struct ipa_dfs_info));
+	info->new = true;
+	info->on_stack = false;
+	info->next_cycle = NULL;
+	node->aux = info;
+	
+	splay_tree_insert (env.nodes_marked_new,
+			   (splay_tree_key)node->uid, 
+			   (splay_tree_value)node);
+      } 
+    else 
+      node->aux = NULL;
+  result = splay_tree_min (env.nodes_marked_new);
+  while (result)
+    {
+      node = (struct cgraph_node *)result->value;
+      searchc (&env, node);
+      result = splay_tree_min (env.nodes_marked_new);
+    }
+  splay_tree_delete (env.nodes_marked_new);
+  free (env.stack);
+
+  return env.order_pos;
+}
+
+
Index: ipa-utils.h
===================================================================
RCS file: ipa-utils.h
diff -N ipa-utils.h
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ipa-utils.h	8 Jun 2005 20:16:32 -0000
@@ -0,0 +1,47 @@
+/* Utilities for ipa analysis.
+   Copyright (C) 2004-2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA.  */
+
+#ifndef GCC_IPA_UTILS_H
+#define GCC_IPA_UTILS_H
+#include "tree.h"
+#include "cgraph.h"
+
+/* Used for parsing attributes of asm code.  */
+extern tree memory_identifier_string;
+
+struct ipa_dfs_info {
+  int dfn_number;
+  int low_link;
+  bool new;
+  bool on_stack;
+  struct cgraph_node* next_cycle;
+  PTR aux;
+};
+
+
+
+/* In ipa-utils.c  */
+void ipa_utils_print_order (FILE*, const char *, struct cgraph_node**, int);
+int ipa_utils_reduced_inorder (struct cgraph_node **, bool, bool);
+ 
+#endif  /* GCC_IPA_UTILS_H  */
+
+
Index: opts.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/opts.c,v
retrieving revision 1.113
diff -u -p -r1.113 opts.c
--- opts.c	1 Jun 2005 07:02:17 -0000	1.113
+++ opts.c	8 Jun 2005 20:16:33 -0000
@@ -522,6 +522,8 @@ decode_options (unsigned int argc, const
       flag_loop_optimize = 1;
       flag_if_conversion = 1;
       flag_if_conversion2 = 1;
+      flag_ipa_pure_const = 1;
+      flag_ipa_reference = 1;
       flag_tree_ccp = 1;
       flag_tree_dce = 1;
       flag_tree_dom = 1;
@@ -554,6 +556,7 @@ decode_options (unsigned int argc, const
       flag_cse_skip_blocks = 1;
       flag_gcse = 1;
       flag_expensive_optimizations = 1;
+      flag_ipa_type_escape = 1;
       flag_strength_reduce = 1;
       flag_rerun_cse_after_loop = 1;
       flag_rerun_loop_opt = 1;
Index: passes.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/passes.c,v
retrieving revision 2.91
diff -u -p -r2.91 passes.c
--- passes.c	25 May 2005 12:33:32 -0000	2.91
+++ passes.c	8 Jun 2005 20:16:33 -0000
@@ -836,28 +836,6 @@ rest_of_handle_cfg (void)
     cleanup_cfg (CLEANUP_EXPENSIVE
 		 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
 
-  /* It may make more sense to mark constant functions after dead code is
-     eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
-     may insert code making function non-constant, but we still must consider
-     it as constant, otherwise -fbranch-probabilities will not read data back.
-
-     life_analysis rarely eliminates modification of external memory.
-
-     FIXME: now with tree based profiling we are in the trap described above
-     again.  It seems to be easiest to disable the optimization for time
-     being before the problem is either solved by moving the transformation
-     to the IPA level (we need the CFG for this) or the very early optimization
-     passes are made to ignore the const/pure flags so code does not change.  */
-  if (optimize
-      && (!flag_tree_based_profiling
-	  || (!profile_arc_flag && !flag_branch_probabilities)))
-    {
-      /* Alias analysis depends on this information and mark_constant_function
-       depends on alias analysis.  */
-      reg_scan (get_insns (), max_reg_num ());
-      mark_constant_function ();
-    }
-
   close_dump_file (DFI_cfg, print_rtl_with_bb, get_insns ());
 }
 
Index: timevar.def
===================================================================
RCS file: /cvs/gcc/gcc/gcc/timevar.def,v
retrieving revision 1.50
diff -u -p -r1.50 timevar.def
--- timevar.def	6 Jun 2005 18:55:58 -0000	1.50
+++ timevar.def	8 Jun 2005 20:16:33 -0000
@@ -42,6 +42,9 @@ DEFTIMEVAR (TV_DUMP                  , "
 
 DEFTIMEVAR (TV_CGRAPH                , "callgraph construction")
 DEFTIMEVAR (TV_CGRAPHOPT             , "callgraph optimization")
+DEFTIMEVAR (TV_IPA_REFERENCE         , "ipa reference")
+DEFTIMEVAR (TV_IPA_PURE_CONST        , "ipa pure const")
+DEFTIMEVAR (TV_IPA_TYPE_ESCAPE       , "ipa type escape")
 /* Time spent by constructing CFG.  */
 DEFTIMEVAR (TV_CFG                   , "cfg construction")
 /* Time spent by cleaning up CFG.  */
Index: tree-dfa.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-dfa.c,v
retrieving revision 2.56
diff -u -p -r2.56 tree-dfa.c
--- tree-dfa.c	7 Jun 2005 12:01:25 -0000	2.56
+++ tree-dfa.c	8 Jun 2005 20:16:34 -0000
@@ -46,6 +46,7 @@ Boston, MA 02111-1307, USA.  */
 #include "convert.h"
 #include "params.h"
 #include "cgraph.h"
+#include "ipa-reference.h"
 
 /* Build and maintain data flow information for trees.  */
 
@@ -105,6 +106,8 @@ find_referenced_vars (void)
   block_stmt_iterator si;
   struct walk_state walk_state;
 
+  ipa_reference_reset_maps ();
+
   vars_found = htab_create (50, htab_hash_pointer, htab_eq_pointer, NULL);
   memset (&walk_state, 0, sizeof (walk_state));
   walk_state.vars_found = vars_found;
Index: tree-flow.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-flow.h,v
retrieving revision 2.119
diff -u -p -r2.119 tree-flow.h
--- tree-flow.h	7 Jun 2005 19:51:23 -0000	2.119
+++ tree-flow.h	8 Jun 2005 20:16:34 -0000
@@ -29,6 +29,7 @@ Boston, MA 02111-1307, USA.  */
 #include "tree-gimple.h"
 #include "tree-ssa-operands.h"
 #include "cgraph.h"
+#include "ipa-reference.h"
 
 /* Forward declare structures for the garbage collector GTY markers.  */
 #ifndef GCC_BASIC_BLOCK_H
@@ -228,6 +229,11 @@ struct var_ann_d GTY(())
      current version of this variable (an SSA_NAME).  */
   tree current_def;
   
+  /* Pointer to the structure that contains the sets of global
+     variables modified by function calls.  This field is only used
+     for FUNCTION_DECLs.  */
+  ipa_reference_vars_info_t GTY ((skip)) reference_vars_info;
+
   /* If this variable is a structure, this fields holds a list of
      symbols representing each of the fields of the structure.  */
   subvar_t subvars;
@@ -732,6 +738,10 @@ bool is_hidden_global_store (tree);
 
 /* In tree-sra.c  */
 void insert_edge_copies (tree, basic_block);
+void sra_insert_before (block_stmt_iterator *, tree);
+void sra_insert_after (block_stmt_iterator *, tree);
+void sra_init_cache (void);
+bool sra_type_can_be_decomposed_p (tree);
 
 /* In tree-loop-linear.c  */
 extern void linear_transform_loops (struct loops *);
Index: tree-optimize.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-optimize.c,v
retrieving revision 2.105
diff -u -p -r2.105 tree-optimize.c
--- tree-optimize.c	6 Jun 2005 18:55:59 -0000	2.105
+++ tree-optimize.c	8 Jun 2005 20:16:34 -0000
@@ -364,6 +364,9 @@ init_tree_optimization_passes (void)
   /* Intraprocedural optimization passes.  */
   p = &all_ipa_passes;
   NEXT_PASS (pass_ipa_inline);
+  NEXT_PASS (pass_ipa_reference);
+  NEXT_PASS (pass_ipa_pure_const); 
+  NEXT_PASS (pass_ipa_type_escape);
   *p = NULL;
 
   /* All passes needed to lower the function into shape optimizers can operate
@@ -394,6 +397,7 @@ init_tree_optimization_passes (void)
 
   p = &pass_all_optimizations.sub;
   NEXT_PASS (pass_referenced_vars);
+  NEXT_PASS (pass_promote_statics);
   NEXT_PASS (pass_create_structure_vars);
   NEXT_PASS (pass_build_ssa);
   NEXT_PASS (pass_may_alias);
Index: tree-pass.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-pass.h,v
retrieving revision 2.40
diff -u -p -r2.40 tree-pass.h
--- tree-pass.h	6 Jun 2005 18:55:59 -0000	2.40
+++ tree-pass.h	8 Jun 2005 20:16:34 -0000
@@ -221,9 +221,13 @@ extern struct tree_opt_pass pass_store_c
 extern struct tree_opt_pass pass_vrp;
 extern struct tree_opt_pass pass_create_structure_vars;
 extern struct tree_opt_pass pass_uncprop;
+extern struct tree_opt_pass pass_promote_statics;
 extern struct tree_opt_pass pass_reassoc;
 
 /* IPA Passes */
 extern struct tree_opt_pass pass_ipa_inline;
+extern struct tree_opt_pass pass_ipa_reference;
+extern struct tree_opt_pass pass_ipa_pure_const;
+extern struct tree_opt_pass pass_ipa_type_escape;
 
 #endif /* GCC_TREE_PASS_H */
Index: tree-promote-statics.c
===================================================================
RCS file: tree-promote-statics.c
diff -N tree-promote-statics.c
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ tree-promote-statics.c	8 Jun 2005 20:16:34 -0000
@@ -0,0 +1,612 @@
+/* Promotion of static variables to ssa registers
+   Copyright (C) 2004-2005 Free Software Foundation, Inc.
+   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING.  If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "basic-block.h"
+#include "tree-flow.h"
+#include "ipa-utils.h"
+#include "ipa-reference.h"
+#include "bitmap.h"
+#include "tree-pass.h"
+#include "flags.h"
+
+#include "langhooks.h"
+
+/*
+The main idea is to promote some static variables from memory to SSA
+registers.  This transformation is only applied to those static
+variables for which the effects of subroutine calls can be understood.
+Such infomation is provided by functions in cgraphunit.c.
+
+The following table shows the actions that are taken to promote
+variables.  The analysis in cgraphunit constructs information about
+both local usage and the effect of any particular call.  Variables are
+broken into 4 categories: only-read, only-write, read-write, and no
+information.  (No information variables are never promoted.)  
+
+All information is of the "may" variety: if a function is marked read,
+it means the call may read the variable, but it also may not read the
+variable.
+
+There are two possible ways to perform the promotion: assume that the
+static is live everywhere or compute the minimal live range for the
+static variable.
+
+The minimal live range path has a lot of problems:
+
+1) live variables and upwards exposed uses must be first comuputed.
+2) new machiney must be invented to prevent code motion algorithms
+from floating a use of the surrogate register across a register
+function call that clobbers the variable, but was not in any minimal
+live range at the time of this analysis.
+
+While the first problem is simply a lot of code, the second problem
+requires a new mechanism for pinning code and teaching all passes that
+can move code to obey this new fenceposts.  
+
+The maximum live range path has the problem that this technique can
+create many false live ranges where the register is loaded after on
+call only to be stored back right before the next call.  This will eat
+a certain amount of space and requires special smarts to get rid of them.
+
+There are really 7 situations to cover in the following table.  
+
+action             read                    write                   read-write
+
+             -+---------------------------------------------------------------
+
+entry         |  load                    load                    load
+              |
+load          |  getfromreg              xxxxx                   getfromreg
+              |
+store         |  xxxx                    puttoreg                puttoreg
+              |
+call-read     |  noaction                store before            store before
+              |
+call-write    |  load after              store before            store before
+              |                          load after              load after  
+call-readwrite|  load after              store before            store before
+              |                          load after              load after  
+              |
+return        |  no action               store                   store         
+
+
+l-r	l-w	c-r	c-w	store-b	load-a
+
+0	0	0	0   |	0	0
+0	0	0	1   |	0	0
+0	0       1      	0   |	0	0
+0	0	1	1   |	0	0
+0	1	0	0   |	0	0
+0	1	0	1   |	1	1	 
+0	1       1      	0   |	1	0
+0	1	1	1   |	1	1
+1	0	0	0   |	0	0
+1	0	0	1   |   0       1
+1	0       1      	0   |	0	0	
+1	0	1	1   |	0	1
+1	1	0	0   |	0	0
+1	1	0	1   |	1	1
+1	1       1      	0   |	1	0
+1	1	1	1   |	1	1
+
+store_before = local_written & (callee_read | callee_written)
+load_after = (local_read | local_written) & callee_written
+
+
+*/
+
+static bitmap_obstack promote_obstack;
+
+/* All of the static variables under consideration by this pass that
+   do reads or writes withing this function.   */
+static bitmap local_read;
+static bitmap local_written;
+static bitmap local_all;
+
+/* Map indexed by the var_ann uid that contains the shadow register
+   for each static variable to be promoted. */
+static tree *static_map;
+
+
+/* Return true if the asm STMT clobbers memory.  */
+
+static bool
+asm_clobbers_mem (tree stmt)
+{
+  tree link;
+  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
+      return true;
+
+  return false;
+}
+
+/* Return a INPUT_BITMAP for the asm inputs and OUTPUT_BITMAP for the
+   asm outputs of static variables written by the asm STMT.  */
+
+static void
+get_asm_read_and_write (bitmap input_bitmap, bitmap output_bitmap, tree stmt) 
+{
+  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  const char **oconstraints
+    = (const char **) alloca ((noutputs) * sizeof (const char *));
+  int i;
+  tree link;
+  const char *constraint;
+  bool allows_mem, allows_reg, is_inout;
+
+  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+    {
+      oconstraints[i] = constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_output_constraint (&constraint, i, 0, 0,
+	  &allows_mem, &allows_reg, &is_inout);
+
+      /* Do not actually check if the variable is a static variable,
+         since it will be anded with the set of variables we are
+         interrested in later.  */
+      if (!allows_reg && allows_mem)
+	{
+	  tree var = TREE_VALUE (link);
+	  var = get_base_address (var);
+	  if (TREE_CODE (var) == VAR_DECL)
+	    {
+	      var_ann_t va = var_ann (var);
+	      if (va) 
+		bitmap_set_bit(output_bitmap, va->uid);
+	    }
+	}
+    }
+
+  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+    {
+      constraint
+	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+			      oconstraints, &allows_mem, &allows_reg);
+      
+      if (!allows_reg && allows_mem)
+	{
+	  tree var = TREE_VALUE (link);
+	  var = get_base_address (var);
+	  if (TREE_CODE (var) == VAR_DECL)
+	    {
+	      var_ann_t va = var_ann (var);
+	      if (va) 
+		bitmap_set_bit(input_bitmap, va->uid);
+	    }
+	}
+    }
+}
+
+/* Generate a series of loads from the static variables pointed to by
+   B1 && B2 or just B1 (if B2 is NULL) and insert them after
+   BSI).  */
+
+static void
+gen_loads (bitmap b1, bitmap b2, block_stmt_iterator *bsi) 
+{
+  bitmap result;
+  bitmap_iterator bi;
+  unsigned int index;
+  tree list = NULL;
+
+  if (b2) 
+    {
+      result = BITMAP_ALLOC (&promote_obstack);
+      bitmap_and (result, b1, b2);
+    }
+  else 
+    result = b1;
+
+  EXECUTE_IF_SET_IN_BITMAP(result, 0, index, bi) 
+      {
+	tree src = referenced_var (index);
+	tree dest = static_map[index];
+	tree stmt = build (MODIFY_EXPR, TREE_TYPE (src), dest, src);
+	append_to_statement_list (stmt, &list);
+      }
+
+  if (list)
+    sra_insert_after (bsi, list);
+			   
+  if (b2)
+    BITMAP_FREE (result);
+}
+
+/* Generate a series of stores to the static variables pointed to by
+   B1 && B2 or just B1 (if B2 is NULL) and insert them before
+   BSI).  */
+
+static void
+gen_stores (bitmap b1, bitmap b2, block_stmt_iterator *bsi) 
+{
+  bitmap result;
+  bitmap_iterator bi;
+  unsigned int index;
+  tree list = NULL;
+
+  if (b2) 
+    {
+      result = BITMAP_ALLOC (&promote_obstack);
+      bitmap_and (result, b1, b2);
+    }
+  else 
+    result = b1;
+
+  EXECUTE_IF_SET_IN_BITMAP(result, 0, index, bi) 
+      {
+	tree src = static_map[index];
+	tree dest = referenced_var (index);
+	tree stmt = build (MODIFY_EXPR, TREE_TYPE (src), dest, src);
+	append_to_statement_list (stmt, &list);
+      }
+
+  if (list)
+    sra_insert_before (bsi, list);
+			   
+  if (b2)
+    BITMAP_FREE (result);
+}
+
+/* Replace the static references if it exists in the TPTR.  */
+
+static void 
+try_replace_operand(tree * tptr)
+{
+  tree t = *tptr;
+  if (TREE_CODE (t) == VAR_DECL)
+    {
+      var_ann_t va = var_ann (t);
+      tree replacement = static_map[va->uid];
+      if (replacement)
+	*tptr = replacement;
+    }
+}
+
+/* Walk an expression TPTR replacing all of the static references.  */
+
+static void
+try_replace (tree *tptr) 
+{
+  tree t = *tptr;
+  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+    return;
+
+  /* The INTEGER_CST is because some people use cute things like &0->a
+     for offsetof.  */
+  while (!SSA_VAR_P (t) 
+	 && (!CONSTANT_CLASS_P (t)) 
+	 && TREE_CODE (t) != LABEL_DECL
+	 && TREE_CODE (t) != CONST_DECL
+	 && TREE_CODE (t) != FUNCTION_DECL)
+    {
+      if (TREE_CODE (t) == ARRAY_REF)
+	try_replace_operand (&TREE_OPERAND (t, 1));
+
+      tptr = &TREE_OPERAND (t, 0);
+      t = *tptr;
+    }
+  try_replace_operand (tptr);
+}
+
+/* Replace all the static references in the operand list of
+   CALL_EXPR.  */
+
+static void
+try_replace_call_operands (tree call_expr) 
+{
+  tree operandList = TREE_OPERAND (call_expr, 1);
+  tree operand;
+
+  for (operand = operandList;
+       operand != NULL_TREE;
+       operand = TREE_CHAIN (operand))
+ 
+    if (TREE_CODE(TREE_VALUE (operand)) != FUNCTION_DECL)
+      try_replace (&TREE_VALUE (operand));
+}
+
+/* Generate loads and stores and replace all the static references in
+   function FN using statement iterator SI. This form is used when
+   there is not info available about the caller.  */
+
+static void 
+gen_dumb_call (tree fn, block_stmt_iterator si) 
+{
+  gen_stores (local_written, NULL, &si);
+  try_replace (&TREE_OPERAND (fn, 0));
+  try_replace_call_operands (fn);
+  gen_loads (local_all, NULL, &si);
+}
+
+
+/* Generate loads and stores and replace all the static references in
+   function FN using statement iterator SI.  */
+
+static void 
+try_replace_call (tree fn, block_stmt_iterator si)
+{
+  /* Store intersection of call_read and local_written
+     registers back to memory before calling.  */
+  /* int call_flags = call_expr_flags (fn); */
+  tree callee = get_callee_fndecl (fn);
+  if (callee) 
+    {
+      bitmap callee_all = BITMAP_ALLOC (&promote_obstack);
+      bitmap callee_written = ipa_reference_get_written_global (callee);
+      if (callee_written) 
+	{
+	  bitmap_ior (callee_all, 
+			 ipa_reference_get_read_global (callee), 
+			 callee_written);
+	  
+	  gen_stores (local_written, callee_all, &si);
+	  
+	  if (TREE_CODE (callee) != FUNCTION_DECL)
+	    try_replace (&TREE_OPERAND (fn, 0));
+	  try_replace_call_operands (fn);
+	  
+	  /* This is a hack required because the call_flags are set on a
+	     function by function basis during compilation.  Thus these
+	     flags are only set if the callee has already been compiled.  */
+	  /* if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) */
+	  gen_loads (local_all, callee_written, &si);
+	  BITMAP_FREE (callee_all);
+	}
+      else 
+	gen_dumb_call (fn, si);
+    }
+  else
+    gen_dumb_call (fn, si);
+}
+
+
+/* Walk the entire function looking uses or stores to global variables
+   and changing them to use ssa shadow registers.  */ 
+
+static void
+walk_function (void)
+{
+  basic_block bb;
+  block_stmt_iterator si, ni;
+
+  FOR_EACH_BB (bb)
+    for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
+      {
+	tree stmt = bsi_stmt (si);
+
+	ni = si;
+	bsi_next (&ni);
+
+	switch (TREE_CODE (stmt))
+	  {
+	  case RETURN_EXPR:
+	    /* Store all of the local_written registers back to memory
+	       before returning. */
+	    gen_stores (local_written, NULL, &si);
+	    break;
+
+	  case MODIFY_EXPR:
+	    /* Change load of static to use of reg.  Change store of
+	       static to store of reg.  */
+	    {
+	      tree t = TREE_OPERAND (stmt, 1);
+	      tree *tptr = &TREE_OPERAND (stmt, 1);
+	      
+	      /* Check to see if the bottom of the right hand side is
+		 a reference to a static variable that we care about.
+		 If it is replace the static part with the ssa
+		 register.  */
+	      
+	      /* Next check the operands on the rhs to see if they are ok. */
+	      switch (TREE_CODE_CLASS (TREE_CODE (t))) 
+		{
+		case tcc_binary:
+		  try_replace (&TREE_OPERAND (t, 0));
+		  try_replace (&TREE_OPERAND (t, 1));
+		  break;
+		case tcc_unary:
+		case tcc_reference:
+		  try_replace (&TREE_OPERAND (t, 0));
+		  break;
+		case tcc_declaration:
+		  try_replace (tptr);
+		  break;
+		case tcc_expression:
+		  switch (TREE_CODE (t)) {
+		  case ADDR_EXPR:
+		    try_replace (tptr);
+		    break;
+		  case CALL_EXPR: 
+		    try_replace_call (t, si);
+		    break;
+		  default:
+		    break;
+		  }
+		  break;
+		default:
+		  break;
+		}
+
+	      /* Check to see if the bottom of the left hand side is a
+		 reference to a static variable that we care about.
+		 If it is replace the static part with the ssa
+		 register.  */
+	      try_replace(&TREE_OPERAND (stmt, 0));
+	    }
+	    break;
+	  case CALL_EXPR:
+	    try_replace_call (stmt, si);
+   
+	    break;
+	  case ASM_EXPR:
+	    /* If the asm clobbers memory, just store everything and
+	       load it back.  */
+	    if (asm_clobbers_mem (stmt)) 
+	      { 
+		gen_stores (local_written, NULL, &si);
+		gen_loads (local_all, NULL, &si);
+	      }
+	    else 
+	      {
+		bitmap store_bitmap = BITMAP_ALLOC (&promote_obstack);
+		bitmap load_bitmap = BITMAP_ALLOC (&promote_obstack);
+		bitmap all_bitmap = BITMAP_ALLOC (&promote_obstack);
+		/* The asm read generates a stores before, and the asm
+		   write generates loads after.  */
+		get_asm_read_and_write (store_bitmap, load_bitmap, stmt);
+		bitmap_ior (all_bitmap, store_bitmap, load_bitmap);
+		
+		gen_stores (local_written, all_bitmap , &si);
+		gen_loads (local_all, load_bitmap, &si);
+		
+		BITMAP_FREE (store_bitmap);
+		BITMAP_FREE (load_bitmap);
+		BITMAP_FREE (all_bitmap);
+	      } 
+	    break;
+	    
+	  default:
+	    break;
+	  }
+      }
+}
+
+/* Main entry point for the promotion of statics to ssa regsisters. */
+
+static void
+execute_promote_statics (void)
+{
+  unsigned int index;
+  bitmap_iterator bi;
+  bitmap tb = ipa_reference_get_read_local (current_function_decl);
+
+
+  /* There are some options that cause this pass to run even if file
+     at a time is not set.  */
+  if (!tb)
+    return;
+
+  bitmap_obstack_initialize (&promote_obstack);
+  sra_init_cache ();
+
+  local_read = BITMAP_ALLOC (&promote_obstack);
+  bitmap_copy (local_read, tb);
+  tb = ipa_reference_get_written_local (current_function_decl);
+  local_written = BITMAP_ALLOC (&promote_obstack);
+  bitmap_copy (local_written, tb);
+
+  /* Create the shadow registers for each static variable that is
+     either read or written in this function.  */
+  static_map = xcalloc (num_referenced_vars, sizeof (tree));
+
+  local_all = BITMAP_ALLOC (&promote_obstack);
+  tb = BITMAP_ALLOC (&promote_obstack);
+  bitmap_ior (local_all, local_read, local_written);
+
+  if (dump_file)
+    fprintf (dump_file, "promoting in %s\n", 
+	     lang_hooks.decl_printable_name (current_function_decl, 2)); 
+
+  EXECUTE_IF_SET_IN_BITMAP (local_all, 0, index, bi) 
+      {
+	tree svar = referenced_var (index);
+	tree type = TREE_TYPE (svar);
+	/* We only promote variables that are either scalars or if
+	   they are aggregrates, they must be a type that sra is
+	   willing to scalarize.  Otherwise there is no reason to
+	   promote it a register.  
+
+	   We also do not promote anything that is marked READONLY
+	   since there is little gain.  The optimizations should
+	   generally be able to look thru the operations and find the
+	   constants.  */
+	if ((!TREE_READONLY(svar)) 
+	    && (TREE_CODE (type) != ARRAY_TYPE)
+	    && ((!AGGREGATE_TYPE_P (type))
+		|| (sra_type_can_be_decomposed_p (type))))
+	  {
+	    tree tmp = create_tmp_var (type, get_name (svar));
+	    add_referenced_tmp_var (tmp);
+	    static_map[index] = tmp;
+
+	    /* Insert loads from all read statics in the entry
+	       block.  */
+	    insert_edge_copies (build (MODIFY_EXPR, TREE_TYPE (svar), 
+				       tmp, svar), 
+				ENTRY_BLOCK_PTR);
+	    if (dump_file) 
+	      fprintf (dump_file, "  var=%s, read=%d,write=%d\n", 
+		       get_name (svar), 
+		       bitmap_bit_p (local_read, index),  
+		       bitmap_bit_p (local_written, index)); 
+	  }
+	else 
+	  {
+	    /* There is nothing to be done with this variable.  */ 
+	    bitmap_set_bit (tb, index);
+	  }
+      }
+
+  /* Clear the to be ignored variables from the local maps.  */
+  bitmap_and_compl_into (local_read, tb);
+  bitmap_and_compl_into (local_written, tb);
+  bitmap_and_compl_into (local_all, tb);
+
+  
+
+  walk_function ();
+  bsi_commit_edge_inserts ();
+
+  bitmap_obstack_release (&promote_obstack);
+  free (static_map);
+}
+
+static bool
+gate_promote_statics (void)
+{
+  return flag_unit_at_a_time != 0;
+}
+
+struct tree_opt_pass pass_promote_statics = 
+{
+  "pstatic",				/* name */
+  gate_promote_statics,			/* gate */
+  execute_promote_statics,		/* execute */
+  NULL,					/* sub */
+  NULL,					/* next */
+  0,					/* static_pass_number */
+  0,					/* tv_id */
+  PROP_cfg,                    		/* properties_required */
+  0,					/* properties_provided */
+  0,					/* properties_destroyed */
+  0,					/* todo_flags_start */
+  TODO_dump_func,			/* todo_flags_finish */
+  0                                     /* letter */
+};
+
+
Index: tree-sra.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-sra.c,v
retrieving revision 2.62
diff -u -p -r2.62 tree-sra.c
--- tree-sra.c	6 Jun 2005 21:14:29 -0000	2.62
+++ tree-sra.c	8 Jun 2005 20:16:35 -0000
@@ -172,8 +172,8 @@ is_sra_scalar_type (tree type)
    instantiated, just that if we decide to break up the type into
    separate pieces that it can be done.  */
 
-static bool
-type_can_be_decomposed_p (tree type)
+bool
+sra_type_can_be_decomposed_p (tree type)
 {
   unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
   tree t;
@@ -275,7 +275,7 @@ decl_can_be_decomposed_p (tree var)
     }
 
   /* We must be able to decompose the variable's type.  */
-  if (!type_can_be_decomposed_p (TREE_TYPE (var)))
+  if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
 	{
@@ -296,7 +296,7 @@ type_can_instantiate_all_elements (tree 
 {
   if (is_sra_scalar_type (type))
     return true;
-  if (!type_can_be_decomposed_p (type))
+  if (!sra_type_can_be_decomposed_p (type))
     return false;
 
   switch (TREE_CODE (type))
@@ -1761,7 +1761,7 @@ insert_edge_copies (tree stmt, basic_blo
 
 /* Helper function to insert LIST before BSI, and set up line number info.  */
 
-static void
+void
 sra_insert_before (block_stmt_iterator *bsi, tree list)
 {
   tree stmt = bsi_stmt (*bsi);
@@ -1773,7 +1773,7 @@ sra_insert_before (block_stmt_iterator *
 
 /* Similarly, but insert after BSI.  Handles insertion onto edges as well.  */
 
-static void
+void
 sra_insert_after (block_stmt_iterator *bsi, tree list)
 {
   tree stmt = bsi_stmt (*bsi);
@@ -2130,6 +2130,16 @@ debug_sra_elt_name (struct sra_elt *elt)
   fputc ('\n', stderr);
 }
 
+void 
+sra_init_cache (void)
+{
+  if (sra_type_decomp_cache) 
+    return;
+
+  sra_type_decomp_cache = BITMAP_ALLOC (NULL);
+  sra_type_inst_cache = BITMAP_ALLOC (NULL);
+}
+
 /* Main entry point.  */
 
 static void
@@ -2139,8 +2149,7 @@ tree_sra (void)
   gcc_obstack_init (&sra_obstack);
   sra_candidates = BITMAP_ALLOC (NULL);
   needs_copy_in = BITMAP_ALLOC (NULL);
-  sra_type_decomp_cache = BITMAP_ALLOC (NULL);
-  sra_type_inst_cache = BITMAP_ALLOC (NULL);
+  sra_init_cache ();
   sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
 
   /* Scan.  If we find anything, instantiate and scalarize.  */
Index: tree-ssa-alias.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-alias.c,v
retrieving revision 2.94
diff -u -p -r2.94 tree-ssa-alias.c
--- tree-ssa-alias.c	7 Jun 2005 14:30:23 -0000	2.94
+++ tree-ssa-alias.c	8 Jun 2005 20:16:36 -0000
@@ -42,6 +42,7 @@ Boston, MA 02111-1307, USA.  */
 #include "tree-pass.h"
 #include "convert.h"
 #include "params.h"
+#include "ipa-type-escape.h"
 #include "vec.h"
 
 /* 'true' after aliases have been computed (see compute_may_aliases).  */
@@ -130,6 +131,8 @@ struct alias_stats_d
   unsigned int simple_resolved;
   unsigned int tbaa_queries;
   unsigned int tbaa_resolved;
+  unsigned int structnoaddress_queries;
+  unsigned int structnoaddress_resolved;
 };
 
 
@@ -139,7 +142,7 @@ static struct alias_stats_d alias_stats;
 /* Local functions.  */
 static void compute_flow_insensitive_aliasing (struct alias_info *);
 static void dump_alias_stats (FILE *);
-static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT);
+static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
 static tree create_memory_tag (tree type, bool is_type_tag);
 static tree get_tmt_for (tree, struct alias_info *);
 static tree get_nmt_for (tree);
@@ -396,6 +399,7 @@ count_ptr_derefs (tree *tp, int *walk_su
   struct count_ptr_d *count_p = (struct count_ptr_d *) data;
 
   if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+/*       || (TREE_CODE (*tp) == MEM_REF && MEM_REF_SYMBOL (*tp) == count_p->ptr)) */
     count_p->count++;
 
   return NULL_TREE;
@@ -483,7 +487,6 @@ count_uses_and_derefs (tree ptr, tree st
   gcc_assert (*num_uses_p >= *num_derefs_p);
 }
 
-
 /* Initialize the data structures used for alias analysis.  */
 
 static struct alias_info *
@@ -981,8 +984,8 @@ compute_flow_insensitive_aliasing (struc
 			 || bitmap_bit_p (ai->written_vars, v_ann->uid);
 	  if (!tag_stored_p && !var_stored_p)
 	    continue;
-
-	  if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
+	     
+	  if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
 	    {
 	      subvar_t svars;
 	      size_t num_tag_refs, num_var_refs;
@@ -1063,7 +1066,7 @@ compute_flow_insensitive_aliasing (struc
 	  sbitmap may_aliases2 = p_map2->may_aliases;
 
 	  /* If the pointers may not point to each other, do nothing.  */
-	  if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set))
+	  if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
 	    continue;
 
 	  /* The two pointers may alias each other.  If they already have
@@ -1671,7 +1674,8 @@ maybe_create_global_var (struct alias_in
 
 static bool
 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
-	     tree var, HOST_WIDE_INT var_alias_set)
+	     tree var, HOST_WIDE_INT var_alias_set,
+	     bool alias_set_only)
 {
   tree mem;
   var_ann_t m_ann;
@@ -1738,6 +1742,65 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem
       alias_stats.tbaa_resolved++;
       return false;
     }
+
+  /* If var is a record or union type, ptr cannot point into var
+     unless there is some operation explicit address operation in the
+     program that can reference a field of the ptr's dereferenced
+     type.  This also assumes that the types of both var and ptr are
+     contained within the compilation unit, and that there is no fancy
+     addressing arithmetic associated with any of the types
+     involved.  */
+
+  if ((mem_alias_set != 0) && (var_alias_set != 0))
+    {
+      tree ptr_type = TREE_TYPE (ptr);
+      tree var_type = TREE_TYPE (var);
+      
+      /* The star count is -1 if the type at the end of the pointer_to 
+	 chain is not a record or union type. */ 
+      if ((!alias_set_only) && 
+	  ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
+	{
+	  int ptr_star_count = 0;
+	  
+	  /* Ipa_type_escape_star_count_of_interesting_type is a little to
+	     restrictive for the pointer type, need to allow pointers to
+	     primitive types as long as those types cannot be pointers
+	     to everything.  */
+	  while (POINTER_TYPE_P (ptr_type))
+	    /* Strip the *'s off.  */ 
+	    {
+	      ptr_type = TREE_TYPE (ptr_type);
+	      ptr_star_count++;
+	    }
+	  
+	  /* There does not appear to be a better test to see if the 
+	     pointer type was one of the pointer to everything 
+	     types.  */
+	  
+	  if (ptr_star_count > 0)
+	    {
+	      alias_stats.structnoaddress_queries++;
+	      if (ipa_type_escape_field_does_not_clobber_p (var_type, 
+							    TREE_TYPE (ptr))) 
+		{
+		  alias_stats.structnoaddress_resolved++;
+		  alias_stats.alias_noalias++;
+		  return false;
+		}
+	    }
+	  else if (ptr_star_count == 0)
+	    {
+	      /* If ptr_type was not really a pointer to type, it cannot 
+		 alias.  */ 
+	      alias_stats.structnoaddress_queries++;
+	      alias_stats.structnoaddress_resolved++;
+	      alias_stats.alias_noalias++;
+	      return false;
+	    }
+	}
+    }
+
   alias_stats.alias_mayalias++;
   return true;
 }
@@ -2399,6 +2462,10 @@ dump_alias_stats (FILE *file)
 	   alias_stats.tbaa_queries);
   fprintf (file, "Total TBAA resolved:\t%u\n",
 	   alias_stats.tbaa_resolved);
+  fprintf (file, "Total non-addressable structure type queries:\t%u\n",
+	   alias_stats.structnoaddress_queries);
+  fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
+	   alias_stats.structnoaddress_resolved);
 }
   
 
Index: tree-ssa-operands.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-operands.c,v
retrieving revision 2.87
diff -u -p -r2.87 tree-ssa-operands.c
--- tree-ssa-operands.c	7 Jun 2005 12:01:29 -0000	2.87
+++ tree-ssa-operands.c	8 Jun 2005 20:16:36 -0000
@@ -34,6 +34,7 @@ Boston, MA 02111-1307, USA.  */
 #include "toplev.h"
 
 #include "langhooks.h"
+#include "ipa-reference.h"
 
 /* This file contains the code required to manage the operands cache of the 
    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt 
@@ -158,7 +159,7 @@ static inline void append_def (tree *);
 static inline void append_use (tree *);
 static void append_v_may_def (tree);
 static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree);
+static void add_call_clobber_ops (tree, tree);
 static void add_call_read_ops (tree);
 static void add_stmt_operand (tree *, stmt_ann_t, int);
 static void build_ssa_operands (tree stmt);
@@ -1728,7 +1729,7 @@ get_call_expr_operands (tree stmt, tree 
 	 there is no point in recording that.  */ 
       if (TREE_SIDE_EFFECTS (expr)
 	  && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
-	add_call_clobber_ops (stmt);
+	add_call_clobber_ops (stmt, get_callee_fndecl (expr));
       else if (!(call_flags & ECF_CONST))
 	add_call_read_ops (stmt);
     }
@@ -1952,7 +1953,7 @@ note_addressable (tree var, stmt_ann_t s
    clobbered variables in the function.  */
 
 static void
-add_call_clobber_ops (tree stmt)
+add_call_clobber_ops (tree stmt, tree callee)
 {
   int i;
   unsigned u;
@@ -1960,6 +1961,7 @@ add_call_clobber_ops (tree stmt)
   bitmap_iterator bi;
   stmt_ann_t s_ann = stmt_ann (stmt);
   struct stmt_ann_d empty_ann;
+  bitmap not_read_b, not_written_b;
 
   /* Functions that are not const, pure or never return may clobber
      call-clobbered variables.  */
@@ -1974,8 +1976,22 @@ add_call_clobber_ops (tree stmt)
       return;
     }
 
+  /* FIXME - if we have better information from the static vars
+     analysis, we need to make the cache call site specific.  This way
+     we can have the performance benefits even if we are doing good
+     optimization.  */
+
+  /* Get info for local and module level statics.  There is a bit
+     set for each static if the call being processed does not read
+     or write that variable.  */
+
+  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; 
+  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; 
+
   /* If cache is valid, copy the elements into the build vectors.  */
-  if (ssa_call_clobbered_cache_valid)
+  if (ssa_call_clobbered_cache_valid
+      && (!not_read_b || bitmap_empty_p (not_read_b))
+      && (!not_written_b || bitmap_empty_p (not_written_b)))
     {
       /* Process the caches in reverse order so we are always inserting at
          the head of the list.  */
@@ -2010,43 +2026,62 @@ add_call_clobber_ops (tree stmt)
       if (unmodifiable_var_p (var))
 	add_stmt_operand (&var, &empty_ann, opf_none);
       else
-	add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
+	{
+	  bool not_read
+	    = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
+	  bool not_written
+	    = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
+
+	  if ((TREE_READONLY (var)
+	       && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+	      || not_written)
+	    {
+	      if (!not_read)
+		add_stmt_operand (&var, &empty_ann, opf_none);
+	    }
+	  else
+	    add_stmt_operand (&var, &empty_ann, opf_is_def);
+	}
     }
 
-  clobbered_aliased_loads = empty_ann.makes_aliased_loads;
-  clobbered_aliased_stores = empty_ann.makes_aliased_stores;
-
-  /* Set the flags for a stmt's annotation.  */
-  if (s_ann)
+  if ((!not_read_b || bitmap_empty_p (not_read_b))
+      && (!not_written_b || bitmap_empty_p (not_written_b)))
     {
-      s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
-      s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
-    }
-
-  /* Prepare empty cache vectors.  */
-  VEC_truncate (tree, clobbered_vuses, 0);
-  VEC_truncate (tree, clobbered_v_may_defs, 0);
+      clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+      clobbered_aliased_stores = empty_ann.makes_aliased_stores;
 
-  /* Now fill the clobbered cache with the values that have been found.  */
-  for (i = opbuild_first (&build_vuses);
-       i != OPBUILD_LAST;
-       i = opbuild_next (&build_vuses, i))
-    VEC_safe_push (tree, heap, clobbered_vuses,
-		   opbuild_elem_virtual (&build_vuses, i));
-
-  gcc_assert (opbuild_num_elems (&build_vuses) 
-	      == VEC_length (tree, clobbered_vuses));
+      /* Set the flags for a stmt's annotation.  */
+      if (s_ann)
+	{
+	  s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+	  s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+	}
 
-  for (i = opbuild_first (&build_v_may_defs);
-       i != OPBUILD_LAST;
-       i = opbuild_next (&build_v_may_defs, i))
-    VEC_safe_push (tree, heap, clobbered_v_may_defs, 
-		   opbuild_elem_virtual (&build_v_may_defs, i));
+      /* Prepare empty cache vectors.  */
+      VEC_truncate (tree, clobbered_vuses, 0);
+      VEC_truncate (tree, clobbered_v_may_defs, 0);
+
+      /* Now fill the clobbered cache with the values that have been found.  */
+      for (i = opbuild_first (&build_vuses);
+	   i != OPBUILD_LAST;
+	   i = opbuild_next (&build_vuses, i))
+	VEC_safe_push (tree, heap, clobbered_vuses,
+		       opbuild_elem_virtual (&build_vuses, i));
+
+      gcc_assert (opbuild_num_elems (&build_vuses) 
+		  == VEC_length (tree, clobbered_vuses));
+
+      for (i = opbuild_first (&build_v_may_defs);
+	   i != OPBUILD_LAST;
+	   i = opbuild_next (&build_v_may_defs, i))
+	VEC_safe_push (tree, heap, clobbered_v_may_defs, 
+		       opbuild_elem_virtual (&build_v_may_defs, i));
 
-  gcc_assert (opbuild_num_elems (&build_v_may_defs) 
-	      == VEC_length (tree, clobbered_v_may_defs));
+      gcc_assert (opbuild_num_elems (&build_v_may_defs) 
+		  == VEC_length (tree, clobbered_v_may_defs));
 
-  ssa_call_clobbered_cache_valid = true;
+      ssa_call_clobbered_cache_valid = true;
+    }
 }
 
 
Index: testsuite/gcc.dg/tree-ssa/ssa-dce-2.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/testsuite/gcc.dg/tree-ssa/ssa-dce-2.c,v
retrieving revision 1.3
diff -u -p -r1.3 ssa-dce-2.c
--- testsuite/gcc.dg/tree-ssa/ssa-dce-2.c	31 Mar 2005 18:34:16 -0000	1.3
+++ testsuite/gcc.dg/tree-ssa/ssa-dce-2.c	8 Jun 2005 20:16:43 -0000
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-dce3" } */
+/* { dg-options "-O2 -fdump-tree-dce3" } */
 
 /* We should notice constantness of this function. */
 int t(int a) 

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]