This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: Stream ODR types


Hi,
this patch renames types reported by Wodr during LTO bootstrap.

Bootrapping/regtesting in progress, OK if it passes?

Honza

	* tree-ssa-ccp.c (prop_value_d): Rename to ...
	(ccp_prop_value_t): ... this one to avoid ODR violation; update uses.
	* ipa-prop.c (struct type_change_info): Rename to ...
	(prop_type_change_infoprop_type_change_info): ... this; update uses.
	* ggc-page.c (globals): Rename to ...
	(static struct ggc_globals): ... this; update uses.
	* tree-ssa-loop-im.c (mem_ref): Rename to ...
	(im_mem_ref): ... this; update uses.
	* ggc-common.c (loc_descriptor): Rename to ...
	(ggc_loc_descriptor): ... this; update uses.
	* lra-eliminations.c (elim_table): Rename to ...
	(lra_elim_table): ... this; update uses.
	* bitmap.c (output_info): Rename to ...
	(bitmap_output_info): ... this; update uses.
	* gcse.c (expr): Rename to ...
	(gcse_expr) ... this; update uses.
	(occr): Rename to ...
	(gcse_occr): .. this; update uses.
	* tree-ssa-copy.c (prop_value_d): Rename to ...
	(prop_value_t): ... this.
	* predict.c (block_info_def): Rename to ...
	(block_info): ... this; update uses.
	(edge_info_def): Rename to ...
	(edge_info): ... this; update uses.
	* profile.c (bb_info): Rename to ...
	(bb_profile_info): ... this; update uses.
	* alloc-pool.c (output_info): Rename to ...
	(pool_output_info): ... this; update uses.
	
Index: tree-ssa-ccp.c
===================================================================
--- tree-ssa-ccp.c	(revision 215328)
+++ tree-ssa-ccp.c	(working copy)
@@ -166,7 +166,7 @@ typedef enum
   VARYING
 } ccp_lattice_t;
 
-struct prop_value_d {
+struct ccp_prop_value_t {
     /* Lattice value.  */
     ccp_lattice_t lattice_val;
 
@@ -180,24 +180,22 @@ struct prop_value_d {
     widest_int mask;
 };
 
-typedef struct prop_value_d prop_value_t;
-
 /* Array of propagated constant values.  After propagation,
    CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I).  If
    the constant is held in an SSA name representing a memory store
    (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
    memory reference used to store (i.e., the LHS of the assignment
    doing the store).  */
-static prop_value_t *const_val;
+static ccp_prop_value_t *const_val;
 static unsigned n_const_val;
 
-static void canonicalize_value (prop_value_t *);
+static void canonicalize_value (ccp_prop_value_t *);
 static bool ccp_fold_stmt (gimple_stmt_iterator *);
 
 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX.  */
 
 static void
-dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
+dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
 {
   switch (val.lattice_val)
     {
@@ -236,10 +234,10 @@ dump_lattice_value (FILE *outf, const ch
 
 /* Print lattice value VAL to stderr.  */
 
-void debug_lattice_value (prop_value_t val);
+void debug_lattice_value (ccp_prop_value_t val);
 
 DEBUG_FUNCTION void
-debug_lattice_value (prop_value_t val)
+debug_lattice_value (ccp_prop_value_t val)
 {
   dump_lattice_value (stderr, "", val);
   fprintf (stderr, "\n");
@@ -272,10 +270,10 @@ extend_mask (const wide_int &nonzero_bit
    4- Initial values of variables that are not GIMPLE registers are
       considered VARYING.  */
 
-static prop_value_t
+static ccp_prop_value_t
 get_default_value (tree var)
 {
-  prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
+  ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
   gimple stmt;
 
   stmt = SSA_NAME_DEF_STMT (var);
@@ -343,10 +341,10 @@ get_default_value (tree var)
 
 /* Get the constant value associated with variable VAR.  */
 
-static inline prop_value_t *
+static inline ccp_prop_value_t *
 get_value (tree var)
 {
-  prop_value_t *val;
+  ccp_prop_value_t *val;
 
   if (const_val == NULL
       || SSA_NAME_VERSION (var) >= n_const_val)
@@ -366,7 +364,7 @@ get_value (tree var)
 static inline tree
 get_constant_value (tree var)
 {
-  prop_value_t *val;
+  ccp_prop_value_t *val;
   if (TREE_CODE (var) != SSA_NAME)
     {
       if (is_gimple_min_invariant (var))
@@ -387,7 +385,7 @@ get_constant_value (tree var)
 static inline void
 set_value_varying (tree var)
 {
-  prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
+  ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
 
   val->lattice_val = VARYING;
   val->value = NULL_TREE;
@@ -413,7 +411,7 @@ set_value_varying (tree var)
   For other constants, make sure to drop TREE_OVERFLOW.  */
 
 static void
-canonicalize_value (prop_value_t *val)
+canonicalize_value (ccp_prop_value_t *val)
 {
   enum machine_mode mode;
   tree type;
@@ -451,7 +449,7 @@ canonicalize_value (prop_value_t *val)
 /* Return whether the lattice transition is valid.  */
 
 static bool
-valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
+valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
 {
   /* Lattice transitions must always be monotonically increasing in
      value.  */
@@ -486,10 +484,10 @@ valid_lattice_transition (prop_value_t o
    value is different from VAR's previous value.  */
 
 static bool
-set_lattice_value (tree var, prop_value_t new_val)
+set_lattice_value (tree var, ccp_prop_value_t new_val)
 {
   /* We can deal with old UNINITIALIZED values just fine here.  */
-  prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
+  ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
 
   canonicalize_value (&new_val);
 
@@ -534,8 +532,8 @@ set_lattice_value (tree var, prop_value_
   return false;
 }
 
-static prop_value_t get_value_for_expr (tree, bool);
-static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
+static ccp_prop_value_t get_value_for_expr (tree, bool);
+static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
 static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
 			       tree, const widest_int &, const widest_int &,
 			       tree, const widest_int &, const widest_int &);
@@ -544,7 +542,7 @@ static void bit_value_binop_1 (enum tree
    from VAL.  */
 
 static widest_int
-value_to_wide_int (prop_value_t val)
+value_to_wide_int (ccp_prop_value_t val)
 {
   if (val.value
       && TREE_CODE (val.value) == INTEGER_CST)
@@ -556,11 +554,11 @@ value_to_wide_int (prop_value_t val)
 /* Return the value for the address expression EXPR based on alignment
    information.  */
 
-static prop_value_t
+static ccp_prop_value_t
 get_value_from_alignment (tree expr)
 {
   tree type = TREE_TYPE (expr);
-  prop_value_t val;
+  ccp_prop_value_t val;
   unsigned HOST_WIDE_INT bitpos;
   unsigned int align;
 
@@ -583,10 +581,10 @@ get_value_from_alignment (tree expr)
    return constant bits extracted from alignment information for
    invariant addresses.  */
 
-static prop_value_t
+static ccp_prop_value_t
 get_value_for_expr (tree expr, bool for_bits_p)
 {
-  prop_value_t val;
+  ccp_prop_value_t val;
 
   if (TREE_CODE (expr) == SSA_NAME)
     {
@@ -654,7 +652,7 @@ likely_value (gimple stmt)
   all_undefined_operands = true;
   FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
     {
-      prop_value_t *val = get_value (use);
+      ccp_prop_value_t *val = get_value (use);
 
       if (val->lattice_val == UNDEFINED)
 	has_undefined_operand = true;
@@ -792,7 +790,7 @@ ccp_initialize (void)
   basic_block bb;
 
   n_const_val = num_ssa_names;
-  const_val = XCNEWVEC (prop_value_t, n_const_val);
+  const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
 
   /* Initialize simulation flags for PHI nodes and statements.  */
   FOR_EACH_BB_FN (bb, cfun)
@@ -884,7 +882,7 @@ ccp_finalize (void)
   for (i = 1; i < num_ssa_names; ++i)
     {
       tree name = ssa_name (i);
-      prop_value_t *val;
+      ccp_prop_value_t *val;
       unsigned int tem, align;
 
       if (!name
@@ -941,7 +939,7 @@ ccp_finalize (void)
    */
 
 static void
-ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
+ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
 {
   if (val1->lattice_val == UNDEFINED)
     {
@@ -997,7 +995,7 @@ ccp_lattice_meet (prop_value_t *val1, pr
     {
       /* When not equal addresses are involved try meeting for
 	 alignment.  */
-      prop_value_t tem = *val2;
+      ccp_prop_value_t tem = *val2;
       if (TREE_CODE (val1->value) == ADDR_EXPR)
 	*val1 = get_value_for_expr (val1->value, true);
       if (TREE_CODE (val2->value) == ADDR_EXPR)
@@ -1023,7 +1021,7 @@ static enum ssa_prop_result
 ccp_visit_phi_node (gimple phi)
 {
   unsigned i;
-  prop_value_t *old_val, new_val;
+  ccp_prop_value_t *old_val, new_val;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -1069,7 +1067,7 @@ ccp_visit_phi_node (gimple phi)
       if (e->flags & EDGE_EXECUTABLE)
 	{
 	  tree arg = gimple_phi_arg (phi, i)->def;
-	  prop_value_t arg_val = get_value_for_expr (arg, false);
+	  ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
 
 	  ccp_lattice_meet (&new_val, &arg_val);
 
@@ -1449,12 +1447,12 @@ bit_value_binop_1 (enum tree_code code,
 /* Return the propagation value when applying the operation CODE to
    the value RHS yielding type TYPE.  */
 
-static prop_value_t
+static ccp_prop_value_t
 bit_value_unop (enum tree_code code, tree type, tree rhs)
 {
-  prop_value_t rval = get_value_for_expr (rhs, true);
+  ccp_prop_value_t rval = get_value_for_expr (rhs, true);
   widest_int value, mask;
-  prop_value_t val;
+  ccp_prop_value_t val;
 
   if (rval.lattice_val == UNDEFINED)
     return rval;
@@ -1483,13 +1481,13 @@ bit_value_unop (enum tree_code code, tre
 /* Return the propagation value when applying the operation CODE to
    the values RHS1 and RHS2 yielding type TYPE.  */
 
-static prop_value_t
+static ccp_prop_value_t
 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
 {
-  prop_value_t r1val = get_value_for_expr (rhs1, true);
-  prop_value_t r2val = get_value_for_expr (rhs2, true);
+  ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
+  ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
   widest_int value, mask;
-  prop_value_t val;
+  ccp_prop_value_t val;
 
   if (r1val.lattice_val == UNDEFINED
       || r2val.lattice_val == UNDEFINED)
@@ -1532,15 +1530,15 @@ bit_value_binop (enum tree_code code, tr
    is false, for alloc_aligned attribute ATTR is non-NULL and
    ALLOC_ALIGNED is true.  */
 
-static prop_value_t
-bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
+static ccp_prop_value_t
+bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
 			  bool alloc_aligned)
 {
   tree align, misalign = NULL_TREE, type;
   unsigned HOST_WIDE_INT aligni, misaligni = 0;
-  prop_value_t alignval;
+  ccp_prop_value_t alignval;
   widest_int value, mask;
-  prop_value_t val;
+  ccp_prop_value_t val;
 
   if (attr == NULL_TREE)
     {
@@ -1632,10 +1630,10 @@ bit_value_assume_aligned (gimple stmt, t
 /* Evaluate statement STMT.
    Valid only for assignments, calls, conditionals, and switches. */
 
-static prop_value_t
+static ccp_prop_value_t
 evaluate_stmt (gimple stmt)
 {
-  prop_value_t val;
+  ccp_prop_value_t val;
   tree simplified = NULL_TREE;
   ccp_lattice_t likelyvalue = likely_value (stmt);
   bool is_constant = false;
@@ -2062,7 +2060,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi
     {
     case GIMPLE_COND:
       {
-	prop_value_t val;
+	ccp_prop_value_t val;
 	/* Statement evaluation will handle type mismatches in constants
 	   more gracefully than the final propagation.  This allows us to
 	   fold more conditionals here.  */
@@ -2197,7 +2195,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi
 static enum ssa_prop_result
 visit_assignment (gimple stmt, tree *output_p)
 {
-  prop_value_t val;
+  ccp_prop_value_t val;
   enum ssa_prop_result retval;
 
   tree lhs = gimple_get_lhs (stmt);
@@ -2242,7 +2240,7 @@ visit_assignment (gimple stmt, tree *out
 static enum ssa_prop_result
 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
 {
-  prop_value_t val;
+  ccp_prop_value_t val;
   basic_block block;
 
   block = gimple_bb (stmt);
@@ -2320,7 +2318,7 @@ ccp_visit_stmt (gimple stmt, edge *taken
      Mark them VARYING.  */
   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
     {
-      prop_value_t v = { VARYING, NULL_TREE, -1 };
+      ccp_prop_value_t v = { VARYING, NULL_TREE, -1 };
       set_lattice_value (def, v);
     }
 
Index: ipa-prop.c
===================================================================
--- ipa-prop.c	(revision 215328)
+++ ipa-prop.c	(working copy)
@@ -592,7 +592,7 @@ ipa_get_bb_info (struct func_body_info *
 /* Structure to be passed in between detect_type_change and
    check_stmt_for_type_change.  */
 
-struct type_change_info
+struct prop_type_change_info
 {
   /* Offset into the object where there is the virtual method pointer we are
      looking for.  */
@@ -680,7 +680,7 @@ stmt_may_be_vtbl_ptr_store (gimple stmt)
    identified, return the type.  Otherwise return NULL_TREE.  */
 
 static tree
-extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
+extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
 {
   HOST_WIDE_INT offset, size, max_size;
   tree lhs, rhs, base, binfo;
@@ -726,13 +726,13 @@ extr_type_from_vtbl_ptr_store (gimple st
    detect_type_change to check whether a particular statement may modify
    the virtual table pointer, and if possible also determine the new type of
    the (sub-)object.  It stores its result into DATA, which points to a
-   type_change_info structure.  */
+   prop_type_change_info structure.  */
 
 static bool
 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
 {
   gimple stmt = SSA_NAME_DEF_STMT (vdef);
-  struct type_change_info *tci = (struct type_change_info *) data;
+  struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
 
   if (stmt_may_be_vtbl_ptr_store (stmt))
     {
@@ -830,7 +830,7 @@ detect_type_change_from_memory_writes (t
 				       gimple call, struct ipa_jump_func *jfunc,
 				       HOST_WIDE_INT offset)
 {
-  struct type_change_info tci;
+  struct prop_type_change_info tci;
   ao_ref ao;
   bool entry_reached = false;
 
Index: ggc-page.c
===================================================================
--- ggc-page.c	(revision 215328)
+++ ggc-page.c	(working copy)
@@ -378,7 +378,7 @@ struct free_object
 #endif
 
 /* The rest of the global variables.  */
-static struct globals
+static struct ggc_globals
 {
   /* The Nth element in this array is a page with objects of size 2^N.
      If there are any pages with free objects, they will be at the
Index: tree-ssa-loop-im.c
===================================================================
--- tree-ssa-loop-im.c	(revision 215328)
+++ tree-ssa-loop-im.c	(working copy)
@@ -116,7 +116,7 @@ typedef struct mem_ref_loc
 
 /* Description of a memory reference.  */
 
-typedef struct mem_ref
+typedef struct im_mem_ref
 {
   unsigned id;			/* ID assigned to the memory reference
 				   (its index in memory_accesses.refs_list)  */
@@ -153,15 +153,15 @@ typedef struct mem_ref
 
 /* Mem_ref hashtable helpers.  */
 
-struct mem_ref_hasher : typed_noop_remove <mem_ref>
+struct mem_ref_hasher : typed_noop_remove <im_mem_ref>
 {
-  typedef mem_ref value_type;
+  typedef im_mem_ref value_type;
   typedef tree_node compare_type;
   static inline hashval_t hash (const value_type *);
   static inline bool equal (const value_type *, const compare_type *);
 };
 
-/* A hash function for struct mem_ref object OBJ.  */
+/* A hash function for struct im_mem_ref object OBJ.  */
 
 inline hashval_t
 mem_ref_hasher::hash (const value_type *mem)
@@ -169,7 +169,7 @@ mem_ref_hasher::hash (const value_type *
   return mem->hash;
 }
 
-/* An equality function for struct mem_ref object MEM1 with
+/* An equality function for struct im_mem_ref object MEM1 with
    memory reference OBJ2.  */
 
 inline bool
@@ -1395,7 +1395,7 @@ force_move_till (tree ref, tree *index,
 /* A function to free the mem_ref object OBJ.  */
 
 static void
-memref_free (struct mem_ref *mem)
+memref_free (struct im_mem_ref *mem)
 {
   mem->accesses_in_loop.release ();
 }
@@ -1406,7 +1406,7 @@ memref_free (struct mem_ref *mem)
 static mem_ref_p
 mem_ref_alloc (tree mem, unsigned hash, unsigned id)
 {
-  mem_ref_p ref = XOBNEW (&mem_ref_obstack, struct mem_ref);
+  mem_ref_p ref = XOBNEW (&mem_ref_obstack, struct im_mem_ref);
   ao_ref_init (&ref->mem, mem);
   ref->id = id;
   ref->hash = hash;
@@ -1461,7 +1461,7 @@ gather_mem_refs_stmt (struct loop *loop,
 {
   tree *mem = NULL;
   hashval_t hash;
-  mem_ref **slot;
+  im_mem_ref **slot;
   mem_ref_p ref;
   bool is_stored;
   unsigned id;
@@ -1578,7 +1578,7 @@ analyze_memory_references (void)
 
   /* Sort the location list of gathered memory references after their
      loop postorder number.  */
-  mem_ref *ref;
+  im_mem_ref *ref;
   FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref)
     ref->accesses_in_loop.qsort (sort_locs_in_loop_postorder_cmp);
 
Index: ggc-common.c
===================================================================
--- ggc-common.c	(revision 215328)
+++ ggc-common.c	(working copy)
@@ -904,7 +904,7 @@ init_ggc_heuristics (void)
 }
 
 /* Datastructure used to store per-call-site statistics.  */
-struct loc_descriptor
+struct ggc_loc_descriptor
 {
   const char *file;
   int line;
@@ -918,42 +918,42 @@ struct loc_descriptor
 
 /* Hash table helper.  */
 
-struct loc_desc_hasher : typed_noop_remove <loc_descriptor>
+struct ggc_loc_desc_hasher : typed_noop_remove <ggc_loc_descriptor>
 {
-  typedef loc_descriptor value_type;
-  typedef loc_descriptor compare_type;
+  typedef ggc_loc_descriptor value_type;
+  typedef ggc_loc_descriptor compare_type;
   static inline hashval_t hash (const value_type *);
   static inline bool equal (const value_type *, const compare_type *);
 };
 
 inline hashval_t
-loc_desc_hasher::hash (const value_type *d)
+ggc_loc_desc_hasher::hash (const value_type *d)
 {
   return htab_hash_pointer (d->function) | d->line;
 }
 
 inline bool
-loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
+ggc_loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
 {
   return (d->file == d2->file && d->line == d2->line
 	  && d->function == d2->function);
 }
 
 /* Hashtable used for statistics.  */
-static hash_table<loc_desc_hasher> *loc_hash;
+static hash_table<ggc_loc_desc_hasher> *loc_hash;
 
-struct ptr_hash_entry
+struct ggc_ptr_hash_entry
 {
   void *ptr;
-  struct loc_descriptor *loc;
+  struct ggc_loc_descriptor *loc;
   size_t size;
 };
 
 /* Helper for ptr_hash table.  */
 
-struct ptr_hash_hasher : typed_noop_remove <ptr_hash_entry>
+struct ptr_hash_hasher : typed_noop_remove <ggc_ptr_hash_entry>
 {
-  typedef ptr_hash_entry value_type;
+  typedef ggc_ptr_hash_entry value_type;
   typedef void compare_type;
   static inline hashval_t hash (const value_type *);
   static inline bool equal (const value_type *, const compare_type *);
@@ -975,22 +975,22 @@ ptr_hash_hasher::equal (const value_type
 static hash_table<ptr_hash_hasher> *ptr_hash;
 
 /* Return descriptor for given call site, create new one if needed.  */
-static struct loc_descriptor *
+static struct ggc_loc_descriptor *
 make_loc_descriptor (const char *name, int line, const char *function)
 {
-  struct loc_descriptor loc;
-  struct loc_descriptor **slot;
+  struct ggc_loc_descriptor loc;
+  struct ggc_loc_descriptor **slot;
 
   loc.file = name;
   loc.line = line;
   loc.function = function;
   if (!loc_hash)
-    loc_hash = new hash_table<loc_desc_hasher> (10);
+    loc_hash = new hash_table<ggc_loc_desc_hasher> (10);
 
   slot = loc_hash->find_slot (&loc, INSERT);
   if (*slot)
     return *slot;
-  *slot = XCNEW (struct loc_descriptor);
+  *slot = XCNEW (struct ggc_loc_descriptor);
   (*slot)->file = name;
   (*slot)->line = line;
   (*slot)->function = function;
@@ -1002,9 +1002,9 @@ void
 ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
 		     const char *name, int line, const char *function)
 {
-  struct loc_descriptor *loc = make_loc_descriptor (name, line, function);
-  struct ptr_hash_entry *p = XNEW (struct ptr_hash_entry);
-  ptr_hash_entry **slot;
+  struct ggc_loc_descriptor *loc = make_loc_descriptor (name, line, function);
+  struct ggc_ptr_hash_entry *p = XNEW (struct ggc_ptr_hash_entry);
+  ggc_ptr_hash_entry **slot;
 
   p->ptr = ptr;
   p->loc = loc;
@@ -1023,9 +1023,9 @@ ggc_record_overhead (size_t allocated, s
 /* Helper function for prune_overhead_list.  See if SLOT is still marked and
    remove it from hashtable if it is not.  */
 int
-ggc_prune_ptr (ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
+ggc_prune_ptr (ggc_ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
 {
-  struct ptr_hash_entry *p = *slot;
+  struct ggc_ptr_hash_entry *p = *slot;
   if (!ggc_marked_p (p->ptr))
     {
       p->loc->collected += p->size;
@@ -1047,15 +1047,15 @@ ggc_prune_overhead_list (void)
 void
 ggc_free_overhead (void *ptr)
 {
-  ptr_hash_entry **slot
+  ggc_ptr_hash_entry **slot
     = ptr_hash->find_slot_with_hash (ptr, htab_hash_pointer (ptr), NO_INSERT);
-  struct ptr_hash_entry *p;
+  struct ggc_ptr_hash_entry *p;
   /* The pointer might be not found if a PCH read happened between allocation
      and ggc_free () call.  FIXME: account memory properly in the presence of
      PCH. */
   if (!slot)
       return;
-  p = (struct ptr_hash_entry *) *slot;
+  p = (struct ggc_ptr_hash_entry *) *slot;
   p->loc->freed += p->size;
   ptr_hash->clear_slot (slot);
   free (p);
@@ -1065,10 +1065,10 @@ ggc_free_overhead (void *ptr)
 static int
 final_cmp_statistic (const void *loc1, const void *loc2)
 {
-  const struct loc_descriptor *const l1 =
-    *(const struct loc_descriptor *const *) loc1;
-  const struct loc_descriptor *const l2 =
-    *(const struct loc_descriptor *const *) loc2;
+  const struct ggc_loc_descriptor *const l1 =
+    *(const struct ggc_loc_descriptor *const *) loc1;
+  const struct ggc_loc_descriptor *const l2 =
+    *(const struct ggc_loc_descriptor *const *) loc2;
   long diff;
   diff = ((long)(l1->allocated + l1->overhead - l1->freed) -
 	  (l2->allocated + l2->overhead - l2->freed));
@@ -1079,10 +1079,10 @@ final_cmp_statistic (const void *loc1, c
 static int
 cmp_statistic (const void *loc1, const void *loc2)
 {
-  const struct loc_descriptor *const l1 =
-    *(const struct loc_descriptor *const *) loc1;
-  const struct loc_descriptor *const l2 =
-    *(const struct loc_descriptor *const *) loc2;
+  const struct ggc_loc_descriptor *const l1 =
+    *(const struct ggc_loc_descriptor *const *) loc1;
+  const struct ggc_loc_descriptor *const l2 =
+    *(const struct ggc_loc_descriptor *const *) loc2;
   long diff;
 
   diff = ((long)(l1->allocated + l1->overhead - l1->freed - l1->collected) -
@@ -1095,9 +1095,9 @@ cmp_statistic (const void *loc1, const v
 }
 
 /* Collect array of the descriptors from hashtable.  */
-static struct loc_descriptor **loc_array;
+static struct ggc_loc_descriptor **loc_array;
 int
-ggc_add_statistics (loc_descriptor **slot, int *n)
+ggc_add_statistics (ggc_loc_descriptor **slot, int *n)
 {
   loc_array[*n] = *slot;
   (*n)++;
@@ -1120,7 +1120,7 @@ dump_ggc_loc_statistics (bool final)
   ggc_force_collect = true;
   ggc_collect ();
 
-  loc_array = XCNEWVEC (struct loc_descriptor *,
+  loc_array = XCNEWVEC (struct ggc_loc_descriptor *,
 			loc_hash->elements_with_deleted ());
   fprintf (stderr, "-------------------------------------------------------\n");
   fprintf (stderr, "\n%-48s %10s       %10s       %10s       %10s       %10s\n",
@@ -1131,7 +1131,7 @@ dump_ggc_loc_statistics (bool final)
 	 final ? final_cmp_statistic : cmp_statistic);
   for (i = 0; i < nentries; i++)
     {
-      struct loc_descriptor *d = loc_array[i];
+      struct ggc_loc_descriptor *d = loc_array[i];
       allocated += d->allocated;
       times += d->times;
       freed += d->freed;
@@ -1140,7 +1140,7 @@ dump_ggc_loc_statistics (bool final)
     }
   for (i = 0; i < nentries; i++)
     {
-      struct loc_descriptor *d = loc_array[i];
+      struct ggc_loc_descriptor *d = loc_array[i];
       if (d->allocated)
 	{
 	  const char *s1 = d->file;
Index: lra-eliminations.c
===================================================================
--- lra-eliminations.c	(revision 215328)
+++ lra-eliminations.c	(working copy)
@@ -77,7 +77,7 @@ along with GCC; see the file COPYING3.	I
 
 /* This structure is used to record information about hard register
    eliminations.  */
-struct elim_table
+struct lra_elim_table
 {
   /* Hard register number to be eliminated.  */
   int from;
@@ -105,7 +105,7 @@ struct elim_table
    of eliminating a register in favor of another.  If there is more
    than one way of eliminating a particular register, the most
    preferred should be specified first.	 */
-static struct elim_table *reg_eliminate = 0;
+static struct lra_elim_table *reg_eliminate = 0;
 
 /* This is an intermediate structure to initialize the table.  It has
    exactly the members provided by ELIMINABLE_REGS.  */
@@ -131,7 +131,7 @@ static const struct elim_table_1
 static void
 print_elim_table (FILE *f)
 {
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
     fprintf (f, "%s eliminate %d to %d (offset=" HOST_WIDE_INT_PRINT_DEC
@@ -151,7 +151,7 @@ lra_debug_elim_table (void)
    VALUE.  Setup FRAME_POINTER_NEEDED if elimination from frame
    pointer to stack pointer is not possible anymore.  */
 static void
-setup_can_eliminate (struct elim_table *ep, bool value)
+setup_can_eliminate (struct lra_elim_table *ep, bool value)
 {
   ep->can_eliminate = ep->prev_can_eliminate = value;
   if (! value
@@ -163,12 +163,12 @@ setup_can_eliminate (struct elim_table *
    or NULL if none.  The elimination table may contain more than
    one elimination for the same hard register, but this map specifies
    the one that we are currently using.  */
-static struct elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
+static struct lra_elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
 
 /* When an eliminable hard register becomes not eliminable, we use the
    following special structure to restore original offsets for the
    register.  */
-static struct elim_table self_elim_table;
+static struct lra_elim_table self_elim_table;
 
 /* Offsets should be used to restore original offsets for eliminable
    hard register which just became not eliminable.  Zero,
@@ -184,7 +184,7 @@ static void
 setup_elimination_map (void)
 {
   int i;
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     elimination_map[i] = NULL;
@@ -249,7 +249,7 @@ form_sum (rtx x, rtx y)
 int
 lra_get_elimination_hard_regno (int hard_regno)
 {
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   if (hard_regno < 0 || hard_regno >= FIRST_PSEUDO_REGISTER)
     return hard_regno;
@@ -260,11 +260,11 @@ lra_get_elimination_hard_regno (int hard
 
 /* Return elimination which will be used for hard reg REG, NULL
    otherwise.  */
-static struct elim_table *
+static struct lra_elim_table *
 get_elimination (rtx reg)
 {
   int hard_regno;
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
   HOST_WIDE_INT offset;
 
   lra_assert (REG_P (reg));
@@ -306,7 +306,7 @@ lra_eliminate_regs_1 (rtx_insn *insn, rt
 		      bool subst_p, bool update_p, bool full_p)
 {
   enum rtx_code code = GET_CODE (x);
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
   rtx new_rtx;
   int i, j;
   const char *fmt;
@@ -674,7 +674,7 @@ static void
 mark_not_eliminable (rtx x, enum machine_mode mem_mode)
 {
   enum rtx_code code = GET_CODE (x);
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
   int i, j;
   const char *fmt;
 
@@ -856,7 +856,7 @@ eliminate_regs_in_insn (rtx_insn *insn,
   int i;
   rtx substed_operand[MAX_RECOG_OPERANDS];
   rtx orig_operand[MAX_RECOG_OPERANDS];
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
   rtx plus_src, plus_cst_src;
   lra_insn_recog_data_t id;
   struct lra_static_insn_data *static_id;
@@ -1130,7 +1130,7 @@ static bool
 update_reg_eliminate (bitmap insns_with_changed_offsets)
 {
   bool prev, result;
-  struct elim_table *ep, *ep1;
+  struct lra_elim_table *ep, *ep1;
   HARD_REG_SET temp_hard_reg_set;
 
   /* Clear self elimination offsets.  */
@@ -1235,14 +1235,14 @@ update_reg_eliminate (bitmap insns_with_
 static void
 init_elim_table (void)
 {
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 #ifdef ELIMINABLE_REGS
   bool value_p;
   const struct elim_table_1 *ep1;
 #endif
 
   if (!reg_eliminate)
-    reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
+    reg_eliminate = XCNEWVEC (struct lra_elim_table, NUM_ELIMINABLE_REGS);
 
   memset (self_elim_offsets, 0, sizeof (self_elim_offsets));
   /* Initiate member values which will be never changed.  */
@@ -1291,7 +1291,7 @@ init_elimination (void)
   bool stop_to_sp_elimination_p;
   basic_block bb;
   rtx_insn *insn;
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   init_elim_table ();
   FOR_EACH_BB_FN (bb, cfun)
@@ -1325,7 +1325,7 @@ void
 lra_eliminate_reg_if_possible (rtx *loc)
 {
   int regno;
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   lra_assert (REG_P (*loc));
   if ((regno = REGNO (*loc)) >= FIRST_PSEUDO_REGISTER
@@ -1369,7 +1369,7 @@ lra_eliminate (bool final_p, bool first_
   unsigned int uid;
   bitmap_head insns_with_changed_offsets;
   bitmap_iterator bi;
-  struct elim_table *ep;
+  struct lra_elim_table *ep;
 
   gcc_assert (! final_p || ! first_p);
 
Index: bitmap.c
===================================================================
--- bitmap.c	(revision 215328)
+++ bitmap.c	(working copy)
@@ -2143,7 +2143,7 @@ bitmap_print (FILE *file, const_bitmap h
 
 
 /* Used to accumulate statistics about bitmap sizes.  */
-struct output_info
+struct bitmap_output_info
 {
   uint64_t size;
   uint64_t count;
@@ -2152,7 +2152,7 @@ struct output_info
 /* Called via hash_table::traverse.  Output bitmap descriptor pointed out by
    SLOT and update statistics.  */
 int
-print_statistics (bitmap_descriptor_d **slot, output_info *i)
+print_statistics (bitmap_descriptor_d **slot, bitmap_output_info *i)
 {
   bitmap_descriptor d = *slot;
   char s[4096];
@@ -2181,7 +2181,7 @@ print_statistics (bitmap_descriptor_d **
 void
 dump_bitmap_statistics (void)
 {
-  struct output_info info;
+  struct bitmap_output_info info;
 
   if (! GATHER_STATISTICS)
     return;
@@ -2197,7 +2197,7 @@ dump_bitmap_statistics (void)
   fprintf (stderr, "---------------------------------------------------------------------------------\n");
   info.count = 0;
   info.size = 0;
-  bitmap_desc_hash->traverse <output_info *, print_statistics> (&info);
+  bitmap_desc_hash->traverse <bitmap_output_info *, print_statistics> (&info);
   fprintf (stderr, "---------------------------------------------------------------------------------\n");
   fprintf (stderr,
 	   "%-41s %9"PRId64" %15"PRId64"\n",
Index: gcse.c
===================================================================
--- gcse.c	(revision 215328)
+++ gcse.c	(working copy)
@@ -256,25 +256,25 @@ static struct obstack gcse_obstack;
 
 /* Hash table of expressions.  */
 
-struct expr
+struct gcse_expr
 {
   /* The expression.  */
   rtx expr;
   /* Index in the available expression bitmaps.  */
   int bitmap_index;
   /* Next entry with the same hash.  */
-  struct expr *next_same_hash;
+  struct gcse_expr *next_same_hash;
   /* List of anticipatable occurrences in basic blocks in the function.
      An "anticipatable occurrence" is one that is the first occurrence in the
      basic block, the operands are not modified in the basic block prior
      to the occurrence and the output is not used between the start of
      the block and the occurrence.  */
-  struct occr *antic_occr;
+  struct gcse_occr *antic_occr;
   /* List of available occurrence in basic blocks in the function.
      An "available occurrence" is one that is the last occurrence in the
      basic block and the operands are not modified by following statements in
      the basic block [including this insn].  */
-  struct occr *avail_occr;
+  struct gcse_occr *avail_occr;
   /* Non-null if the computation is PRE redundant.
      The value is the newly created pseudo-reg to record a copy of the
      expression in all the places that reach the redundant copy.  */
@@ -291,10 +291,10 @@ struct expr
    There is one per basic block.  If a pattern appears more than once the
    last appearance is used [or first for anticipatable expressions].  */
 
-struct occr
+struct gcse_occr
 {
   /* Next occurrence of this expression.  */
-  struct occr *next;
+  struct gcse_occr *next;
   /* The insn that computes the expression.  */
   rtx_insn *insn;
   /* Nonzero if this [anticipatable] occurrence has been deleted.  */
@@ -306,7 +306,7 @@ struct occr
   char copied_p;
 };
 
-typedef struct occr *occr_t;
+typedef struct gcse_occr *occr_t;
 
 /* Expression hash tables.
    Each hash table is an array of buckets.
@@ -317,11 +317,11 @@ typedef struct occr *occr_t;
    [one could build a mapping table without holes afterwards though].
    Someday I'll perform the computation and figure it out.  */
 
-struct hash_table_d
+struct gcse_hash_table_d
 {
   /* The table itself.
      This is an array of `expr_hash_table_size' elements.  */
-  struct expr **table;
+  struct gcse_expr **table;
 
   /* Size of the hash table, in elements.  */
   unsigned int size;
@@ -331,7 +331,7 @@ struct hash_table_d
 };
 
 /* Expression hash table.  */
-static struct hash_table_d expr_hash_table;
+static struct gcse_hash_table_d expr_hash_table;
 
 /* This is a list of expressions which are MEMs and will be used by load
    or store motion.
@@ -344,7 +344,7 @@ static struct hash_table_d expr_hash_tab
 
 struct ls_expr
 {
-  struct expr * expr;		/* Gcse expression reference for LM.  */
+  struct gcse_expr * expr;	/* Gcse expression reference for LM.  */
   rtx pattern;			/* Pattern of this mem.  */
   rtx pattern_regs;		/* List of registers mentioned by the mem.  */
   rtx_insn_list *loads;		/* INSN list of loads seen.  */
@@ -462,38 +462,38 @@ static void *gcalloc (size_t, size_t) AT
 static void *gcse_alloc (unsigned long);
 static void alloc_gcse_mem (void);
 static void free_gcse_mem (void);
-static void hash_scan_insn (rtx_insn *, struct hash_table_d *);
-static void hash_scan_set (rtx, rtx_insn *, struct hash_table_d *);
-static void hash_scan_clobber (rtx, rtx_insn *, struct hash_table_d *);
-static void hash_scan_call (rtx, rtx_insn *, struct hash_table_d *);
+static void hash_scan_insn (rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_set (rtx, rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_clobber (rtx, rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_call (rtx, rtx_insn *, struct gcse_hash_table_d *);
 static int want_to_gcse_p (rtx, int *);
 static int oprs_unchanged_p (const_rtx, const rtx_insn *, int);
 static int oprs_anticipatable_p (const_rtx, const rtx_insn *);
 static int oprs_available_p (const_rtx, const rtx_insn *);
 static void insert_expr_in_table (rtx, enum machine_mode, rtx_insn *, int, int,
-				  int, struct hash_table_d *);
+				  int, struct gcse_hash_table_d *);
 static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
 static void record_last_reg_set_info (rtx, int);
 static void record_last_mem_set_info (rtx_insn *);
 static void record_last_set_info (rtx, const_rtx, void *);
-static void compute_hash_table (struct hash_table_d *);
-static void alloc_hash_table (struct hash_table_d *);
-static void free_hash_table (struct hash_table_d *);
-static void compute_hash_table_work (struct hash_table_d *);
-static void dump_hash_table (FILE *, const char *, struct hash_table_d *);
+static void compute_hash_table (struct gcse_hash_table_d *);
+static void alloc_hash_table (struct gcse_hash_table_d *);
+static void free_hash_table (struct gcse_hash_table_d *);
+static void compute_hash_table_work (struct gcse_hash_table_d *);
+static void dump_hash_table (FILE *, const char *, struct gcse_hash_table_d *);
 static void compute_transp (const_rtx, int, sbitmap *);
 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
-				      struct hash_table_d *);
+				      struct gcse_hash_table_d *);
 static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
 static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
 static void canon_list_insert (rtx, const_rtx, void *);
 static void alloc_pre_mem (int, int);
 static void free_pre_mem (void);
 static struct edge_list *compute_pre_data (void);
-static int pre_expr_reaches_here_p (basic_block, struct expr *,
+static int pre_expr_reaches_here_p (basic_block, struct gcse_expr *,
 				    basic_block);
-static void insert_insn_end_basic_block (struct expr *, basic_block);
-static void pre_insert_copy_insn (struct expr *, rtx_insn *);
+static void insert_insn_end_basic_block (struct gcse_expr *, basic_block);
+static void pre_insert_copy_insn (struct gcse_expr *, rtx_insn *);
 static void pre_insert_copies (void);
 static int pre_delete (void);
 static int pre_gcse (struct edge_list *);
@@ -503,16 +503,16 @@ static void alloc_code_hoist_mem (int, i
 static void free_code_hoist_mem (void);
 static void compute_code_hoist_vbeinout (void);
 static void compute_code_hoist_data (void);
-static int should_hoist_expr_to_dom (basic_block, struct expr *, basic_block,
+static int should_hoist_expr_to_dom (basic_block, struct gcse_expr *, basic_block,
 				     sbitmap, int, int *, enum reg_class,
 				     int *, bitmap, rtx_insn *);
 static int hoist_code (void);
 static enum reg_class get_regno_pressure_class (int regno, int *nregs);
 static enum reg_class get_pressure_class_and_nregs (rtx_insn *insn, int *nregs);
 static int one_code_hoisting_pass (void);
-static rtx_insn *process_insert_insn (struct expr *);
-static int pre_edge_insert (struct edge_list *, struct expr **);
-static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
+static rtx_insn *process_insert_insn (struct gcse_expr *);
+static int pre_edge_insert (struct edge_list *, struct gcse_expr **);
+static int pre_expr_reaches_here_p_work (basic_block, struct gcse_expr *,
 					 basic_block, char *);
 static struct ls_expr * ldst_entry (rtx);
 static void free_ldst_entry (struct ls_expr *);
@@ -523,7 +523,7 @@ static int simple_mem (const_rtx);
 static void invalidate_any_buried_refs (rtx);
 static void compute_ld_motion_mems (void);
 static void trim_ld_motion_mems (void);
-static void update_ld_motion_stores (struct expr *);
+static void update_ld_motion_stores (struct gcse_expr *);
 static void clear_modify_mem_tables (void);
 static void free_modify_mem_tables (void);
 static rtx gcse_emit_move_after (rtx, rtx, rtx_insn *);
@@ -679,7 +679,7 @@ free_gcse_mem (void)
 
 static void
 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
-			  struct hash_table_d *table)
+			  struct gcse_hash_table_d *table)
 {
   unsigned int i;
 
@@ -696,12 +696,12 @@ compute_local_properties (sbitmap *trans
 
   for (i = 0; i < table->size; i++)
     {
-      struct expr *expr;
+      struct gcse_expr *expr;
 
       for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
 	{
 	  int indx = expr->bitmap_index;
-	  struct occr *occr;
+	  struct gcse_occr *occr;
 
 	  /* The expression is transparent in this block if it is not killed.
 	     We start by assuming all are transparent [none are killed], and
@@ -1128,12 +1128,12 @@ expr_equiv_p (const_rtx x, const_rtx y)
 static void
 insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
 		      int antic_p,
-		      int avail_p, int max_distance, struct hash_table_d *table)
+		      int avail_p, int max_distance, struct gcse_hash_table_d *table)
 {
   int found, do_not_record_p;
   unsigned int hash;
-  struct expr *cur_expr, *last_expr = NULL;
-  struct occr *antic_occr, *avail_occr;
+  struct gcse_expr *cur_expr, *last_expr = NULL;
+  struct gcse_occr *antic_occr, *avail_occr;
 
   hash = hash_expr (x, mode, &do_not_record_p, table->size);
 
@@ -1156,8 +1156,8 @@ insert_expr_in_table (rtx x, enum machin
 
   if (! found)
     {
-      cur_expr = GOBNEW (struct expr);
-      bytes_used += sizeof (struct expr);
+      cur_expr = GOBNEW (struct gcse_expr);
+      bytes_used += sizeof (struct gcse_expr);
       if (table->table[hash] == NULL)
 	/* This is the first pattern that hashed to this index.  */
 	table->table[hash] = cur_expr;
@@ -1194,8 +1194,8 @@ insert_expr_in_table (rtx x, enum machin
       else
 	{
 	  /* First occurrence of this expression in this basic block.  */
-	  antic_occr = GOBNEW (struct occr);
-	  bytes_used += sizeof (struct occr);
+	  antic_occr = GOBNEW (struct gcse_occr);
+	  bytes_used += sizeof (struct gcse_occr);
 	  antic_occr->insn = insn;
 	  antic_occr->next = cur_expr->antic_occr;
 	  antic_occr->deleted_p = 0;
@@ -1219,8 +1219,8 @@ insert_expr_in_table (rtx x, enum machin
       else
 	{
 	  /* First occurrence of this expression in this basic block.  */
-	  avail_occr = GOBNEW (struct occr);
-	  bytes_used += sizeof (struct occr);
+	  avail_occr = GOBNEW (struct gcse_occr);
+	  bytes_used += sizeof (struct gcse_occr);
 	  avail_occr->insn = insn;
 	  avail_occr->next = cur_expr->avail_occr;
 	  avail_occr->deleted_p = 0;
@@ -1232,7 +1232,7 @@ insert_expr_in_table (rtx x, enum machin
 /* Scan SET present in INSN and add an entry to the hash TABLE.  */
 
 static void
-hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table)
+hash_scan_set (rtx set, rtx_insn *insn, struct gcse_hash_table_d *table)
 {
   rtx src = SET_SRC (set);
   rtx dest = SET_DEST (set);
@@ -1352,14 +1352,14 @@ hash_scan_set (rtx set, rtx_insn *insn,
 
 static void
 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
-		   struct hash_table_d *table ATTRIBUTE_UNUSED)
+		   struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
 {
   /* Currently nothing to do.  */
 }
 
 static void
 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
-		struct hash_table_d *table ATTRIBUTE_UNUSED)
+		struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
 {
   /* Currently nothing to do.  */
 }
@@ -1367,7 +1367,7 @@ hash_scan_call (rtx x ATTRIBUTE_UNUSED,
 /* Process INSN and add hash table entries as appropriate.  */
 
 static void
-hash_scan_insn (rtx_insn *insn, struct hash_table_d *table)
+hash_scan_insn (rtx_insn *insn, struct gcse_hash_table_d *table)
 {
   rtx pat = PATTERN (insn);
   int i;
@@ -1401,15 +1401,15 @@ hash_scan_insn (rtx_insn *insn, struct h
 /* Dump the hash table TABLE to file FILE under the name NAME.  */
 
 static void
-dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
+dump_hash_table (FILE *file, const char *name, struct gcse_hash_table_d *table)
 {
   int i;
   /* Flattened out table, so it's printed in proper order.  */
-  struct expr **flat_table;
+  struct gcse_expr **flat_table;
   unsigned int *hash_val;
-  struct expr *expr;
+  struct gcse_expr *expr;
 
-  flat_table = XCNEWVEC (struct expr *, table->n_elems);
+  flat_table = XCNEWVEC (struct gcse_expr *, table->n_elems);
   hash_val = XNEWVEC (unsigned int, table->n_elems);
 
   for (i = 0; i < (int) table->size; i++)
@@ -1553,7 +1553,7 @@ record_last_set_info (rtx dest, const_rt
    TABLE is the table computed.  */
 
 static void
-compute_hash_table_work (struct hash_table_d *table)
+compute_hash_table_work (struct gcse_hash_table_d *table)
 {
   int i;
 
@@ -1605,7 +1605,7 @@ compute_hash_table_work (struct hash_tab
    It is used to determine the number of buckets to use.  */
 
 static void
-alloc_hash_table (struct hash_table_d *table)
+alloc_hash_table (struct gcse_hash_table_d *table)
 {
   int n;
 
@@ -1619,14 +1619,14 @@ alloc_hash_table (struct hash_table_d *t
      Making it an odd number is simplest for now.
      ??? Later take some measurements.  */
   table->size |= 1;
-  n = table->size * sizeof (struct expr *);
-  table->table = GNEWVAR (struct expr *, n);
+  n = table->size * sizeof (struct gcse_expr *);
+  table->table = GNEWVAR (struct gcse_expr *, n);
 }
 
 /* Free things allocated by alloc_hash_table.  */
 
 static void
-free_hash_table (struct hash_table_d *table)
+free_hash_table (struct gcse_hash_table_d *table)
 {
   free (table->table);
 }
@@ -1634,11 +1634,11 @@ free_hash_table (struct hash_table_d *ta
 /* Compute the expression hash table TABLE.  */
 
 static void
-compute_hash_table (struct hash_table_d *table)
+compute_hash_table (struct gcse_hash_table_d *table)
 {
   /* Initialize count of number of entries in hash table.  */
   table->n_elems = 0;
-  memset (table->table, 0, table->size * sizeof (struct expr *));
+  memset (table->table, 0, table->size * sizeof (struct gcse_expr *));
 
   compute_hash_table_work (table);
 }
@@ -1864,7 +1864,7 @@ static void
 prune_expressions (bool pre_p)
 {
   sbitmap prune_exprs;
-  struct expr *expr;
+  struct gcse_expr *expr;
   unsigned int ui;
   basic_block bb;
 
@@ -2063,7 +2063,7 @@ compute_pre_data (void)
    the closest such expression.  */
 
 static int
-pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
+pre_expr_reaches_here_p_work (basic_block occr_bb, struct gcse_expr *expr,
 			      basic_block bb, char *visited)
 {
   edge pred;
@@ -2110,7 +2110,7 @@ pre_expr_reaches_here_p_work (basic_bloc
    memory allocated for that function is returned.  */
 
 static int
-pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
+pre_expr_reaches_here_p (basic_block occr_bb, struct gcse_expr *expr, basic_block bb)
 {
   int rval;
   char *visited = XCNEWVEC (char, last_basic_block_for_fn (cfun));
@@ -2124,7 +2124,7 @@ pre_expr_reaches_here_p (basic_block occ
 /* Generate RTL to copy an EXPR to its `reaching_reg' and return it.  */
 
 static rtx_insn *
-process_insert_insn (struct expr *expr)
+process_insert_insn (struct gcse_expr *expr)
 {
   rtx reg = expr->reaching_reg;
   /* Copy the expression to make sure we don't have any sharing issues.  */
@@ -2159,7 +2159,7 @@ process_insert_insn (struct expr *expr)
    This is used by both the PRE and code hoisting.  */
 
 static void
-insert_insn_end_basic_block (struct expr *expr, basic_block bb)
+insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb)
 {
   rtx_insn *insn = BB_END (bb);
   rtx_insn *new_insn;
@@ -2259,7 +2259,7 @@ insert_insn_end_basic_block (struct expr
    the expressions fully redundant.  */
 
 static int
-pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
+pre_edge_insert (struct edge_list *edge_list, struct gcse_expr **index_map)
 {
   int e, i, j, num_edges, set_size, did_insert = 0;
   sbitmap *inserted;
@@ -2286,8 +2286,8 @@ pre_edge_insert (struct edge_list *edge_
 	       j++, insert >>= 1)
 	    if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
 	      {
-		struct expr *expr = index_map[j];
-		struct occr *occr;
+		struct gcse_expr *expr = index_map[j];
+		struct gcse_occr *occr;
 
 		/* Now look at each deleted occurrence of this expression.  */
 		for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
@@ -2356,7 +2356,7 @@ pre_edge_insert (struct edge_list *edge_
      MEM          <- reaching_reg.  */
 
 static void
-pre_insert_copy_insn (struct expr *expr, rtx_insn *insn)
+pre_insert_copy_insn (struct gcse_expr *expr, rtx_insn *insn)
 {
   rtx reg = expr->reaching_reg;
   int regno = REGNO (reg);
@@ -2448,9 +2448,9 @@ static void
 pre_insert_copies (void)
 {
   unsigned int i, added_copy;
-  struct expr *expr;
-  struct occr *occr;
-  struct occr *avail;
+  struct gcse_expr *expr;
+  struct gcse_occr *occr;
+  struct gcse_occr *avail;
 
   /* For each available expression in the table, copy the result to
      `reaching_reg' if the expression reaches a deleted one.
@@ -2614,8 +2614,8 @@ pre_delete (void)
 {
   unsigned int i;
   int changed;
-  struct expr *expr;
-  struct occr *occr;
+  struct gcse_expr *expr;
+  struct gcse_occr *occr;
 
   changed = 0;
   for (i = 0; i < expr_hash_table.size; i++)
@@ -2687,13 +2687,13 @@ pre_gcse (struct edge_list *edge_list)
 {
   unsigned int i;
   int did_insert, changed;
-  struct expr **index_map;
-  struct expr *expr;
+  struct gcse_expr **index_map;
+  struct gcse_expr *expr;
 
   /* Compute a mapping from expression number (`bitmap_index') to
      hash table entry.  */
 
-  index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
+  index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
   for (i = 0; i < expr_hash_table.size; i++)
     for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
       index_map[expr->bitmap_index] = expr;
@@ -3042,7 +3042,7 @@ update_bb_reg_pressure (basic_block bb,
    paths.  */
 
 static int
-should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
+should_hoist_expr_to_dom (basic_block expr_bb, struct gcse_expr *expr,
 			  basic_block bb, sbitmap visited, int distance,
 			  int *bb_size, enum reg_class pressure_class,
 			  int *nregs, bitmap hoisted_bbs, rtx_insn *from)
@@ -3150,8 +3150,8 @@ should_hoist_expr_to_dom (basic_block ex
 
 /* Find occurrence in BB.  */
 
-static struct occr *
-find_occr_in_bb (struct occr *occr, basic_block bb)
+static struct gcse_occr *
+find_occr_in_bb (struct gcse_occr *occr, basic_block bb)
 {
   /* Find the right occurrence of this expression.  */
   while (occr && BLOCK_FOR_INSN (occr->insn) != bb)
@@ -3212,8 +3212,8 @@ hoist_code (void)
   unsigned int dom_tree_walk_index;
   vec<basic_block> domby;
   unsigned int i, j, k;
-  struct expr **index_map;
-  struct expr *expr;
+  struct gcse_expr **index_map;
+  struct gcse_expr *expr;
   int *to_bb_head;
   int *bb_size;
   int changed = 0;
@@ -3227,7 +3227,7 @@ hoist_code (void)
   /* Compute a mapping from expression number (`bitmap_index') to
      hash table entry.  */
 
-  index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
+  index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
   for (i = 0; i < expr_hash_table.size; i++)
     for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
       index_map[expr->bitmap_index] = expr;
@@ -3285,7 +3285,7 @@ hoist_code (void)
 	      int nregs = 0;
 	      enum reg_class pressure_class = NO_REGS;
 	      /* Current expression.  */
-	      struct expr *expr = index_map[i];
+	      struct gcse_expr *expr = index_map[i];
 	      /* Number of occurrences of EXPR that can be hoisted to BB.  */
 	      int hoistable = 0;
 	      /* Occurrences reachable from BB.  */
@@ -4028,7 +4028,7 @@ trim_ld_motion_mems (void)
 
   while (ptr != NULL)
     {
-      struct expr * expr;
+      struct gcse_expr * expr;
 
       /* Delete if entry has been made invalid.  */
       if (! ptr->invalid)
@@ -4043,7 +4043,7 @@ trim_ld_motion_mems (void)
 	      break;
 	}
       else
-	expr = (struct expr *) 0;
+	expr = (struct gcse_expr *) 0;
 
       if (expr)
 	{
@@ -4074,7 +4074,7 @@ trim_ld_motion_mems (void)
    correct value in the reaching register for the loads.  */
 
 static void
-update_ld_motion_stores (struct expr * expr)
+update_ld_motion_stores (struct gcse_expr * expr)
 {
   struct ls_expr * mem_ptr;
 
Index: tree-ssa-copy.c
===================================================================
--- tree-ssa-copy.c	(revision 215328)
+++ tree-ssa-copy.c	(working copy)
@@ -76,11 +76,10 @@ along with GCC; see the file COPYING3.
    When visiting a statement or PHI node the lattice value for an
    SSA name can transition from UNDEFINED to COPY to VARYING.  */
 
-struct prop_value_d {
+struct prop_value_t {
     /* Copy-of value.  */
     tree value;
 };
-typedef struct prop_value_d prop_value_t;
 
 static prop_value_t *copy_of;
 static unsigned n_copy_of;
Index: predict.c
===================================================================
--- predict.c	(revision 215328)
+++ predict.c	(working copy)
@@ -2496,7 +2496,7 @@ predict_paths_leading_to_edge (edge e, e
 /* This is used to carry information about basic blocks.  It is
    attached to the AUX field of the standard CFG block.  */
 
-typedef struct block_info_def
+struct block_info
 {
   /* Estimated frequency of execution of basic_block.  */
   sreal frequency;
@@ -2506,10 +2506,10 @@ typedef struct block_info_def
 
   /* Number of predecessors we need to visit first.  */
   int npredecessors;
-} *block_info;
+};
 
 /* Similar information for edges.  */
-typedef struct edge_info_def
+struct edge_info
 {
   /* In case edge is a loopback edge, the probability edge will be reached
      in case header is.  Estimated number of iterations of the loop can be
@@ -2517,10 +2517,10 @@ typedef struct edge_info_def
   sreal back_edge_prob;
   /* True if the edge is a loopback edge in the natural loop.  */
   unsigned int back_edge:1;
-} *edge_info;
+};
 
-#define BLOCK_INFO(B)	((block_info) (B)->aux)
-#define EDGE_INFO(E)	((edge_info) (E)->aux)
+#define BLOCK_INFO(B)	((block_info *) (B)->aux)
+#define EDGE_INFO(E)	((edge_info *) (E)->aux)
 
 /* Helper function for estimate_bb_frequencies.
    Propagate the frequencies in blocks marked in
@@ -2935,8 +2935,8 @@ estimate_bb_frequencies (bool force)
 	 REG_BR_PROB_BASE;
 
       /* Set up block info for each basic block.  */
-      alloc_aux_for_blocks (sizeof (struct block_info_def));
-      alloc_aux_for_edges (sizeof (struct edge_info_def));
+      alloc_aux_for_blocks (sizeof (block_info));
+      alloc_aux_for_edges (sizeof (edge_info));
       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
 	{
 	  edge e;
Index: profile.c
===================================================================
--- profile.c	(revision 215328)
+++ profile.c	(working copy)
@@ -74,7 +74,7 @@ along with GCC; see the file COPYING3.
 
 #include "profile.h"
 
-struct bb_info {
+struct bb_profile_info {
   unsigned int count_valid : 1;
 
   /* Number of successor and predecessor edges.  */
@@ -82,7 +82,7 @@ struct bb_info {
   gcov_type pred_count;
 };
 
-#define BB_INFO(b)  ((struct bb_info *) (b)->aux)
+#define BB_INFO(b)  ((struct bb_profile_info *) (b)->aux)
 
 
 /* Counter summary from the last set of coverage counts read.  */
@@ -128,7 +128,7 @@ instrument_edges (struct edge_list *el)
 
       FOR_EACH_EDGE (e, ei, bb->succs)
 	{
-	  struct edge_info *inf = EDGE_INFO (e);
+	  struct edge_profile_info *inf = EDGE_INFO (e);
 
 	  if (!inf->ignore && !inf->on_tree)
 	    {
@@ -542,7 +542,7 @@ compute_branch_probabilities (unsigned c
     }
 
   /* Attach extra info block to each bb.  */
-  alloc_aux_for_blocks (sizeof (struct bb_info));
+  alloc_aux_for_blocks (sizeof (struct bb_profile_info));
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
@@ -590,7 +590,7 @@ compute_branch_probabilities (unsigned c
       changes = 0;
       FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
 	{
-	  struct bb_info *bi = BB_INFO (bb);
+	  struct bb_profile_info *bi = BB_INFO (bb);
 	  if (! bi->count_valid)
 	    {
 	      if (bi->succ_count == 0)
@@ -1129,7 +1129,7 @@ branch_prob (void)
 
   el = create_edge_list ();
   num_edges = NUM_EDGES (el);
-  alloc_aux_for_edges (sizeof (struct edge_info));
+  alloc_aux_for_edges (sizeof (struct edge_profile_info));
 
   /* The basic blocks are expected to be numbered sequentially.  */
   compact_blocks ();
@@ -1161,7 +1161,7 @@ branch_prob (void)
   for (num_instrumented = i = 0; i < num_edges; i++)
     {
       edge e = INDEX_EDGE (el, i);
-      struct edge_info *inf = EDGE_INFO (e);
+      struct edge_profile_info *inf = EDGE_INFO (e);
 
       if (inf->ignore || inf->on_tree)
 	/*NOP*/;
@@ -1221,7 +1221,7 @@ branch_prob (void)
 
 	  FOR_EACH_EDGE (e, ei, bb->succs)
 	    {
-	      struct edge_info *i = EDGE_INFO (e);
+	      struct edge_profile_info *i = EDGE_INFO (e);
 	      if (!i->ignore)
 		{
 		  unsigned flag_bits = 0;
Index: profile.h
===================================================================
--- profile.h	(revision 215328)
+++ profile.h	(working copy)
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3.
 #define PROFILE_H
 
 /* Additional information about edges. */
-struct edge_info
+struct edge_profile_info
 {
   unsigned int count_valid:1;
 
@@ -35,7 +35,7 @@ struct edge_info
   unsigned int ignore:1;
 };
 
-#define EDGE_INFO(e)  ((struct edge_info *) (e)->aux)
+#define EDGE_INFO(e)  ((struct edge_profile_info *) (e)->aux)
 
 /* Smoothes the initial assigned basic block and edge counts using
    a minimum cost flow algorithm. */
Index: alloc-pool.c
===================================================================
--- alloc-pool.c	(revision 215328)
+++ alloc-pool.c	(working copy)
@@ -339,7 +339,7 @@ pool_free (alloc_pool pool, void *ptr)
 /* Output per-alloc_pool statistics.  */
 
 /* Used to accumulate statistics about alloc_pool sizes.  */
-struct output_info
+struct pool_output_info
 {
   unsigned long total_created;
   unsigned long total_allocated;
@@ -350,7 +350,7 @@ struct output_info
 bool
 print_alloc_pool_statistics (const char *const &name,
 			     const alloc_pool_descriptor &d,
-			     struct output_info *i)
+			     struct pool_output_info *i)
 {
   if (d.allocated)
     {
@@ -369,7 +369,7 @@ print_alloc_pool_statistics (const char
 void
 dump_alloc_pool_statistics (void)
 {
-  struct output_info info;
+  struct pool_output_info info;
 
   if (! GATHER_STATISTICS)
     return;
@@ -381,7 +381,7 @@ dump_alloc_pool_statistics (void)
   fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
   info.total_created = 0;
   info.total_allocated = 0;
-  alloc_pool_hash->traverse <struct output_info *,
+  alloc_pool_hash->traverse <struct pool_output_info *,
 			     print_alloc_pool_statistics> (&info);
   fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
   fprintf (stderr, "%-22s           %7lu %10lu\n",


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]