This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] remove many typedefs


From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

Hi,

just more work getting rid of typedefs that are useless and sometimes hide
pointerness.  gcc/ChangeLog:

bootstrapped + regtested on x86_64-linux-gnu, I believe this is all preapproved
so I'll commit it in a day or so if nobody decides to bikeshed anything.

Trev


2015-09-29  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* cfganal.c, compare-elim.c, coverage.c, cprop.c, df-scan.c,
	function.c, read-rtl.c, statistics.c, trans-mem.c, tree-if-conv.c,
	tree-into-ssa.c, tree-loop-distribution.c, tree-ssa-coalesce.c,
	tree-ssa-loop-ivopts.c, tree-ssa-reassoc.c, tree-ssa-strlen.c,
	tree-ssa-tail-merge.c, tree-vrp.c, var-tracking.c: Remove
unneeded typedefs.
---
 gcc/cfganal.c                |  21 ++-
 gcc/compare-elim.c           |   4 +-
 gcc/coverage.c               |  10 +-
 gcc/cprop.c                  |   2 -
 gcc/df-scan.c                |  14 +-
 gcc/function.c               |   6 +-
 gcc/read-rtl.c               |   5 +-
 gcc/statistics.c             |  50 +++---
 gcc/trans-mem.c              |  44 +++--
 gcc/tree-if-conv.c           |  16 +-
 gcc/tree-into-ssa.c          |  88 ++++-----
 gcc/tree-loop-distribution.c |  72 ++++----
 gcc/tree-ssa-coalesce.c      | 139 +++++++-------
 gcc/tree-ssa-loop-ivopts.c   |  18 +-
 gcc/tree-ssa-reassoc.c       | 147 ++++++++-------
 gcc/tree-ssa-strlen.c        | 100 +++++------
 gcc/tree-ssa-tail-merge.c    | 100 +++++------
 gcc/tree-vrp.c               | 254 +++++++++++++-------------
 gcc/var-tracking.c           | 419 ++++++++++++++++++++++---------------------
 19 files changed, 733 insertions(+), 776 deletions(-)

diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 3a9174c..279c3b5 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "timevar.h"
 
 /* Store the data structures necessary for depth-first search.  */
-struct depth_first_search_dsS {
+struct depth_first_search_ds {
   /* stack for backtracking during the algorithm */
   basic_block *stack;
 
@@ -40,14 +40,13 @@ struct depth_first_search_dsS {
   /* record of basic blocks already seen by depth-first search */
   sbitmap visited_blocks;
 };
-typedef struct depth_first_search_dsS *depth_first_search_ds;
 
-static void flow_dfs_compute_reverse_init (depth_first_search_ds);
-static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
+static void flow_dfs_compute_reverse_init (depth_first_search_ds *);
+static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds *,
 					     basic_block);
-static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
+static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds *,
 						     basic_block);
-static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
+static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
 
 /* Mark the back edges in DFS traversal.
    Return nonzero if a loop (natural or otherwise) is present.
@@ -575,7 +574,7 @@ connect_infinite_loops_to_exit (void)
 {
   basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
   basic_block deadend_block;
-  struct depth_first_search_dsS dfs_ds;
+  depth_first_search_ds dfs_ds;
 
   /* Perform depth-first search in the reverse graph to find nodes
      reachable from the exit block.  */
@@ -1055,7 +1054,7 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
    element on the stack.  */
 
 static void
-flow_dfs_compute_reverse_init (depth_first_search_ds data)
+flow_dfs_compute_reverse_init (depth_first_search_ds *data)
 {
   /* Allocate stack for back-tracking up CFG.  */
   data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
@@ -1075,7 +1074,7 @@ flow_dfs_compute_reverse_init (depth_first_search_ds data)
    block.  */
 
 static void
-flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
+flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
 {
   data->stack[data->sp++] = bb;
   bitmap_set_bit (data->visited_blocks, bb->index);
@@ -1087,7 +1086,7 @@ flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
    available.  */
 
 static basic_block
-flow_dfs_compute_reverse_execute (depth_first_search_ds data,
+flow_dfs_compute_reverse_execute (depth_first_search_ds *data,
 				  basic_block last_unvisited)
 {
   basic_block bb;
@@ -1116,7 +1115,7 @@ flow_dfs_compute_reverse_execute (depth_first_search_ds data,
    reverse graph.  */
 
 static void
-flow_dfs_compute_reverse_finish (depth_first_search_ds data)
+flow_dfs_compute_reverse_finish (depth_first_search_ds *data)
 {
   free (data->stack);
   sbitmap_free (data->visited_blocks);
diff --git a/gcc/compare-elim.c b/gcc/compare-elim.c
index b65d09e..08e070c 100644
--- a/gcc/compare-elim.c
+++ b/gcc/compare-elim.c
@@ -121,9 +121,7 @@ struct comparison
   bool inputs_valid;
 };
   
-typedef struct comparison *comparison_struct_p;
-
-static vec<comparison_struct_p> all_compares;
+static vec<comparison *> all_compares;
 
 /* Look for a "conforming" comparison, as defined above.  If valid, return
    the rtx for the COMPARE itself.  */
diff --git a/gcc/coverage.c b/gcc/coverage.c
index 3c165a4..4e08e5f 100644
--- a/gcc/coverage.c
+++ b/gcc/coverage.c
@@ -77,7 +77,7 @@ struct GTY((chain_next ("%h.next"))) coverage_data
 };
 
 /* Counts information for a function.  */
-typedef struct counts_entry : pointer_hash <counts_entry>
+struct counts_entry : pointer_hash <counts_entry>
 {
   /* We hash by  */
   unsigned ident;
@@ -93,7 +93,7 @@ typedef struct counts_entry : pointer_hash <counts_entry>
   static inline hashval_t hash (const counts_entry *);
   static int equal (const counts_entry *, const counts_entry *);
   static void remove (counts_entry *);
-} counts_entry_t;
+};
 
 static GTY(()) struct coverage_data *functions_head = 0;
 static struct coverage_data **functions_tail = &functions_head;
@@ -279,7 +279,7 @@ read_counts_file (void)
 	}
       else if (GCOV_TAG_IS_COUNTER (tag) && fn_ident)
 	{
-	  counts_entry_t **slot, *entry, elt;
+	  counts_entry **slot, *entry, elt;
 	  unsigned n_counts = GCOV_TAG_COUNTER_NUM (length);
 	  unsigned ix;
 
@@ -290,7 +290,7 @@ read_counts_file (void)
 	  entry = *slot;
 	  if (!entry)
 	    {
-	      *slot = entry = XCNEW (counts_entry_t);
+	      *slot = entry = XCNEW (counts_entry);
 	      entry->ident = fn_ident;
 	      entry->ctr = elt.ctr;
 	      entry->lineno_checksum = lineno_checksum;
@@ -358,7 +358,7 @@ get_coverage_counts (unsigned counter, unsigned expected,
                      unsigned cfg_checksum, unsigned lineno_checksum,
 		     const struct gcov_ctr_summary **summary)
 {
-  counts_entry_t *entry, elt;
+  counts_entry *entry, elt;
 
   /* No hash table, no counts.  */
   if (!counts_hash)
diff --git a/gcc/cprop.c b/gcc/cprop.c
index 147ab16..28e9e54 100644
--- a/gcc/cprop.c
+++ b/gcc/cprop.c
@@ -71,8 +71,6 @@ struct cprop_occr
   rtx_insn *insn;
 };
 
-typedef struct cprop_occr *occr_t;
-
 /* Hash table entry for assignment expressions.  */
 
 struct cprop_expr
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index eea93df..7a22b10 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -40,9 +40,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
 
 
-typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
-
-
 /* The set of hard registers in eliminables[i].from. */
 
 static HARD_REG_SET elim_reg_set;
@@ -55,7 +52,7 @@ struct df_collection_rec
   auto_vec<df_ref, 128> def_vec;
   auto_vec<df_ref, 32> use_vec;
   auto_vec<df_ref, 32> eq_use_vec;
-  auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
+  auto_vec<df_mw_hardreg *, 32> mw_vec;
 };
 
 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
@@ -147,9 +144,6 @@ struct df_scan_problem_data
   bitmap_obstack insn_bitmaps;
 };
 
-typedef struct df_scan_bb_info *df_scan_bb_info_t;
-
-
 /* Internal function to shut down the scanning problem.  */
 static void
 df_scan_free_internal (void)
@@ -2241,7 +2235,7 @@ df_mw_ptr_compare (const void *m1, const void *m2)
 /* Sort and compress a set of refs.  */
 
 static void
-df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
+df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
 {
   unsigned int count;
   struct df_scan_problem_data *problem_data
@@ -2405,7 +2399,7 @@ df_install_refs (basic_block bb,
    insn.  */
 
 static struct df_mw_hardreg *
-df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
+df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
 {
   unsigned int count = old_vec->length ();
   if (count)
@@ -4059,7 +4053,7 @@ df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
 
 static bool
-df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
+df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
 	       struct df_mw_hardreg *old_rec,
 	       bool abort_if_fail)
 {
diff --git a/gcc/function.c b/gcc/function.c
index 9b4c2b9..2448158 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -160,9 +160,7 @@ static bool parm_in_unassigned_mem_p (tree, rtx);
 /* Stack of nested functions.  */
 /* Keep track of the cfun stack.  */
 
-typedef struct function *function_p;
-
-static vec<function_p> function_context_stack;
+static vec<function *> function_context_stack;
 
 /* Save the current context for compilation of a nested function.
    This is called from language-specific code.  */
@@ -4899,7 +4897,7 @@ set_cfun (struct function *new_cfun)
 
 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
 
-static vec<function_p> cfun_stack;
+static vec<function *> cfun_stack;
 
 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
    current_function_decl accordingly.  */
diff --git a/gcc/read-rtl.c b/gcc/read-rtl.c
index ff08505..36e42cd 100644
--- a/gcc/read-rtl.c
+++ b/gcc/read-rtl.c
@@ -54,9 +54,6 @@ struct mapping {
   struct map_value *current_value;
 };
 
-/* Vector definitions for the above.  */
-typedef struct mapping *mapping_ptr;
-
 /* A structure for abstracting the common parts of iterators.  */
 struct iterator_group {
   /* Tables of "mapping" structures, one for attributes and one for
@@ -117,7 +114,7 @@ static rtx read_rtx_variadic (rtx);
 static struct iterator_group modes, codes, ints, substs;
 
 /* All iterators used in the current rtx.  */
-static vec<mapping_ptr> current_iterators;
+static vec<mapping *> current_iterators;
 
 /* The list of all iterator uses in the current rtx.  */
 static vec<iterator_use> iterator_uses;
diff --git a/gcc/statistics.c b/gcc/statistics.c
index 97884fa..8e3dc14 100644
--- a/gcc/statistics.c
+++ b/gcc/statistics.c
@@ -36,28 +36,28 @@ static FILE *statistics_dump_file;
 /* Statistics entry.  A integer counter associated to a string ID
    and value.  */
 
-typedef struct statistics_counter_s {
+struct statistics_counter {
   const char *id;
   int val;
   bool histogram_p;
   unsigned HOST_WIDE_INT count;
   unsigned HOST_WIDE_INT prev_dumped_count;
-} statistics_counter_t;
+};
 
 /* Hashtable helpers.  */
 
-struct stats_counter_hasher : pointer_hash <statistics_counter_t>
+struct stats_counter_hasher : pointer_hash <statistics_counter>
 {
-  static inline hashval_t hash (const statistics_counter_t *);
-  static inline bool equal (const statistics_counter_t *,
-			    const statistics_counter_t *);
-  static inline void remove (statistics_counter_t *);
+  static inline hashval_t hash (const statistics_counter *);
+  static inline bool equal (const statistics_counter *,
+			    const statistics_counter *);
+  static inline void remove (statistics_counter *);
 };
 
 /* Hash a statistic counter by its string ID.  */
 
 inline hashval_t
-stats_counter_hasher::hash (const statistics_counter_t *c)
+stats_counter_hasher::hash (const statistics_counter *c)
 {
   return htab_hash_string (c->id) + c->val;
 }
@@ -65,8 +65,8 @@ stats_counter_hasher::hash (const statistics_counter_t *c)
 /* Compare two statistic counters by their string IDs.  */
 
 inline bool
-stats_counter_hasher::equal (const statistics_counter_t *c1,
-			     const statistics_counter_t *c2)
+stats_counter_hasher::equal (const statistics_counter *c1,
+			     const statistics_counter *c2)
 {
   return c1->val == c2->val && strcmp (c1->id, c2->id) == 0;
 }
@@ -74,7 +74,7 @@ stats_counter_hasher::equal (const statistics_counter_t *c1,
 /* Free a statistics entry.  */
 
 inline void
-stats_counter_hasher::remove (statistics_counter_t *v)
+stats_counter_hasher::remove (statistics_counter *v)
 {
   free (CONST_CAST (char *, v->id));
   free (v);
@@ -120,10 +120,10 @@ curr_statistics_hash (void)
    since the last dump for the pass dump files.  */
 
 int
-statistics_fini_pass_1 (statistics_counter_t **slot,
+statistics_fini_pass_1 (statistics_counter **slot,
 			void *data ATTRIBUTE_UNUSED)
 {
-  statistics_counter_t *counter = *slot;
+  statistics_counter *counter = *slot;
   unsigned HOST_WIDE_INT count = counter->count - counter->prev_dumped_count;
   if (count == 0)
     return 1;
@@ -141,10 +141,10 @@ statistics_fini_pass_1 (statistics_counter_t **slot,
    since the last dump for the statistics dump.  */
 
 int
-statistics_fini_pass_2 (statistics_counter_t **slot,
+statistics_fini_pass_2 (statistics_counter **slot,
 			void *data ATTRIBUTE_UNUSED)
 {
-  statistics_counter_t *counter = *slot;
+  statistics_counter *counter = *slot;
   unsigned HOST_WIDE_INT count = counter->count - counter->prev_dumped_count;
   if (count == 0)
     return 1;
@@ -172,10 +172,10 @@ statistics_fini_pass_2 (statistics_counter_t **slot,
 /* Helper for statistics_fini_pass, reset the counters.  */
 
 int
-statistics_fini_pass_3 (statistics_counter_t **slot,
+statistics_fini_pass_3 (statistics_counter **slot,
 			void *data ATTRIBUTE_UNUSED)
 {
-  statistics_counter_t *counter = *slot;
+  statistics_counter *counter = *slot;
   counter->prev_dumped_count = counter->count;
   return 1;
 }
@@ -210,9 +210,9 @@ statistics_fini_pass (void)
 /* Helper for printing summary information.  */
 
 int
-statistics_fini_1 (statistics_counter_t **slot, opt_pass *pass)
+statistics_fini_1 (statistics_counter **slot, opt_pass *pass)
 {
-  statistics_counter_t *counter = *slot;
+  statistics_counter *counter = *slot;
   if (counter->count == 0)
     return 1;
   if (counter->histogram_p)
@@ -280,18 +280,18 @@ statistics_init (void)
 /* Lookup or add a statistics counter in the hashtable HASH with ID, VAL
    and HISTOGRAM_P.  */
 
-static statistics_counter_t *
+static statistics_counter *
 lookup_or_add_counter (stats_counter_table_type *hash, const char *id, int val,
 		       bool histogram_p)
 {
-  statistics_counter_t **counter;
-  statistics_counter_t c;
+  statistics_counter **counter;
+  statistics_counter c;
   c.id = id;
   c.val = val;
   counter = hash->find_slot (&c, INSERT);
   if (!*counter)
     {
-      *counter = XNEW (struct statistics_counter_s);
+      *counter = XNEW (statistics_counter);
       (*counter)->id = xstrdup (id);
       (*counter)->val = val;
       (*counter)->histogram_p = histogram_p;
@@ -308,7 +308,7 @@ lookup_or_add_counter (stats_counter_table_type *hash, const char *id, int val,
 void
 statistics_counter_event (struct function *fn, const char *id, int incr)
 {
-  statistics_counter_t *counter;
+  statistics_counter *counter;
 
   if ((!(dump_flags & TDF_STATS)
        && !statistics_dump_file)
@@ -342,7 +342,7 @@ statistics_counter_event (struct function *fn, const char *id, int incr)
 void
 statistics_histogram_event (struct function *fn, const char *id, int val)
 {
-  statistics_counter_t *counter;
+  statistics_counter *counter;
 
   if (!(dump_flags & TDF_STATS)
       && !statistics_dump_file)
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index d9a681f..5b43d86 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -927,7 +927,7 @@ make_pass_diagnose_tm_blocks (gcc::context *ctxt)
 /* One individual log entry.  We may have multiple statements for the
    same location if neither dominate each other (on different
    execution paths).  */
-typedef struct tm_log_entry
+struct tm_log_entry
 {
   /* Address to save.  */
   tree addr;
@@ -940,7 +940,7 @@ typedef struct tm_log_entry
      save/restore sequence.  Later, when generating the save sequence
      we place the SSA temp generated here.  */
   tree save_var;
-} *tm_log_entry_t;
+};
 
 
 /* Log entry hashtable helpers.  */
@@ -1009,29 +1009,29 @@ enum thread_memory_type
     mem_max
   };
 
-typedef struct tm_new_mem_map
+struct tm_new_mem_map
 {
   /* SSA_NAME being dereferenced.  */
   tree val;
   enum thread_memory_type local_new_memory;
-} tm_new_mem_map_t;
+};
 
 /* Hashtable helpers.  */
 
-struct tm_mem_map_hasher : free_ptr_hash <tm_new_mem_map_t>
+struct tm_mem_map_hasher : free_ptr_hash <tm_new_mem_map>
 {
-  static inline hashval_t hash (const tm_new_mem_map_t *);
-  static inline bool equal (const tm_new_mem_map_t *, const tm_new_mem_map_t *);
+  static inline hashval_t hash (const tm_new_mem_map *);
+  static inline bool equal (const tm_new_mem_map *, const tm_new_mem_map *);
 };
 
 inline hashval_t
-tm_mem_map_hasher::hash (const tm_new_mem_map_t *v)
+tm_mem_map_hasher::hash (const tm_new_mem_map *v)
 {
   return (intptr_t)v->val >> 4;
 }
 
 inline bool
-tm_mem_map_hasher::equal (const tm_new_mem_map_t *v, const tm_new_mem_map_t *c)
+tm_mem_map_hasher::equal (const tm_new_mem_map *v, const tm_new_mem_map *c)
 {
   return v->val == c->val;
 }
@@ -1362,8 +1362,8 @@ thread_private_new_memory (basic_block entry_block, tree x)
 {
   gimple *stmt = NULL;
   enum tree_code code;
-  tm_new_mem_map_t **slot;
-  tm_new_mem_map_t elt, *elt_p;
+  tm_new_mem_map **slot;
+  tm_new_mem_map elt, *elt_p;
   tree val = x;
   enum thread_memory_type retval = mem_transaction_local;
 
@@ -1383,7 +1383,7 @@ thread_private_new_memory (basic_block entry_block, tree x)
 
   /* Optimistically assume the memory is transaction local during
      processing.  This catches recursion into this variable.  */
-  *slot = elt_p = XNEW (tm_new_mem_map_t);
+  *slot = elt_p = XNEW (tm_new_mem_map);
   elt_p->val = val;
   elt_p->local_new_memory = mem_transaction_local;
 
@@ -1864,8 +1864,6 @@ public:
   bitmap irr_blocks;
 };
 
-typedef struct tm_region *tm_region_p;
-
 /* True if there are pending edge statements to be committed for the
    current function being scanned in the tmmark pass.  */
 bool pending_edge_inserts_p;
@@ -1970,7 +1968,7 @@ tm_region_init (struct tm_region *region)
   auto_vec<basic_block> queue;
   bitmap visited_blocks = BITMAP_ALLOC (NULL);
   struct tm_region *old_region;
-  auto_vec<tm_region_p> bb_regions;
+  auto_vec<tm_region *> bb_regions;
 
   all_tm_regions = region;
   bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
@@ -2594,7 +2592,7 @@ get_tm_region_blocks (basic_block entry_block,
 // Callback data for collect_bb2reg.
 struct bb2reg_stuff
 {
-  vec<tm_region_p> *bb2reg;
+  vec<tm_region *> *bb2reg;
   bool include_uninstrumented_p;
 };
 
@@ -2603,7 +2601,7 @@ static void *
 collect_bb2reg (struct tm_region *region, void *data)
 {
   struct bb2reg_stuff *stuff = (struct bb2reg_stuff *)data;
-  vec<tm_region_p> *bb2reg = stuff->bb2reg;
+  vec<tm_region *> *bb2reg = stuff->bb2reg;
   vec<basic_block> queue;
   unsigned int i;
   basic_block bb;
@@ -2647,13 +2645,13 @@ collect_bb2reg (struct tm_region *region, void *data)
 // ??? There is currently a hack inside tree-ssa-pre.c to work around the
 // only known instance of this block sharing.
 
-static vec<tm_region_p>
+static vec<tm_region *>
 get_bb_regions_instrumented (bool traverse_clones,
 			     bool include_uninstrumented_p)
 {
   unsigned n = last_basic_block_for_fn (cfun);
   struct bb2reg_stuff stuff;
-  vec<tm_region_p> ret;
+  vec<tm_region *> ret;
 
   ret.create (n);
   ret.safe_grow_cleared (n);
@@ -2986,7 +2984,7 @@ execute_tm_mark (void)
 
   tm_log_init ();
 
-  vec<tm_region_p> bb_regions
+  vec<tm_region *> bb_regions
     = get_bb_regions_instrumented (/*traverse_clones=*/true,
 				   /*include_uninstrumented_p=*/false);
   struct tm_region *r;
@@ -3223,7 +3221,7 @@ public:
 unsigned int
 pass_tm_edges::execute (function *fun)
 {
-  vec<tm_region_p> bb_regions
+  vec<tm_region *> bb_regions
     = get_bb_regions_instrumented (/*traverse_clones=*/false,
 				   /*include_uninstrumented_p=*/true);
   struct tm_region *r;
@@ -3307,13 +3305,13 @@ expand_regions (struct tm_region *region,
 
 
 /* A unique TM memory operation.  */
-typedef struct tm_memop
+struct tm_memop
 {
   /* Unique ID that all memory operations to the same location have.  */
   unsigned int value_id;
   /* Address of load/store.  */
   tree addr;
-} *tm_memop_t;
+};
 
 /* TM memory operation hashtable helpers.  */
 
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index 25c9599..f201ab5 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -131,7 +131,7 @@ static bool aggressive_if_conv;
 
 /* Structure used to predicate basic blocks.  This is attached to the
    ->aux field of the BBs in the loop to be if-converted.  */
-typedef struct bb_predicate_s {
+struct bb_predicate {
 
   /* The condition under which this basic block is executed.  */
   tree predicate;
@@ -140,7 +140,7 @@ typedef struct bb_predicate_s {
      recorded here, in order to avoid the duplication of computations
      that occur in previous conditions.  See PR44483.  */
   gimple_seq predicate_gimplified_stmts;
-} *bb_predicate_p;
+};
 
 /* Returns true when the basic block BB has a predicate.  */
 
@@ -155,7 +155,7 @@ bb_has_predicate (basic_block bb)
 static inline tree
 bb_predicate (basic_block bb)
 {
-  return ((bb_predicate_p) bb->aux)->predicate;
+  return ((struct bb_predicate *) bb->aux)->predicate;
 }
 
 /* Sets the gimplified predicate COND for basic block BB.  */
@@ -166,7 +166,7 @@ set_bb_predicate (basic_block bb, tree cond)
   gcc_assert ((TREE_CODE (cond) == TRUTH_NOT_EXPR
 	       && is_gimple_condexpr (TREE_OPERAND (cond, 0)))
 	      || is_gimple_condexpr (cond));
-  ((bb_predicate_p) bb->aux)->predicate = cond;
+  ((struct bb_predicate *) bb->aux)->predicate = cond;
 }
 
 /* Returns the sequence of statements of the gimplification of the
@@ -175,7 +175,7 @@ set_bb_predicate (basic_block bb, tree cond)
 static inline gimple_seq
 bb_predicate_gimplified_stmts (basic_block bb)
 {
-  return ((bb_predicate_p) bb->aux)->predicate_gimplified_stmts;
+  return ((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts;
 }
 
 /* Sets the sequence of statements STMTS of the gimplification of the
@@ -184,7 +184,7 @@ bb_predicate_gimplified_stmts (basic_block bb)
 static inline void
 set_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
 {
-  ((bb_predicate_p) bb->aux)->predicate_gimplified_stmts = stmts;
+  ((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts = stmts;
 }
 
 /* Adds the sequence of statements STMTS to the sequence of statements
@@ -194,7 +194,7 @@ static inline void
 add_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
 {
   gimple_seq_add_seq
-    (&(((bb_predicate_p) bb->aux)->predicate_gimplified_stmts), stmts);
+    (&(((struct bb_predicate *) bb->aux)->predicate_gimplified_stmts), stmts);
 }
 
 /* Initializes to TRUE the predicate of basic block BB.  */
@@ -202,7 +202,7 @@ add_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
 static inline void
 init_bb_predicate (basic_block bb)
 {
-  bb->aux = XNEW (struct bb_predicate_s);
+  bb->aux = XNEW (struct bb_predicate);
   set_bb_predicate_gimplified_stmts (bb, NULL);
   set_bb_predicate (bb, boolean_true_node);
 }
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index a19f4e3..9fd698d 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -65,7 +65,7 @@ along with GCC; see the file COPYING3.  If not see
 
 /* Structure to map a variable VAR to the set of blocks that contain
    definitions for VAR.  */
-struct def_blocks_d
+struct def_blocks
 {
   /* Blocks that contain definitions of VAR.  Bit I will be set if the
      Ith block contains a definition of VAR.  */
@@ -79,9 +79,6 @@ struct def_blocks_d
   bitmap livein_blocks;
 };
 
-typedef struct def_blocks_d *def_blocks_p;
-
-
 /* Stack of trees used to restore the global currdefs to its original
    state after completing rewriting of a block and its dominator
    children.  Its elements have the following properties:
@@ -169,7 +166,7 @@ enum need_phi_state {
 };
 
 /* Information stored for both SSA names and decls.  */
-struct common_info_d
+struct common_info
 {
   /* This field indicates whether or not the variable may need PHI nodes.
      See the enum's definition for more detailed information about the
@@ -180,29 +177,23 @@ struct common_info_d
   tree current_def;
 
   /* Definitions for this var.  */
-  struct def_blocks_d def_blocks;
+  struct def_blocks def_blocks;
 };
 
-/* The information associated with decls and SSA names.  */
-typedef struct common_info_d *common_info_p;
-
 /* Information stored for decls.  */
-struct var_info_d
+struct var_info
 {
   /* The variable.  */
   tree var;
 
   /* Information stored for both SSA names and decls.  */
-  struct common_info_d info;
+  common_info info;
 };
 
-/* The information associated with decls.  */
-typedef struct var_info_d *var_info_p;
-
 
 /* VAR_INFOS hashtable helpers.  */
 
-struct var_info_hasher : free_ptr_hash <var_info_d>
+struct var_info_hasher : free_ptr_hash <var_info>
 {
   static inline hashval_t hash (const value_type &);
   static inline bool equal (const value_type &, const compare_type &);
@@ -238,13 +229,10 @@ struct ssa_name_info
   bitmap repl_set;
 
   /* Information stored for both SSA names and decls.  */
-  struct common_info_d info;
+  common_info info;
 };
 
-/* The information associated with names.  */
-typedef struct ssa_name_info *ssa_name_info_p;
-
-static vec<ssa_name_info_p> info_for_ssa_name;
+static vec<ssa_name_info *> info_for_ssa_name;
 static unsigned current_info_for_ssa_name_age;
 
 static bitmap_obstack update_ssa_obstack;
@@ -339,7 +327,7 @@ set_register_defs (gimple *stmt, bool register_defs_p)
 
 /* Get the information associated with NAME.  */
 
-static inline ssa_name_info_p
+static inline ssa_name_info *
 get_ssa_name_ann (tree name)
 {
   unsigned ver = SSA_NAME_VERSION (name);
@@ -376,16 +364,16 @@ get_ssa_name_ann (tree name)
 
 /* Return and allocate the auxiliar information for DECL.  */
 
-static inline var_info_p
+static inline var_info *
 get_var_info (tree decl)
 {
-  struct var_info_d vi;
-  var_info_d **slot;
+  var_info vi;
+  var_info **slot;
   vi.var = decl;
   slot = var_infos->find_slot_with_hash (&vi, DECL_UID (decl), INSERT);
   if (*slot == NULL)
     {
-      var_info_p v = XCNEW (struct var_info_d);
+      var_info *v = XCNEW (var_info);
       v->var = decl;
       *slot = v;
       return v;
@@ -409,7 +397,7 @@ clear_ssa_name_info (void)
 
 /* Get access to the auxiliar information stored per SSA name or decl.  */
 
-static inline common_info_p
+static inline common_info *
 get_common_info (tree var)
 {
   if (TREE_CODE (var) == SSA_NAME)
@@ -480,10 +468,10 @@ mark_block_for_update (basic_block bb)
    where VAR is live on entry (livein).  If no entry is found in
    DEF_BLOCKS, a new one is created and returned.  */
 
-static inline struct def_blocks_d *
-get_def_blocks_for (common_info_p info)
+static inline def_blocks *
+get_def_blocks_for (common_info *info)
 {
-  struct def_blocks_d *db_p = &info->def_blocks;
+  def_blocks *db_p = &info->def_blocks;
   if (!db_p->def_blocks)
     {
       db_p->def_blocks = BITMAP_ALLOC (&update_ssa_obstack);
@@ -501,8 +489,8 @@ get_def_blocks_for (common_info_p info)
 static void
 set_def_block (tree var, basic_block bb, bool phi_p)
 {
-  struct def_blocks_d *db_p;
-  common_info_p info;
+  def_blocks *db_p;
+  common_info *info;
 
   info = get_common_info (var);
   db_p = get_def_blocks_for (info);
@@ -536,8 +524,8 @@ set_def_block (tree var, basic_block bb, bool phi_p)
 static void
 set_livein_block (tree var, basic_block bb)
 {
-  common_info_p info;
-  struct def_blocks_d *db_p;
+  common_info *info;
+  def_blocks *db_p;
 
   info = get_common_info (var);
   db_p = get_def_blocks_for (info);
@@ -935,10 +923,10 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
    where VAR is live on entry (livein).  Return NULL, if no entry is
    found in DEF_BLOCKS.  */
 
-static inline struct def_blocks_d *
+static inline def_blocks *
 find_def_blocks_for (tree var)
 {
-  def_blocks_p p = &get_common_info (var)->def_blocks;
+  def_blocks *p = &get_common_info (var)->def_blocks;
   if (!p->def_blocks)
     return NULL;
   return p;
@@ -992,7 +980,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
   gphi *phi;
   basic_block bb;
   bitmap_iterator bi;
-  struct def_blocks_d *def_map = find_def_blocks_for (var);
+  def_blocks *def_map = find_def_blocks_for (var);
 
   /* Remove the blocks where we already have PHI nodes for VAR.  */
   bitmap_and_compl_into (phi_insertion_points, def_map->phi_blocks);
@@ -1068,8 +1056,8 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
 static int
 insert_phi_nodes_compare_var_infos (const void *a, const void *b)
 {
-  const struct var_info_d *defa = *(struct var_info_d * const *)a;
-  const struct var_info_d *defb = *(struct var_info_d * const *)b;
+  const var_info *defa = *(var_info * const *)a;
+  const var_info *defb = *(var_info * const *)b;
   if (DECL_UID (defa->var) < DECL_UID (defb->var))
     return -1;
   else
@@ -1085,11 +1073,11 @@ insert_phi_nodes (bitmap_head *dfs)
 {
   hash_table<var_info_hasher>::iterator hi;
   unsigned i;
-  var_info_p info;
+  var_info *info;
 
   timevar_push (TV_TREE_INSERT_PHI_NODES);
 
-  auto_vec<var_info_p> vars (var_infos->elements ());
+  auto_vec<var_info *> vars (var_infos->elements ());
   FOR_EACH_HASH_TABLE_ELEMENT (*var_infos, info, var_info_p, hi)
     if (info->info.need_phi_state != NEED_PHI_STATE_NO)
       vars.quick_push (info);
@@ -1115,7 +1103,7 @@ insert_phi_nodes (bitmap_head *dfs)
 static void
 register_new_def (tree def, tree sym)
 {
-  common_info_p info = get_common_info (sym);
+  common_info *info = get_common_info (sym);
   tree currdef;
 
   /* If this variable is set in a single basic block and all uses are
@@ -1183,7 +1171,7 @@ register_new_def (tree def, tree sym)
 static tree
 get_reaching_def (tree var)
 {
-  common_info_p info = get_common_info (var);
+  common_info *info = get_common_info (var);
   tree currdef;
 
   /* Lookup the current reaching definition for VAR.  */
@@ -1215,7 +1203,7 @@ rewrite_debug_stmt_uses (gimple *stmt)
   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
     {
       tree var = USE_FROM_PTR (use_p), def;
-      common_info_p info = get_common_info (var);
+      common_info *info = get_common_info (var);
       gcc_checking_assert (DECL_P (var));
       def = info->current_def;
       if (!def)
@@ -1282,7 +1270,7 @@ rewrite_debug_stmt_uses (gimple *stmt)
 	    ;
 	  else
 	    {
-	      struct def_blocks_d *db_p = get_def_blocks_for (info);
+	      def_blocks *db_p = get_def_blocks_for (info);
 
 	      /* If there are some non-debug uses in the current bb,
 		 it is fine.  */
@@ -1602,7 +1590,7 @@ dump_currdefs (FILE *file)
   fprintf (file, "\n\nCurrent reaching definitions\n\n");
   FOR_EACH_VEC_ELT (symbols_to_rename, i, var)
     {
-      common_info_p info = get_common_info (var);
+      common_info *info = get_common_info (var);
       fprintf (file, "CURRDEF (");
       print_generic_expr (file, var, 0);
       fprintf (file, ") = ");
@@ -1689,9 +1677,9 @@ debug_tree_ssa_stats (void)
 /* Callback for htab_traverse to dump the VAR_INFOS hash table.  */
 
 int
-debug_var_infos_r (var_info_d **slot, FILE *file)
+debug_var_infos_r (var_info **slot, FILE *file)
 {
-  struct var_info_d *info = *slot;
+  var_info *info = *slot;
 
   fprintf (file, "VAR: ");
   print_generic_expr (file, info->var, dump_flags);
@@ -1731,7 +1719,7 @@ debug_var_infos (void)
 static inline void
 register_new_update_single (tree new_name, tree old_name)
 {
-  common_info_p info = get_common_info (old_name);
+  common_info *info = get_common_info (old_name);
   tree currdef = info->current_def;
 
   /* Push the current reaching definition into BLOCK_DEFS_STACK.
@@ -2487,7 +2475,7 @@ mark_use_interesting (tree var, gimple *stmt, basic_block bb,
      replace it).  */
   if (insert_phi_p)
     {
-      struct def_blocks_d *db_p = get_def_blocks_for (get_common_info (var));
+      def_blocks *db_p = get_def_blocks_for (get_common_info (var));
       if (!bitmap_bit_p (db_p->def_blocks, bb->index))
 	set_livein_block (var, bb);
     }
@@ -3006,7 +2994,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
                               unsigned update_flags)
 {
   basic_block entry;
-  struct def_blocks_d *db;
+  def_blocks *db;
   bitmap idf, pruned_idf;
   bitmap_iterator bi;
   unsigned i;
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index d9380fd..6f86d53 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -73,7 +73,7 @@ along with GCC; see the file COPYING3.  If not see
 
 
 /* A Reduced Dependence Graph (RDG) vertex representing a statement.  */
-typedef struct rdg_vertex
+struct rdg_vertex
 {
   /* The statement represented by this vertex.  */
   gimple *stmt;
@@ -86,7 +86,7 @@ typedef struct rdg_vertex
 
   /* True when the statement contains a read from memory.  */
   bool has_mem_reads;
-} *rdg_vertex_p;
+};
 
 #define RDGV_STMT(V)     ((struct rdg_vertex *) ((V)->data))->stmt
 #define RDGV_DATAREFS(V) ((struct rdg_vertex *) ((V)->data))->datarefs
@@ -110,11 +110,11 @@ enum rdg_dep_type
 
 /* Dependence information attached to an edge of the RDG.  */
 
-typedef struct rdg_edge
+struct rdg_edge
 {
   /* Type of the dependence.  */
   enum rdg_dep_type type;
-} *rdg_edge_p;
+};
 
 #define RDGE_TYPE(E)        ((struct rdg_edge *) ((E)->data))->type
 
@@ -474,7 +474,7 @@ enum partition_kind {
     PKIND_NORMAL, PKIND_MEMSET, PKIND_MEMCPY
 };
 
-typedef struct partition_s
+struct partition
 {
   bitmap stmts;
   bitmap loops;
@@ -485,15 +485,15 @@ typedef struct partition_s
   data_reference_p secondary_dr;
   tree niter;
   bool plus_one;
-} *partition_t;
+};
 
 
 /* Allocate and initialize a partition from BITMAP.  */
 
-static partition_t
+static partition *
 partition_alloc (bitmap stmts, bitmap loops)
 {
-  partition_t partition = XCNEW (struct partition_s);
+  partition *partition = XCNEW (struct partition);
   partition->stmts = stmts ? stmts : BITMAP_ALLOC (NULL);
   partition->loops = loops ? loops : BITMAP_ALLOC (NULL);
   partition->reduction_p = false;
@@ -504,7 +504,7 @@ partition_alloc (bitmap stmts, bitmap loops)
 /* Free PARTITION.  */
 
 static void
-partition_free (partition_t partition)
+partition_free (partition *partition)
 {
   BITMAP_FREE (partition->stmts);
   BITMAP_FREE (partition->loops);
@@ -514,7 +514,7 @@ partition_free (partition_t partition)
 /* Returns true if the partition can be generated as a builtin.  */
 
 static bool
-partition_builtin_p (partition_t partition)
+partition_builtin_p (partition *partition)
 {
   return partition->kind != PKIND_NORMAL;
 }
@@ -522,7 +522,7 @@ partition_builtin_p (partition_t partition)
 /* Returns true if the partition contains a reduction.  */
 
 static bool
-partition_reduction_p (partition_t partition)
+partition_reduction_p (partition *partition)
 {
   return partition->reduction_p;
 }
@@ -530,7 +530,7 @@ partition_reduction_p (partition_t partition)
 /* Merge PARTITION into the partition DEST.  */
 
 static void
-partition_merge_into (partition_t dest, partition_t partition)
+partition_merge_into (partition *dest, partition *partition)
 {
   dest->kind = PKIND_NORMAL;
   bitmap_ior_into (dest->stmts, partition->stmts);
@@ -615,7 +615,7 @@ create_bb_after_loop (struct loop *loop)
    basic blocks of a loop are taken in dom order.  */
 
 static void
-generate_loops_for_partition (struct loop *loop, partition_t partition,
+generate_loops_for_partition (struct loop *loop, partition *partition,
 			      bool copy_p)
 {
   unsigned i;
@@ -776,7 +776,7 @@ const_with_all_bytes_same (tree val)
 /* Generate a call to memset for PARTITION in LOOP.  */
 
 static void
-generate_memset_builtin (struct loop *loop, partition_t partition)
+generate_memset_builtin (struct loop *loop, partition *partition)
 {
   gimple_stmt_iterator gsi;
   gimple *stmt, *fn_call;
@@ -832,7 +832,7 @@ generate_memset_builtin (struct loop *loop, partition_t partition)
 /* Generate a call to memcpy for PARTITION in LOOP.  */
 
 static void
-generate_memcpy_builtin (struct loop *loop, partition_t partition)
+generate_memcpy_builtin (struct loop *loop, partition *partition)
 {
   gimple_stmt_iterator gsi;
   gimple *stmt, *fn_call;
@@ -927,7 +927,7 @@ destroy_loop (struct loop *loop)
 
 static void
 generate_code_for_partition (struct loop *loop,
-			     partition_t partition, bool copy_p)
+			     partition *partition, bool copy_p)
 {
   switch (partition->kind)
     {
@@ -960,10 +960,10 @@ generate_code_for_partition (struct loop *loop,
 /* Returns a partition with all the statements needed for computing
    the vertex V of the RDG, also including the loop exit conditions.  */
 
-static partition_t
+static partition *
 build_rdg_partition_for_vertex (struct graph *rdg, int v)
 {
-  partition_t partition = partition_alloc (NULL, NULL);
+  partition *partition = partition_alloc (NULL, NULL);
   auto_vec<int, 3> nodes;
   unsigned i;
   int x;
@@ -984,7 +984,7 @@ build_rdg_partition_for_vertex (struct graph *rdg, int v)
    For the moment we detect only the memset zero pattern.  */
 
 static void
-classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
+classify_partition (loop_p loop, struct graph *rdg, partition *partition)
 {
   bitmap_iterator bi;
   unsigned i;
@@ -1167,8 +1167,8 @@ ref_base_address (data_reference_p dr)
    accesses in RDG.  */
 
 static bool
-similar_memory_accesses (struct graph *rdg, partition_t partition1,
-			 partition_t partition2)
+similar_memory_accesses (struct graph *rdg, partition *partition1,
+			 partition *partition2)
 {
   unsigned i, j, k, l;
   bitmap_iterator bi, bj;
@@ -1210,7 +1210,7 @@ similar_memory_accesses (struct graph *rdg, partition_t partition1,
 static void
 rdg_build_partitions (struct graph *rdg,
 		      vec<gimple *> starting_stmts,
-		      vec<partition_t> *partitions)
+		      vec<partition *> *partitions)
 {
   bitmap processed = BITMAP_ALLOC (NULL);
   int i;
@@ -1229,7 +1229,7 @@ rdg_build_partitions (struct graph *rdg,
       if (bitmap_bit_p (processed, v))
 	continue;
 
-      partition_t partition = build_rdg_partition_for_vertex (rdg, v);
+      partition *partition = build_rdg_partition_for_vertex (rdg, v);
       bitmap_ior_into (processed, partition->stmts);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1250,20 +1250,20 @@ rdg_build_partitions (struct graph *rdg,
 /* Dump to FILE the PARTITIONS.  */
 
 static void
-dump_rdg_partitions (FILE *file, vec<partition_t> partitions)
+dump_rdg_partitions (FILE *file, vec<partition *> partitions)
 {
   int i;
-  partition_t partition;
+  partition *partition;
 
   FOR_EACH_VEC_ELT (partitions, i, partition)
     debug_bitmap_file (file, partition->stmts);
 }
 
 /* Debug PARTITIONS.  */
-extern void debug_rdg_partitions (vec<partition_t> );
+extern void debug_rdg_partitions (vec<partition *> );
 
 DEBUG_FUNCTION void
-debug_rdg_partitions (vec<partition_t> partitions)
+debug_rdg_partitions (vec<partition *> partitions)
 {
   dump_rdg_partitions (stderr, partitions);
 }
@@ -1291,7 +1291,7 @@ number_of_rw_in_rdg (struct graph *rdg)
    the RDG.  */
 
 static int
-number_of_rw_in_partition (struct graph *rdg, partition_t partition)
+number_of_rw_in_partition (struct graph *rdg, partition *partition)
 {
   int res = 0;
   unsigned i;
@@ -1314,10 +1314,10 @@ number_of_rw_in_partition (struct graph *rdg, partition_t partition)
 
 static bool
 partition_contains_all_rw (struct graph *rdg,
-			   vec<partition_t> partitions)
+			   vec<partition *> partitions)
 {
   int i;
-  partition_t partition;
+  partition *partition;
   int nrw = number_of_rw_in_rdg (rdg);
 
   FOR_EACH_VEC_ELT (partitions, i, partition)
@@ -1410,7 +1410,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
 		 control_dependences *cd, int *nb_calls)
 {
   struct graph *rdg;
-  partition_t partition;
+  partition *partition;
   bool any_builtin;
   int i, nbp;
   graph *pg = NULL;
@@ -1435,7 +1435,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
   if (dump_file && (dump_flags & TDF_DETAILS))
     dump_rdg (dump_file, rdg);
 
-  auto_vec<partition_t, 3> partitions;
+  auto_vec<struct partition *, 3> partitions;
   rdg_build_partitions (rdg, stmts, &partitions);
 
   any_builtin = false;
@@ -1458,7 +1458,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
      were not classified as builtins.  This also avoids chopping
      a loop into pieces, separated by builtin calls.  That is, we
      only want no or a single loop body remaining.  */
-  partition_t into;
+  struct partition *into;
   if (!flag_tree_loop_distribution)
     {
       for (i = 0; partitions.iterate (i, &into); ++i)
@@ -1535,7 +1535,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
     {
       pg = new_graph (partitions.length ());
       struct pgdata {
-	  partition_t partition;
+	  struct partition *partition;
 	  vec<data_reference_p> writes;
 	  vec<data_reference_p> reads;
       };
@@ -1559,7 +1559,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
 	      else
 		data->writes.safe_push (dr);
 	}
-      partition_t partition1, partition2;
+      struct partition *partition1, *partition2;
       for (i = 0; partitions.iterate (i, &partition1); ++i)
 	for (int j = i + 1; partitions.iterate (j, &partition2); ++j)
 	  {
@@ -1599,7 +1599,7 @@ distribute_loop (struct loop *loop, vec<gimple *> stmts,
       num_sccs = graphds_scc (pg, NULL);
       for (i = 0; i < num_sccs; ++i)
 	{
-	  partition_t first;
+	  struct partition *first;
 	  int j;
 	  for (j = 0; partitions.iterate (j, &first); ++j)
 	    if (pg->vertices[j].component == i)
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 8af6583..4e656e2 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -49,13 +49,12 @@ along with GCC; see the file COPYING3.  If not see
 
 /* This structure defines a pair entry.  */
 
-typedef struct coalesce_pair
+struct coalesce_pair
 {
   int first_element;
   int second_element;
   int cost;
-} * coalesce_pair_p;
-typedef const struct coalesce_pair *const_coalesce_pair_p;
+};
 
 /* Coalesce pair hashtable helpers.  */
 
@@ -90,22 +89,22 @@ typedef hash_table<coalesce_pair_hasher> coalesce_table_type;
 typedef coalesce_table_type::iterator coalesce_iterator_type;
 
 
-typedef struct cost_one_pair_d
+struct cost_one_pair
 {
   int first_element;
   int second_element;
-  struct cost_one_pair_d *next;
-} * cost_one_pair_p;
+  cost_one_pair *next;
+};
 
 /* This structure maintains the list of coalesce pairs.  */
 
-typedef struct coalesce_list_d
+struct coalesce_list
 {
   coalesce_table_type *list;	/* Hash table.  */
-  coalesce_pair_p *sorted;	/* List when sorted.  */
+  coalesce_pair **sorted;	/* List when sorted.  */
   int num_sorted;		/* Number in the sorted list.  */
-  cost_one_pair_p cost_one_list;/* Single use coalesces with cost 1.  */
-} *coalesce_list_p;
+  cost_one_pair *cost_one_list;/* Single use coalesces with cost 1.  */
+};
 
 #define NO_BEST_COALESCE	-1
 #define MUST_COALESCE_COST	INT_MAX
@@ -183,9 +182,9 @@ coalesce_cost_edge (edge e)
    NO_BEST_COALESCE is returned if there aren't any.  */
 
 static inline int
-pop_cost_one_pair (coalesce_list_p cl, int *p1, int *p2)
+pop_cost_one_pair (coalesce_list *cl, int *p1, int *p2)
 {
-  cost_one_pair_p ptr;
+  cost_one_pair *ptr;
 
   ptr = cl->cost_one_list;
   if (!ptr)
@@ -205,9 +204,9 @@ pop_cost_one_pair (coalesce_list_p cl, int *p1, int *p2)
    NO_BEST_COALESCE is returned if the coalesce list is empty.  */
 
 static inline int
-pop_best_coalesce (coalesce_list_p cl, int *p1, int *p2)
+pop_best_coalesce (coalesce_list *cl, int *p1, int *p2)
 {
-  coalesce_pair_p node;
+  coalesce_pair *node;
   int ret;
 
   if (cl->sorted == NULL)
@@ -228,16 +227,16 @@ pop_best_coalesce (coalesce_list_p cl, int *p1, int *p2)
 
 /* Create a new empty coalesce list object and return it.  */
 
-static inline coalesce_list_p
+static inline coalesce_list *
 create_coalesce_list (void)
 {
-  coalesce_list_p list;
+  coalesce_list *list;
   unsigned size = num_ssa_names * 3;
 
   if (size < 40)
     size = 40;
 
-  list = (coalesce_list_p) xmalloc (sizeof (struct coalesce_list_d));
+  list = (coalesce_list *) xmalloc (sizeof (struct coalesce_list));
   list->list = new coalesce_table_type (size);
   list->sorted = NULL;
   list->num_sorted = 0;
@@ -249,7 +248,7 @@ create_coalesce_list (void)
 /* Delete coalesce list CL.  */
 
 static inline void
-delete_coalesce_list (coalesce_list_p cl)
+delete_coalesce_list (coalesce_list *cl)
 {
   gcc_assert (cl->cost_one_list == NULL);
   delete cl->list;
@@ -264,8 +263,8 @@ delete_coalesce_list (coalesce_list_p cl)
    one isn't found, return NULL if CREATE is false, otherwise create a new
    coalesce pair object and return it.  */
 
-static coalesce_pair_p
-find_coalesce_pair (coalesce_list_p cl, int p1, int p2, bool create)
+static coalesce_pair *
+find_coalesce_pair (coalesce_list *cl, int p1, int p2, bool create)
 {
   struct coalesce_pair p;
   coalesce_pair **slot;
@@ -302,11 +301,11 @@ find_coalesce_pair (coalesce_list_p cl, int p1, int p2, bool create)
 }
 
 static inline void
-add_cost_one_coalesce (coalesce_list_p cl, int p1, int p2)
+add_cost_one_coalesce (coalesce_list *cl, int p1, int p2)
 {
-  cost_one_pair_p pair;
+  cost_one_pair *pair;
 
-  pair = XNEW (struct cost_one_pair_d);
+  pair = XNEW (cost_one_pair);
   pair->first_element = p1;
   pair->second_element = p2;
   pair->next = cl->cost_one_list;
@@ -317,9 +316,9 @@ add_cost_one_coalesce (coalesce_list_p cl, int p1, int p2)
 /* Add a coalesce between P1 and P2 in list CL with a cost of VALUE.  */
 
 static inline void
-add_coalesce (coalesce_list_p cl, int p1, int p2, int value)
+add_coalesce (coalesce_list *cl, int p1, int p2, int value)
 {
-  coalesce_pair_p node;
+  coalesce_pair *node;
 
   gcc_assert (cl->sorted == NULL);
   if (p1 == p2)
@@ -343,8 +342,8 @@ add_coalesce (coalesce_list_p cl, int p1, int p2, int value)
 static int
 compare_pairs (const void *p1, const void *p2)
 {
-  const_coalesce_pair_p const *const pp1 = (const_coalesce_pair_p const *) p1;
-  const_coalesce_pair_p const *const pp2 = (const_coalesce_pair_p const *) p2;
+  const coalesce_pair *const *const pp1 = (const coalesce_pair *const *) p1;
+  const coalesce_pair *const *const pp2 = (const coalesce_pair *const *) p2;
   int result;
 
   result = (* pp1)->cost - (* pp2)->cost;
@@ -365,7 +364,7 @@ compare_pairs (const void *p1, const void *p2)
 /* Return the number of unique coalesce pairs in CL.  */
 
 static inline int
-num_coalesce_pairs (coalesce_list_p cl)
+num_coalesce_pairs (coalesce_list *cl)
 {
   return cl->list->elements ();
 }
@@ -381,10 +380,10 @@ num_coalesce_pairs (coalesce_list_p cl)
    in order from most important coalesce to least important.  */
 
 static void
-sort_coalesce_list (coalesce_list_p cl)
+sort_coalesce_list (coalesce_list *cl)
 {
   unsigned x, num;
-  coalesce_pair_p p;
+  coalesce_pair *p;
   coalesce_iterator_type ppi;
 
   gcc_assert (cl->sorted == NULL);
@@ -395,7 +394,7 @@ sort_coalesce_list (coalesce_list_p cl)
     return;
 
   /* Allocate a vector for the pair pointers.  */
-  cl->sorted = XNEWVEC (coalesce_pair_p, num);
+  cl->sorted = XNEWVEC (coalesce_pair *, num);
 
   /* Populate the vector with pointers to the pairs.  */
   x = 0;
@@ -419,16 +418,16 @@ sort_coalesce_list (coalesce_list_p cl)
      ??? Maybe std::sort will do better, provided that compare_pairs
      can be inlined.  */
   if (num > 2)
-      qsort (cl->sorted, num, sizeof (coalesce_pair_p), compare_pairs);
+      qsort (cl->sorted, num, sizeof (coalesce_pair *), compare_pairs);
 }
 
 
 /* Send debug info for coalesce list CL to file F.  */
 
 static void
-dump_coalesce_list (FILE *f, coalesce_list_p cl)
+dump_coalesce_list (FILE *f, coalesce_list *cl)
 {
-  coalesce_pair_p node;
+  coalesce_pair *node;
   coalesce_iterator_type ppi;
 
   int x;
@@ -470,20 +469,20 @@ dump_coalesce_list (FILE *f, coalesce_list_p cl)
    A full matrix is used for conflicts rather than just upper triangular form.
    this make sit much simpler and faster to perform conflict merges.  */
 
-typedef struct ssa_conflicts_d
+struct ssa_conflicts
 {
   bitmap_obstack obstack;	/* A place to allocate our bitmaps.  */
   vec<bitmap> conflicts;
-} * ssa_conflicts_p;
+};
 
 /* Return an empty new conflict graph for SIZE elements.  */
 
-static inline ssa_conflicts_p
+static inline ssa_conflicts *
 ssa_conflicts_new (unsigned size)
 {
-  ssa_conflicts_p ptr;
+  ssa_conflicts *ptr;
 
-  ptr = XNEW (struct ssa_conflicts_d);
+  ptr = XNEW (ssa_conflicts);
   bitmap_obstack_initialize (&ptr->obstack);
   ptr->conflicts.create (size);
   ptr->conflicts.safe_grow_cleared (size);
@@ -494,7 +493,7 @@ ssa_conflicts_new (unsigned size)
 /* Free storage for conflict graph PTR.  */
 
 static inline void
-ssa_conflicts_delete (ssa_conflicts_p ptr)
+ssa_conflicts_delete (ssa_conflicts *ptr)
 {
   bitmap_obstack_release (&ptr->obstack);
   ptr->conflicts.release ();
@@ -505,7 +504,7 @@ ssa_conflicts_delete (ssa_conflicts_p ptr)
 /* Test if elements X and Y conflict in graph PTR.  */
 
 static inline bool
-ssa_conflicts_test_p (ssa_conflicts_p ptr, unsigned x, unsigned y)
+ssa_conflicts_test_p (ssa_conflicts *ptr, unsigned x, unsigned y)
 {
   bitmap bx = ptr->conflicts[x];
   bitmap by = ptr->conflicts[y];
@@ -523,7 +522,7 @@ ssa_conflicts_test_p (ssa_conflicts_p ptr, unsigned x, unsigned y)
 /* Add a conflict with Y to the bitmap for X in graph PTR.  */
 
 static inline void
-ssa_conflicts_add_one (ssa_conflicts_p ptr, unsigned x, unsigned y)
+ssa_conflicts_add_one (ssa_conflicts *ptr, unsigned x, unsigned y)
 {
   bitmap bx = ptr->conflicts[x];
   /* If there are no conflicts yet, allocate the bitmap and set bit.  */
@@ -536,7 +535,7 @@ ssa_conflicts_add_one (ssa_conflicts_p ptr, unsigned x, unsigned y)
 /* Add conflicts between X and Y in graph PTR.  */
 
 static inline void
-ssa_conflicts_add (ssa_conflicts_p ptr, unsigned x, unsigned y)
+ssa_conflicts_add (ssa_conflicts *ptr, unsigned x, unsigned y)
 {
   gcc_checking_assert (x != y);
   ssa_conflicts_add_one (ptr, x, y);
@@ -547,7 +546,7 @@ ssa_conflicts_add (ssa_conflicts_p ptr, unsigned x, unsigned y)
 /* Merge all Y's conflict into X in graph PTR.  */
 
 static inline void
-ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
+ssa_conflicts_merge (ssa_conflicts *ptr, unsigned x, unsigned y)
 {
   unsigned z;
   bitmap_iterator bi;
@@ -587,7 +586,7 @@ ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
 /* Dump a conflicts graph.  */
 
 static void
-ssa_conflicts_dump (FILE *file, ssa_conflicts_p ptr)
+ssa_conflicts_dump (FILE *file, ssa_conflicts *ptr)
 {
   unsigned x;
   bitmap b;
@@ -615,28 +614,28 @@ ssa_conflicts_dump (FILE *file, ssa_conflicts_p ptr)
    marked as being live.  This delays clearing of these bitmaps until
    they are actually needed again.  */
 
-typedef struct live_track_d
+struct live_track
 {
   bitmap_obstack obstack;	/* A place to allocate our bitmaps.  */
   bitmap live_base_var;		/* Indicates if a basevar is live.  */
   bitmap *live_base_partitions;	/* Live partitions for each basevar.  */
   var_map map;			/* Var_map being used for partition mapping.  */
-} * live_track_p;
+};
 
 
 /* This routine will create a new live track structure based on the partitions
    in MAP.  */
 
-static live_track_p
+static live_track *
 new_live_track (var_map map)
 {
-  live_track_p ptr;
+  live_track *ptr;
   int lim, x;
 
   /* Make sure there is a partition view in place.  */
   gcc_assert (map->partition_to_base_index != NULL);
 
-  ptr = (live_track_p) xmalloc (sizeof (struct live_track_d));
+  ptr = (live_track *) xmalloc (sizeof (live_track));
   ptr->map = map;
   lim = num_basevars (map);
   bitmap_obstack_initialize (&ptr->obstack);
@@ -651,7 +650,7 @@ new_live_track (var_map map)
 /* This routine will free the memory associated with PTR.  */
 
 static void
-delete_live_track (live_track_p ptr)
+delete_live_track (live_track *ptr)
 {
   bitmap_obstack_release (&ptr->obstack);
   free (ptr->live_base_partitions);
@@ -662,7 +661,7 @@ delete_live_track (live_track_p ptr)
 /* This function will remove PARTITION from the live list in PTR.  */
 
 static inline void
-live_track_remove_partition (live_track_p ptr, int partition)
+live_track_remove_partition (live_track *ptr, int partition)
 {
   int root;
 
@@ -677,7 +676,7 @@ live_track_remove_partition (live_track_p ptr, int partition)
 /* This function will adds PARTITION to the live list in PTR.  */
 
 static inline void
-live_track_add_partition (live_track_p ptr, int partition)
+live_track_add_partition (live_track *ptr, int partition)
 {
   int root;
 
@@ -694,7 +693,7 @@ live_track_add_partition (live_track_p ptr, int partition)
 /* Clear the live bit for VAR in PTR.  */
 
 static inline void
-live_track_clear_var (live_track_p ptr, tree var)
+live_track_clear_var (live_track *ptr, tree var)
 {
   int p;
 
@@ -707,7 +706,7 @@ live_track_clear_var (live_track_p ptr, tree var)
 /* Return TRUE if VAR is live in PTR.  */
 
 static inline bool
-live_track_live_p (live_track_p ptr, tree var)
+live_track_live_p (live_track *ptr, tree var)
 {
   int p, root;
 
@@ -726,7 +725,7 @@ live_track_live_p (live_track_p ptr, tree var)
    ssa live map and the live bitmap for the root of USE.  */
 
 static inline void
-live_track_process_use (live_track_p ptr, tree use)
+live_track_process_use (live_track *ptr, tree use)
 {
   int p;
 
@@ -744,7 +743,7 @@ live_track_process_use (live_track_p ptr, tree use)
    variable, conflicts will be added to GRAPH.  */
 
 static inline void
-live_track_process_def (live_track_p ptr, tree def, ssa_conflicts_p graph)
+live_track_process_def (live_track *ptr, tree def, ssa_conflicts *graph)
 {
   int p, root;
   bitmap b;
@@ -772,7 +771,7 @@ live_track_process_def (live_track_p ptr, tree def, ssa_conflicts_p graph)
 /* Initialize PTR with the partitions set in INIT.  */
 
 static inline void
-live_track_init (live_track_p ptr, bitmap init)
+live_track_init (live_track *ptr, bitmap init)
 {
   unsigned p;
   bitmap_iterator bi;
@@ -786,7 +785,7 @@ live_track_init (live_track_p ptr, bitmap init)
 /* This routine will clear all live partitions in PTR.   */
 
 static inline void
-live_track_clear_base_vars (live_track_p ptr)
+live_track_clear_base_vars (live_track *ptr)
 {
   /* Simply clear the live base list.  Anything marked as live in the element
      lists will be cleared later if/when the base variable ever comes alive
@@ -800,14 +799,14 @@ live_track_clear_base_vars (live_track_p ptr)
    conflict graph.  Only conflicts between ssa_name partitions with the same
    base variable are added.  */
 
-static ssa_conflicts_p
+static ssa_conflicts *
 build_ssa_conflict_graph (tree_live_info_p liveinfo)
 {
-  ssa_conflicts_p graph;
+  ssa_conflicts *graph;
   var_map map;
   basic_block bb;
   ssa_op_iter iter;
-  live_track_p live;
+  live_track *live;
   basic_block entry;
 
   /* If inter-variable coalescing is enabled, we may attempt to
@@ -942,7 +941,7 @@ fail_abnormal_edge_coalesce (int x, int y)
    a coalesce list for use later in the out of ssa process.  */
 
 static var_map
-create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
+create_outofssa_var_map (coalesce_list *cl, bitmap used_in_copy)
 {
   gimple_stmt_iterator gsi;
   basic_block bb;
@@ -1135,7 +1134,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
    DEBUG, if it is nun-NULL.  */
 
 static inline bool
-attempt_coalesce (var_map map, ssa_conflicts_p graph, int x, int y,
+attempt_coalesce (var_map map, ssa_conflicts *graph, int x, int y,
 		  FILE *debug)
 {
   int z;
@@ -1201,7 +1200,7 @@ attempt_coalesce (var_map map, ssa_conflicts_p graph, int x, int y,
    GRAPH.  Debug output is sent to DEBUG if it is non-NULL.  */
 
 static void
-coalesce_partitions (var_map map, ssa_conflicts_p graph, coalesce_list_p cl,
+coalesce_partitions (var_map map, ssa_conflicts *graph, coalesce_list *cl,
 		     FILE *debug)
 {
   int x = 0, y = 0;
@@ -1413,7 +1412,7 @@ gimple_can_coalesce_p (tree name1, tree name2)
 
 static void
 compute_optimized_partition_bases (var_map map, bitmap used_in_copies,
-				   coalesce_list_p cl)
+				   coalesce_list *cl)
 {
   int parts = num_var_partitions (map);
   partition tentative = partition_new (parts);
@@ -1422,7 +1421,7 @@ compute_optimized_partition_bases (var_map map, bitmap used_in_copies,
      pair, both of its members are in the same partition in
      TENTATIVE.  */
   gcc_assert (!cl->sorted);
-  coalesce_pair_p node;
+  coalesce_pair *node;
   coalesce_iterator_type ppi;
   FOR_EACH_PARTITION_PAIR (node, ppi, cl)
     {
@@ -1438,7 +1437,7 @@ compute_optimized_partition_bases (var_map map, bitmap used_in_copies,
     }
 
   /* We have to deal with cost one pairs too.  */
-  for (cost_one_pair_d *co = cl->cost_one_list; co; co = co->next)
+  for (cost_one_pair *co = cl->cost_one_list; co; co = co->next)
     {
       tree v1 = ssa_name (co->first_element);
       int p1 = partition_find (tentative, var_to_partition (map, v1));
@@ -1616,8 +1615,8 @@ extern var_map
 coalesce_ssa_name (void)
 {
   tree_live_info_p liveinfo;
-  ssa_conflicts_p graph;
-  coalesce_list_p cl;
+  ssa_conflicts *graph;
+  coalesce_list *cl;
   bitmap used_in_copies = BITMAP_ALLOC (NULL);
   var_map map;
   unsigned int i;
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 1ddd8bd..945d34b 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -267,10 +267,6 @@ struct iv_inv_expr_ent
 
 /* The data used by the induction variable optimizations.  */
 
-typedef struct iv_use *iv_use_p;
-
-typedef struct iv_cand *iv_cand_p;
-
 /* Hashtable helpers.  */
 
 struct iv_inv_expr_hasher : free_ptr_hash <iv_inv_expr_ent>
@@ -326,10 +322,10 @@ struct ivopts_data
   bitmap relevant;
 
   /* The uses of induction variables.  */
-  vec<iv_use_p> iv_uses;
+  vec<iv_use *> iv_uses;
 
   /* The candidates.  */
-  vec<iv_cand_p> iv_candidates;
+  vec<iv_cand *> iv_candidates;
 
   /* A bitmap of important candidates.  */
   bitmap important_candidates;
@@ -3747,12 +3743,12 @@ enum ainc_type
   AINC_NONE		/* Also the number of auto increment types.  */
 };
 
-typedef struct address_cost_data_s
+struct address_cost_data
 {
   HOST_WIDE_INT min_offset, max_offset;
   unsigned costs[2][2][2][2];
   unsigned ainc_costs[AINC_NONE];
-} *address_cost_data;
+};
 
 
 static comp_cost
@@ -3763,9 +3759,9 @@ get_address_cost (bool symbol_present, bool var_present,
 		  bool stmt_after_inc, bool *may_autoinc)
 {
   machine_mode address_mode = targetm.addr_space.address_mode (as);
-  static vec<address_cost_data> address_cost_data_list;
+  static vec<address_cost_data *> address_cost_data_list;
   unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mem_mode;
-  address_cost_data data;
+  address_cost_data *data;
   static bool has_preinc[MAX_MACHINE_MODE], has_postinc[MAX_MACHINE_MODE];
   static bool has_predec[MAX_MACHINE_MODE], has_postdec[MAX_MACHINE_MODE];
   unsigned cost, acost, complexity;
@@ -3789,7 +3785,7 @@ get_address_cost (bool symbol_present, bool var_present,
       rtx addr, base;
       rtx reg0, reg1;
 
-      data = (address_cost_data) xcalloc (1, sizeof (*data));
+      data = (address_cost_data *) xcalloc (1, sizeof (*data));
 
       reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
 
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 34f3d64..5efee21 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -70,7 +70,7 @@ along with GCC; see the file COPYING3.  If not see
     2. Left linearization of the expression trees, so that (A+B)+(C+D)
     becomes (((A+B)+C)+D), which is easier for us to rewrite later.
     During linearization, we place the operands of the binary
-    expressions into a vector of operand_entry_t
+    expressions into a vector of operand_entry_*
 
     3. Optimization of the operand lists, eliminating things like a +
     -a, a & a, etc.
@@ -192,13 +192,13 @@ static struct
 } reassociate_stats;
 
 /* Operator, rank pair.  */
-typedef struct operand_entry
+struct operand_entry
 {
   unsigned int rank;
   int id;
   tree op;
   unsigned int count;
-} *operand_entry_t;
+};
 
 static object_allocator<operand_entry> operand_entry_pool
   ("operand entry pool");
@@ -493,8 +493,8 @@ constant_type (tree t)
 static int
 sort_by_operand_rank (const void *pa, const void *pb)
 {
-  const operand_entry_t oea = *(const operand_entry_t *)pa;
-  const operand_entry_t oeb = *(const operand_entry_t *)pb;
+  const operand_entry *oea = *(const operand_entry *const *)pa;
+  const operand_entry *oeb = *(const operand_entry *const *)pb;
 
   /* It's nicer for optimize_expression if constants that are likely
      to fold when added/multiplied//whatever are put next to each
@@ -556,9 +556,9 @@ sort_by_operand_rank (const void *pa, const void *pb)
 /* Add an operand entry to *OPS for the tree operand OP.  */
 
 static void
-add_to_ops_vec (vec<operand_entry_t> *ops, tree op)
+add_to_ops_vec (vec<operand_entry *> *ops, tree op)
 {
-  operand_entry_t oe = operand_entry_pool.allocate ();
+  operand_entry *oe = operand_entry_pool.allocate ();
 
   oe->op = op;
   oe->rank = get_rank (op);
@@ -571,10 +571,10 @@ add_to_ops_vec (vec<operand_entry_t> *ops, tree op)
    count REPEAT.  */
 
 static void
-add_repeat_to_ops_vec (vec<operand_entry_t> *ops, tree op,
+add_repeat_to_ops_vec (vec<operand_entry *> *ops, tree op,
 		       HOST_WIDE_INT repeat)
 {
-  operand_entry_t oe = operand_entry_pool.allocate ();
+  operand_entry *oe = operand_entry_pool.allocate ();
 
   oe->op = op;
   oe->rank = get_rank (op);
@@ -630,11 +630,11 @@ get_unary_op (tree name, enum tree_code opcode)
 
 static bool
 eliminate_duplicate_pair (enum tree_code opcode,
-			  vec<operand_entry_t> *ops,
+			  vec<operand_entry *> *ops,
 			  bool *all_done,
 			  unsigned int i,
-			  operand_entry_t curr,
-			  operand_entry_t last)
+			  operand_entry *curr,
+			  operand_entry *last)
 {
 
   /* If we have two of the same op, and the opcode is & |, min, or max,
@@ -708,14 +708,14 @@ static vec<tree> plus_negates;
 
 static bool
 eliminate_plus_minus_pair (enum tree_code opcode,
-			   vec<operand_entry_t> *ops,
+			   vec<operand_entry *> *ops,
 			   unsigned int currindex,
-			   operand_entry_t curr)
+			   operand_entry *curr)
 {
   tree negateop;
   tree notop;
   unsigned int i;
-  operand_entry_t oe;
+  operand_entry *oe;
 
   if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
     return false;
@@ -791,13 +791,13 @@ eliminate_plus_minus_pair (enum tree_code opcode,
 
 static bool
 eliminate_not_pairs (enum tree_code opcode,
-		     vec<operand_entry_t> *ops,
+		     vec<operand_entry *> *ops,
 		     unsigned int currindex,
-		     operand_entry_t curr)
+		     operand_entry *curr)
 {
   tree notop;
   unsigned int i;
-  operand_entry_t oe;
+  operand_entry *oe;
 
   if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
       || TREE_CODE (curr->op) != SSA_NAME)
@@ -857,9 +857,9 @@ eliminate_not_pairs (enum tree_code opcode,
 
 static void
 eliminate_using_constants (enum tree_code opcode,
-			   vec<operand_entry_t> *ops)
+			   vec<operand_entry *> *ops)
 {
-  operand_entry_t oelast = ops->last ();
+  operand_entry *oelast = ops->last ();
   tree type = TREE_TYPE (oelast->op);
 
   if (oelast->rank == 0
@@ -978,7 +978,7 @@ eliminate_using_constants (enum tree_code opcode,
 }
 
 
-static void linearize_expr_tree (vec<operand_entry_t> *, gimple *,
+static void linearize_expr_tree (vec<operand_entry *> *, gimple *,
 				 bool, bool);
 
 /* Structure for tracking and counting operands.  */
@@ -1365,15 +1365,15 @@ build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
 
 static bool
 undistribute_ops_list (enum tree_code opcode,
-		       vec<operand_entry_t> *ops, struct loop *loop)
+		       vec<operand_entry *> *ops, struct loop *loop)
 {
   unsigned int length = ops->length ();
-  operand_entry_t oe1;
+  operand_entry *oe1;
   unsigned i, j;
   sbitmap candidates, candidates2;
   unsigned nr_candidates, nr_candidates2;
   sbitmap_iterator sbi0;
-  vec<operand_entry_t> *subops;
+  vec<operand_entry *> *subops;
   bool changed = false;
   int next_oecount_id = 0;
 
@@ -1426,7 +1426,7 @@ undistribute_ops_list (enum tree_code opcode,
 
   /* ??? Macro arguments cannot have multi-argument template types in
      them.  This typedef is needed to workaround that limitation.  */
-  typedef vec<operand_entry_t> vec_operand_entry_t_heap;
+  typedef vec<operand_entry *> vec_operand_entry_t_heap;
   subops = XCNEWVEC (vec_operand_entry_t_heap, ops->length ());
   EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
     {
@@ -1522,7 +1522,7 @@ undistribute_ops_list (enum tree_code opcode,
 
       if (nr_candidates2 >= 2)
 	{
-	  operand_entry_t oe1, oe2;
+	  operand_entry *oe1, *oe2;
 	  gimple *prod;
 	  int first = bitmap_first_set_bit (candidates2);
 
@@ -1590,15 +1590,15 @@ undistribute_ops_list (enum tree_code opcode,
 
 static bool
 eliminate_redundant_comparison (enum tree_code opcode,
-				vec<operand_entry_t> *ops,
+				vec<operand_entry *> *ops,
 				unsigned int currindex,
-				operand_entry_t curr)
+				operand_entry *curr)
 {
   tree op1, op2;
   enum tree_code lcode, rcode;
   gimple *def1, *def2;
   int i;
-  operand_entry_t oe;
+  operand_entry *oe;
 
   if (opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
     return false;
@@ -1715,12 +1715,12 @@ eliminate_redundant_comparison (enum tree_code opcode,
 
 static void
 optimize_ops_list (enum tree_code opcode,
-		   vec<operand_entry_t> *ops)
+		   vec<operand_entry *> *ops)
 {
   unsigned int length = ops->length ();
   unsigned int i;
-  operand_entry_t oe;
-  operand_entry_t oelast = NULL;
+  operand_entry *oe;
+  operand_entry *oelast = NULL;
   bool iterate = false;
 
   if (length == 1)
@@ -1732,7 +1732,7 @@ optimize_ops_list (enum tree_code opcode,
      and try the next two.  */
   if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
     {
-      operand_entry_t oelm1 = (*ops)[length - 2];
+      operand_entry *oelm1 = (*ops)[length - 2];
 
       if (oelm1->rank == 0
 	  && is_gimple_min_invariant (oelm1->op)
@@ -2052,10 +2052,10 @@ static bool
 update_range_test (struct range_entry *range, struct range_entry *otherrange,
 		   struct range_entry **otherrangep,
 		   unsigned int count, enum tree_code opcode,
-		   vec<operand_entry_t> *ops, tree exp, gimple_seq seq,
+		   vec<operand_entry *> *ops, tree exp, gimple_seq seq,
 		   bool in_p, tree low, tree high, bool strict_overflow_p)
 {
-  operand_entry_t oe = (*ops)[range->idx];
+  operand_entry *oe = (*ops)[range->idx];
   tree op = oe->op;
   gimple *stmt = op ? SSA_NAME_DEF_STMT (op) :
     last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
@@ -2199,7 +2199,7 @@ update_range_test (struct range_entry *range, struct range_entry *otherrange,
 static bool
 optimize_range_tests_xor (enum tree_code opcode, tree type,
 			  tree lowi, tree lowj, tree highi, tree highj,
-			  vec<operand_entry_t> *ops,
+			  vec<operand_entry *> *ops,
 			  struct range_entry *rangei,
 			  struct range_entry *rangej)
 {
@@ -2240,7 +2240,7 @@ optimize_range_tests_xor (enum tree_code opcode, tree type,
 static bool
 optimize_range_tests_diff (enum tree_code opcode, tree type,
 			    tree lowi, tree lowj, tree highi, tree highj,
-			    vec<operand_entry_t> *ops,
+			    vec<operand_entry *> *ops,
 			    struct range_entry *rangei,
 			    struct range_entry *rangej)
 {
@@ -2283,7 +2283,7 @@ optimize_range_tests_diff (enum tree_code opcode, tree type,
 
 static bool
 optimize_range_tests_1 (enum tree_code opcode, int first, int length,
-			bool optimize_xor, vec<operand_entry_t> *ops,
+			bool optimize_xor, vec<operand_entry *> *ops,
 			struct range_entry *ranges)
 {
   int i, j;
@@ -2420,7 +2420,7 @@ extract_bit_test_mask (tree exp, int prec, tree totallow, tree low, tree high,
 
 static bool
 optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
-				  vec<operand_entry_t> *ops,
+				  vec<operand_entry *> *ops,
 				  struct range_entry *ranges)
 {
   int i, j;
@@ -2497,7 +2497,7 @@ optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
 	  tree high = wide_int_to_tree (TREE_TYPE (lowi),
 					wi::to_widest (lowi)
 					+ prec - 1 - wi::clz (mask));
-	  operand_entry_t oe = (*ops)[ranges[i].idx];
+	  operand_entry *oe = (*ops)[ranges[i].idx];
 	  tree op = oe->op;
 	  gimple *stmt = op ? SSA_NAME_DEF_STMT (op)
 			   : last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
@@ -2594,10 +2594,10 @@ optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
 
 static bool
 optimize_range_tests (enum tree_code opcode,
-		      vec<operand_entry_t> *ops)
+		      vec<operand_entry *> *ops)
 {
   unsigned int length = ops->length (), i, j, first;
-  operand_entry_t oe;
+  operand_entry *oe;
   struct range_entry *ranges;
   bool any_changes = false;
 
@@ -2904,7 +2904,7 @@ no_side_effect_bb (basic_block bb)
    return true and fill in *OPS recursively.  */
 
 static bool
-get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
+get_ops (tree var, enum tree_code code, vec<operand_entry *> *ops,
 	 struct loop *loop)
 {
   gimple *stmt = SSA_NAME_DEF_STMT (var);
@@ -2922,7 +2922,7 @@ get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
 	&& !get_ops (rhs[i], code, ops, loop)
 	&& has_single_use (rhs[i]))
       {
-	operand_entry_t oe = operand_entry_pool.allocate ();
+	operand_entry *oe = operand_entry_pool.allocate ();
 
 	oe->op = rhs[i];
 	oe->rank = code;
@@ -2938,7 +2938,7 @@ get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
    stmts.  */
 
 static tree
-update_ops (tree var, enum tree_code code, vec<operand_entry_t> ops,
+update_ops (tree var, enum tree_code code, vec<operand_entry *> ops,
 	    unsigned int *pidx, struct loop *loop)
 {
   gimple *stmt = SSA_NAME_DEF_STMT (var);
@@ -2998,7 +2998,7 @@ maybe_optimize_range_tests (gimple *stmt)
   basic_block bb;
   edge_iterator ei;
   edge e;
-  auto_vec<operand_entry_t> ops;
+  auto_vec<operand_entry *> ops;
   auto_vec<inter_bb_range_test_entry> bbinfo;
   bool any_changes = false;
 
@@ -3155,7 +3155,7 @@ maybe_optimize_range_tests (gimple *stmt)
 	      && has_single_use (rhs))
 	    {
 	      /* Otherwise, push the _234 range test itself.  */
-	      operand_entry_t oe = operand_entry_pool.allocate ();
+	      operand_entry *oe = operand_entry_pool.allocate ();
 
 	      oe->op = rhs;
 	      oe->rank = code;
@@ -3187,7 +3187,7 @@ maybe_optimize_range_tests (gimple *stmt)
 			   loop_containing_stmt (stmt))))
 	{
 	  /* Or push the GIMPLE_COND stmt itself.  */
-	  operand_entry_t oe = operand_entry_pool.allocate ();
+	  operand_entry *oe = operand_entry_pool.allocate ();
 
 	  oe->op = NULL;
 	  oe->rank = (e->flags & EDGE_TRUE_VALUE)
@@ -3395,10 +3395,10 @@ remove_visited_stmt_chain (tree var)
    cases, but it is unlikely to be worth it.  */
 
 static void
-swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
+swap_ops_for_binary_stmt (vec<operand_entry *> ops,
 			  unsigned int opindex, gimple *stmt)
 {
-  operand_entry_t oe1, oe2, oe3;
+  operand_entry *oe1, *oe2, *oe3;
 
   oe1 = ops[opindex];
   oe2 = ops[opindex + 1];
@@ -3410,7 +3410,7 @@ swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
 	  && !is_phi_for_stmt (stmt, oe1->op)
 	  && !is_phi_for_stmt (stmt, oe2->op)))
     {
-      struct operand_entry temp = *oe3;
+      operand_entry temp = *oe3;
       oe3->op = oe1->op;
       oe3->rank = oe1->rank;
       oe1->op = temp.op;
@@ -3422,7 +3422,7 @@ swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
 	       && !is_phi_for_stmt (stmt, oe1->op)
 	       && !is_phi_for_stmt (stmt, oe3->op)))
     {
-      struct operand_entry temp = *oe2;
+      operand_entry temp = *oe2;
       oe2->op = oe1->op;
       oe2->rank = oe1->rank;
       oe1->op = temp.op;
@@ -3451,12 +3451,12 @@ find_insert_point (gimple *stmt, tree rhs1, tree rhs2)
 
 static tree
 rewrite_expr_tree (gimple *stmt, unsigned int opindex,
-		   vec<operand_entry_t> ops, bool changed)
+		   vec<operand_entry *> ops, bool changed)
 {
   tree rhs1 = gimple_assign_rhs1 (stmt);
   tree rhs2 = gimple_assign_rhs2 (stmt);
   tree lhs = gimple_assign_lhs (stmt);
-  operand_entry_t oe;
+  operand_entry *oe;
 
   /* The final recursion case for this function is that you have
      exactly two operations left.
@@ -3465,7 +3465,7 @@ rewrite_expr_tree (gimple *stmt, unsigned int opindex,
      rewrites them one at a time.  */
   if (opindex + 2 == ops.length ())
     {
-      operand_entry_t oe1, oe2;
+      operand_entry *oe1, *oe2;
 
       oe1 = ops[opindex];
       oe2 = ops[opindex + 1];
@@ -3661,7 +3661,7 @@ get_reassociation_width (int ops_num, enum tree_code opc,
 
 static void
 rewrite_expr_tree_parallel (gassign *stmt, int width,
-			    vec<operand_entry_t> ops)
+			    vec<operand_entry *> ops)
 {
   enum tree_code opcode = gimple_assign_rhs_code (stmt);
   int op_num = ops.length ();
@@ -4010,7 +4010,7 @@ acceptable_pow_call (gimple *stmt, tree *base, HOST_WIDE_INT *exponent)
    Place the operands of the expression tree in the vector named OPS.  */
 
 static void
-linearize_expr_tree (vec<operand_entry_t> *ops, gimple *stmt,
+linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
 		     bool is_associative, bool set_visited)
 {
   tree binlhs = gimple_assign_rhs1 (stmt);
@@ -4287,7 +4287,7 @@ break_up_subtract_bb (basic_block bb)
 }
 
 /* Used for repeated factor analysis.  */
-struct repeat_factor_d
+struct repeat_factor
 {
   /* An SSA name that occurs in a multiply chain.  */
   tree factor;
@@ -4303,9 +4303,6 @@ struct repeat_factor_d
   tree repr;
 };
 
-typedef struct repeat_factor_d repeat_factor, *repeat_factor_t;
-typedef const struct repeat_factor_d *const_repeat_factor_t;
-
 
 static vec<repeat_factor> repeat_factor_vec;
 
@@ -4315,8 +4312,8 @@ static vec<repeat_factor> repeat_factor_vec;
 static int
 compare_repeat_factors (const void *x1, const void *x2)
 {
-  const_repeat_factor_t rf1 = (const_repeat_factor_t) x1;
-  const_repeat_factor_t rf2 = (const_repeat_factor_t) x2;
+  const repeat_factor *rf1 = (const repeat_factor *) x1;
+  const repeat_factor *rf2 = (const repeat_factor *) x2;
 
   if (rf1->count != rf2->count)
     return rf1->count - rf2->count;
@@ -4330,12 +4327,12 @@ compare_repeat_factors (const void *x1, const void *x2)
    SSA name representing the value of the replacement sequence.  */
 
 static tree
-attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
+attempt_builtin_powi (gimple *stmt, vec<operand_entry *> *ops)
 {
   unsigned i, j, vec_len;
   int ii;
-  operand_entry_t oe;
-  repeat_factor_t rf1, rf2;
+  operand_entry *oe;
+  repeat_factor *rf1, *rf2;
   repeat_factor rfnew;
   tree result = NULL_TREE;
   tree target_ssa, iter_result;
@@ -4441,7 +4438,7 @@ attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
 	      if (dump_file && (dump_flags & TDF_DETAILS))
 		{
 		  unsigned elt;
-		  repeat_factor_t rf;
+		  repeat_factor *rf;
 		  fputs ("Multiplying by cached product ", dump_file);
 		  for (elt = j; elt < vec_len; elt++)
 		    {
@@ -4466,7 +4463,7 @@ attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
 	      if (dump_file && (dump_flags & TDF_DETAILS))
 		{
 		  unsigned elt;
-		  repeat_factor_t rf;
+		  repeat_factor *rf;
 		  fputs ("Building __builtin_pow call for cached product (",
 			 dump_file);
 		  for (elt = j; elt < vec_len; elt++)
@@ -4501,7 +4498,7 @@ attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
 	  if (dump_file && (dump_flags & TDF_DETAILS))
 	    {
 	      unsigned elt;
-	      repeat_factor_t rf;
+	      repeat_factor *rf;
 	      fputs ("Building __builtin_pow call for (", dump_file);
 	      for (elt = j; elt < vec_len; elt++)
 		{
@@ -4745,7 +4742,7 @@ reassociate_bb (basic_block bb)
 
 	  if (associative_tree_code (rhs_code))
 	    {
-	      auto_vec<operand_entry_t> ops;
+	      auto_vec<operand_entry *> ops;
 	      tree powi_result = NULL_TREE;
 
 	      /* There may be no immediate uses left by the time we
@@ -4918,15 +4915,15 @@ branch_fixup (void)
   reassoc_branch_fixups.release ();
 }
 
-void dump_ops_vector (FILE *file, vec<operand_entry_t> ops);
-void debug_ops_vector (vec<operand_entry_t> ops);
+void dump_ops_vector (FILE *file, vec<operand_entry *> ops);
+void debug_ops_vector (vec<operand_entry *> ops);
 
 /* Dump the operand entry vector OPS to FILE.  */
 
 void
-dump_ops_vector (FILE *file, vec<operand_entry_t> ops)
+dump_ops_vector (FILE *file, vec<operand_entry *> ops)
 {
-  operand_entry_t oe;
+  operand_entry *oe;
   unsigned int i;
 
   FOR_EACH_VEC_ELT (ops, i, oe)
@@ -4939,7 +4936,7 @@ dump_ops_vector (FILE *file, vec<operand_entry_t> ops)
 /* Dump the operand entry vector OPS to STDERR.  */
 
 DEBUG_FUNCTION void
-debug_ops_vector (vec<operand_entry_t> ops)
+debug_ops_vector (vec<operand_entry *> ops)
 {
   dump_ops_vector (stderr, ops);
 }
diff --git a/gcc/tree-ssa-strlen.c b/gcc/tree-ssa-strlen.c
index 874d8c3..9430fac 100644
--- a/gcc/tree-ssa-strlen.c
+++ b/gcc/tree-ssa-strlen.c
@@ -66,7 +66,7 @@ static vec<int> ssa_ver_to_stridx;
 static int max_stridx;
 
 /* String information record.  */
-typedef struct strinfo_struct
+struct strinfo
 {
   /* String length of this string.  */
   tree length;
@@ -110,10 +110,10 @@ typedef struct strinfo_struct
   /* A flag for the next maybe_invalidate that this strinfo shouldn't
      be invalidated.  Always cleared by maybe_invalidate.  */
   bool dont_invalidate;
-} *strinfo;
+};
 
 /* Pool for allocating strinfo_struct entries.  */
-static object_allocator<strinfo_struct> strinfo_pool ("strinfo_struct pool");
+static object_allocator<strinfo> strinfo_pool ("strinfo pool");
 
 /* Vector mapping positive string indexes to strinfo, for the
    current basic block.  The first pointer in the vector is special,
@@ -121,7 +121,7 @@ static object_allocator<strinfo_struct> strinfo_pool ("strinfo_struct pool");
    a basic block pointer to the owner basic_block if shared.
    If some other bb wants to modify the vector, the vector needs
    to be unshared first, and only the owner bb is supposed to free it.  */
-static vec<strinfo, va_heap, vl_embed> *stridx_to_strinfo;
+static vec<strinfo *, va_heap, vl_embed> *stridx_to_strinfo;
 
 /* One OFFSET->IDX mapping.  */
 struct stridxlist
@@ -155,11 +155,11 @@ struct laststmt_struct
   int stridx;
 } laststmt;
 
-static int get_stridx_plus_constant (strinfo, HOST_WIDE_INT, tree);
+static int get_stridx_plus_constant (strinfo *, HOST_WIDE_INT, tree);
 
 /* Return strinfo vector entry IDX.  */
 
-static inline strinfo
+static inline strinfo *
 get_strinfo (int idx)
 {
   if (vec_safe_length (stridx_to_strinfo) <= (unsigned int) idx)
@@ -230,7 +230,7 @@ get_stridx (tree exp)
 	    return 0;
 	  if (ssa_ver_to_stridx[SSA_NAME_VERSION (rhs1)])
 	    {
-	      strinfo si
+	      strinfo *si
 		= get_strinfo (ssa_ver_to_stridx[SSA_NAME_VERSION (rhs1)]);
 	      if (si
 		  && si->length
@@ -279,7 +279,7 @@ strinfo_shared (void)
 static void
 unshare_strinfo_vec (void)
 {
-  strinfo si;
+  strinfo *si;
   unsigned int i = 0;
 
   gcc_assert (strinfo_shared ());
@@ -383,10 +383,10 @@ new_addr_stridx (tree exp)
 
 /* Create a new strinfo.  */
 
-static strinfo
+static strinfo *
 new_strinfo (tree ptr, int idx, tree length)
 {
-  strinfo si = strinfo_pool.allocate ();
+  strinfo *si = strinfo_pool.allocate ();
   si->length = length;
   si->ptr = ptr;
   si->stmt = NULL;
@@ -404,7 +404,7 @@ new_strinfo (tree ptr, int idx, tree length)
 /* Decrease strinfo refcount and free it if not referenced anymore.  */
 
 static inline void
-free_strinfo (strinfo si)
+free_strinfo (strinfo *si)
 {
   if (si && --si->refcount == 0)
     strinfo_pool.remove (si);
@@ -413,7 +413,7 @@ free_strinfo (strinfo si)
 /* Set strinfo in the vector entry IDX to SI.  */
 
 static inline void
-set_strinfo (int idx, strinfo si)
+set_strinfo (int idx, strinfo *si)
 {
   if (vec_safe_length (stridx_to_strinfo) && (*stridx_to_strinfo)[0])
     unshare_strinfo_vec ();
@@ -425,7 +425,7 @@ set_strinfo (int idx, strinfo si)
 /* Return string length, or NULL if it can't be computed.  */
 
 static tree
-get_string_length (strinfo si)
+get_string_length (strinfo *si)
 {
   if (si->length)
     return si->length;
@@ -542,7 +542,7 @@ get_string_length (strinfo si)
 static bool
 maybe_invalidate (gimple *stmt)
 {
-  strinfo si;
+  strinfo *si;
   unsigned int i;
   bool nonempty = false;
 
@@ -571,10 +571,10 @@ maybe_invalidate (gimple *stmt)
    if stridx_to_strinfo vector is shared with some other
    bbs.  */
 
-static strinfo
-unshare_strinfo (strinfo si)
+static strinfo *
+unshare_strinfo (strinfo *si)
 {
-  strinfo nsi;
+  strinfo *nsi;
 
   if (si->refcount == 1 && !strinfo_shared ())
     return si;
@@ -595,10 +595,10 @@ unshare_strinfo (strinfo si)
    if all strinfos in between belong to the chain, otherwise
    NULL.  */
 
-static strinfo
-verify_related_strinfos (strinfo origsi)
+static strinfo *
+verify_related_strinfos (strinfo *origsi)
 {
-  strinfo si = origsi, psi;
+  strinfo *si = origsi, *psi;
 
   if (origsi->first == 0)
     return NULL;
@@ -622,7 +622,7 @@ verify_related_strinfos (strinfo origsi)
    been created.  */
 
 static int
-get_stridx_plus_constant (strinfo basesi, HOST_WIDE_INT off, tree ptr)
+get_stridx_plus_constant (strinfo *basesi, HOST_WIDE_INT off, tree ptr)
 {
   gcc_checking_assert (TREE_CODE (ptr) == SSA_NAME);
 
@@ -636,7 +636,7 @@ get_stridx_plus_constant (strinfo basesi, HOST_WIDE_INT off, tree ptr)
     return 0;
 
   HOST_WIDE_INT len = tree_to_shwi (basesi->length) - off;
-  strinfo si = basesi, chainsi;
+  strinfo *si = basesi, *chainsi;
   if (si->first || si->prev || si->next)
     si = verify_related_strinfos (basesi);
   if (si == NULL
@@ -676,7 +676,7 @@ get_stridx_plus_constant (strinfo basesi, HOST_WIDE_INT off, tree ptr)
   set_strinfo (idx, si);
   if (chainsi->next)
     {
-      strinfo nextsi = unshare_strinfo (get_strinfo (chainsi->next));
+      strinfo *nextsi = unshare_strinfo (get_strinfo (chainsi->next));
       si->next = nextsi->idx;
       nextsi->prev = idx;
     }
@@ -697,10 +697,10 @@ get_stridx_plus_constant (strinfo basesi, HOST_WIDE_INT off, tree ptr)
    to a zero-length string and if possible chain it to a related strinfo
    chain whose part is or might be CHAINSI.  */
 
-static strinfo
-zero_length_string (tree ptr, strinfo chainsi)
+static strinfo *
+zero_length_string (tree ptr, strinfo *chainsi)
 {
-  strinfo si;
+  strinfo *si;
   int idx;
   if (ssa_ver_to_stridx.length () <= SSA_NAME_VERSION (ptr))
     ssa_ver_to_stridx.safe_grow_cleared (num_ssa_names);
@@ -779,16 +779,16 @@ zero_length_string (tree ptr, strinfo chainsi)
    but don't adjust ORIGSI).  */
 
 static void
-adjust_related_strinfos (location_t loc, strinfo origsi, tree adj)
+adjust_related_strinfos (location_t loc, strinfo *origsi, tree adj)
 {
-  strinfo si = verify_related_strinfos (origsi);
+  strinfo *si = verify_related_strinfos (origsi);
 
   if (si == NULL)
     return;
 
   while (1)
     {
-      strinfo nsi;
+      strinfo *nsi;
 
       if (si != origsi)
 	{
@@ -878,11 +878,11 @@ find_equal_ptrs (tree ptr, int idx)
    strinfo.  */
 
 static void
-adjust_last_stmt (strinfo si, gimple *stmt, bool is_strcat)
+adjust_last_stmt (strinfo *si, gimple *stmt, bool is_strcat)
 {
   tree vuse, callee, len;
   struct laststmt_struct last = laststmt;
-  strinfo lastsi, firstsi;
+  strinfo *lastsi, *firstsi;
   unsigned len_arg_no = 2;
 
   laststmt.stmt = NULL;
@@ -913,7 +913,7 @@ adjust_last_stmt (strinfo si, gimple *stmt, bool is_strcat)
 	return;
       while (firstsi != lastsi)
 	{
-	  strinfo nextsi;
+	  strinfo *nextsi;
 	  if (firstsi->next == 0)
 	    return;
 	  nextsi = get_strinfo (firstsi->next);
@@ -1010,7 +1010,7 @@ handle_builtin_strlen (gimple_stmt_iterator *gsi)
   idx = get_stridx (src);
   if (idx)
     {
-      strinfo si = NULL;
+      strinfo *si = NULL;
       tree rhs;
 
       if (idx < 0)
@@ -1061,7 +1061,7 @@ handle_builtin_strlen (gimple_stmt_iterator *gsi)
     return;
   if (idx)
     {
-      strinfo si = new_strinfo (src, idx, lhs);
+      strinfo *si = new_strinfo (src, idx, lhs);
       set_strinfo (idx, si);
       find_equal_ptrs (src, idx);
     }
@@ -1090,7 +1090,7 @@ handle_builtin_strchr (gimple_stmt_iterator *gsi)
   idx = get_stridx (src);
   if (idx)
     {
-      strinfo si = NULL;
+      strinfo *si = NULL;
       tree rhs;
 
       if (idx < 0)
@@ -1165,7 +1165,7 @@ handle_builtin_strchr (gimple_stmt_iterator *gsi)
 	  tree srcu = fold_convert_loc (loc, size_type_node, src);
 	  tree length = fold_build2_loc (loc, MINUS_EXPR,
 					 size_type_node, lhsu, srcu);
-	  strinfo si = new_strinfo (src, idx, length);
+	  strinfo *si = new_strinfo (src, idx, length);
 	  si->endptr = lhs;
 	  set_strinfo (idx, si);
 	  find_equal_ptrs (src, idx);
@@ -1188,7 +1188,7 @@ handle_builtin_strcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
   tree src, dst, srclen, len, lhs, args, type, fn, oldlen;
   bool success;
   gimple *stmt = gsi_stmt (*gsi);
-  strinfo si, dsi, olddsi, zsi;
+  strinfo *si, *dsi, *olddsi, *zsi;
   location_t loc;
   bool with_bounds = gimple_call_with_bounds_p (stmt);
 
@@ -1274,7 +1274,7 @@ handle_builtin_strcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
 
   if (dsi->length == NULL_TREE)
     {
-      strinfo chainsi;
+      strinfo *chainsi;
 
       /* If string length of src is unknown, use delayed length
 	 computation.  If string lenth of dst will be needed, it
@@ -1439,7 +1439,7 @@ handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
   int idx, didx;
   tree src, dst, len, lhs, oldlen, newlen;
   gimple *stmt = gsi_stmt (*gsi);
-  strinfo si, dsi, olddsi;
+  strinfo *si, *dsi, *olddsi;
   bool with_bounds = gimple_call_with_bounds_p (stmt);
 
   len = gimple_call_arg (stmt, with_bounds ? 4 : 2);
@@ -1582,7 +1582,7 @@ handle_builtin_strcat (enum built_in_function bcode, gimple_stmt_iterator *gsi)
   tree src, dst, srclen, dstlen, len, lhs, args, type, fn, objsz, endptr;
   bool success;
   gimple *stmt = gsi_stmt (*gsi);
-  strinfo si, dsi;
+  strinfo *si, *dsi;
   location_t loc;
   bool with_bounds = gimple_call_with_bounds_p (stmt);
 
@@ -1792,7 +1792,7 @@ handle_builtin_malloc (enum built_in_function bcode, gimple_stmt_iterator *gsi)
   tree length = NULL_TREE;
   if (bcode == BUILT_IN_CALLOC)
     length = build_int_cst (size_type_node, 0);
-  strinfo si = new_strinfo (lhs, idx, length);
+  strinfo *si = new_strinfo (lhs, idx, length);
   if (bcode == BUILT_IN_CALLOC)
     si->endptr = lhs;
   set_strinfo (idx, si);
@@ -1815,7 +1815,7 @@ handle_builtin_memset (gimple_stmt_iterator *gsi)
   int idx1 = get_stridx (ptr);
   if (idx1 <= 0)
     return true;
-  strinfo si1 = get_strinfo (idx1);
+  strinfo *si1 = get_strinfo (idx1);
   if (!si1)
     return true;
   gimple *stmt1 = si1->stmt;
@@ -1866,7 +1866,7 @@ handle_pointer_plus (gimple_stmt_iterator *gsi)
   gimple *stmt = gsi_stmt (*gsi);
   tree lhs = gimple_assign_lhs (stmt), off;
   int idx = get_stridx (gimple_assign_rhs1 (stmt));
-  strinfo si, zsi;
+  strinfo *si, *zsi;
 
   if (idx == 0)
     return;
@@ -1916,7 +1916,7 @@ static bool
 handle_char_store (gimple_stmt_iterator *gsi)
 {
   int idx = -1;
-  strinfo si = NULL;
+  strinfo *si = NULL;
   gimple *stmt = gsi_stmt (*gsi);
   tree ssaname = NULL_TREE, lhs = gimple_assign_lhs (stmt);
 
@@ -2224,7 +2224,7 @@ strlen_dom_walker::before_dom_children (basic_block bb)
     stridx_to_strinfo = NULL;
   else
     {
-      stridx_to_strinfo = ((vec<strinfo, va_heap, vl_embed> *) dombb->aux);
+      stridx_to_strinfo = ((vec<strinfo *, va_heap, vl_embed> *) dombb->aux);
       if (stridx_to_strinfo)
 	{
 	  for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
@@ -2246,7 +2246,7 @@ strlen_dom_walker::before_dom_children (basic_block bb)
 		      if (!strinfo_shared ())
 			{
 			  unsigned int i;
-			  strinfo si;
+			  strinfo *si;
 
 			  for (i = 1;
 			       vec_safe_iterate (stridx_to_strinfo, i, &si);
@@ -2294,7 +2294,7 @@ strlen_dom_walker::before_dom_children (basic_block bb)
 
   bb->aux = stridx_to_strinfo;
   if (vec_safe_length (stridx_to_strinfo) && !strinfo_shared ())
-    (*stridx_to_strinfo)[0] = (strinfo) bb;
+    (*stridx_to_strinfo)[0] = (strinfo *) bb;
 }
 
 /* Callback for walk_dominator_tree.  Free strinfo vector if it is
@@ -2305,12 +2305,12 @@ strlen_dom_walker::after_dom_children (basic_block bb)
 {
   if (bb->aux)
     {
-      stridx_to_strinfo = ((vec<strinfo, va_heap, vl_embed> *) bb->aux);
+      stridx_to_strinfo = ((vec<strinfo *, va_heap, vl_embed> *) bb->aux);
       if (vec_safe_length (stridx_to_strinfo)
-	  && (*stridx_to_strinfo)[0] == (strinfo) bb)
+	  && (*stridx_to_strinfo)[0] == (strinfo *) bb)
 	{
 	  unsigned int i;
-	  strinfo si;
+	  strinfo *si;
 
 	  for (i = 1; vec_safe_iterate (stridx_to_strinfo, i, &si); ++i)
 	    free_strinfo (si);
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index 0ce59e8..6c8a4f4 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -222,7 +222,7 @@ along with GCC; see the file COPYING3.  If not see
    Additionally, the hash value for the struct is cached in hashval, and
    in_worklist indicates whether it's currently part of worklist.  */
 
-struct same_succ_def : pointer_hash <same_succ_def>
+struct same_succ : pointer_hash <same_succ>
 {
   /* The bbs that have the same successor bbs.  */
   bitmap bbs;
@@ -239,24 +239,22 @@ struct same_succ_def : pointer_hash <same_succ_def>
   hashval_t hashval;
 
   /* hash_table support.  */
-  static inline hashval_t hash (const same_succ_def *);
-  static int equal (const same_succ_def *, const same_succ_def *);
-  static void remove (same_succ_def *);
+  static inline hashval_t hash (const same_succ *);
+  static int equal (const same_succ *, const same_succ *);
+  static void remove (same_succ *);
 };
-typedef struct same_succ_def *same_succ;
-typedef const struct same_succ_def *const_same_succ;
 
 /* hash routine for hash_table support, returns hashval of E.  */
 
 inline hashval_t
-same_succ_def::hash (const same_succ_def *e)
+same_succ::hash (const same_succ *e)
 {
   return e->hashval;
 }
 
 /* A group of bbs where 1 bb from bbs can replace the other bbs.  */
 
-struct bb_cluster_def
+struct bb_cluster
 {
   /* The bbs in the cluster.  */
   bitmap bbs;
@@ -267,8 +265,6 @@ struct bb_cluster_def
   /* The bb to replace the cluster with.  */
   basic_block rep_bb;
 };
-typedef struct bb_cluster_def *bb_cluster;
-typedef const struct bb_cluster_def *const_bb_cluster;
 
 /* Per bb-info.  */
 
@@ -277,9 +273,9 @@ struct aux_bb_info
   /* The number of non-debug statements in the bb.  */
   int size;
   /* The same_succ that this bb is a member of.  */
-  same_succ bb_same_succ;
+  same_succ *bb_same_succ;
   /* The cluster that this bb is a member of.  */
-  bb_cluster cluster;
+  bb_cluster *cluster;
   /* The vop state at the exit of a bb.  This is shortlived data, used to
      communicate data between update_block_by and update_vuses.  */
   tree vop_at_exit;
@@ -383,7 +379,7 @@ gvn_uses_equal (tree val1, tree val2)
 /* Prints E to FILE.  */
 
 static void
-same_succ_print (FILE *file, const same_succ e)
+same_succ_print (FILE *file, const same_succ *e)
 {
   unsigned int i;
   bitmap_print (file, e->bbs, "bbs:", "\n");
@@ -398,9 +394,9 @@ same_succ_print (FILE *file, const same_succ e)
 /* Prints same_succ VE to VFILE.  */
 
 inline int
-ssa_same_succ_print_traverse (same_succ *pe, FILE *file)
+ssa_same_succ_print_traverse (same_succ **pe, FILE *file)
 {
-  const same_succ e = *pe;
+  const same_succ *e = *pe;
   same_succ_print (file, e);
   return 1;
 }
@@ -445,7 +441,7 @@ stmt_update_dep_bb (gimple *stmt)
 /* Calculates hash value for same_succ VE.  */
 
 static hashval_t
-same_succ_hash (const_same_succ e)
+same_succ_hash (const same_succ *e)
 {
   inchash::hash hstate (bitmap_hash (e->succs));
   int flags;
@@ -523,7 +519,7 @@ same_succ_hash (const_same_succ e)
    the other edge flags.  */
 
 static bool
-inverse_flags (const_same_succ e1, const_same_succ e2)
+inverse_flags (const same_succ *e1, const same_succ *e2)
 {
   int f1a, f1b, f2a, f2b;
   int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
@@ -545,7 +541,7 @@ inverse_flags (const_same_succ e1, const_same_succ e2)
 /* Compares SAME_SUCCs E1 and E2.  */
 
 int
-same_succ_def::equal (const same_succ_def *e1, const same_succ_def *e2)
+same_succ::equal (const same_succ *e1, const same_succ *e2)
 {
   unsigned int i, first1, first2;
   gimple_stmt_iterator gsi1, gsi2;
@@ -600,10 +596,10 @@ same_succ_def::equal (const same_succ_def *e1, const same_succ_def *e2)
 
 /* Alloc and init a new SAME_SUCC.  */
 
-static same_succ
+static same_succ *
 same_succ_alloc (void)
 {
-  same_succ same = XNEW (struct same_succ_def);
+  same_succ *same = XNEW (struct same_succ);
 
   same->bbs = BITMAP_ALLOC (NULL);
   same->succs = BITMAP_ALLOC (NULL);
@@ -617,7 +613,7 @@ same_succ_alloc (void)
 /* Delete same_succ E.  */
 
 void
-same_succ_def::remove (same_succ e)
+same_succ::remove (same_succ *e)
 {
   BITMAP_FREE (e->bbs);
   BITMAP_FREE (e->succs);
@@ -630,7 +626,7 @@ same_succ_def::remove (same_succ e)
 /* Reset same_succ SAME.  */
 
 static void
-same_succ_reset (same_succ same)
+same_succ_reset (same_succ *same)
 {
   bitmap_clear (same->bbs);
   bitmap_clear (same->succs);
@@ -638,7 +634,7 @@ same_succ_reset (same_succ same)
   same->succ_flags.truncate (0);
 }
 
-static hash_table<same_succ_def> *same_succ_htab;
+static hash_table<same_succ> *same_succ_htab;
 
 /* Array that is used to store the edge flags for a successor.  */
 
@@ -665,7 +661,7 @@ debug_same_succ ( void)
 
 /* Vector of bbs to process.  */
 
-static vec<same_succ> worklist;
+static vec<same_succ *> worklist;
 
 /* Prints worklist to FILE.  */
 
@@ -680,7 +676,7 @@ print_worklist (FILE *file)
 /* Adds SAME to worklist.  */
 
 static void
-add_to_worklist (same_succ same)
+add_to_worklist (same_succ *same)
 {
   if (same->in_worklist)
     return;
@@ -695,12 +691,12 @@ add_to_worklist (same_succ same)
 /* Add BB to same_succ_htab.  */
 
 static void
-find_same_succ_bb (basic_block bb, same_succ *same_p)
+find_same_succ_bb (basic_block bb, same_succ **same_p)
 {
   unsigned int j;
   bitmap_iterator bj;
-  same_succ same = *same_p;
-  same_succ *slot;
+  same_succ *same = *same_p;
+  same_succ **slot;
   edge_iterator ei;
   edge e;
 
@@ -750,7 +746,7 @@ find_same_succ_bb (basic_block bb, same_succ *same_p)
 static void
 find_same_succ (void)
 {
-  same_succ same = same_succ_alloc ();
+  same_succ *same = same_succ_alloc ();
   basic_block bb;
 
   FOR_EACH_BB_FN (bb, cfun)
@@ -760,7 +756,7 @@ find_same_succ (void)
 	same = same_succ_alloc ();
     }
 
-  same_succ_def::remove (same);
+  same_succ::remove (same);
 }
 
 /* Initializes worklist administration.  */
@@ -769,7 +765,7 @@ static void
 init_worklist (void)
 {
   alloc_aux_for_blocks (sizeof (struct aux_bb_info));
-  same_succ_htab = new hash_table<same_succ_def> (n_basic_blocks_for_fn (cfun));
+  same_succ_htab = new hash_table<same_succ> (n_basic_blocks_for_fn (cfun));
   same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
   deleted_bbs = BITMAP_ALLOC (NULL);
   deleted_bb_preds = BITMAP_ALLOC (NULL);
@@ -817,7 +813,7 @@ mark_basic_block_deleted (basic_block bb)
 static void
 same_succ_flush_bb (basic_block bb)
 {
-  same_succ same = BB_SAME_SUCC (bb);
+  same_succ *same = BB_SAME_SUCC (bb);
   BB_SAME_SUCC (bb) = NULL;
   if (bitmap_single_bit_set_p (same->bbs))
     same_succ_htab->remove_elt_with_hash (same, same->hashval);
@@ -875,7 +871,7 @@ update_worklist (void)
   unsigned int i;
   bitmap_iterator bi;
   basic_block bb;
-  same_succ same;
+  same_succ *same;
 
   bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
   bitmap_clear (deleted_bbs);
@@ -892,14 +888,14 @@ update_worklist (void)
       if (same == NULL)
 	same = same_succ_alloc ();
     }
-  same_succ_def::remove (same);
+  same_succ::remove (same);
   bitmap_clear (deleted_bb_preds);
 }
 
 /* Prints cluster C to FILE.  */
 
 static void
-print_cluster (FILE *file, bb_cluster c)
+print_cluster (FILE *file, bb_cluster *c)
 {
   if (c == NULL)
     return;
@@ -909,9 +905,9 @@ print_cluster (FILE *file, bb_cluster c)
 
 /* Prints cluster C to stderr.  */
 
-extern void debug_cluster (bb_cluster);
+extern void debug_cluster (bb_cluster *);
 DEBUG_FUNCTION void
-debug_cluster (bb_cluster c)
+debug_cluster (bb_cluster *c)
 {
   print_cluster (stderr, c);
 }
@@ -919,7 +915,7 @@ debug_cluster (bb_cluster c)
 /* Update C->rep_bb, given that BB is added to the cluster.  */
 
 static void
-update_rep_bb (bb_cluster c, basic_block bb)
+update_rep_bb (bb_cluster *c, basic_block bb)
 {
   /* Initial.  */
   if (c->rep_bb == NULL)
@@ -953,7 +949,7 @@ update_rep_bb (bb_cluster c, basic_block bb)
 /* Add BB to cluster C.  Sets BB in C->bbs, and preds of BB in C->preds.  */
 
 static void
-add_bb_to_cluster (bb_cluster c, basic_block bb)
+add_bb_to_cluster (bb_cluster *c, basic_block bb)
 {
   edge e;
   edge_iterator ei;
@@ -968,11 +964,11 @@ add_bb_to_cluster (bb_cluster c, basic_block bb)
 
 /* Allocate and init new cluster.  */
 
-static bb_cluster
+static bb_cluster *
 new_cluster (void)
 {
-  bb_cluster c;
-  c = XCNEW (struct bb_cluster_def);
+  bb_cluster *c;
+  c = XCNEW (bb_cluster);
   c->bbs = BITMAP_ALLOC (NULL);
   c->preds = BITMAP_ALLOC (NULL);
   c->rep_bb = NULL;
@@ -982,7 +978,7 @@ new_cluster (void)
 /* Delete clusters.  */
 
 static void
-delete_cluster (bb_cluster c)
+delete_cluster (bb_cluster *c)
 {
   if (c == NULL)
     return;
@@ -994,7 +990,7 @@ delete_cluster (bb_cluster c)
 
 /* Array that contains all clusters.  */
 
-static vec<bb_cluster> all_clusters;
+static vec<bb_cluster *> all_clusters;
 
 /* Allocate all cluster vectors.  */
 
@@ -1032,7 +1028,7 @@ delete_cluster_vectors (void)
 /* Merge cluster C2 into C1.  */
 
 static void
-merge_clusters (bb_cluster c1, bb_cluster c2)
+merge_clusters (bb_cluster *c1, bb_cluster *c2)
 {
   bitmap_ior_into (c1->bbs, c2->bbs);
   bitmap_ior_into (c1->preds, c2->preds);
@@ -1045,7 +1041,7 @@ static void
 set_cluster (basic_block bb1, basic_block bb2)
 {
   basic_block merge_bb, other_bb;
-  bb_cluster merge, old, c;
+  bb_cluster *merge, *old, *c;
 
   if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
     {
@@ -1105,7 +1101,7 @@ gimple_operand_equal_value_p (tree t1, tree t2)
    gimple_bb (s2) are members of SAME_SUCC.  */
 
 static bool
-gimple_equal_p (same_succ same_succ, gimple *s1, gimple *s2)
+gimple_equal_p (same_succ *same_succ, gimple *s1, gimple *s2)
 {
   unsigned int i;
   tree lhs1, lhs2;
@@ -1225,7 +1221,7 @@ gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
    clusters them.  */
 
 static void
-find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
+find_duplicate (same_succ *same_succ, basic_block bb1, basic_block bb2)
 {
   gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
   gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
@@ -1307,7 +1303,7 @@ same_phi_alternatives_1 (basic_block dest, edge e1, edge e2)
    phi alternatives for BB1 and BB2 are equal.  */
 
 static bool
-same_phi_alternatives (same_succ same_succ, basic_block bb1, basic_block bb2)
+same_phi_alternatives (same_succ *same_succ, basic_block bb1, basic_block bb2)
 {
   unsigned int s;
   bitmap_iterator bs;
@@ -1392,7 +1388,7 @@ deps_ok_for_redirect (basic_block bb1, basic_block bb2)
 /* Within SAME_SUCC->bbs, find clusters of bbs which can be merged.  */
 
 static void
-find_clusters_1 (same_succ same_succ)
+find_clusters_1 (same_succ *same_succ)
 {
   basic_block bb1, bb2;
   unsigned int i, j;
@@ -1444,7 +1440,7 @@ find_clusters_1 (same_succ same_succ)
 static void
 find_clusters (void)
 {
-  same_succ same;
+  same_succ *same;
 
   while (!worklist.is_empty ())
     {
@@ -1552,7 +1548,7 @@ static int
 apply_clusters (void)
 {
   basic_block bb1, bb2;
-  bb_cluster c;
+  bb_cluster *c;
   unsigned int i, j;
   bitmap_iterator bj;
   int nr_bbs_removed = 0;
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 2cd71a2..3bc3b03 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -63,7 +63,7 @@ along with GCC; see the file COPYING3.  If not see
 
 /* Range of values that can be associated with an SSA_NAME after VRP
    has executed.  */
-struct value_range_d
+struct value_range
 {
   /* Lattice value represented by this range.  */
   enum value_range_type type;
@@ -87,8 +87,6 @@ struct value_range_d
   bitmap equiv;
 };
 
-typedef struct value_range_d value_range_t;
-
 #define VR_INITIALIZER { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL }
 
 /* Set of SSA names found live during the RPO traversal of the function
@@ -107,8 +105,8 @@ live_on_edge (edge e, tree name)
 /* Local functions.  */
 static int compare_values (tree val1, tree val2);
 static int compare_values_warnv (tree val1, tree val2, bool *);
-static void vrp_meet (value_range_t *, value_range_t *);
-static void vrp_intersect_ranges (value_range_t *, value_range_t *);
+static void vrp_meet (value_range *, value_range *);
+static void vrp_intersect_ranges (value_range *, value_range *);
 static tree vrp_evaluate_conditional_warnv_with_ops (enum tree_code,
 						     tree, tree, bool, bool *,
 						     bool *);
@@ -155,7 +153,7 @@ static assert_locus **asserts_for;
 /* Value range array.  After propagation, VR_VALUE[I] holds the range
    of values that SSA name N_I may take.  */
 static unsigned num_vr_values;
-static value_range_t **vr_value;
+static value_range **vr_value;
 static bool values_propagated;
 
 /* For a PHI node which sets SSA name N_I, VR_COUNTS[I] holds the
@@ -348,7 +346,7 @@ avoid_overflow_infinity (tree val)
 /* Set value range VR to VR_UNDEFINED.  */
 
 static inline void
-set_value_range_to_undefined (value_range_t *vr)
+set_value_range_to_undefined (value_range *vr)
 {
   vr->type = VR_UNDEFINED;
   vr->min = vr->max = NULL_TREE;
@@ -360,7 +358,7 @@ set_value_range_to_undefined (value_range_t *vr)
 /* Set value range VR to VR_VARYING.  */
 
 static inline void
-set_value_range_to_varying (value_range_t *vr)
+set_value_range_to_varying (value_range *vr)
 {
   vr->type = VR_VARYING;
   vr->min = vr->max = NULL_TREE;
@@ -372,7 +370,7 @@ set_value_range_to_varying (value_range_t *vr)
 /* Set value range VR to {T, MIN, MAX, EQUIV}.  */
 
 static void
-set_value_range (value_range_t *vr, enum value_range_type t, tree min,
+set_value_range (value_range *vr, enum value_range_type t, tree min,
 		 tree max, bitmap equiv)
 {
 #if defined ENABLE_CHECKING
@@ -434,7 +432,7 @@ set_value_range (value_range_t *vr, enum value_range_type t, tree min,
    extract ranges from var + CST op limit.  */
 
 static void
-set_and_canonicalize_value_range (value_range_t *vr, enum value_range_type t,
+set_and_canonicalize_value_range (value_range *vr, enum value_range_type t,
 				  tree min, tree max, bitmap equiv)
 {
   /* Use the canonical setters for VR_UNDEFINED and VR_VARYING.  */
@@ -547,7 +545,7 @@ set_and_canonicalize_value_range (value_range_t *vr, enum value_range_type t,
 /* Copy value range FROM into value range TO.  */
 
 static inline void
-copy_value_range (value_range_t *to, value_range_t *from)
+copy_value_range (value_range *to, value_range *from)
 {
   set_value_range (to, from->type, from->min, from->max, from->equiv);
 }
@@ -558,7 +556,7 @@ copy_value_range (value_range_t *to, value_range_t *from)
    infinity when we shouldn't.  */
 
 static inline void
-set_value_range_to_value (value_range_t *vr, tree val, bitmap equiv)
+set_value_range_to_value (value_range *vr, tree val, bitmap equiv)
 {
   gcc_assert (is_gimple_min_invariant (val));
   if (TREE_OVERFLOW_P (val))
@@ -573,7 +571,7 @@ set_value_range_to_value (value_range_t *vr, tree val, bitmap equiv)
    overflow does not occur.  */
 
 static inline void
-set_value_range_to_nonnegative (value_range_t *vr, tree type,
+set_value_range_to_nonnegative (value_range *vr, tree type,
 				bool overflow_infinity)
 {
   tree zero;
@@ -595,7 +593,7 @@ set_value_range_to_nonnegative (value_range_t *vr, tree type,
 /* Set value range VR to a non-NULL range of type TYPE.  */
 
 static inline void
-set_value_range_to_nonnull (value_range_t *vr, tree type)
+set_value_range_to_nonnull (value_range *vr, tree type)
 {
   tree zero = build_int_cst (type, 0);
   set_value_range (vr, VR_ANTI_RANGE, zero, zero, vr->equiv);
@@ -605,7 +603,7 @@ set_value_range_to_nonnull (value_range_t *vr, tree type)
 /* Set value range VR to a NULL range of type TYPE.  */
 
 static inline void
-set_value_range_to_null (value_range_t *vr, tree type)
+set_value_range_to_null (value_range *vr, tree type)
 {
   set_value_range_to_value (vr, build_int_cst (type, 0), vr->equiv);
 }
@@ -614,7 +612,7 @@ set_value_range_to_null (value_range_t *vr, tree type)
 /* Set value range VR to a range of a truthvalue of type TYPE.  */
 
 static inline void
-set_value_range_to_truthvalue (value_range_t *vr, tree type)
+set_value_range_to_truthvalue (value_range *vr, tree type)
 {
   if (TYPE_PRECISION (type) == 1)
     set_value_range_to_varying (vr);
@@ -629,7 +627,7 @@ set_value_range_to_truthvalue (value_range_t *vr, tree type)
    abs (min) >= abs (max), set VR to [-min, min].  */
 
 static void
-abs_extent_range (value_range_t *vr, tree min, tree max)
+abs_extent_range (value_range *vr, tree min, tree max)
 {
   int cmp;
 
@@ -666,12 +664,12 @@ abs_extent_range (value_range_t *vr, tree min, tree max)
    If we have no values ranges recorded (ie, VRP is not running), then
    return NULL.  Otherwise create an empty range if none existed for VAR.  */
 
-static value_range_t *
+static value_range *
 get_value_range (const_tree var)
 {
-  static const struct value_range_d vr_const_varying
+  static const value_range vr_const_varying
     = { VR_VARYING, NULL_TREE, NULL_TREE, NULL };
-  value_range_t *vr;
+  value_range *vr;
   tree sym;
   unsigned ver = SSA_NAME_VERSION (var);
 
@@ -683,7 +681,7 @@ get_value_range (const_tree var)
      We should get here at most from the substitute-and-fold stage which
      will never try to change values.  */
   if (ver >= num_vr_values)
-    return CONST_CAST (value_range_t *, &vr_const_varying);
+    return CONST_CAST (value_range *, &vr_const_varying);
 
   vr = vr_value[ver];
   if (vr)
@@ -691,10 +689,10 @@ get_value_range (const_tree var)
 
   /* After propagation finished do not allocate new value-ranges.  */
   if (values_propagated)
-    return CONST_CAST (value_range_t *, &vr_const_varying);
+    return CONST_CAST (value_range *, &vr_const_varying);
 
   /* Create a default value range.  */
-  vr_value[ver] = vr = XCNEW (value_range_t);
+  vr_value[ver] = vr = XCNEW (value_range);
 
   /* Defer allocating the equivalence set.  */
   vr->equiv = NULL;
@@ -758,9 +756,9 @@ vrp_bitmap_equal_p (const_bitmap b1, const_bitmap b2)
    is the range object associated with another SSA name.  */
 
 static inline bool
-update_value_range (const_tree var, value_range_t *new_vr)
+update_value_range (const_tree var, value_range *new_vr)
 {
-  value_range_t *old_vr;
+  value_range *old_vr;
   bool is_new;
 
   /* If there is a value-range on the SSA name from earlier analysis
@@ -771,7 +769,7 @@ update_value_range (const_tree var, value_range_t *new_vr)
       value_range_type rtype = get_range_info (var, &min, &max);
       if (rtype == VR_RANGE || rtype == VR_ANTI_RANGE)
 	{
-	  value_range_d nr;
+	  value_range nr;
 	  nr.type = rtype;
 	  nr.min = wide_int_to_tree (TREE_TYPE (var), min);
 	  nr.max = wide_int_to_tree (TREE_TYPE (var), max);
@@ -820,7 +818,7 @@ static void
 add_equivalence (bitmap *equiv, const_tree var)
 {
   unsigned ver = SSA_NAME_VERSION (var);
-  value_range_t *vr = vr_value[ver];
+  value_range *vr = vr_value[ver];
 
   if (*equiv == NULL)
     *equiv = BITMAP_ALLOC (NULL);
@@ -833,7 +831,7 @@ add_equivalence (bitmap *equiv, const_tree var)
 /* Return true if VR is ~[0, 0].  */
 
 static inline bool
-range_is_nonnull (value_range_t *vr)
+range_is_nonnull (value_range *vr)
 {
   return vr->type == VR_ANTI_RANGE
 	 && integer_zerop (vr->min)
@@ -844,7 +842,7 @@ range_is_nonnull (value_range_t *vr)
 /* Return true if VR is [0, 0].  */
 
 static inline bool
-range_is_null (value_range_t *vr)
+range_is_null (value_range *vr)
 {
   return vr->type == VR_RANGE
 	 && integer_zerop (vr->min)
@@ -855,7 +853,7 @@ range_is_null (value_range_t *vr)
    a singleton.  */
 
 static inline bool
-range_int_cst_p (value_range_t *vr)
+range_int_cst_p (value_range *vr)
 {
   return (vr->type == VR_RANGE
 	  && TREE_CODE (vr->max) == INTEGER_CST
@@ -865,7 +863,7 @@ range_int_cst_p (value_range_t *vr)
 /* Return true if VR is a INTEGER_CST singleton.  */
 
 static inline bool
-range_int_cst_singleton_p (value_range_t *vr)
+range_int_cst_singleton_p (value_range *vr)
 {
   return (range_int_cst_p (vr)
 	  && !is_overflow_infinity (vr->min)
@@ -876,7 +874,7 @@ range_int_cst_singleton_p (value_range_t *vr)
 /* Return true if value range VR involves at least one symbol.  */
 
 static inline bool
-symbolic_range_p (value_range_t *vr)
+symbolic_range_p (value_range *vr)
 {
   return (!is_gimple_min_invariant (vr->min)
           || !is_gimple_min_invariant (vr->max));
@@ -952,7 +950,7 @@ build_symbolic_expr (tree type, tree sym, bool neg, tree inv)
 /* Return true if value range VR involves exactly one symbol SYM.  */
 
 static bool
-symbolic_range_based_on_p (value_range_t *vr, const_tree sym)
+symbolic_range_based_on_p (value_range *vr, const_tree sym)
 {
   bool neg, min_has_symbol, max_has_symbol;
   tree inv;
@@ -977,7 +975,7 @@ symbolic_range_based_on_p (value_range_t *vr, const_tree sym)
 /* Return true if value range VR uses an overflow infinity.  */
 
 static inline bool
-overflow_infinity_range_p (value_range_t *vr)
+overflow_infinity_range_p (value_range *vr)
 {
   return (vr->type == VR_RANGE
 	  && (is_overflow_infinity (vr->min)
@@ -991,7 +989,7 @@ overflow_infinity_range_p (value_range_t *vr)
    uses an overflow infinity.  */
 
 static bool
-usable_range_p (value_range_t *vr, bool *strict_overflow_p)
+usable_range_p (value_range *vr, bool *strict_overflow_p)
 {
   gcc_assert (vr->type == VR_RANGE);
   if (is_overflow_infinity (vr->min))
@@ -1173,7 +1171,7 @@ vrp_stmt_computes_nonzero (gimple *stmt, bool *strict_overflow_p)
 	  && TREE_CODE (base) == MEM_REF
 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
 	{
-	  value_range_t *vr = get_value_range (TREE_OPERAND (base, 0));
+	  value_range *vr = get_value_range (TREE_OPERAND (base, 0));
 	  if (range_is_nonnull (vr))
 	    return true;
 	}
@@ -1497,7 +1495,7 @@ value_inside_range (tree val, tree min, tree max)
    */
 
 static inline bool
-value_ranges_intersect_p (value_range_t *vr0, value_range_t *vr1)
+value_ranges_intersect_p (value_range *vr0, value_range *vr1)
 {
   /* The value ranges do not intersect if the maximum of the first range is
      less than the minimum of the second range or vice versa.
@@ -1523,7 +1521,7 @@ range_includes_zero_p (tree min, tree max)
 /* Return true if *VR is know to only contain nonnegative values.  */
 
 static inline bool
-value_range_nonnegative_p (value_range_t *vr)
+value_range_nonnegative_p (value_range *vr)
 {
   /* Testing for VR_ANTI_RANGE is not useful here as any anti-range
      which would return a useful value should be encoded as a 
@@ -1541,7 +1539,7 @@ value_range_nonnegative_p (value_range_t *vr)
    otherwise return NULL_TREE.  */
 
 static tree
-value_range_constant_singleton (value_range_t *vr)
+value_range_constant_singleton (value_range *vr)
 {
   if (vr->type == VR_RANGE
       && operand_equal_p (vr->min, vr->max, 0)
@@ -1572,7 +1570,7 @@ op_with_constant_singleton_value_range (tree op)
 static bool
 op_with_boolean_value_range_p (tree op)
 {
-  value_range_t *vr;
+  value_range *vr;
 
   if (TYPE_PRECISION (TREE_TYPE (op)) == 1)
     return true;
@@ -1594,10 +1592,10 @@ op_with_boolean_value_range_p (tree op)
    it in *VR_P.  */
 
 static void
-extract_range_from_assert (value_range_t *vr_p, tree expr)
+extract_range_from_assert (value_range *vr_p, tree expr)
 {
   tree var, cond, limit, min, max, type;
-  value_range_t *limit_vr;
+  value_range *limit_vr;
   enum tree_code cond_code;
 
   var = ASSERT_EXPR_VAR (expr);
@@ -1877,9 +1875,9 @@ extract_range_from_assert (value_range_t *vr_p, tree expr)
     always false.  */
 
 static void
-extract_range_from_ssa_name (value_range_t *vr, tree var)
+extract_range_from_ssa_name (value_range *vr, tree var)
 {
-  value_range_t *var_vr = get_value_range (var);
+  value_range *var_vr = get_value_range (var);
 
   if (var_vr->type != VR_VARYING)
     copy_value_range (vr, var_vr);
@@ -2049,7 +2047,7 @@ vrp_int_const_binop (enum tree_code code, tree val1, tree val2)
 
 static bool
 zero_nonzero_bits_from_vr (const tree expr_type,
-			   value_range_t *vr,
+			   value_range *vr,
 			   wide_int *may_be_nonzero,
 			   wide_int *must_be_nonzero)
 {
@@ -2089,8 +2087,8 @@ zero_nonzero_bits_from_vr (const tree expr_type,
    *VR1 will be VR_UNDEFINED.  */
 
 static bool
-ranges_from_anti_range (value_range_t *ar,
-			value_range_t *vr0, value_range_t *vr1)
+ranges_from_anti_range (value_range *ar,
+			value_range *vr0, value_range *vr1)
 {
   tree type = TREE_TYPE (ar->min);
 
@@ -2129,9 +2127,9 @@ ranges_from_anti_range (value_range_t *ar,
    *VR0 CODE *VR1.  */
 
 static void
-extract_range_from_multiplicative_op_1 (value_range_t *vr,
+extract_range_from_multiplicative_op_1 (value_range *vr,
 					enum tree_code code,
-					value_range_t *vr0, value_range_t *vr1)
+					value_range *vr0, value_range *vr1)
 {
   enum value_range_type type;
   tree val[4];
@@ -2284,12 +2282,12 @@ extract_range_from_multiplicative_op_1 (value_range_t *vr,
    type EXPR_TYPE.  The resulting range is stored in *VR.  */
 
 static void
-extract_range_from_binary_expr_1 (value_range_t *vr,
+extract_range_from_binary_expr_1 (value_range *vr,
 				  enum tree_code code, tree expr_type,
-				  value_range_t *vr0_, value_range_t *vr1_)
+				  value_range *vr0_, value_range *vr1_)
 {
-  value_range_t vr0 = *vr0_, vr1 = *vr1_;
-  value_range_t vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
+  value_range vr0 = *vr0_, vr1 = *vr1_;
+  value_range vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
   enum value_range_type type;
   tree min = NULL_TREE, max = NULL_TREE;
   int cmp;
@@ -2348,7 +2346,7 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
       extract_range_from_binary_expr_1 (vr, code, expr_type, &vrtem0, vr1_);
       if (vrtem1.type != VR_UNDEFINED)
 	{
-	  value_range_t vrres = VR_INITIALIZER;
+	  value_range vrres = VR_INITIALIZER;
 	  extract_range_from_binary_expr_1 (&vrres, code, expr_type,
 					    &vrtem1, vr1_);
 	  vrp_meet (vr, &vrres);
@@ -2362,7 +2360,7 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
       extract_range_from_binary_expr_1 (vr, code, expr_type, vr0_, &vrtem0);
       if (vrtem1.type != VR_UNDEFINED)
 	{
-	  value_range_t vrres = VR_INITIALIZER;
+	  value_range vrres = VR_INITIALIZER;
 	  extract_range_from_binary_expr_1 (&vrres, code, expr_type,
 					    vr0_, &vrtem1);
 	  vrp_meet (vr, &vrres);
@@ -2908,7 +2906,7 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
 		   && range_int_cst_singleton_p (&vr1))
 	    {
 	      bool saved_flag_wrapv;
-	      value_range_t vr1p = VR_INITIALIZER;
+	      value_range vr1p = VR_INITIALIZER;
 	      vr1p.type = VR_RANGE;
 	      vr1p.min = (wide_int_to_tree
 			  (expr_type,
@@ -3284,12 +3282,12 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
    The resulting range is stored in *VR.  */
 
 static void
-extract_range_from_binary_expr (value_range_t *vr,
+extract_range_from_binary_expr (value_range *vr,
 				enum tree_code code,
 				tree expr_type, tree op0, tree op1)
 {
-  value_range_t vr0 = VR_INITIALIZER;
-  value_range_t vr1 = VR_INITIALIZER;
+  value_range vr0 = VR_INITIALIZER;
+  value_range vr1 = VR_INITIALIZER;
 
   /* Get value ranges for each operand.  For constant operands, create
      a new value range with the operand to simplify processing.  */
@@ -3321,7 +3319,7 @@ extract_range_from_binary_expr (value_range_t *vr,
       && symbolic_range_based_on_p (&vr0, op1))
     {
       const bool minus_p = (code == MINUS_EXPR);
-      value_range_t n_vr1 = VR_INITIALIZER;
+      value_range n_vr1 = VR_INITIALIZER;
 
       /* Try with VR0 and [-INF, OP1].  */
       if (is_gimple_min_invariant (minus_p ? vr0.max : vr0.min))
@@ -3345,7 +3343,7 @@ extract_range_from_binary_expr (value_range_t *vr,
       && symbolic_range_based_on_p (&vr1, op0))
     {
       const bool minus_p = (code == MINUS_EXPR);
-      value_range_t n_vr0 = VR_INITIALIZER;
+      value_range n_vr0 = VR_INITIALIZER;
 
       /* Try with [-INF, OP0] and VR1.  */
       if (is_gimple_min_invariant (minus_p ? vr1.max : vr1.min))
@@ -3368,11 +3366,11 @@ extract_range_from_binary_expr (value_range_t *vr,
    The resulting range is stored in *VR.  */
 
 static void
-extract_range_from_unary_expr_1 (value_range_t *vr,
+extract_range_from_unary_expr_1 (value_range *vr,
 				 enum tree_code code, tree type,
-				 value_range_t *vr0_, tree op0_type)
+				 value_range *vr0_, tree op0_type)
 {
-  value_range_t vr0 = *vr0_, vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
+  value_range vr0 = *vr0_, vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
 
   /* VRP only operates on integral and pointer types.  */
   if (!(INTEGRAL_TYPE_P (op0_type)
@@ -3402,7 +3400,7 @@ extract_range_from_unary_expr_1 (value_range_t *vr,
     {
       /* -X is simply 0 - X, so re-use existing code that also handles
          anti-ranges fine.  */
-      value_range_t zero = VR_INITIALIZER;
+      value_range zero = VR_INITIALIZER;
       set_value_range_to_value (&zero, build_int_cst (type, 0), NULL);
       extract_range_from_binary_expr_1 (vr, MINUS_EXPR, type, &zero, &vr0);
       return;
@@ -3411,7 +3409,7 @@ extract_range_from_unary_expr_1 (value_range_t *vr,
     {
       /* ~X is simply -1 - X, so re-use existing code that also handles
          anti-ranges fine.  */
-      value_range_t minusone = VR_INITIALIZER;
+      value_range minusone = VR_INITIALIZER;
       set_value_range_to_value (&minusone, build_int_cst (type, -1), NULL);
       extract_range_from_binary_expr_1 (vr, MINUS_EXPR,
 					type, &minusone, &vr0);
@@ -3426,7 +3424,7 @@ extract_range_from_unary_expr_1 (value_range_t *vr,
       extract_range_from_unary_expr_1 (vr, code, type, &vrtem0, op0_type);
       if (vrtem1.type != VR_UNDEFINED)
 	{
-	  value_range_t vrres = VR_INITIALIZER;
+	  value_range vrres = VR_INITIALIZER;
 	  extract_range_from_unary_expr_1 (&vrres, code, type,
 					   &vrtem1, op0_type);
 	  vrp_meet (vr, &vrres);
@@ -3669,10 +3667,10 @@ extract_range_from_unary_expr_1 (value_range_t *vr,
    The resulting range is stored in *VR.  */
 
 static void
-extract_range_from_unary_expr (value_range_t *vr, enum tree_code code,
+extract_range_from_unary_expr (value_range *vr, enum tree_code code,
 			       tree type, tree op0)
 {
-  value_range_t vr0 = VR_INITIALIZER;
+  value_range vr0 = VR_INITIALIZER;
 
   /* Get value ranges for the operand.  For constant operands, create
      a new value range with the operand to simplify processing.  */
@@ -3691,11 +3689,11 @@ extract_range_from_unary_expr (value_range_t *vr, enum tree_code code,
    the ranges of each of its operands and the expression code.  */
 
 static void
-extract_range_from_cond_expr (value_range_t *vr, gassign *stmt)
+extract_range_from_cond_expr (value_range *vr, gassign *stmt)
 {
   tree op0, op1;
-  value_range_t vr0 = VR_INITIALIZER;
-  value_range_t vr1 = VR_INITIALIZER;
+  value_range vr0 = VR_INITIALIZER;
+  value_range vr1 = VR_INITIALIZER;
 
   /* Get value ranges for each operand.  For constant operands, create
      a new value range with the operand to simplify processing.  */
@@ -3725,7 +3723,7 @@ extract_range_from_cond_expr (value_range_t *vr, gassign *stmt)
    on the range of its operand and the expression code.  */
 
 static void
-extract_range_from_comparison (value_range_t *vr, enum tree_code code,
+extract_range_from_comparison (value_range *vr, enum tree_code code,
 			       tree type, tree op0, tree op1)
 {
   bool sop = false;
@@ -3765,8 +3763,8 @@ static bool
 check_for_binary_op_overflow (enum tree_code subcode, tree type,
 			      tree op0, tree op1, bool *ovf)
 {
-  value_range_t vr0 = VR_INITIALIZER;
-  value_range_t vr1 = VR_INITIALIZER;
+  value_range vr0 = VR_INITIALIZER;
+  value_range vr1 = VR_INITIALIZER;
   if (TREE_CODE (op0) == SSA_NAME)
     vr0 = *get_value_range (op0);
   else if (TREE_CODE (op0) == INTEGER_CST)
@@ -3867,7 +3865,7 @@ check_for_binary_op_overflow (enum tree_code subcode, tree type,
    Store the result in *VR */
 
 static void
-extract_range_basic (value_range_t *vr, gimple *stmt)
+extract_range_basic (value_range *vr, gimple *stmt)
 {
   bool sop = false;
   tree type = gimple_expr_type (stmt);
@@ -3903,7 +3901,7 @@ extract_range_basic (value_range_t *vr, gimple *stmt)
 	  maxi = prec;
 	  if (TREE_CODE (arg) == SSA_NAME)
 	    {
-	      value_range_t *vr0 = get_value_range (arg);
+	      value_range *vr0 = get_value_range (arg);
 	      /* If arg is non-zero, then ffs or popcount
 		 are non-zero.  */
 	      if (((vr0->type == VR_RANGE
@@ -3949,7 +3947,7 @@ extract_range_basic (value_range_t *vr, gimple *stmt)
 	    mini = -2;
 	  if (TREE_CODE (arg) == SSA_NAME)
 	    {
-	      value_range_t *vr0 = get_value_range (arg);
+	      value_range *vr0 = get_value_range (arg);
 	      /* From clz of VR_RANGE minimum we can compute
 		 result maximum.  */
 	      if (vr0->type == VR_RANGE
@@ -4010,7 +4008,7 @@ extract_range_basic (value_range_t *vr, gimple *stmt)
 	    }
 	  if (TREE_CODE (arg) == SSA_NAME)
 	    {
-	      value_range_t *vr0 = get_value_range (arg);
+	      value_range *vr0 = get_value_range (arg);
 	      /* If arg is non-zero, then use [0, prec - 1].  */
 	      if (((vr0->type == VR_RANGE
 		    && integer_nonzerop (vr0->min))
@@ -4150,8 +4148,8 @@ extract_range_basic (value_range_t *vr, gimple *stmt)
 		    }
 		  else
 		    {
-		      value_range_t vr0 = VR_INITIALIZER;
-		      value_range_t vr1 = VR_INITIALIZER;
+		      value_range vr0 = VR_INITIALIZER;
+		      value_range vr1 = VR_INITIALIZER;
 		      bool saved_flag_wrapv = flag_wrapv;
 		      /* Pretend the arithmetics is wrapping.  If there is
 			 any overflow, IMAGPART_EXPR will be set.  */
@@ -4185,7 +4183,7 @@ extract_range_basic (value_range_t *vr, gimple *stmt)
    in *VR.  */
 
 static void
-extract_range_from_assignment (value_range_t *vr, gassign *stmt)
+extract_range_from_assignment (value_range *vr, gassign *stmt)
 {
   enum tree_code code = gimple_assign_rhs_code (stmt);
 
@@ -4224,7 +4222,7 @@ extract_range_from_assignment (value_range_t *vr, gassign *stmt)
    for VAR.  If so, update VR with the new limits.  */
 
 static void
-adjust_range_with_scev (value_range_t *vr, struct loop *loop,
+adjust_range_with_scev (value_range *vr, struct loop *loop,
 			gimple *stmt, tree var)
 {
   tree init, step, chrec, tmin, tmax, min, max, type, tem;
@@ -4302,7 +4300,7 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop,
 	 the number of latch executions is the correct thing to use.  */
       if (max_loop_iterations (loop, &nit))
 	{
-	  value_range_t maxvr = VR_INITIALIZER;
+	  value_range maxvr = VR_INITIALIZER;
 	  signop sgn = TYPE_SIGN (TREE_TYPE (step));
 	  bool overflow;
 
@@ -4407,7 +4405,7 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop,
 
 
 static tree
-compare_ranges (enum tree_code comp, value_range_t *vr0, value_range_t *vr1,
+compare_ranges (enum tree_code comp, value_range *vr0, value_range *vr1,
 		bool *strict_overflow_p)
 {
   /* VARYING or UNDEFINED ranges cannot be compared.  */
@@ -4437,7 +4435,7 @@ compare_ranges (enum tree_code comp, value_range_t *vr0, value_range_t *vr1,
       if (vr0->type == VR_RANGE)
 	{
 	  /* To simplify processing, make VR0 the anti-range.  */
-	  value_range_t *tmp = vr0;
+	  value_range *tmp = vr0;
 	  vr0 = vr1;
 	  vr1 = tmp;
 	}
@@ -4561,7 +4559,7 @@ compare_ranges (enum tree_code comp, value_range_t *vr0, value_range_t *vr1,
    infinity was used in the test.  */
 
 static tree
-compare_range_with_value (enum tree_code comp, value_range_t *vr, tree val,
+compare_range_with_value (enum tree_code comp, value_range *vr, tree val,
 			  bool *strict_overflow_p)
 {
   if (vr->type == VR_VARYING || vr->type == VR_UNDEFINED)
@@ -4683,8 +4681,8 @@ compare_range_with_value (enum tree_code comp, value_range_t *vr, tree val,
 
 /* Debugging dumps.  */
 
-void dump_value_range (FILE *, value_range_t *);
-void debug_value_range (value_range_t *);
+void dump_value_range (FILE *, value_range *);
+void debug_value_range (value_range *);
 void dump_all_value_ranges (FILE *);
 void debug_all_value_ranges (void);
 void dump_vr_equiv (FILE *, bitmap);
@@ -4694,7 +4692,7 @@ void debug_vr_equiv (bitmap);
 /* Dump value range VR to FILE.  */
 
 void
-dump_value_range (FILE *file, value_range_t *vr)
+dump_value_range (FILE *file, value_range *vr)
 {
   if (vr == NULL)
     fprintf (file, "[]");
@@ -4754,7 +4752,7 @@ dump_value_range (FILE *file, value_range_t *vr)
 /* Dump value range VR to stderr.  */
 
 DEBUG_FUNCTION void
-debug_value_range (value_range_t *vr)
+debug_value_range (value_range *vr)
 {
   dump_value_range (stderr, vr);
   fprintf (stderr, "\n");
@@ -6479,7 +6477,7 @@ insert_range_assertions (void)
 static void
 check_array_ref (location_t location, tree ref, bool ignore_off_by_one)
 {
-  value_range_t* vr = NULL;
+  value_range *vr = NULL;
   tree low_sub, up_sub;
   tree low_bound, up_bound, up_bound_p1;
   tree base;
@@ -6974,7 +6972,7 @@ vrp_initialize (void)
 
   values_propagated = false;
   num_vr_values = num_ssa_names;
-  vr_value = XCNEWVEC (value_range_t *, num_vr_values);
+  vr_value = XCNEWVEC (value_range *, num_vr_values);
   vr_phi_edge_counts = XCNEWVEC (int, num_ssa_names);
 
   FOR_EACH_BB_FN (bb, cfun)
@@ -7024,7 +7022,7 @@ vrp_valueize (tree name)
 {
   if (TREE_CODE (name) == SSA_NAME)
     {
-      value_range_t *vr = get_value_range (name);
+      value_range *vr = get_value_range (name);
       if (vr->type == VR_RANGE
 	  && (vr->min == vr->max
 	      || operand_equal_p (vr->min, vr->max, 0)))
@@ -7048,7 +7046,7 @@ vrp_valueize_1 (tree name)
       if (!gimple_nop_p (def_stmt)
 	  && prop_simulate_again_p (def_stmt))
 	return NULL_TREE;
-      value_range_t *vr = get_value_range (name);
+      value_range *vr = get_value_range (name);
       if (range_int_cst_singleton_p (vr))
 	return vr->min;
     }
@@ -7075,7 +7073,7 @@ vrp_visit_assignment_or_call (gimple *stmt, tree *output_p)
 	   && TYPE_MAX_VALUE (TREE_TYPE (lhs)))
 	  || POINTER_TYPE_P (TREE_TYPE (lhs))))
     {
-      value_range_t new_vr = VR_INITIALIZER;
+      value_range new_vr = VR_INITIALIZER;
 
       /* Try folding the statement to a constant first.  */
       tree tem = gimple_fold_stmt_to_constant_1 (stmt, vrp_valueize,
@@ -7153,9 +7151,9 @@ vrp_visit_assignment_or_call (gimple *stmt, tree *output_p)
 		   SSA_PROP_NOT_INTERESTING.  If there are no
 		   {REAL,IMAG}PART_EXPR uses at all,
 		   return SSA_PROP_VARYING.  */
-		value_range_t new_vr = VR_INITIALIZER;
+		value_range new_vr = VR_INITIALIZER;
 		extract_range_basic (&new_vr, use_stmt);
-		value_range_t *old_vr = get_value_range (use_lhs);
+		value_range *old_vr = get_value_range (use_lhs);
 		if (old_vr->type != new_vr.type
 		    || !vrp_operand_equal_p (old_vr->min, new_vr.min)
 		    || !vrp_operand_equal_p (old_vr->max, new_vr.max)
@@ -7189,10 +7187,10 @@ vrp_visit_assignment_or_call (gimple *stmt, tree *output_p)
    or a symbolic range containing the SSA_NAME only if the value range
    is varying or undefined.  */
 
-static inline value_range_t
+static inline value_range
 get_vr_for_comparison (int i)
 {
-  value_range_t vr = *get_value_range (ssa_name (i));
+  value_range vr = *get_value_range (ssa_name (i));
 
   /* If name N_i does not have a valid range, use N_i as its own
      range.  This allows us to compare against names that may
@@ -7222,7 +7220,7 @@ compare_name_with_value (enum tree_code comp, tree var, tree val,
   tree retval, t;
   int used_strict_overflow;
   bool sop;
-  value_range_t equiv_vr;
+  value_range equiv_vr;
 
   /* Get the set of equivalences for VAR.  */
   e = get_value_range (var)->equiv;
@@ -7345,14 +7343,14 @@ compare_names (enum tree_code comp, tree n1, tree n2,
      of the loop just to check N1 and N2 ranges.  */
   EXECUTE_IF_SET_IN_BITMAP (e1, 0, i1, bi1)
     {
-      value_range_t vr1 = get_vr_for_comparison (i1);
+      value_range vr1 = get_vr_for_comparison (i1);
 
       t = retval = NULL_TREE;
       EXECUTE_IF_SET_IN_BITMAP (e2, 0, i2, bi2)
 	{
 	  bool sop = false;
 
-	  value_range_t vr2 = get_vr_for_comparison (i2);
+	  value_range vr2 = get_vr_for_comparison (i2);
 
 	  t = compare_ranges (comp, &vr1, &vr2, &sop);
 	  if (t)
@@ -7402,7 +7400,7 @@ vrp_evaluate_conditional_warnv_with_ops_using_ranges (enum tree_code code,
 						      tree op0, tree op1,
 						      bool * strict_overflow_p)
 {
-  value_range_t *vr0, *vr1;
+  value_range *vr0, *vr1;
 
   vr0 = (TREE_CODE (op0) == SSA_NAME) ? get_value_range (op0) : NULL;
   vr1 = (TREE_CODE (op1) == SSA_NAME) ? get_value_range (op1) : NULL;
@@ -7522,7 +7520,7 @@ vrp_evaluate_conditional (tree_code code, tree op0, tree op1, gimple *stmt)
 	 always fold regardless of the value of OP0.  If -Wtype-limits
 	 was specified, emit a warning.  */
       tree type = TREE_TYPE (op0);
-      value_range_t *vr0 = get_value_range (op0);
+      value_range *vr0 = get_value_range (op0);
 
       if (vr0->type == VR_RANGE
 	  && INTEGRAL_TYPE_P (type)
@@ -7777,7 +7775,7 @@ find_case_label_range (gswitch *stmt, tree min, tree max, size_t *min_idx,
    Returns true if the default label is not needed.  */
 
 static bool
-find_case_label_ranges (gswitch *stmt, value_range_t *vr, size_t *min_idx1,
+find_case_label_ranges (gswitch *stmt, value_range *vr, size_t *min_idx1,
 			size_t *max_idx1, size_t *min_idx2,
 			size_t *max_idx2)
 {
@@ -7858,7 +7856,7 @@ static enum ssa_prop_result
 vrp_visit_switch_stmt (gswitch *stmt, edge *taken_edge_p)
 {
   tree op, val;
-  value_range_t *vr;
+  value_range *vr;
   size_t i = 0, j = 0, k, l;
   bool take_default;
 
@@ -8531,9 +8529,9 @@ intersect_ranges (enum value_range_type *vr0type,
    in *VR0.  This may not be the smallest possible such range.  */
 
 static void
-vrp_intersect_ranges_1 (value_range_t *vr0, value_range_t *vr1)
+vrp_intersect_ranges_1 (value_range *vr0, value_range *vr1)
 {
-  value_range_t saved;
+  value_range saved;
 
   /* If either range is VR_VARYING the other one wins.  */
   if (vr1->type == VR_VARYING)
@@ -8583,7 +8581,7 @@ vrp_intersect_ranges_1 (value_range_t *vr0, value_range_t *vr1)
 }
 
 static void
-vrp_intersect_ranges (value_range_t *vr0, value_range_t *vr1)
+vrp_intersect_ranges (value_range *vr0, value_range *vr1)
 {
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -8607,9 +8605,9 @@ vrp_intersect_ranges (value_range_t *vr0, value_range_t *vr1)
    may not be the smallest possible such range.  */
 
 static void
-vrp_meet_1 (value_range_t *vr0, value_range_t *vr1)
+vrp_meet_1 (value_range *vr0, value_range *vr1)
 {
-  value_range_t saved;
+  value_range saved;
 
   if (vr0->type == VR_UNDEFINED)
     {
@@ -8680,7 +8678,7 @@ vrp_meet_1 (value_range_t *vr0, value_range_t *vr1)
 }
 
 static void
-vrp_meet (value_range_t *vr0, value_range_t *vr1)
+vrp_meet (value_range *vr0, value_range *vr1)
 {
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -8709,8 +8707,8 @@ vrp_visit_phi_node (gphi *phi)
 {
   size_t i;
   tree lhs = PHI_RESULT (phi);
-  value_range_t *lhs_vr = get_value_range (lhs);
-  value_range_t vr_result = VR_INITIALIZER;
+  value_range *lhs_vr = get_value_range (lhs);
+  value_range vr_result = VR_INITIALIZER;
   bool first = true;
   int edges, old_edges;
   struct loop *l;
@@ -8737,7 +8735,7 @@ vrp_visit_phi_node (gphi *phi)
       if (e->flags & EDGE_EXECUTABLE)
 	{
 	  tree arg = PHI_ARG_DEF (phi, i);
-	  value_range_t vr_arg;
+	  value_range vr_arg;
 
 	  ++edges;
 
@@ -8993,7 +8991,7 @@ simplify_div_or_mod_using_ranges (gimple *stmt)
   tree val = NULL;
   tree op0 = gimple_assign_rhs1 (stmt);
   tree op1 = gimple_assign_rhs2 (stmt);
-  value_range_t *vr = get_value_range (op0);
+  value_range *vr = get_value_range (op0);
 
   if (rhs_code == TRUNC_MOD_EXPR
       && TREE_CODE (op1) == INTEGER_CST
@@ -9130,7 +9128,7 @@ static bool
 simplify_abs_using_ranges (gimple *stmt)
 {
   tree op = gimple_assign_rhs1 (stmt);
-  value_range_t *vr = get_value_range (op);
+  value_range *vr = get_value_range (op);
 
   if (vr)
     {
@@ -9187,8 +9185,8 @@ simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
   tree op0 = gimple_assign_rhs1 (stmt);
   tree op1 = gimple_assign_rhs2 (stmt);
   tree op = NULL_TREE;
-  value_range_t vr0 = VR_INITIALIZER;
-  value_range_t vr1 = VR_INITIALIZER;
+  value_range vr0 = VR_INITIALIZER;
+  value_range vr1 = VR_INITIALIZER;
   wide_int may_be_nonzero0, may_be_nonzero1;
   wide_int must_be_nonzero0, must_be_nonzero1;
   wide_int mask;
@@ -9267,7 +9265,7 @@ simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
 
 static tree
 test_for_singularity (enum tree_code cond_code, tree op0,
-		      tree op1, value_range_t *vr,
+		      tree op1, value_range *vr,
 		      bool *strict_overflow_p)
 {
   tree min = NULL;
@@ -9337,7 +9335,7 @@ test_for_singularity (enum tree_code cond_code, tree op0,
    by PRECISION and UNSIGNED_P.  */
 
 static bool
-range_fits_type_p (value_range_t *vr, unsigned dest_precision, signop dest_sgn)
+range_fits_type_p (value_range *vr, unsigned dest_precision, signop dest_sgn)
 {
   tree src_type;
   unsigned src_precision;
@@ -9402,7 +9400,7 @@ simplify_cond_using_ranges (gcond *stmt)
       && INTEGRAL_TYPE_P (TREE_TYPE (op0))
       && is_gimple_min_invariant (op1))
     {
-      value_range_t *vr = get_value_range (op0);
+      value_range *vr = get_value_range (op0);
 
       /* If we have range information for OP0, then we might be
 	 able to simplify this conditional. */
@@ -9517,7 +9515,7 @@ simplify_cond_using_ranges (gcond *stmt)
       if (TREE_CODE (innerop) == SSA_NAME
 	  && !POINTER_TYPE_P (TREE_TYPE (innerop)))
 	{
-	  value_range_t *vr = get_value_range (innerop);
+	  value_range *vr = get_value_range (innerop);
 
 	  if (range_int_cst_p (vr)
 	      && range_fits_type_p (vr,
@@ -9568,7 +9566,7 @@ static bool
 simplify_switch_using_ranges (gswitch *stmt)
 {
   tree op = gimple_switch_index (stmt);
-  value_range_t *vr;
+  value_range *vr;
   bool take_default;
   edge e;
   edge_iterator ei;
@@ -9667,7 +9665,7 @@ simplify_conversion_using_ranges (gimple *stmt)
 {
   tree innerop, middleop, finaltype;
   gimple *def_stmt;
-  value_range_t *innervr;
+  value_range *innervr;
   signop inner_sgn, middle_sgn, final_sgn;
   unsigned inner_prec, middle_prec, final_prec;
   widest_int innermin, innermed, innermax, middlemin, middlemed, middlemax;
@@ -9746,7 +9744,7 @@ simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi,
 					gimple *stmt)
 {
   tree rhs1 = gimple_assign_rhs1 (stmt);
-  value_range_t *vr = get_value_range (rhs1);
+  value_range *vr = get_value_range (rhs1);
   machine_mode fltmode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)));
   machine_mode mode;
   tree tem;
@@ -10078,7 +10076,7 @@ simplify_stmt_for_jump_threading (gimple *stmt, gimple *within_stmt,
 
   if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
     {
-      value_range_t new_vr = VR_INITIALIZER;
+      value_range new_vr = VR_INITIALIZER;
       tree lhs = gimple_assign_lhs (assign_stmt);
 
       if (TREE_CODE (lhs) == SSA_NAME
diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c
index e3542d3..8010ce1 100644
--- a/gcc/var-tracking.c
+++ b/gcc/var-tracking.c
@@ -248,10 +248,10 @@ dv_as_opaque (decl_or_value dv)
    register is described by a chain of these structures.
    The chains are pretty short (usually 1 or 2 elements) and thus
    chain is the best data structure.  */
-typedef struct attrs_def
+struct attrs
 {
   /* Pointer to next member of the list.  */
-  struct attrs_def *next;
+  attrs *next;
 
   /* The rtx of register.  */
   rtx loc;
@@ -261,7 +261,7 @@ typedef struct attrs_def
 
   /* Offset from start of DECL.  */
   HOST_WIDE_INT offset;
-} *attrs;
+};
 
 /* Structure for chaining the locations.  */
 struct location_chain
@@ -357,7 +357,7 @@ struct variable_part
 
 /* Enumeration type used to discriminate various types of one-part
    variables.  */
-typedef enum onepart_enum
+enum onepart_enum
 {
   /* Not a one-part variable.  */
   NOT_ONEPART = 0,
@@ -367,10 +367,10 @@ typedef enum onepart_enum
   ONEPART_DEXPR = 2,
   /* A VALUE.  */
   ONEPART_VALUE = 3
-} onepart_enum_t;
+};
 
 /* Structure describing where the variable is located.  */
-typedef struct variable_def
+struct variable
 {
   /* The declaration of the variable, or an RTL value being handled
      like a declaration.  */
@@ -391,11 +391,10 @@ typedef struct variable_def
 
   /* The variable parts.  */
   variable_part var_part[1];
-} *variable;
-typedef const struct variable_def *const_variable;
+};
 
 /* Pointer to the BB's information specific to variable tracking pass.  */
-#define VTI(BB) ((variable_tracking_info) (BB)->aux)
+#define VTI(BB) ((variable_tracking_info *) (BB)->aux)
 
 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT.  Evaluates MEM twice.  */
 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
@@ -405,14 +404,14 @@ typedef const struct variable_def *const_variable;
 /* Access VAR's Ith part's offset, checking that it's not a one-part
    variable.  */
 #define VAR_PART_OFFSET(var, i) __extension__			\
-(*({  variable const __v = (var);				\
+(*({  variable *const __v = (var);				\
       gcc_checking_assert (!__v->onepart);			\
       &__v->var_part[(i)].aux.offset; }))
 
 /* Access VAR's one-part auxiliary data, checking that it is a
    one-part variable.  */
 #define VAR_LOC_1PAUX(var) __extension__			\
-(*({  variable const __v = (var);				\
+(*({  variable *const __v = (var);				\
       gcc_checking_assert (__v->onepart);			\
       &__v->var_part[0].aux.onepaux; }))
 
@@ -471,19 +470,19 @@ static void variable_htab_free (void *);
 
 /* Variable hashtable helpers.  */
 
-struct variable_hasher : pointer_hash <variable_def>
+struct variable_hasher : pointer_hash <variable>
 {
   typedef void *compare_type;
-  static inline hashval_t hash (const variable_def *);
-  static inline bool equal (const variable_def *, const void *);
-  static inline void remove (variable_def *);
+  static inline hashval_t hash (const variable *);
+  static inline bool equal (const variable *, const void *);
+  static inline void remove (variable *);
 };
 
 /* The hash function for variable_htab, computes the hash value
    from the declaration of variable X.  */
 
 inline hashval_t
-variable_hasher::hash (const variable_def *v)
+variable_hasher::hash (const variable *v)
 {
   return dv_htab_hash (v->dv);
 }
@@ -491,7 +490,7 @@ variable_hasher::hash (const variable_def *v)
 /* Compare the declaration of variable X with declaration Y.  */
 
 inline bool
-variable_hasher::equal (const variable_def *v, const void *y)
+variable_hasher::equal (const variable *v, const void *y)
 {
   decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
 
@@ -501,7 +500,7 @@ variable_hasher::equal (const variable_def *v, const void *y)
 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
 
 inline void
-variable_hasher::remove (variable_def *var)
+variable_hasher::remove (variable *var)
 {
   variable_htab_free (var);
 }
@@ -541,7 +540,7 @@ struct dataflow_set
   HOST_WIDE_INT stack_adjust;
 
   /* Attributes for registers (lists of attrs).  */
-  attrs regs[FIRST_PSEUDO_REGISTER];
+  attrs *regs[FIRST_PSEUDO_REGISTER];
 
   /* Variable locations.  */
   shared_hash *vars;
@@ -552,7 +551,7 @@ struct dataflow_set
 
 /* The structure (one for each basic block) containing the information
    needed for variable tracking.  */
-typedef struct variable_tracking_info_def
+struct variable_tracking_info
 {
   /* The vector of micro operations.  */
   vec<micro_operation> mos;
@@ -573,20 +572,20 @@ typedef struct variable_tracking_info_def
   /* Has the block been flooded in VTA?  */
   bool flooded;
 
-} *variable_tracking_info;
+};
 
 /* Alloc pool for struct attrs_def.  */
-object_allocator<attrs_def> attrs_def_pool ("attrs_def pool");
+object_allocator<attrs> attrs_pool ("attrs pool");
 
 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
 
 static pool_allocator var_pool
-  ("variable_def pool", sizeof (variable_def) +
-   (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
+  ("variable_def pool", sizeof (variable) +
+   (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
 
 /* Alloc pool for struct variable_def with a single var_part entry.  */
 static pool_allocator valvar_pool
-  ("small variable_def pool", sizeof (variable_def));
+  ("small variable_def pool", sizeof (variable));
 
 /* Alloc pool for struct location_chain.  */
 static object_allocator<location_chain> location_chain_pool
@@ -616,14 +615,14 @@ static shared_hash *empty_shared_hash;
 static bitmap scratch_regs = NULL;
 
 #ifdef HAVE_window_save
-typedef struct GTY(()) parm_reg {
+struct GTY(()) parm_reg {
   rtx outgoing;
   rtx incoming;
-} parm_reg_t;
+};
 
 
 /* Vector of windowed parameter registers, if any.  */
-static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
+static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
 #endif
 
 /* Variable used to tell whether cselib_process_insn called our hook.  */
@@ -636,15 +635,15 @@ static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
 					       HOST_WIDE_INT *);
 static bool vt_stack_adjustments (void);
 
-static void init_attrs_list_set (attrs *);
-static void attrs_list_clear (attrs *);
-static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
-static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
-static void attrs_list_copy (attrs *, attrs);
-static void attrs_list_union (attrs *, attrs);
+static void init_attrs_list_set (attrs **);
+static void attrs_list_clear (attrs **);
+static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
+static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
+static void attrs_list_copy (attrs **, attrs *);
+static void attrs_list_union (attrs **, attrs *);
 
-static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
-					variable var, enum var_init_status);
+static variable **unshare_variable (dataflow_set *set, variable **slot,
+					variable *var, enum var_init_status);
 static void vars_copy (variable_table_type *, variable_table_type *);
 static tree var_debug_decl (tree);
 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
@@ -662,12 +661,13 @@ static void dataflow_set_clear (dataflow_set *);
 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
 static int variable_union_info_cmp_pos (const void *, const void *);
 static void dataflow_set_union (dataflow_set *, dataflow_set *);
-static location_chain *find_loc_in_1pdv (rtx, variable, variable_table_type *);
+static location_chain *find_loc_in_1pdv (rtx, variable *,
+					 variable_table_type *);
 static bool canon_value_cmp (rtx, rtx);
 static int loc_cmp (rtx, rtx);
 static bool variable_part_different_p (variable_part *, variable_part *);
-static bool onepart_variable_different_p (variable, variable);
-static bool variable_different_p (variable, variable);
+static bool onepart_variable_different_p (variable *, variable *);
+static bool variable_different_p (variable *, variable *);
 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
 static void dataflow_set_destroy (dataflow_set *);
 
@@ -679,26 +679,26 @@ static void add_stores (rtx, const_rtx, void *);
 static bool compute_bb_dataflow (basic_block);
 static bool vt_find_locations (void);
 
-static void dump_attrs_list (attrs);
-static void dump_var (variable);
+static void dump_attrs_list (attrs *);
+static void dump_var (variable *);
 static void dump_vars (variable_table_type *);
 static void dump_dataflow_set (dataflow_set *);
 static void dump_dataflow_sets (void);
 
 static void set_dv_changed (decl_or_value, bool);
-static void variable_was_changed (variable, dataflow_set *);
-static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
-				     decl_or_value, HOST_WIDE_INT,
-				     enum var_init_status, rtx);
+static void variable_was_changed (variable *, dataflow_set *);
+static variable **set_slot_part (dataflow_set *, rtx, variable **,
+				 decl_or_value, HOST_WIDE_INT,
+				 enum var_init_status, rtx);
 static void set_variable_part (dataflow_set *, rtx,
 			       decl_or_value, HOST_WIDE_INT,
 			       enum var_init_status, rtx, enum insert_option);
-static variable_def **clobber_slot_part (dataflow_set *, rtx,
-					 variable_def **, HOST_WIDE_INT, rtx);
+static variable **clobber_slot_part (dataflow_set *, rtx,
+				     variable **, HOST_WIDE_INT, rtx);
 static void clobber_variable_part (dataflow_set *, rtx,
 				   decl_or_value, HOST_WIDE_INT, rtx);
-static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
-					HOST_WIDE_INT);
+static variable **delete_slot_part (dataflow_set *, rtx, variable **,
+				    HOST_WIDE_INT);
 static void delete_variable_part (dataflow_set *, rtx,
 				  decl_or_value, HOST_WIDE_INT);
 static void emit_notes_in_bb (basic_block, dataflow_set *);
@@ -1203,7 +1203,7 @@ adjust_insn (basic_block bb, rtx_insn *insn)
     {
       unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
       rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
-      parm_reg_t *p;
+      parm_reg *p;
 
       FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
 	{
@@ -1334,7 +1334,7 @@ dv_as_rtx (decl_or_value dv)
 /* Return nonzero if a decl_or_value must not have more than one
    variable part.  The returned value discriminates among various
    kinds of one-part DVs ccording to enum onepart_enum.  */
-static inline onepart_enum_t
+static inline onepart_enum
 dv_onepart_p (decl_or_value dv)
 {
   tree decl;
@@ -1358,16 +1358,16 @@ dv_onepart_p (decl_or_value dv)
 
 /* Return the variable pool to be used for a dv of type ONEPART.  */
 static inline pool_allocator &
-onepart_pool (onepart_enum_t onepart)
+onepart_pool (onepart_enum onepart)
 {
   return onepart ? valvar_pool : var_pool;
 }
 
 /* Allocate a variable_def from the corresponding variable pool.  */
-static inline variable_def *
-onepart_pool_allocate (onepart_enum_t onepart)
+static inline variable *
+onepart_pool_allocate (onepart_enum onepart)
 {
-  return (variable_def*) onepart_pool (onepart).allocate ();
+  return (variable*) onepart_pool (onepart).allocate ();
 }
 
 /* Build a decl_or_value out of a decl.  */
@@ -1425,7 +1425,7 @@ debug_dv (decl_or_value dv)
     debug_generic_stmt (dv_as_decl (dv));
 }
 
-static void loc_exp_dep_clear (variable var);
+static void loc_exp_dep_clear (variable *var);
 
 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
 
@@ -1433,7 +1433,7 @@ static void
 variable_htab_free (void *elem)
 {
   int i;
-  variable var = (variable) elem;
+  variable *var = (variable *) elem;
   location_chain *node, *next;
 
   gcc_checking_assert (var->refcount > 0);
@@ -1468,7 +1468,7 @@ variable_htab_free (void *elem)
 /* Initialize the set (array) SET of attrs to empty lists.  */
 
 static void
-init_attrs_list_set (attrs *set)
+init_attrs_list_set (attrs **set)
 {
   int i;
 
@@ -1479,9 +1479,9 @@ init_attrs_list_set (attrs *set)
 /* Make the list *LISTP empty.  */
 
 static void
-attrs_list_clear (attrs *listp)
+attrs_list_clear (attrs **listp)
 {
-  attrs list, next;
+  attrs *list, *next;
 
   for (list = *listp; list; list = next)
     {
@@ -1493,8 +1493,8 @@ attrs_list_clear (attrs *listp)
 
 /* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
 
-static attrs
-attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
+static attrs *
+attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
 {
   for (; list; list = list->next)
     if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
@@ -1505,10 +1505,10 @@ attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
 
 static void
-attrs_list_insert (attrs *listp, decl_or_value dv,
+attrs_list_insert (attrs **listp, decl_or_value dv,
 		   HOST_WIDE_INT offset, rtx loc)
 {
-  attrs list = new attrs_def;
+  attrs *list = new attrs;
   list->loc = loc;
   list->dv = dv;
   list->offset = offset;
@@ -1519,12 +1519,12 @@ attrs_list_insert (attrs *listp, decl_or_value dv,
 /* Copy all nodes from SRC and create a list *DSTP of the copies.  */
 
 static void
-attrs_list_copy (attrs *dstp, attrs src)
+attrs_list_copy (attrs **dstp, attrs *src)
 {
   attrs_list_clear (dstp);
   for (; src; src = src->next)
     {
-      attrs n = new attrs_def;
+      attrs *n = new attrs;
       n->loc = src->loc;
       n->dv = src->dv;
       n->offset = src->offset;
@@ -1536,7 +1536,7 @@ attrs_list_copy (attrs *dstp, attrs src)
 /* Add all nodes from SRC which are not in *DSTP to *DSTP.  */
 
 static void
-attrs_list_union (attrs *dstp, attrs src)
+attrs_list_union (attrs **dstp, attrs *src)
 {
   for (; src; src = src->next)
     {
@@ -1549,7 +1549,7 @@ attrs_list_union (attrs *dstp, attrs src)
    *DSTP.  */
 
 static void
-attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
+attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
 {
   gcc_assert (!*dstp);
   for (; src; src = src->next)
@@ -1586,7 +1586,7 @@ shared_hash_htab (shared_hash *vars)
 /* Return true if VAR is shared, or maybe because VARS is shared.  */
 
 static inline bool
-shared_var_p (variable var, shared_hash *vars)
+shared_var_p (variable *var, shared_hash *vars)
 {
   /* Don't count an entry in the changed_variables table as a duplicate.  */
   return ((var->refcount > 1 + (int) var->in_changed_variables)
@@ -1633,7 +1633,7 @@ shared_hash_destroy (shared_hash *vars)
 /* Unshare *PVARS if shared and return slot for DV.  If INS is
    INSERT, insert it if not already present.  */
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
 				 hashval_t dvhash, enum insert_option ins)
 {
@@ -1642,7 +1642,7 @@ shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
   return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
 }
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
 			       enum insert_option ins)
 {
@@ -1653,7 +1653,7 @@ shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
    If it is not present, insert it only VARS is not shared, otherwise
    return NULL.  */
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
 {
   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
@@ -1661,7 +1661,7 @@ shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
 						       ? NO_INSERT : INSERT);
 }
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
 {
   return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
@@ -1669,14 +1669,14 @@ shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
 
 /* Return slot for DV only if it is already present in the hash table.  */
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
 				  hashval_t dvhash)
 {
   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
 }
 
-static inline variable_def **
+static inline variable **
 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
 {
   return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
@@ -1685,13 +1685,13 @@ shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
 /* Return variable for DV or NULL if not already present in the hash
    table.  */
 
-static inline variable
+static inline variable *
 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
 {
   return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
 }
 
-static inline variable
+static inline variable *
 shared_hash_find (shared_hash *vars, decl_or_value dv)
 {
   return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
@@ -1717,11 +1717,11 @@ static bool dst_can_be_shared;
 
 /* Return a copy of a variable VAR and insert it to dataflow set SET.  */
 
-static variable_def **
-unshare_variable (dataflow_set *set, variable_def **slot, variable var,
+static variable **
+unshare_variable (dataflow_set *set, variable **slot, variable *var,
 		  enum var_init_status initialized)
 {
-  variable new_var;
+  variable *new_var;
   int i;
 
   new_var = onepart_pool_allocate (var->onepart);
@@ -1784,7 +1784,7 @@ unshare_variable (dataflow_set *set, variable_def **slot, variable var,
   *slot = new_var;
   if (var->in_changed_variables)
     {
-      variable_def **cslot
+      variable **cslot
 	= changed_variables->find_slot_with_hash (var->dv,
 						  dv_htab_hash (var->dv),
 						  NO_INSERT);
@@ -1803,11 +1803,11 @@ static void
 vars_copy (variable_table_type *dst, variable_table_type *src)
 {
   variable_iterator_type hi;
-  variable var;
+  variable *var;
 
   FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
     {
-      variable_def **dstp;
+      variable **dstp;
       var->refcount++;
       dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
 				       INSERT);
@@ -1838,7 +1838,7 @@ var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
 		  enum insert_option iopt)
 {
-  attrs node;
+  attrs *node;
   bool decl_p = dv_is_decl_p (dv);
 
   if (decl_p)
@@ -1869,7 +1869,7 @@ var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
 static enum var_init_status
 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
 {
-  variable var;
+  variable *var;
   int i;
   enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
 
@@ -1907,8 +1907,8 @@ var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
 {
   tree decl = REG_EXPR (loc);
   HOST_WIDE_INT offset = REG_OFFSET (loc);
-  attrs node, next;
-  attrs *nextp;
+  attrs *node, *next;
+  attrs **nextp;
 
   decl = var_debug_decl (decl);
 
@@ -1944,8 +1944,8 @@ var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
 static void
 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
 {
-  attrs *nextp = &set->regs[REGNO (loc)];
-  attrs node, next;
+  attrs **nextp = &set->regs[REGNO (loc)];
+  attrs *node, *next;
 
   if (clobber)
     {
@@ -1976,8 +1976,8 @@ var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
 static void
 var_regno_delete (dataflow_set *set, int regno)
 {
-  attrs *reg = &set->regs[regno];
-  attrs node, next;
+  attrs **reg = &set->regs[regno];
+  attrs *node, *next;
 
   for (node = *reg; node; node = next)
     {
@@ -2067,7 +2067,7 @@ get_addr_from_local_cache (dataflow_set *set, rtx const loc)
 {
   rtx x;
   decl_or_value dv;
-  variable var;
+  variable *var;
   location_chain *l;
 
   gcc_checking_assert (GET_CODE (loc) == VALUE);
@@ -2237,11 +2237,11 @@ struct overlapping_mems
    canonicalized itself.  */
 
 int
-drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
+drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
 {
   dataflow_set *set = coms->set;
   rtx mloc = coms->loc, addr = coms->addr;
-  variable var = *slot;
+  variable *var = *slot;
 
   if (var->onepart == ONEPART_VALUE)
     {
@@ -2512,7 +2512,7 @@ local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
 static void
 val_reset (dataflow_set *set, decl_or_value dv)
 {
-  variable var = shared_hash_find (set->vars, dv) ;
+  variable *var = shared_hash_find (set->vars, dv) ;
   location_chain *node;
   rtx cval;
 
@@ -2618,7 +2618,7 @@ val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
 
   if (REG_P (loc))
     {
-      attrs node, found = NULL;
+      attrs *node, *found = NULL;
 
       for (node = set->regs[REGNO (loc)]; node; node = node->next)
 	if (dv_is_value_p (node->dv)
@@ -2734,10 +2734,10 @@ variable_union_info_cmp_pos (const void *n1, const void *n2)
    we keep the newest locations in the beginning.  */
 
 static int
-variable_union (variable src, dataflow_set *set)
+variable_union (variable *src, dataflow_set *set)
 {
-  variable dst;
-  variable_def **dstp;
+  variable *dst;
+  variable **dstp;
   int i, j, k;
 
   dstp = shared_hash_find_slot (set->vars, src->dv);
@@ -2886,7 +2886,7 @@ variable_union (variable src, dataflow_set *set)
 		{
 		  dstp = unshare_variable (set, dstp, dst,
 					   VAR_INIT_STATUS_UNKNOWN);
-		  dst = (variable)*dstp;
+		  dst = (variable *)*dstp;
 		}
 	    }
 
@@ -3118,7 +3118,7 @@ dataflow_set_union (dataflow_set *dst, dataflow_set *src)
   else
     {
       variable_iterator_type hi;
-      variable var;
+      variable *var;
 
       FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
 				   var, variable, hi)
@@ -3184,7 +3184,7 @@ dv_changed_p (decl_or_value dv)
    be in star-canonical form.  */
 
 static location_chain *
-find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
+find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
 {
   location_chain *node;
   enum rtx_code loc_code;
@@ -3203,7 +3203,7 @@ find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
   for (node = var->var_part[0].loc_chain; node; node = node->next)
     {
       decl_or_value dv;
-      variable rvar;
+      variable *rvar;
 
       if (GET_CODE (node->loc) != loc_code)
 	{
@@ -3298,7 +3298,7 @@ insert_into_intersection (location_chain **nodep, rtx loc,
 
 static void
 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
-		      location_chain *s1node, variable s2var)
+		      location_chain *s1node, variable *s2var)
 {
   dataflow_set *s1set = dsm->cur;
   dataflow_set *s2set = dsm->src;
@@ -3343,7 +3343,7 @@ intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
 	  && !VALUE_RECURSED_INTO (s1node->loc))
 	{
 	  decl_or_value dv = dv_from_value (s1node->loc);
-	  variable svar = shared_hash_find (s1set->vars, dv);
+	  variable *svar = shared_hash_find (s1set->vars, dv);
 	  if (svar)
 	    {
 	      if (svar->n_var_parts == 1)
@@ -3575,10 +3575,10 @@ loc_cmp (rtx x, rtx y)
 /* Check the order of entries in one-part variables.   */
 
 int
-canonicalize_loc_order_check (variable_def **slot,
+canonicalize_loc_order_check (variable **slot,
 			      dataflow_set *data ATTRIBUTE_UNUSED)
 {
-  variable var = *slot;
+  variable *var = *slot;
   location_chain *node, *next;
 
 #ifdef ENABLE_RTL_CHECKING
@@ -3611,9 +3611,9 @@ canonicalize_loc_order_check (variable_def **slot,
    the connections bidirectional.  */
 
 int
-canonicalize_values_mark (variable_def **slot, dataflow_set *set)
+canonicalize_values_mark (variable **slot, dataflow_set *set)
 {
-  variable var = *slot;
+  variable *var = *slot;
   decl_or_value dv = var->dv;
   rtx val;
   location_chain *node;
@@ -3633,7 +3633,7 @@ canonicalize_values_mark (variable_def **slot, dataflow_set *set)
 	else
 	  {
 	    decl_or_value odv = dv_from_value (node->loc);
-	    variable_def **oslot;
+	    variable **oslot;
 	    oslot = shared_hash_find_slot_noinsert (set->vars, odv);
 
 	    set_slot_part (set, val, oslot, odv, 0,
@@ -3650,14 +3650,14 @@ canonicalize_values_mark (variable_def **slot, dataflow_set *set)
    variables, canonicalizing equivalence sets into star shapes.  */
 
 int
-canonicalize_values_star (variable_def **slot, dataflow_set *set)
+canonicalize_values_star (variable **slot, dataflow_set *set)
 {
-  variable var = *slot;
+  variable *var = *slot;
   decl_or_value dv = var->dv;
   location_chain *node;
   decl_or_value cdv;
   rtx val, cval;
-  variable_def **cslot;
+  variable **cslot;
   bool has_value;
   bool has_marks;
 
@@ -3774,7 +3774,7 @@ canonicalize_values_star (variable_def **slot, dataflow_set *set)
 	  }
 	else if (GET_CODE (node->loc) == REG)
 	  {
-	    attrs list = set->regs[REGNO (node->loc)], *listp;
+	    attrs *list = set->regs[REGNO (node->loc)], **listp;
 
 	    /* Change an existing attribute referring to dv so that it
 	       refers to cdv, removing any duplicate this might
@@ -3871,15 +3871,15 @@ canonicalize_values_star (variable_def **slot, dataflow_set *set)
    get to a variable that references another member of the set.  */
 
 int
-canonicalize_vars_star (variable_def **slot, dataflow_set *set)
+canonicalize_vars_star (variable **slot, dataflow_set *set)
 {
-  variable var = *slot;
+  variable *var = *slot;
   decl_or_value dv = var->dv;
   location_chain *node;
   rtx cval;
   decl_or_value cdv;
-  variable_def **cslot;
-  variable cvar;
+  variable **cslot;
+  variable *cvar;
   location_chain *cnode;
 
   if (!var->onepart || var->onepart == ONEPART_VALUE)
@@ -3929,13 +3929,13 @@ canonicalize_vars_star (variable_def **slot, dataflow_set *set)
    intersection.  */
 
 static int
-variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
+variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
 {
   dataflow_set *dst = dsm->dst;
-  variable_def **dstslot;
-  variable s2var, dvar = NULL;
+  variable **dstslot;
+  variable *s2var, *dvar = NULL;
   decl_or_value dv = s1var->dv;
-  onepart_enum_t onepart = s1var->onepart;
+  onepart_enum onepart = s1var->onepart;
   rtx val;
   hashval_t dvhash;
   location_chain *node, **nodep;
@@ -4033,7 +4033,7 @@ variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
 
       if (GET_CODE (node->loc) == REG)
 	{
-	  attrs list;
+	  attrs *list;
 
 	  for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
 	    if (GET_MODE (node->loc) == GET_MODE (list->loc)
@@ -4129,7 +4129,7 @@ variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
 	      if (GET_CODE (node->loc) == VALUE)
 		{
 		  decl_or_value dv = dv_from_value (node->loc);
-		  variable_def **slot = NULL;
+		  variable **slot = NULL;
 
 		  if (shared_hash_shared (dst->vars))
 		    slot = shared_hash_find_slot_noinsert (dst->vars, dv);
@@ -4138,7 +4138,7 @@ variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
 							  INSERT);
 		  if (!*slot)
 		    {
-		      variable var = onepart_pool_allocate (ONEPART_VALUE);
+		      variable *var = onepart_pool_allocate (ONEPART_VALUE);
 		      var->dv = dv;
 		      var->refcount = 1;
 		      var->n_var_parts = 1;
@@ -4189,14 +4189,14 @@ variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
    variable_merge_over_cur().  */
 
 static int
-variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
+variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
 {
   dataflow_set *dst = dsm->dst;
   decl_or_value dv = s2var->dv;
 
   if (!s2var->onepart)
     {
-      variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
+      variable **dstp = shared_hash_find_slot (dst->vars, dv);
       *dstp = s2var;
       s2var->refcount++;
       return 1;
@@ -4218,7 +4218,7 @@ dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
   int i;
   size_t src1_elems, src2_elems;
   variable_iterator_type hi;
-  variable var;
+  variable *var;
 
   src1_elems = shared_hash_htab (src1->vars)->elements ();
   src2_elems = shared_hash_htab (src2->vars)->elements ();
@@ -4256,7 +4256,7 @@ static void
 dataflow_set_equiv_regs (dataflow_set *set)
 {
   int i;
-  attrs list, *listp;
+  attrs *list, **listp;
 
   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     {
@@ -4311,7 +4311,7 @@ dataflow_set_equiv_regs (dataflow_set *set)
 	if (list->offset == 0 && dv_onepart_p (list->dv))
 	  {
 	    rtx cval = canon[(int)GET_MODE (list->loc)];
-	    variable_def **slot;
+	    variable **slot;
 
 	    if (!cval)
 	      continue;
@@ -4335,7 +4335,7 @@ dataflow_set_equiv_regs (dataflow_set *set)
    be unshared and 1-part.  */
 
 static void
-remove_duplicate_values (variable var)
+remove_duplicate_values (variable *var)
 {
   location_chain *node, **nodep;
 
@@ -4383,10 +4383,10 @@ struct dfset_post_merge
    variables that don't have value numbers for them.  */
 
 int
-variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
+variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
 {
   dataflow_set *set = dfpm->set;
-  variable var = *slot;
+  variable *var = *slot;
   location_chain *node;
 
   if (!var->onepart || !var->n_var_parts)
@@ -4405,7 +4405,7 @@ variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
 	    gcc_assert (!VALUE_RECURSED_INTO (node->loc));
 	  else if (GET_CODE (node->loc) == REG)
 	    {
-	      attrs att, *attp, *curp = NULL;
+	      attrs *att, **attp, **curp = NULL;
 
 	      if (var->refcount != 1)
 		{
@@ -4519,13 +4519,13 @@ variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
    chosen expression.  */
 
 int
-variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
+variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
 {
   dataflow_set *set = dfpm->set;
-  variable pvar = *pslot, var;
+  variable *pvar = *pslot, *var;
   location_chain *pnode;
   decl_or_value dv;
-  attrs att;
+  attrs *att;
 
   gcc_assert (dv_is_value_p (pvar->dv)
 	      && pvar->n_var_parts == 1);
@@ -4606,7 +4606,7 @@ find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
 {
   location_chain *node;
   decl_or_value dv;
-  variable var;
+  variable *var;
   location_chain *where = NULL;
 
   if (!val)
@@ -4674,9 +4674,9 @@ mem_dies_at_call (rtx mem)
    the variable itself, directly or within a VALUE.  */
 
 int
-dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
+dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
 {
-  variable var = *slot;
+  variable *var = *slot;
 
   if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
     {
@@ -4783,9 +4783,9 @@ dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
    value.  */
 
 int
-dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
+dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
 {
-  variable var = *slot;
+  variable *var = *slot;
 
   if (var->onepart == ONEPART_VALUE)
     {
@@ -4906,7 +4906,7 @@ variable_part_different_p (variable_part *vp1, variable_part *vp2)
    They must be in canonical order.  */
 
 static bool
-onepart_variable_different_p (variable var1, variable var2)
+onepart_variable_different_p (variable *var1, variable *var2)
 {
   location_chain *lc1, *lc2;
 
@@ -4935,7 +4935,7 @@ onepart_variable_different_p (variable var1, variable var2)
 /* Return true if variables VAR1 and VAR2 are different.  */
 
 static bool
-variable_different_p (variable var1, variable var2)
+variable_different_p (variable *var1, variable *var2)
 {
   int i;
 
@@ -4974,7 +4974,7 @@ static bool
 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
 {
   variable_iterator_type hi;
-  variable var1;
+  variable *var1;
 
   if (old_set->vars == new_set->vars)
     return false;
@@ -4987,7 +4987,7 @@ dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
 			       var1, variable, hi)
     {
       variable_table_type *htab = shared_hash_htab (new_set->vars);
-      variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
+      variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
       if (!var2)
 	{
 	  if (dump_file && (dump_flags & TDF_DETAILS))
@@ -6589,7 +6589,7 @@ find_src_set_src (dataflow_set *set, rtx src)
 {
   tree decl = NULL_TREE;   /* The variable being copied around.          */
   rtx set_src = NULL_RTX;  /* The value for "decl" stored in "src".      */
-  variable var;
+  variable *var;
   location_chain *nextp;
   int i;
   bool found;
@@ -7146,7 +7146,7 @@ vt_find_locations (void)
 /* Print the content of the LIST to dump file.  */
 
 static void
-dump_attrs_list (attrs list)
+dump_attrs_list (attrs *list)
 {
   for (; list; list = list->next)
     {
@@ -7162,9 +7162,9 @@ dump_attrs_list (attrs list)
 /* Print the information about variable *SLOT to dump file.  */
 
 int
-dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
+dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
 {
-  variable var = *slot;
+  variable *var = *slot;
 
   dump_var (var);
 
@@ -7175,7 +7175,7 @@ dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
 /* Print the information about variable VAR to dump file.  */
 
 static void
-dump_var (variable var)
+dump_var (variable *var)
 {
   int i;
   location_chain *node;
@@ -7270,12 +7270,12 @@ dump_dataflow_sets (void)
 /* Return the variable for DV in dropped_values, inserting one if
    requested with INSERT.  */
 
-static inline variable
+static inline variable *
 variable_from_dropped (decl_or_value dv, enum insert_option insert)
 {
-  variable_def **slot;
-  variable empty_var;
-  onepart_enum_t onepart;
+  variable **slot;
+  variable *empty_var;
+  onepart_enum onepart;
 
   slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
 
@@ -7310,9 +7310,9 @@ variable_from_dropped (decl_or_value dv, enum insert_option insert)
 /* Recover the one-part aux from dropped_values.  */
 
 static struct onepart_aux *
-recover_dropped_1paux (variable var)
+recover_dropped_1paux (variable *var)
 {
-  variable dvar;
+  variable *dvar;
 
   gcc_checking_assert (var->onepart);
 
@@ -7337,13 +7337,13 @@ recover_dropped_1paux (variable var)
    if it has no locations delete it from SET's hash table.  */
 
 static void
-variable_was_changed (variable var, dataflow_set *set)
+variable_was_changed (variable *var, dataflow_set *set)
 {
   hashval_t hash = dv_htab_hash (var->dv);
 
   if (emit_notes)
     {
-      variable_def **slot;
+      variable **slot;
 
       /* Remember this decl or VALUE has been added to changed_variables.  */
       set_dv_changed (var->dv, true);
@@ -7352,7 +7352,7 @@ variable_was_changed (variable var, dataflow_set *set)
 
       if (*slot)
 	{
-	  variable old_var = *slot;
+	  variable *old_var = *slot;
 	  gcc_assert (old_var->in_changed_variables);
 	  old_var->in_changed_variables = false;
 	  if (var != old_var && var->onepart)
@@ -7369,9 +7369,9 @@ variable_was_changed (variable var, dataflow_set *set)
 
       if (set && var->n_var_parts == 0)
 	{
-	  onepart_enum_t onepart = var->onepart;
-	  variable empty_var = NULL;
-	  variable_def **dslot = NULL;
+	  onepart_enum onepart = var->onepart;
+	  variable *empty_var = NULL;
+	  variable **dslot = NULL;
 
 	  if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
 	    {
@@ -7433,7 +7433,7 @@ variable_was_changed (variable var, dataflow_set *set)
       gcc_assert (set);
       if (var->n_var_parts == 0)
 	{
-	  variable_def **slot;
+	  variable **slot;
 
 	drop_var:
 	  slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
@@ -7454,7 +7454,7 @@ variable_was_changed (variable var, dataflow_set *set)
    have, if it should be inserted.  */
 
 static inline int
-find_variable_location_part (variable var, HOST_WIDE_INT offset,
+find_variable_location_part (variable *var, HOST_WIDE_INT offset,
 			     int *insertion_point)
 {
   int pos, low, high;
@@ -7492,16 +7492,16 @@ find_variable_location_part (variable var, HOST_WIDE_INT offset,
   return -1;
 }
 
-static variable_def **
-set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
+static variable **
+set_slot_part (dataflow_set *set, rtx loc, variable **slot,
 	       decl_or_value dv, HOST_WIDE_INT offset,
 	       enum var_init_status initialized, rtx set_src)
 {
   int pos;
   location_chain *node, *next;
   location_chain **nextp;
-  variable var;
-  onepart_enum_t onepart;
+  variable *var;
+  onepart_enum onepart;
 
   var = *slot;
 
@@ -7752,7 +7752,7 @@ set_variable_part (dataflow_set *set, rtx loc,
 		   enum var_init_status initialized, rtx set_src,
 		   enum insert_option iopt)
 {
-  variable_def **slot;
+  variable **slot;
 
   if (iopt == NO_INSERT)
     slot = shared_hash_find_slot_noinsert (set->vars, dv);
@@ -7770,11 +7770,11 @@ set_variable_part (dataflow_set *set, rtx loc,
    The variable part is specified by variable's declaration or value
    DV and offset OFFSET.  */
 
-static variable_def **
-clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
+static variable **
+clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
 		   HOST_WIDE_INT offset, rtx set_src)
 {
-  variable var = *slot;
+  variable *var = *slot;
   int pos = find_variable_location_part (var, offset, NULL);
 
   if (pos >= 0)
@@ -7794,8 +7794,8 @@ clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
 	    {
 	      if (REG_P (node->loc))
 		{
-		  attrs anode, anext;
-		  attrs *anextp;
+		  attrs *anode, *anext;
+		  attrs **anextp;
 
 		  /* Remove the variable part from the register's
 		     list, but preserve any other variable parts
@@ -7833,7 +7833,7 @@ static void
 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
 		       HOST_WIDE_INT offset, rtx set_src)
 {
-  variable_def **slot;
+  variable **slot;
 
   if (!dv_as_opaque (dv)
       || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
@@ -7850,11 +7850,11 @@ clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
    variable part is specified by its SET->vars slot SLOT and offset
    OFFSET and the part's location by LOC.  */
 
-static variable_def **
-delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
+static variable **
+delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
 		  HOST_WIDE_INT offset)
 {
-  variable var = *slot;
+  variable *var = *slot;
   int pos = find_variable_location_part (var, offset, NULL);
 
   if (pos >= 0)
@@ -7941,7 +7941,7 @@ static void
 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
 		      HOST_WIDE_INT offset)
 {
-  variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
+  variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
   if (!slot)
     return;
 
@@ -7977,7 +7977,7 @@ struct expand_loc_callback_data
    room for COUNT dependencies.  */
 
 static void
-loc_exp_dep_alloc (variable var, int count)
+loc_exp_dep_alloc (variable *var, int count)
 {
   size_t allocsize;
 
@@ -8025,7 +8025,7 @@ loc_exp_dep_alloc (variable var, int count)
    removing them from the back-links lists too.  */
 
 static void
-loc_exp_dep_clear (variable var)
+loc_exp_dep_clear (variable *var)
 {
   while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
     {
@@ -8043,10 +8043,10 @@ loc_exp_dep_clear (variable var)
    back-links in VARS.  */
 
 static void
-loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
+loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
 {
   decl_or_value dv;
-  variable xvar;
+  variable *xvar;
   loc_exp_dep *led;
 
   dv = dv_from_rtx (x);
@@ -8093,7 +8093,7 @@ loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
    true if we found any pending-recursion results.  */
 
 static bool
-loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
+loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
 		 variable_table_type *vars)
 {
   bool pending_recursion = false;
@@ -8123,14 +8123,14 @@ loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
    attempt to compute a current location.  */
 
 static void
-notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
+notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
 {
   loc_exp_dep *led, *next;
 
   for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
     {
       decl_or_value dv = led->dv;
-      variable var;
+      variable *var;
 
       next = led->next;
 
@@ -8215,7 +8215,8 @@ update_depth (expand_depth saved_depth, expand_depth best_depth)
    it is pending recursion resolution.  */
 
 static inline rtx
-vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
+vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
+			 bool *pendrecp)
 {
   struct expand_loc_callback_data *elcd
     = (struct expand_loc_callback_data *) data;
@@ -8359,7 +8360,7 @@ vt_expand_loc_callback (rtx x, bitmap regs,
   struct expand_loc_callback_data *elcd
     = (struct expand_loc_callback_data *) data;
   decl_or_value dv;
-  variable var;
+  variable *var;
   rtx result, subreg;
   bool pending_recursion = false;
   bool from_empty = false;
@@ -8534,7 +8535,7 @@ vt_expand_loc (rtx loc, variable_table_type *vars)
    in VARS, updating their CUR_LOCs in the process.  */
 
 static rtx
-vt_expand_1pvar (variable var, variable_table_type *vars)
+vt_expand_1pvar (variable *var, variable_table_type *vars)
 {
   struct expand_loc_callback_data data;
   rtx loc;
@@ -8560,9 +8561,9 @@ vt_expand_1pvar (variable var, variable_table_type *vars)
    before or after instruction INSN.  */
 
 int
-emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
+emit_note_insn_var_location (variable **varp, emit_note_data *data)
 {
-  variable var = *varp;
+  variable *var = *varp;
   rtx_insn *insn = data->insn;
   enum emit_note_where where = data->where;
   variable_table_type *vars = data->vars;
@@ -8791,10 +8792,10 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
    values) entries that aren't user variables.  */
 
 int
-var_track_values_to_stack (variable_def **slot,
+var_track_values_to_stack (variable **slot,
 			   vec<rtx, va_heap> *changed_values_stack)
 {
-  variable var = *slot;
+  variable *var = *slot;
 
   if (var->onepart == ONEPART_VALUE)
     changed_values_stack->safe_push (dv_as_value (var->dv));
@@ -8810,8 +8811,8 @@ static void
 remove_value_from_changed_variables (rtx val)
 {
   decl_or_value dv = dv_from_rtx (val);
-  variable_def **slot;
-  variable var;
+  variable **slot;
+  variable *var;
 
   slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
 						NO_INSERT);
@@ -8829,8 +8830,8 @@ static void
 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
 				    vec<rtx, va_heap> *changed_values_stack)
 {
-  variable_def **slot;
-  variable var;
+  variable **slot;
+  variable *var;
   loc_exp_dep *led;
   decl_or_value dv = dv_from_rtx (val);
 
@@ -8846,7 +8847,7 @@ notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
   while ((led = VAR_LOC_DEP_LST (var)))
     {
       decl_or_value ldv = led->dv;
-      variable ivar;
+      variable *ivar;
 
       /* Deactivate and remove the backlink, as it was âused upâ.  It
 	 makes no sense to attempt to notify the same entity again:
@@ -8968,9 +8969,9 @@ emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
    same variable in hash table DATA or is not there at all.  */
 
 int
-emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
+emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
 {
-  variable old_var, new_var;
+  variable *old_var, *new_var;
 
   old_var = *slot;
   new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
@@ -8978,7 +8979,7 @@ emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars
   if (!new_var)
     {
       /* Variable has disappeared.  */
-      variable empty_var = NULL;
+      variable *empty_var = NULL;
 
       if (old_var->onepart == ONEPART_VALUE
 	  || old_var->onepart == ONEPART_DEXPR)
@@ -9040,9 +9041,9 @@ emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars
    table DATA.  */
 
 int
-emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
+emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
 {
-  variable old_var, new_var;
+  variable *old_var, *new_var;
 
   new_var = *slot;
   old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
@@ -9596,7 +9597,7 @@ vt_add_function_parameter (tree parm)
 	  && HARD_REGISTER_P (incoming)
 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
 	{
-	  parm_reg_t p;
+	  parm_reg p;
 	  p.incoming = incoming;
 	  incoming
 	    = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
@@ -9613,7 +9614,7 @@ vt_add_function_parameter (tree parm)
 	  for (i = 0; i < XVECLEN (incoming, 0); i++)
 	    {
 	      rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
-	      parm_reg_t p;
+	      parm_reg p;
 	      p.incoming = reg;
 	      reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
 					OUTGOING_REGNO (REGNO (reg)), 0);
@@ -9633,7 +9634,7 @@ vt_add_function_parameter (tree parm)
 	  rtx reg = XEXP (incoming, 0);
 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
 	    {
-	      parm_reg_t p;
+	      parm_reg p;
 	      p.incoming = reg;
 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
 	      p.outgoing = reg;
@@ -9856,7 +9857,7 @@ vt_initialize (void)
   basic_block bb;
   HOST_WIDE_INT fp_cfa_offset = -1;
 
-  alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
+  alloc_aux_for_blocks (sizeof (variable_tracking_info));
 
   empty_shared_hash = new shared_hash;
   empty_shared_hash->refcount = 1;
@@ -10216,7 +10217,7 @@ vt_finalize (void)
   empty_shared_hash->htab = NULL;
   delete changed_variables;
   changed_variables = NULL;
-  attrs_def_pool.release ();
+  attrs_pool.release ();
   var_pool.release ();
   location_chain_pool.release ();
   shared_hash_pool.release ();
-- 
2.4.0


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]