This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: VTA merge - tracking


tracking (176K) - turn debug insns into var_location notes

Index: gcc/cselib.c
===================================================================
--- gcc/cselib.c.orig	2009-06-05 05:07:49.000000000 -0300
+++ gcc/cselib.c	2009-06-05 05:07:56.000000000 -0300
@@ -54,9 +54,8 @@ static void unchain_one_elt_loc_list (st
 static int discard_useless_locs (void **, void *);
 static int discard_useless_values (void **, void *);
 static void remove_useless_values (void);
-static rtx wrap_constant (enum machine_mode, rtx);
 static unsigned int cselib_hash_rtx (rtx, int);
-static cselib_val *new_cselib_val (unsigned int, enum machine_mode);
+static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
 static cselib_val *cselib_lookup_mem (rtx, int);
 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
@@ -64,6 +63,15 @@ static void cselib_invalidate_mem (rtx);
 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
 static void cselib_record_sets (rtx);
 
+struct expand_value_data
+{
+  bitmap regs_active;
+  cselib_expand_callback callback;
+  void *callback_arg;
+};
+
+static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
+
 /* There are three ways in which cselib can look up an rtx:
    - for a REG, the reg_values table (which is indexed by regno) is used
    - for a MEM, we recursively look up its address and then follow the
@@ -134,6 +142,20 @@ static alloc_pool elt_loc_list_pool, elt
 /* If nonnull, cselib will call this function before freeing useless
    VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
 void (*cselib_discard_hook) (cselib_val *);
+
+/* If nonnull, cselib will call this function before recording sets or
+   even clobbering outputs of INSN.  All the recorded sets will be
+   represented in the array sets[n_sets].  new_val_min can be used to
+   tell whether values present in sets are introduced by this
+   instruction.  */
+void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
+				 int n_sets);
+
+#define PRESERVED_VALUE_P(RTX) \
+  (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
+#define LONG_TERM_PRESERVED_VALUE_P(RTX) \
+  (RTL_FLAG_CHECK1("LONG_TERM_PRESERVED_VALUE_P", (RTX), VALUE)->in_struct)
+
 
 
 /* Allocate a struct elt_list and fill in its two elements with the
@@ -199,12 +221,20 @@ unchain_one_value (cselib_val *v)
 }
 
 /* Remove all entries from the hash table.  Also used during
-   initialization.  If CLEAR_ALL isn't set, then only clear the entries
-   which are known to have been used.  */
+   initialization.  */
 
 void
 cselib_clear_table (void)
 {
+  cselib_reset_table_with_next_value (0);
+}
+
+/* Remove all entries from the hash table, arranging for the next
+   value to be numbered NUM.  */
+
+void
+cselib_reset_table_with_next_value (unsigned int num)
+{
   unsigned int i;
 
   for (i = 0; i < n_used_regs; i++)
@@ -214,15 +244,24 @@ cselib_clear_table (void)
 
   n_used_regs = 0;
 
+  /* ??? Preserve constants?  */
   htab_empty (cselib_hash_table);
 
   n_useless_values = 0;
 
-  next_unknown_value = 0;
+  next_unknown_value = num;
 
   first_containing_mem = &dummy_val;
 }
 
+/* Return the number of the next value that will be generated.  */
+
+unsigned int
+cselib_get_next_unknown_value (void)
+{
+  return next_unknown_value;
+}
+
 /* The equality test for our hash table.  The first argument ENTRY is a table
    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
@@ -317,7 +356,7 @@ discard_useless_locs (void **x, void *in
 	p = &(*p)->next;
     }
 
-  if (had_locs && v->locs == 0)
+  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
     {
       n_useless_values++;
       values_became_useless = 1;
@@ -332,7 +371,7 @@ discard_useless_values (void **x, void *
 {
   cselib_val *v = (cselib_val *)*x;
 
-  if (v->locs == 0)
+  if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
     {
       if (cselib_discard_hook)
 	cselib_discard_hook (v);
@@ -378,6 +417,78 @@ remove_useless_values (void)
   gcc_assert (!n_useless_values);
 }
 
+/* Arrange for a value to not be removed from the hash table even if
+   it becomes useless.  */
+
+void
+cselib_preserve_value (cselib_val *v)
+{
+  PRESERVED_VALUE_P (v->val_rtx) = 1;
+}
+
+/* Test whether a value is preserved.  */
+
+bool
+cselib_preserved_value_p (cselib_val *v)
+{
+  return PRESERVED_VALUE_P (v->val_rtx);
+}
+
+/* Mark preserved values as preserved for the long term.  */
+
+static int
+cselib_preserve_definitely (void **slot, void *info ATTRIBUTE_UNUSED)
+{
+  cselib_val *v = (cselib_val *)*slot;
+
+  if (PRESERVED_VALUE_P (v->val_rtx)
+      && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
+    LONG_TERM_PRESERVED_VALUE_P (v->val_rtx) = true;
+
+  return 1;
+}
+
+/* Clear the preserve marks for values not preserved for the long
+   term.  */
+
+static int
+cselib_clear_preserve (void **slot, void *info ATTRIBUTE_UNUSED)
+{
+  cselib_val *v = (cselib_val *)*slot;
+
+  if (PRESERVED_VALUE_P (v->val_rtx)
+      && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
+    {
+      PRESERVED_VALUE_P (v->val_rtx) = false;
+      if (!v->locs)
+	n_useless_values++;
+    }
+
+  return 1;
+}
+
+/* Clean all non-constant expressions in the hash table, but retain
+   their values.  */
+
+void
+cselib_preserve_only_values (bool retain)
+{
+  int i;
+
+  htab_traverse (cselib_hash_table,
+		 retain ? cselib_preserve_definitely : cselib_clear_preserve,
+		 NULL);
+
+  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+    cselib_invalidate_regno (i, reg_raw_mode[i]);
+
+  cselib_invalidate_mem (callmem);
+
+  remove_useless_values ();
+
+  gcc_assert (first_containing_mem == &dummy_val);
+}
+
 /* Return the mode in which a register was last set.  If X is not a
    register, return its mode.  If the mode in which the register was
    set is not known, or the value was already clobbered, return
@@ -549,19 +660,6 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
   return 1;
 }
 
-/* We need to pass down the mode of constants through the hash table
-   functions.  For that purpose, wrap them in a CONST of the appropriate
-   mode.  */
-static rtx
-wrap_constant (enum machine_mode mode, rtx x)
-{
-  if (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
-      && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
-    return x;
-  gcc_assert (mode != VOIDmode);
-  return gen_rtx_CONST (mode, x);
-}
-
 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
    For registers and memory locations, we look up their cselib_val structure
    and return its VALUE element.
@@ -748,7 +846,7 @@ cselib_hash_rtx (rtx x, int create)
    value is MODE.  */
 
 static inline cselib_val *
-new_cselib_val (unsigned int value, enum machine_mode mode)
+new_cselib_val (unsigned int value, enum machine_mode mode, rtx x)
 {
   cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
 
@@ -768,6 +866,18 @@ new_cselib_val (unsigned int value, enum
   e->addr_list = 0;
   e->locs = 0;
   e->next_containing_mem = 0;
+
+  if (dump_file && flag_verbose_cselib)
+    {
+      fprintf (dump_file, "cselib value %u ", value);
+      if (flag_dump_noaddr || flag_dump_unnumbered)
+	fputs ("# ", dump_file);
+      else
+	fprintf (dump_file, "%p ", (void*)e);
+      print_rtl_single (dump_file, x);
+      fputc ('\n', dump_file);
+    }
+
   return e;
 }
 
@@ -827,7 +937,7 @@ cselib_lookup_mem (rtx x, int create)
   if (! create)
     return 0;
 
-  mem_elt = new_cselib_val (++next_unknown_value, mode);
+  mem_elt = new_cselib_val (++next_unknown_value, mode, x);
   add_mem_for_addr (addr, mem_elt, x);
   slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
 				   mem_elt->value, INSERT);
@@ -842,7 +952,8 @@ cselib_lookup_mem (rtx x, int create)
    expand to the same place.  */
 
 static rtx 
-expand_loc (struct elt_loc_list *p, bitmap regs_active, int max_depth)
+expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
+	    int max_depth)
 {
   rtx reg_result = NULL;
   unsigned int regno = UINT_MAX;
@@ -854,7 +965,7 @@ expand_loc (struct elt_loc_list *p, bitm
 	 the same reg.  */
       if ((REG_P (p->loc)) 
 	  && (REGNO (p->loc) < regno) 
-	  && !bitmap_bit_p (regs_active, REGNO (p->loc)))
+	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
 	{
 	  reg_result = p->loc;
 	  regno = REGNO (p->loc);
@@ -867,7 +978,7 @@ expand_loc (struct elt_loc_list *p, bitm
       else if (!REG_P (p->loc))
 	{
 	  rtx result, note;
-	  if (dump_file)
+	  if (dump_file && flag_verbose_cselib)
 	    {
 	      print_inline_rtx (dump_file, p->loc, 0);
 	      fprintf (dump_file, "\n");
@@ -878,7 +989,7 @@ expand_loc (struct elt_loc_list *p, bitm
 	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
 	      && XEXP (note, 0) == XEXP (p->loc, 1))
 	    return XEXP (p->loc, 1);
-	  result = cselib_expand_value_rtx (p->loc, regs_active, max_depth - 1);
+	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
 	  if (result)
 	    return result;
 	}
@@ -888,15 +999,15 @@ expand_loc (struct elt_loc_list *p, bitm
   if (regno != UINT_MAX)
     {
       rtx result;
-      if (dump_file)
+      if (dump_file && flag_verbose_cselib)
 	fprintf (dump_file, "r%d\n", regno);
 
-      result = cselib_expand_value_rtx (reg_result, regs_active, max_depth - 1);
+      result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
       if (result)
 	return result;
     }
 
-  if (dump_file)
+  if (dump_file && flag_verbose_cselib)
     {
       if (reg_result)
 	{
@@ -910,6 +1021,23 @@ expand_loc (struct elt_loc_list *p, bitm
 }
 
 
+/* Wrap result in CONST:MODE if needed to preserve the mode.  */
+static rtx
+check_wrap_constant (enum machine_mode mode, rtx result)
+{
+  if (!result || GET_MODE (result) == mode)
+    return result;
+
+  if (dump_file && flag_verbose_cselib)
+    fprintf (dump_file, "  wrapping result in const to preserve mode %s\n",
+	     GET_MODE_NAME (mode));
+
+  result = wrap_constant (mode, result);
+  gcc_assert (GET_MODE (result) == mode);
+
+  return result;
+}
+
 /* Forward substitute and expand an expression out to its roots.
    This is the opposite of common subexpression.  Because local value
    numbering is such a weak optimization, the expanded expression is
@@ -931,6 +1059,39 @@ expand_loc (struct elt_loc_list *p, bitm
 rtx
 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
 {
+  struct expand_value_data evd;
+
+  evd.regs_active = regs_active;
+  evd.callback = NULL;
+  evd.callback_arg = NULL;
+
+  return check_wrap_constant (GET_MODE (orig),
+			      cselib_expand_value_rtx_1 (orig, &evd,
+							 max_depth));
+}
+
+/* Same as cselib_expand_value_rtx, but using a callback to try to
+   resolve VALUEs that expand to nothing.  */
+
+rtx
+cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
+			    cselib_expand_callback cb, void *data)
+{
+  struct expand_value_data evd;
+
+  evd.regs_active = regs_active;
+  evd.callback = cb;
+  evd.callback_arg = data;
+
+  return check_wrap_constant (GET_MODE (orig),
+			      cselib_expand_value_rtx_1 (orig, &evd,
+							 max_depth));
+}
+
+static rtx
+cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
+			   int max_depth)
+{
   rtx copy, scopy;
   int i, j;
   RTX_CODE code;
@@ -980,13 +1141,13 @@ cselib_expand_value_rtx (rtx orig, bitma
 		  || regno == HARD_FRAME_POINTER_REGNUM)
 		return orig;
 
-	      bitmap_set_bit (regs_active, regno);
+	      bitmap_set_bit (evd->regs_active, regno);
 
-	      if (dump_file)
+	      if (dump_file && flag_verbose_cselib)
 		fprintf (dump_file, "expanding: r%d into: ", regno);
 
-	      result = expand_loc (l->elt->locs, regs_active, max_depth);
-	      bitmap_clear_bit (regs_active, regno);
+	      result = expand_loc (l->elt->locs, evd, max_depth);
+	      bitmap_clear_bit (evd->regs_active, regno);
 
 	      if (result)
 		return result;
@@ -1017,8 +1178,8 @@ cselib_expand_value_rtx (rtx orig, bitma
 
     case SUBREG:
       {
-	rtx subreg = cselib_expand_value_rtx (SUBREG_REG (orig), regs_active,
-					      max_depth - 1);
+	rtx subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
+						max_depth - 1);
 	if (!subreg)
 	  return NULL;
 	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
@@ -1027,18 +1188,39 @@ cselib_expand_value_rtx (rtx orig, bitma
 	if (scopy == NULL
 	    || (GET_CODE (scopy) == SUBREG
 		&& !REG_P (SUBREG_REG (scopy))
-		&& !MEM_P (SUBREG_REG (scopy))))
+		&& !MEM_P (SUBREG_REG (scopy))
+		&& (REG_P (SUBREG_REG (orig))
+		    || MEM_P (SUBREG_REG (orig)))))
 	  return shallow_copy_rtx (orig);
 	return scopy;
       }
 
     case VALUE:
-      if (dump_file)
-	fprintf (dump_file, "expanding value %s into: ",
-		 GET_MODE_NAME (GET_MODE (orig)));
+      {
+	rtx result;
+	if (dump_file && flag_verbose_cselib)
+	  {
+	    fputs ("\nexpanding ", dump_file);
+	    print_rtl_single (dump_file, orig);
+	    fputs (" into...", dump_file);
+	  }
 
-      return expand_loc (CSELIB_VAL_PTR (orig)->locs, regs_active, max_depth);
+	if (!evd->callback)
+	  result = NULL;
+	else
+	  {
+	    result = evd->callback (orig, evd->regs_active, max_depth,
+				    evd->callback_arg);
+	    if (result == orig)
+	      result = NULL;
+	    else if (result)
+	      result = cselib_expand_value_rtx_1 (result, evd, max_depth);
+	  }
 
+	if (!result)
+	  result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
+	return result;
+      }
     default:
       break;
     }
@@ -1057,7 +1239,8 @@ cselib_expand_value_rtx (rtx orig, bitma
       case 'e':
 	if (XEXP (orig, i) != NULL)
 	  {
-	    rtx result = cselib_expand_value_rtx (XEXP (orig, i), regs_active, max_depth - 1);
+	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
+						    max_depth - 1);
 	    if (!result)
 	      return NULL;
 	    XEXP (copy, i) = result;
@@ -1071,7 +1254,8 @@ cselib_expand_value_rtx (rtx orig, bitma
 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
 	    for (j = 0; j < XVECLEN (copy, i); j++)
 	      {
-		rtx result = cselib_expand_value_rtx (XVECEXP (orig, i, j), regs_active, max_depth - 1);
+		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
+							evd, max_depth - 1);
 		if (!result)
 		  return NULL;
 		XVECEXP (copy, i, j) = result;
@@ -1155,13 +1339,17 @@ cselib_expand_value_rtx (rtx orig, bitma
     {
       XEXP (copy, 0)
 	= gen_rtx_CONST (GET_MODE (XEXP (orig, 0)), XEXP (copy, 0));
-      if (dump_file)
+      if (dump_file && flag_verbose_cselib)
 	fprintf (dump_file, "  wrapping const_int result in const to preserve mode %s\n",
 		 GET_MODE_NAME (GET_MODE (XEXP (copy, 0))));
     }
   scopy = simplify_rtx (copy);
   if (scopy)
-    return scopy;
+    {
+      if (GET_MODE (copy) != GET_MODE (scopy))
+	scopy = wrap_constant (GET_MODE (copy), scopy);
+      return scopy;
+    }
   return copy;
 }
 
@@ -1199,7 +1387,7 @@ cselib_subst_to_values (rtx x)
 	{
 	  /* This happens for autoincrements.  Assign a value that doesn't
 	     match any other.  */
-	  e = new_cselib_val (++next_unknown_value, GET_MODE (x));
+	  e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
 	}
       return e->val_rtx;
 
@@ -1215,7 +1403,7 @@ cselib_subst_to_values (rtx x)
     case PRE_DEC:
     case POST_MODIFY:
     case PRE_MODIFY:
-      e = new_cselib_val (++next_unknown_value, GET_MODE (x));
+      e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
       return e->val_rtx;
 
     default:
@@ -1259,6 +1447,21 @@ cselib_subst_to_values (rtx x)
   return copy;
 }
 
+/* Log a lookup of X to the cselib table along with the result RET.  */
+
+static cselib_val *
+cselib_log_lookup (rtx x, cselib_val *ret)
+{
+  if (dump_file && flag_verbose_cselib)
+    {
+      fputs ("cselib lookup ", dump_file);
+      print_inline_rtx (dump_file, x, 2);
+      fprintf (dump_file, " => %u\n", ret ? ret->value : 0);
+    }
+
+  return ret;
+}
+
 /* Look up the rtl expression X in our tables and return the value it has.
    If CREATE is zero, we return NULL if we don't know the value.  Otherwise,
    we create a new one if possible, using mode MODE if X doesn't have a mode
@@ -1287,10 +1490,10 @@ cselib_lookup (rtx x, enum machine_mode 
 	l = l->next;
       for (; l; l = l->next)
 	if (mode == GET_MODE (l->elt->val_rtx))
-	  return l->elt;
+	  return cselib_log_lookup (x, l->elt);
 
       if (! create)
-	return 0;
+	return cselib_log_lookup (x, 0);
 
       if (i < FIRST_PSEUDO_REGISTER)
 	{
@@ -1300,7 +1503,7 @@ cselib_lookup (rtx x, enum machine_mode 
 	    max_value_regs = n;
 	}
 
-      e = new_cselib_val (++next_unknown_value, GET_MODE (x));
+      e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
       e->locs = new_elt_loc_list (e->locs, x);
       if (REG_VALUES (i) == 0)
 	{
@@ -1313,34 +1516,34 @@ cselib_lookup (rtx x, enum machine_mode 
       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
       slot = htab_find_slot_with_hash (cselib_hash_table, x, e->value, INSERT);
       *slot = e;
-      return e;
+      return cselib_log_lookup (x, e);
     }
 
   if (MEM_P (x))
-    return cselib_lookup_mem (x, create);
+    return cselib_log_lookup (x, cselib_lookup_mem (x, create));
 
   hashval = cselib_hash_rtx (x, create);
   /* Can't even create if hashing is not possible.  */
   if (! hashval)
-    return 0;
+    return cselib_log_lookup (x, 0);
 
   slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
 				   hashval, create ? INSERT : NO_INSERT);
   if (slot == 0)
-    return 0;
+    return cselib_log_lookup (x, 0);
 
   e = (cselib_val *) *slot;
   if (e)
-    return e;
+    return cselib_log_lookup (x, e);
 
-  e = new_cselib_val (hashval, mode);
+  e = new_cselib_val (hashval, mode, x);
 
   /* We have to fill the slot before calling cselib_subst_to_values:
      the hash table is inconsistent until we do so, and
      cselib_subst_to_values will need to do lookups.  */
   *slot = (void *) e;
   e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
-  return e;
+  return cselib_log_lookup (x, e);
 }
 
 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
@@ -1427,7 +1630,7 @@ cselib_invalidate_regno (unsigned int re
 		  break;
 		}
 	    }
-	  if (v->locs == 0)
+	  if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
 	    n_useless_values++;
 	}
     }
@@ -1510,7 +1713,7 @@ cselib_invalidate_mem (rtx mem_rtx)
 	  unchain_one_elt_loc_list (p);
 	}
 
-      if (had_locs && v->locs == 0)
+      if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
 	n_useless_values++;
 
       next = v->next_containing_mem;
@@ -1591,28 +1794,19 @@ cselib_record_set (rtx dest, cselib_val 
 	  REG_VALUES (dreg)->elt = src_elt;
 	}
 
-      if (src_elt->locs == 0)
+      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
 	n_useless_values--;
       src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
     }
   else if (MEM_P (dest) && dest_addr_elt != 0
 	   && cselib_record_memory)
     {
-      if (src_elt->locs == 0)
+      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
 	n_useless_values--;
       add_mem_for_addr (dest_addr_elt, src_elt, dest);
     }
 }
 
-/* Describe a single set that is part of an insn.  */
-struct set
-{
-  rtx src;
-  rtx dest;
-  cselib_val *src_elt;
-  cselib_val *dest_addr_elt;
-};
-
 /* There is no good way to determine how many elements there can be
    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
@@ -1623,7 +1817,7 @@ cselib_record_sets (rtx insn)
 {
   int n_sets = 0;
   int i;
-  struct set sets[MAX_SETS];
+  struct cselib_set sets[MAX_SETS];
   rtx body = PATTERN (insn);
   rtx cond = 0;
 
@@ -1695,6 +1889,9 @@ cselib_record_sets (rtx insn)
 	}
     }
 
+  if (cselib_record_sets_hook)
+    cselib_record_sets_hook (insn, sets, n_sets);
+
   /* Invalidate all locations written by this insn.  Note that the elts we
      looked up in the previous loop aren't affected, just some of their
      locations may go away.  */
@@ -1751,7 +1948,7 @@ cselib_process_insn (rtx insn)
 	  && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
 	  && MEM_VOLATILE_P (PATTERN (insn))))
     {
-      cselib_clear_table ();
+      cselib_reset_table_with_next_value (next_unknown_value);
       return;
     }
 
@@ -1868,4 +2065,92 @@ cselib_finish (void)
   next_unknown_value = 0;
 }
 
+/* Dump the cselib_val *X to FILE *info.  */
+
+static int
+dump_cselib_val (void **x, void *info)
+{
+  cselib_val *v = (cselib_val *)*x;
+  FILE *out = (FILE *)info;
+  bool need_lf = true;
+
+  print_inline_rtx (out, v->val_rtx, 0);
+
+  if (v->locs)
+    {
+      struct elt_loc_list *l = v->locs;
+      if (need_lf)
+	{
+	  fputc ('\n', out);
+	  need_lf = false;
+	}
+      fputs (" locs:", out);
+      do
+	{
+	  fprintf (out, "\n  from insn %i ",
+		   INSN_UID (l->setting_insn));
+	  print_inline_rtx (out, l->loc, 4);
+	}
+      while ((l = l->next));
+      fputc ('\n', out);
+    }
+  else
+    {
+      fputs (" no locs", out);
+      need_lf = true;
+    }
+
+  if (v->addr_list)
+    {
+      struct elt_list *e = v->addr_list;
+      if (need_lf)
+	{
+	  fputc ('\n', out);
+	  need_lf = false;
+	}
+      fputs (" addr list:", out);
+      do
+	{
+	  fputs ("\n  ", out);
+	  print_inline_rtx (out, e->elt->val_rtx, 2);
+	}
+      while ((e = e->next));
+      fputc ('\n', out);
+    }
+  else
+    {
+      fputs (" no addrs", out);
+      need_lf = true;
+    }
+
+  if (v->next_containing_mem == &dummy_val)
+    fputs (" last mem\n", out);
+  else if (v->next_containing_mem)
+    {
+      fputs (" next mem ", out);
+      print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
+      fputc ('\n', out);
+    }
+  else if (need_lf)
+    fputc ('\n', out);
+
+  return 1;
+}
+
+/* Dump to OUT everything in the CSELIB table.  */
+
+void
+dump_cselib_table (FILE *out)
+{
+  fprintf (out, "cselib hash table:\n");
+  htab_traverse (cselib_hash_table, dump_cselib_val, out);
+  if (first_containing_mem != &dummy_val)
+    {
+      fputs ("first mem ", out);
+      print_inline_rtx (out, first_containing_mem->val_rtx, 2);
+      fputc ('\n', out);
+    }
+  fprintf (out, "last unknown value %i\n", next_unknown_value);
+}
+
 #include "gt-cselib.h"
Index: gcc/rtl.c
===================================================================
--- gcc/rtl.c.orig	2009-06-05 05:07:48.000000000 -0300
+++ gcc/rtl.c	2009-06-05 05:07:56.000000000 -0300
@@ -379,6 +379,7 @@ rtx_equal_p_cb (const_rtx x, const_rtx y
     case SYMBOL_REF:
       return XSTR (x, 0) == XSTR (y, 0);
 
+    case VALUE:
     case SCRATCH:
     case CONST_DOUBLE:
     case CONST_INT:
Index: gcc/var-tracking.c
===================================================================
--- gcc/var-tracking.c.orig	2009-06-05 05:07:49.000000000 -0300
+++ gcc/var-tracking.c	2009-06-05 05:07:56.000000000 -0300
@@ -106,6 +106,8 @@
 #include "expr.h"
 #include "timevar.h"
 #include "tree-pass.h"
+#include "cselib.h"
+#include "target.h"
 
 /* Type of micro operation.  */
 enum micro_operation_type
@@ -113,12 +115,30 @@ enum micro_operation_type
   MO_USE,	/* Use location (REG or MEM).  */
   MO_USE_NO_VAR,/* Use location which is not associated with a variable
 		   or the variable is not trackable.  */
+  MO_VAL_USE,	/* Use location which is associated with a value.  */
+  MO_VAL_LOC,   /* Use location which appears in a debug insn.  */
+  MO_VAL_SET,	/* Set location associated with a value.  */
   MO_SET,	/* Set location.  */
   MO_COPY,	/* Copy the same portion of a variable from one
 		   location to another.  */
   MO_CLOBBER,	/* Clobber location.  */
   MO_CALL,	/* Call insn.  */
   MO_ADJUST	/* Adjust stack pointer.  */
+
+};
+
+static const char * const ATTRIBUTE_UNUSED
+micro_operation_type_name[] = {
+  "MO_USE",
+  "MO_USE_NO_VAR",
+  "MO_VAL_USE",
+  "MO_VAL_LOC",
+  "MO_VAL_SET",
+  "MO_SET",
+  "MO_COPY",
+  "MO_CLOBBER",
+  "MO_CALL",
+  "MO_ADJUST"
 };
 
 /* Where shall the note be emitted?  BEFORE or AFTER the instruction.  */
@@ -135,9 +155,12 @@ typedef struct micro_operation_def
   enum micro_operation_type type;
 
   union {
-    /* Location.  For MO_SET and MO_COPY, this is the SET that performs
-       the assignment, if known, otherwise it is the target of the
-       assignment.  */
+    /* Location.  For MO_SET and MO_COPY, this is the SET that
+       performs the assignment, if known, otherwise it is the target
+       of the assignment.  For MO_VAL_USE and MO_VAL_SET, it is a
+       CONCAT of the VALUE and the LOC associated with it.  For
+       MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
+       associated with it.  */
     rtx loc;
 
     /* Stack adjustment.  */
@@ -152,6 +175,14 @@ typedef struct micro_operation_def
   rtx insn;
 } micro_operation;
 
+/* A declaration of a variable, or an RTL value being handled like a
+   declaration.  */
+typedef struct decl_or_value
+{
+  /* An opaque pointer to the decl or the value.  */
+  void *ptr;
+} decl_or_value;
+
 /* Structure for passing some other parameters to function
    emit_note_insn_var_location.  */
 typedef struct emit_note_data_def
@@ -161,6 +192,9 @@ typedef struct emit_note_data_def
 
   /* Where the note will be emitted (before/after insn)?  */
   enum emit_note_where where;
+
+  /* The variables and values active at this point.  */
+  htab_t vars;
 } emit_note_data;
 
 /* Description of location of a part of a variable.  The content of a physical
@@ -176,7 +210,7 @@ typedef struct attrs_def
   rtx loc;
 
   /* The declaration corresponding to LOC.  */
-  tree decl;
+  decl_or_value dv;
 
   /* Offset from start of DECL.  */
   HOST_WIDE_INT offset;
@@ -209,8 +243,18 @@ typedef struct variable_tracking_info_de
   dataflow_set in;
   dataflow_set out;
 
+  /* The permanent-in dataflow set for this block.  This is used to
+     hold values for which we had to compute entry values.  ??? This
+     should probably be dynamically allocated, to avoid using more
+     memory in non-debug builds.  */
+  dataflow_set *permp;
+
   /* Has the block been visited in DFS?  */
   bool visited;
+
+  /* Has the block been flooded in VTA?  */
+  bool flooded;
+
 } *variable_tracking_info;
 
 /* Structure for chaining the locations.  */
@@ -219,7 +263,7 @@ typedef struct location_chain_def
   /* Next element in the chain.  */
   struct location_chain_def *next;
 
-  /* The location (REG or MEM).  */
+  /* The location (REG, MEM or VALUE).  */
   rtx loc;
 
   /* The "value" stored in this location.  */
@@ -248,8 +292,9 @@ typedef struct variable_part_def
 /* Structure describing where the variable is located.  */
 typedef struct variable_def
 {
-  /* The declaration of the variable.  */
-  tree decl;
+  /* The declaration of the variable, or an RTL value being handled
+     like a declaration.  */
+  decl_or_value dv;
 
   /* Reference count.  */
   int refcount;
@@ -258,7 +303,7 @@ typedef struct variable_def
   int n_var_parts;
 
   /* The variable parts.  */
-  variable_part var_part[MAX_VAR_PARTS];
+  variable_part var_part[1];
 } *variable;
 typedef const struct variable_def *const_variable;
 
@@ -274,9 +319,12 @@ typedef const struct variable_def *const
 /* Alloc pool for struct attrs_def.  */
 static alloc_pool attrs_pool;
 
-/* Alloc pool for struct variable_def.  */
+/* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
 static alloc_pool var_pool;
 
+/* Alloc pool for struct variable_def with a single var_part entry.  */
+static alloc_pool valvar_pool;
+
 /* Alloc pool for struct location_chain_def.  */
 static alloc_pool loc_chain_pool;
 
@@ -286,6 +334,12 @@ static htab_t changed_variables;
 /* Shall notes be emitted?  */
 static bool emit_notes;
 
+/* Scratch register bitmap used by cselib_expand_value_rtx.  */
+static bitmap scratch_regs = NULL;
+
+/* Variable used to tell whether cselib_process_insn called our hook.  */
+static bool cselib_hook_called;
+
 /* Local function prototypes.  */
 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
 					  HOST_WIDE_INT *);
@@ -300,13 +354,13 @@ static void variable_htab_free (void *);
 
 static void init_attrs_list_set (attrs *);
 static void attrs_list_clear (attrs *);
-static attrs attrs_list_member (attrs, tree, HOST_WIDE_INT);
-static void attrs_list_insert (attrs *, tree, HOST_WIDE_INT, rtx);
+static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
+static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
 static void attrs_list_copy (attrs *, attrs);
 static void attrs_list_union (attrs *, attrs);
 
 static void vars_clear (htab_t);
-static variable unshare_variable (dataflow_set *set, variable var, 
+static variable unshare_variable (void **slot, variable var,
 				  enum var_init_status);
 static int vars_copy_1 (void **, void *);
 static void vars_copy (htab_t, htab_t);
@@ -327,7 +381,11 @@ static void dataflow_set_copy (dataflow_
 static int variable_union_info_cmp_pos (const void *, const void *);
 static int variable_union (void **, void *);
 static void dataflow_set_union (dataflow_set *, dataflow_set *);
+static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
+static bool canon_value_cmp (rtx, rtx);
+static int loc_cmp (rtx, rtx);
 static bool variable_part_different_p (variable_part *, variable_part *);
+static bool onepart_variable_different_p (variable, variable);
 static bool variable_different_p (variable, variable, bool);
 static int dataflow_set_different_1 (void **, void *);
 static int dataflow_set_different_2 (void **, void *);
@@ -335,7 +393,7 @@ static bool dataflow_set_different (data
 static void dataflow_set_destroy (dataflow_set *);
 
 static bool contains_symbol_ref (rtx);
-static bool track_expr_p (tree);
+static bool track_expr_p (tree, bool);
 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
 static int count_uses (rtx *, void *);
 static void count_uses_1 (rtx *, void *);
@@ -347,19 +405,28 @@ static bool compute_bb_dataflow (basic_b
 static void vt_find_locations (void);
 
 static void dump_attrs_list (attrs);
-static int dump_variable (void **, void *);
+static int dump_variable_slot (void **, void *);
+static void dump_variable (variable);
 static void dump_vars (htab_t);
 static void dump_dataflow_set (dataflow_set *);
 static void dump_dataflow_sets (void);
 
 static void variable_was_changed (variable, htab_t);
-static void set_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT, 
-			       enum var_init_status, rtx);
-static void clobber_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT, 
-				   rtx);
-static void delete_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT);
+static void set_slot_part (dataflow_set *, rtx, void **,
+			   decl_or_value, HOST_WIDE_INT,
+			   enum var_init_status, rtx);
+static void set_variable_part (dataflow_set *, rtx,
+			       decl_or_value, HOST_WIDE_INT,
+			       enum var_init_status, rtx, enum insert_option);
+static void clobber_slot_part (dataflow_set *, rtx,
+			       void **, HOST_WIDE_INT, rtx);
+static void clobber_variable_part (dataflow_set *, rtx,
+				   decl_or_value, HOST_WIDE_INT, rtx);
+static void delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
+static void delete_variable_part (dataflow_set *, rtx,
+				  decl_or_value, HOST_WIDE_INT);
 static int emit_note_insn_var_location (void **, void *);
-static void emit_notes_for_changes (rtx, enum emit_note_where);
+static void emit_notes_for_changes (rtx, enum emit_note_where, htab_t vars);
 static int emit_notes_for_differences_1 (void **, void *);
 static int emit_notes_for_differences_2 (void **, void *);
 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
@@ -610,6 +677,120 @@ adjust_stack_reference (rtx mem, HOST_WI
   return replace_equiv_address_nv (mem, addr);
 }
 
+/* Return true if a decl_or_value DV is a DECL or NULL.  */
+static inline bool
+dv_is_decl_p (decl_or_value dv)
+{
+  tree decl;
+
+  if (!dv.ptr)
+    return false;
+
+  decl = (tree)dv.ptr;
+
+  if (GET_CODE ((rtx)dv.ptr) == VALUE)
+    return false;
+
+  return true;
+}
+
+/* Return true if a decl_or_value is a VALUE rtl.  */
+static inline bool
+dv_is_value_p (decl_or_value dv)
+{
+  return dv.ptr && !dv_is_decl_p (dv);
+}
+
+/* Return the decl in the decl_or_value.  */
+static inline tree
+dv_as_decl (decl_or_value dv)
+{
+  gcc_assert (dv_is_decl_p (dv));
+  return (tree) dv.ptr;
+}
+
+/* Return the value in the decl_or_value.  */
+static inline rtx
+dv_as_value (decl_or_value dv)
+{
+  gcc_assert (dv_is_value_p (dv));
+  return (rtx)dv.ptr;
+}
+
+/* Return the opaque pointer in the decl_or_value.  */
+static inline void *
+dv_as_opaque (decl_or_value dv)
+{
+  return dv.ptr;
+}
+
+/* Return true if a decl_or_value must not have more than one variable
+   part.  */
+static inline bool
+dv_onepart_p (decl_or_value dv)
+{
+  tree decl;
+
+  if (!MAY_HAVE_DEBUG_INSNS)
+    return false;
+
+  if (dv_is_value_p (dv))
+    return true;
+
+  decl = dv_as_decl (dv);
+
+  if (!decl)
+    return true;
+
+  return var_debug_value_for_decl (decl);
+}
+
+/* Return the variable pool to be used for dv, depending on whether it
+   can have multiple parts or not.  */
+static inline alloc_pool
+dv_pool (decl_or_value dv)
+{
+  return dv_onepart_p (dv) ? valvar_pool : var_pool;
+}
+
+#define IS_DECL_CODE(C) ((C) == VAR_DECL || (C) == PARM_DECL \
+			 || (C) == RESULT_DECL || (C) == COMPONENT_REF)
+
+/* Check that VALUE won't ever look like a DECL.  */
+static char check_value_is_not_decl [(!IS_DECL_CODE ((enum tree_code)VALUE))
+				     ? 1 : -1] ATTRIBUTE_UNUSED;
+
+
+/* Build a decl_or_value out of a decl.  */
+static inline decl_or_value
+dv_from_decl (tree decl)
+{
+  decl_or_value dv;
+  gcc_assert (!decl || IS_DECL_CODE (TREE_CODE (decl)));
+  dv.ptr = decl;
+  return dv;
+}
+
+/* Build a decl_or_value out of a value.  */
+static inline decl_or_value
+dv_from_value (rtx value)
+{
+  decl_or_value dv;
+  gcc_assert (value);
+  dv.ptr = value;
+  return dv;
+}
+
+static hashval_t
+dv_htab_hash (decl_or_value dv)
+{
+  gcc_assert (dv.ptr);
+  if (dv_is_value_p (dv))
+    return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
+  else
+    return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
+}
+
 /* The hash function for variable_htab, computes the hash value
    from the declaration of variable X.  */
 
@@ -618,7 +799,7 @@ variable_htab_hash (const void *x)
 {
   const_variable const v = (const_variable) x;
 
-  return (VARIABLE_HASH_VAL (v->decl));
+  return dv_htab_hash (v->dv);
 }
 
 /* Compare the declaration of variable X with declaration Y.  */
@@ -627,9 +808,31 @@ static int
 variable_htab_eq (const void *x, const void *y)
 {
   const_variable const v = (const_variable) x;
-  const_tree const decl = (const_tree) y;
+  decl_or_value dv = *(decl_or_value const*)y;
+
+  if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
+    return true;
+
+#if ENABLE_CHECKING
+  {
+    bool visv, dvisv;
+
+    visv = dv_is_value_p (v->dv);
+    dvisv = dv_is_value_p (dv);
+
+    if (visv != dvisv)
+      return false;
+
+    if (visv)
+      gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
+		  != CSELIB_VAL_PTR (dv_as_value (dv)));
+    else
+      gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
+		  != VARIABLE_HASH_VAL (dv_as_decl (dv)));
+  }
+#endif
 
-  return (VARIABLE_HASH_VAL (v->decl) == VARIABLE_HASH_VAL (decl));
+  return false;
 }
 
 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
@@ -656,7 +859,7 @@ variable_htab_free (void *elem)
 	}
       var->var_part[i].loc_chain = NULL;
     }
-  pool_free (var_pool, var);
+  pool_free (dv_pool (var->dv), var);
 }
 
 /* Initialize the set (array) SET of attrs to empty lists.  */
@@ -688,10 +891,10 @@ attrs_list_clear (attrs *listp)
 /* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
 
 static attrs
-attrs_list_member (attrs list, tree decl, HOST_WIDE_INT offset)
+attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
 {
   for (; list; list = list->next)
-    if (list->decl == decl && list->offset == offset)
+    if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
       return list;
   return NULL;
 }
@@ -699,13 +902,14 @@ attrs_list_member (attrs list, tree decl
 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
 
 static void
-attrs_list_insert (attrs *listp, tree decl, HOST_WIDE_INT offset, rtx loc)
+attrs_list_insert (attrs *listp, decl_or_value dv,
+		   HOST_WIDE_INT offset, rtx loc)
 {
   attrs list;
 
   list = (attrs) pool_alloc (attrs_pool);
   list->loc = loc;
-  list->decl = decl;
+  list->dv = dv;
   list->offset = offset;
   list->next = *listp;
   *listp = list;
@@ -723,7 +927,7 @@ attrs_list_copy (attrs *dstp, attrs src)
     {
       n = (attrs) pool_alloc (attrs_pool);
       n->loc = src->loc;
-      n->decl = src->decl;
+      n->dv = src->dv;
       n->offset = src->offset;
       n->next = *dstp;
       *dstp = n;
@@ -737,8 +941,28 @@ attrs_list_union (attrs *dstp, attrs src
 {
   for (; src; src = src->next)
     {
-      if (!attrs_list_member (*dstp, src->decl, src->offset))
-	attrs_list_insert (dstp, src->decl, src->offset, src->loc);
+      if (!attrs_list_member (*dstp, src->dv, src->offset))
+	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
+    }
+}
+
+/* Combine nodes that are not onepart nodes from SRC and SRC2 into
+   *DSTP.  */
+
+static void
+attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
+{
+  gcc_assert (!*dstp);
+  for (; src; src = src->next)
+    {
+      if (!dv_onepart_p (src->dv))
+	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
+    }
+  for (src = src2; src; src = src->next)
+    {
+      if (!dv_onepart_p (src->dv)
+	  && !attrs_list_member (*dstp, src->dv, src->offset))
+	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
     }
 }
 
@@ -753,15 +977,14 @@ vars_clear (htab_t vars)
 /* Return a copy of a variable VAR and insert it to dataflow set SET.  */
 
 static variable
-unshare_variable (dataflow_set *set, variable var, 
+unshare_variable (void **slot, variable var,
 		  enum var_init_status initialized)
 {
-  void **slot;
   variable new_var;
   int i;
 
-  new_var = (variable) pool_alloc (var_pool);
-  new_var->decl = var->decl;
+  new_var = (variable) pool_alloc (dv_pool (var->dv));
+  new_var->dv = var->dv;
   new_var->refcount = 1;
   var->refcount--;
   new_var->n_var_parts = var->n_var_parts;
@@ -801,9 +1024,6 @@ unshare_variable (dataflow_set *set, var
 	new_var->var_part[i].cur_loc = NULL;
     }
 
-  slot = htab_find_slot_with_hash (set->vars, new_var->decl,
-				   VARIABLE_HASH_VAL (new_var->decl),
-				   INSERT);
   *slot = new_var;
   return new_var;
 }
@@ -815,14 +1035,15 @@ static int
 vars_copy_1 (void **slot, void *data)
 {
   htab_t dst = (htab_t) data;
-  variable src, *dstp;
+  variable src;
+  void **dstp;
 
-  src = *(variable *) slot;
+  src = (variable) *slot;
   src->refcount++;
 
-  dstp = (variable *) htab_find_slot_with_hash (dst, src->decl,
-						VARIABLE_HASH_VAL (src->decl),
-						INSERT);
+  dstp = htab_find_slot_with_hash (dst, &src->dv,
+				   dv_htab_hash (src->dv),
+				   INSERT);
   *dstp = src;
 
   /* Continue traversing the hash table.  */
@@ -851,28 +1072,43 @@ var_debug_decl (tree decl)
   return decl;
 }
 
-/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
+/* Set the register LOC to contain DV, OFFSET.  */
 
 static void
-var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, 
-	     rtx set_src)
+var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
+		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
+		  enum insert_option iopt)
 {
-  tree decl = REG_EXPR (loc);
-  HOST_WIDE_INT offset = REG_OFFSET (loc);
   attrs node;
+  bool decl_p = dv_is_decl_p (dv);
 
-  decl = var_debug_decl (decl);
+  if (decl_p)
+    dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
 
   for (node = set->regs[REGNO (loc)]; node; node = node->next)
-    if (node->decl == decl && node->offset == offset)
+    if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
+	&& node->offset == offset)
       break;
   if (!node)
-    attrs_list_insert (&set->regs[REGNO (loc)], decl, offset, loc);
-  set_variable_part (set, loc, decl, offset, initialized, set_src);
+    attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
+  set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
+}
+
+/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
+
+static void
+var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
+	     rtx set_src)
+{
+  tree decl = REG_EXPR (loc);
+  HOST_WIDE_INT offset = REG_OFFSET (loc);
+
+  var_reg_decl_set (set, loc, initialized,
+		    dv_from_decl (decl), offset, set_src, INSERT);
 }
 
 static enum var_init_status
-get_init_value (dataflow_set *set, rtx loc, tree decl)
+get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
 {
   void **slot;
   variable var;
@@ -882,11 +1118,11 @@ get_init_value (dataflow_set *set, rtx l
   if (! flag_var_tracking_uninit)
     return VAR_INIT_STATUS_INITIALIZED;
 
-  slot = htab_find_slot_with_hash (set->vars, decl, VARIABLE_HASH_VAL (decl),
-				   NO_INSERT);
+  slot = htab_find_slot_with_hash (set->vars, &dv,
+				   dv_htab_hash (dv), NO_INSERT);
   if (slot)
     {
-      var = * (variable *) slot;
+      var = (variable) *slot;
       for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
 	{
 	  location_chain nextp;
@@ -921,15 +1157,15 @@ var_reg_delete_and_set (dataflow_set *se
   decl = var_debug_decl (decl);
 
   if (initialized == VAR_INIT_STATUS_UNKNOWN)
-    initialized = get_init_value (set, loc, decl);
+    initialized = get_init_value (set, loc, dv_from_decl (decl));
 
   nextp = &set->regs[REGNO (loc)];
   for (node = *nextp; node; node = next)
     {
       next = node->next;
-      if (node->decl != decl || node->offset != offset)
+      if (dv_as_opaque (node->dv) != decl || node->offset != offset)
 	{
-	  delete_variable_part (set, node->loc, node->decl, node->offset);
+	  delete_variable_part (set, node->loc, node->dv, node->offset);
 	  pool_free (attrs_pool, node);
 	  *nextp = next;
 	}
@@ -940,7 +1176,7 @@ var_reg_delete_and_set (dataflow_set *se
 	}
     }
   if (modify)
-    clobber_variable_part (set, loc, decl, offset, set_src);
+    clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
   var_reg_set (set, loc, initialized, set_src);
 }
 
@@ -961,13 +1197,13 @@ var_reg_delete (dataflow_set *set, rtx l
 
       decl = var_debug_decl (decl);
 
-      clobber_variable_part (set, NULL, decl, offset, NULL);
+      clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
     }
 
   for (node = *reg; node; node = next)
     {
       next = node->next;
-      delete_variable_part (set, node->loc, node->decl, node->offset);
+      delete_variable_part (set, node->loc, node->dv, node->offset);
       pool_free (attrs_pool, node);
     }
   *reg = NULL;
@@ -984,12 +1220,25 @@ var_regno_delete (dataflow_set *set, int
   for (node = *reg; node; node = next)
     {
       next = node->next;
-      delete_variable_part (set, node->loc, node->decl, node->offset);
+      delete_variable_part (set, node->loc, node->dv, node->offset);
       pool_free (attrs_pool, node);
     }
   *reg = NULL;
 }
 
+/* Set the location of DV, OFFSET as the MEM LOC.  */
+
+static void
+var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
+		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
+		  enum insert_option iopt)
+{
+  if (dv_is_decl_p (dv))
+    dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
+
+  set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
+}
+
 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
    SET to LOC.
    Adjust the address first if it is stack pointer based.  */
@@ -1001,9 +1250,8 @@ var_mem_set (dataflow_set *set, rtx loc,
   tree decl = MEM_EXPR (loc);
   HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
 
-  decl = var_debug_decl (decl);
-
-  set_variable_part (set, loc, decl, offset, initialized, set_src);
+  var_mem_decl_set (set, loc, initialized,
+		    dv_from_decl (decl), offset, set_src, INSERT);
 }
 
 /* Delete and set the location part of variable MEM_EXPR (LOC) in
@@ -1023,10 +1271,10 @@ var_mem_delete_and_set (dataflow_set *se
   decl = var_debug_decl (decl);
 
   if (initialized == VAR_INIT_STATUS_UNKNOWN)
-    initialized = get_init_value (set, loc, decl);
+    initialized = get_init_value (set, loc, dv_from_decl (decl));
 
   if (modify)
-    clobber_variable_part (set, NULL, decl, offset, set_src);
+    clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
   var_mem_set (set, loc, initialized, set_src);
 }
 
@@ -1042,8 +1290,186 @@ var_mem_delete (dataflow_set *set, rtx l
 
   decl = var_debug_decl (decl);
   if (clobber)
-    clobber_variable_part (set, NULL, decl, offset, NULL);
-  delete_variable_part (set, loc, decl, offset);
+    clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
+  delete_variable_part (set, loc, dv_from_decl (decl), offset);
+}
+
+/* Map a value to a location it was just stored in.  */
+
+static void
+val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
+{
+  cselib_val *v = CSELIB_VAL_PTR (val);
+
+  gcc_assert (cselib_preserved_value_p (v));
+
+  if (dump_file)
+    {
+      fprintf (dump_file, "%i: ", INSN_UID (insn));
+      print_inline_rtx (dump_file, val, 0);
+      fprintf (dump_file, " stored in ");
+      print_inline_rtx (dump_file, loc, 0);
+      if (v->locs)
+	{
+	  struct elt_loc_list *l;
+	  for (l = v->locs; l; l = l->next)
+	    {
+	      fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
+	      print_inline_rtx (dump_file, l->loc, 0);
+	    }
+	}
+      fprintf (dump_file, "\n");
+    }
+
+  if (REG_P (loc))
+    {
+      var_regno_delete (set, REGNO (loc));
+      var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+			dv_from_value (val), 0, NULL_RTX, INSERT);
+    }
+  else if (MEM_P (loc))
+    var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+		      dv_from_value (val), 0, NULL_RTX, INSERT);
+  else
+    set_variable_part (set, loc, dv_from_value (val), 0,
+		       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+}
+
+/* Reset this node, detaching all its equivalences.  Return the slot
+   in the variable hash table that holds dv, if there is one.  */
+
+static void
+val_reset (dataflow_set *set, decl_or_value dv)
+{
+  void **slot = htab_find_slot_with_hash (set->vars, &dv, dv_htab_hash (dv),
+					  NO_INSERT);
+  variable var;
+  location_chain node;
+  rtx cval;
+
+  if (!slot)
+    return;
+
+  var = (variable)*slot;
+  if (!var->n_var_parts)
+    return;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  cval = NULL;
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (GET_CODE (node->loc) == VALUE
+	&& canon_value_cmp (node->loc, cval))
+      cval = node->loc;
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (GET_CODE (node->loc) == VALUE && cval != node->loc)
+      {
+	/* Redirect the equivalence link to the new canonical
+	   value, or simply remove it if it would point at
+	   itself.  */
+	if (cval)
+	  set_variable_part (set, cval, dv_from_value (node->loc),
+			     0, node->init, node->set_src, NO_INSERT);
+	delete_variable_part (set, dv_as_value (dv),
+			      dv_from_value (node->loc), 0);
+      }
+
+  if (cval)
+    {
+      decl_or_value cdv = dv_from_value (cval);
+
+      /* Keep the remaining values connected, accummulating links
+	 in the canonical value.  */
+      for (node = var->var_part[0].loc_chain; node; node = node->next)
+	{
+	  if (node->loc == cval)
+	    continue;
+	  else if (GET_CODE (node->loc) == REG)
+	    var_reg_decl_set (set, node->loc, node->init, cdv, 0,
+			      node->set_src, NO_INSERT);
+	  else if (GET_CODE (node->loc) == MEM)
+	    var_mem_decl_set (set, node->loc, node->init, cdv, 0,
+			      node->set_src, NO_INSERT);
+	  else
+	    set_variable_part (set, node->loc, cdv, 0,
+			       node->init, node->set_src, NO_INSERT);
+	}
+    }
+
+  /* We remove this last, to make sure that the canonical value is not
+     removed to the point of requiring reinsertion.  */
+  if (cval)
+    delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
+
+  clobber_variable_part (set, NULL, dv, 0, NULL);
+
+  /* ??? Should we make sure there aren't other available values or
+     variables whose values involve this one other than by
+     equivalence?  E.g., at the very least we should reset MEMs, those
+     shouldn't be too hard to find cselib-looking up the value as an
+     address, then locating the resulting value in our own hash
+     table.  */
+}
+
+/* Find the values in a given location and map the val to another
+   value, if it is unique, or add the location as one holding the
+   value.  */
+
+static void
+val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
+{
+  decl_or_value dv = dv_from_value (val);
+
+  if (dump_file && flag_verbose_cselib)
+    {
+      if (insn)
+	fprintf (dump_file, "%i: ", INSN_UID (insn));
+      else
+	fprintf (dump_file, "head: ");
+      print_inline_rtx (dump_file, val, 0);
+      fputs (" is at ", dump_file);
+      print_inline_rtx (dump_file, loc, 0);
+      fputc ('\n', dump_file);
+    }
+
+  val_reset (set, dv);
+
+  if (REG_P (loc))
+    {
+      attrs node, found = NULL;
+
+      for (node = set->regs[REGNO (loc)]; node; node = node->next)
+	if (dv_is_value_p (node->dv)
+	    && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
+	  {
+	    found = node;
+
+	    /* Map incoming equivalences.  ??? Wouldn't it be nice if
+	     we just started sharing the location lists?  Maybe a
+	     circular list ending at the value itself or some
+	     such.  */
+	    set_variable_part (set, dv_as_value (node->dv),
+			       dv_from_value (val), node->offset,
+			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+	    set_variable_part (set, val, node->dv, node->offset,
+			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+	  }
+
+      /* If we didn't find any equivalence, we need to remember that
+	 this value is held in the named register.  */
+      if (!found)
+	var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+			  dv_from_value (val), 0, NULL_RTX, INSERT);
+    }
+  else if (MEM_P (loc))
+    /* ??? Merge equivalent MEMs.  */
+    var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+		      dv_from_value (val), 0, NULL_RTX, INSERT);
+  else
+    /* ??? Merge equivalent expressions.  */
+    set_variable_part (set, loc, dv_from_value (val), 0,
+		       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
 }
 
 /* Initialize dataflow set SET to be empty. 
@@ -1129,14 +1555,15 @@ variable_union_info_cmp_pos (const void 
 static int
 variable_union (void **slot, void *data)
 {
-  variable src, dst, *dstp;
+  variable src, dst;
+  void **dstp;
   dataflow_set *set = (dataflow_set *) data;
   int i, j, k;
 
-  src = *(variable *) slot;
-  dstp = (variable *) htab_find_slot_with_hash (set->vars, src->decl,
-						VARIABLE_HASH_VAL (src->decl),
-						INSERT);
+  src = (variable) *slot;
+  dstp = htab_find_slot_with_hash (set->vars, &src->dv,
+				   dv_htab_hash (src->dv),
+				   INSERT);
   if (!*dstp)
     {
       src->refcount++;
@@ -1162,7 +1589,7 @@ variable_union (void **slot, void *data)
 	  if (! flag_var_tracking_uninit)
 	    status = VAR_INIT_STATUS_INITIALIZED;
 
-	  unshare_variable (set, src, status);
+	  unshare_variable (dstp, src, status);
 	}
       else
 	*dstp = src;
@@ -1171,10 +1598,67 @@ variable_union (void **slot, void *data)
       return 1;
     }
   else
-    dst = *dstp;
+    dst = (variable) *dstp;
 
   gcc_assert (src->n_var_parts);
 
+  /* We can combine one-part variables very efficiently, because their
+     entries are in canonical order.  */
+  if (dv_onepart_p (src->dv))
+    {
+      location_chain *nodep, dnode, snode;
+
+      gcc_assert (src->n_var_parts == 1);
+      gcc_assert (dst->n_var_parts == 1);
+
+      snode = src->var_part[0].loc_chain;
+      gcc_assert (snode);
+
+    restart_onepart_unshared:
+      nodep = &dst->var_part[0].loc_chain;
+      dnode = *nodep;
+      gcc_assert (dnode);
+
+      while (snode)
+	{
+	  int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
+
+	  if (r > 0)
+	    {
+	      location_chain nnode;
+
+	      if (dst->refcount != 1)
+		{
+		  dst = unshare_variable (dstp, dst,
+					  VAR_INIT_STATUS_INITIALIZED);
+		  goto restart_onepart_unshared;
+		}
+
+	      *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
+	      nnode->loc = snode->loc;
+	      nnode->init = snode->init;
+	      if (!snode->set_src || MEM_P (snode->set_src))
+		nnode->set_src = NULL;
+	      else
+		nnode->set_src = snode->set_src;
+	      nnode->next = dnode;
+	      dnode = nnode;
+	    }
+	  else if (r == 0)
+	    gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
+
+	  if (r >= 0)
+	    snode = snode->next;
+
+	  nodep = &dnode->next;
+	  dnode = *nodep;
+	}
+
+      dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
+
+      return 1;
+    }
+
   /* Count the number of location parts, result is K.  */
   for (i = 0, j = 0, k = 0;
        i < src->n_var_parts && j < dst->n_var_parts; k++)
@@ -1194,7 +1678,7 @@ variable_union (void **slot, void *data)
 
   /* We track only variables whose size is <= MAX_VAR_PARTS bytes
      thus there are at most MAX_VAR_PARTS different offsets.  */
-  gcc_assert (k <= MAX_VAR_PARTS);
+  gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
 
   if (dst->refcount > 1 && dst->n_var_parts != k)
     {
@@ -1202,7 +1686,7 @@ variable_union (void **slot, void *data)
       
       if (! flag_var_tracking_uninit)
 	status = VAR_INIT_STATUS_INITIALIZED;
-      dst = unshare_variable (set, dst, status);
+      dst = unshare_variable (dstp, dst, status);
     }
 
   i = src->n_var_parts - 1;
@@ -1243,7 +1727,7 @@ variable_union (void **slot, void *data)
 		    }
 		}
 	      if (node || node2)
-		dst = unshare_variable (set, dst, VAR_INIT_STATUS_UNKNOWN);
+		dst = unshare_variable (dstp, dst, VAR_INIT_STATUS_UNKNOWN);
 	    }
 
 	  src_l = 0;
@@ -1315,7 +1799,7 @@ variable_union (void **slot, void *data)
 	  dst->var_part[k].loc_chain = vui[0].lc;
 	  dst->var_part[k].offset = dst->var_part[j].offset;
 
-	  free (vui);
+	  XDELETEVEC (vui);
 	  i--;
 	  j--;
 	}
@@ -1392,55 +1876,1662 @@ dataflow_set_union (dataflow_set *dst, d
   htab_traverse (src->vars, variable_union, dst);
 }
 
-/* Flag whether two dataflow sets being compared contain different data.  */
-static bool
-dataflow_set_different_value;
+/* Whether the value is currently being expanded.  */
+#define VALUE_RECURSED_INTO(x) \
+  (RTL_FLAG_CHECK1 ("VALUE_RECURSED_INTO", (x), VALUE)->used)
+
+/* Return a location list node whose loc is rtx_equal to LOC, in the
+   location list of a one-part variable or value VAR, or in that of
+   any values recursively mentioned in the location lists.  */
 
-static bool
-variable_part_different_p (variable_part *vp1, variable_part *vp2)
+static location_chain
+find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
 {
-  location_chain lc1, lc2;
+  location_chain node;
 
-  for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
-    {
-      for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
-	{
-	  if (REG_P (lc1->loc) && REG_P (lc2->loc))
-	    {
-	      if (REGNO (lc1->loc) == REGNO (lc2->loc))
-		break;
-	    }
-	  if (rtx_equal_p (lc1->loc, lc2->loc))
-	    break;
-	}
-      if (!lc2)
-	return true;
-    }
-  return false;
+  if (!var)
+    return NULL;
+
+  gcc_assert (dv_onepart_p (var->dv));
+
+  if (!var->n_var_parts)
+    return NULL;
+
+  gcc_assert (var->var_part[0].offset == 0);
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (rtx_equal_p (loc, node->loc))
+      return node;
+    else if (GET_CODE (node->loc) == VALUE
+	     && !VALUE_RECURSED_INTO (node->loc))
+      {
+	decl_or_value dv = dv_from_value (node->loc);
+	void **slot = htab_find_slot_with_hash (vars, &dv, dv_htab_hash (dv),
+						NO_INSERT);
+
+	if (slot)
+	  {
+	    location_chain where;
+	    VALUE_RECURSED_INTO (node->loc) = true;
+	    if ((where = find_loc_in_1pdv (loc, (variable)*slot, vars)))
+	      {
+		VALUE_RECURSED_INTO (node->loc) = false;
+		return where;
+	      }
+	    VALUE_RECURSED_INTO (node->loc) = false;
+	  }
+      }
+
+  return NULL;
 }
 
-/* Return true if variables VAR1 and VAR2 are different.
-   If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
-   variable part.  */
+/* Hash table iteration argument passed to variable_merge.  */
+struct dfset_merge
+{
+  /* The set in which the merge is to be inserted.  */
+  dataflow_set *dst;
+  /* The set that we're iterating in.  */
+  dataflow_set *cur;
+  /* The set that may contain the other dv we are to merge with.  */
+  dataflow_set *src;
+};
 
-static bool
-variable_different_p (variable var1, variable var2,
-		      bool compare_current_location)
+/* Insert LOC in *DNODE, if it's not there yet.  The list must be in
+   loc_cmp order, and it is maintained as such.  */
+
+static void
+insert_into_intersection (location_chain *nodep, rtx loc,
+			  enum var_init_status status)
 {
-  int i;
+  location_chain node;
+  int r;
 
-  if (var1 == var2)
-    return false;
+  for (node = *nodep; node; nodep = &node->next, node = *nodep)
+    if ((r = loc_cmp (node->loc, loc)) == 0)
+      {
+	node->init = MIN (node->init, status);
+	return;
+      }
+    else if (r > 0)
+      break;
 
-  if (var1->n_var_parts != var2->n_var_parts)
-    return true;
+  node = (location_chain) pool_alloc (loc_chain_pool);
 
-  for (i = 0; i < var1->n_var_parts; i++)
+  node->loc = loc;
+  node->set_src = NULL;
+  node->init = status;
+  node->next = *nodep;
+  *nodep = node;
+}
+
+/* Insert in DEST the intersection the locations present in both
+   S1NODE and S2VAR, directly or indirectly.  S1NODE is from a
+   variable in DSM->cur, whereas S2VAR is from DSM->src.  dvar is in
+   DSM->dst.  */
+
+static void
+intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
+		      location_chain s1node, variable s2var)
+{
+  dataflow_set *s1set = dsm->cur;
+  dataflow_set *s2set = dsm->src;
+  location_chain found;
+
+  for (; s1node; s1node = s1node->next)
     {
-      if (var1->var_part[i].offset != var2->var_part[i].offset)
-	return true;
-      if (compare_current_location)
-	{
+      if (s1node->loc == val)
+	continue;
+
+      if ((found = find_loc_in_1pdv (s1node->loc, s2var, s2set->vars)))
+	{
+	  insert_into_intersection (dest, s1node->loc,
+				    MIN (s1node->init, found->init));
+	  continue;
+	}
+
+      if (GET_CODE (s1node->loc) == VALUE
+	  && !VALUE_RECURSED_INTO (s1node->loc))
+	{
+	  decl_or_value dv = dv_from_value (s1node->loc);
+	  void **slot = htab_find_slot_with_hash (s1set->vars,
+						  &dv, dv_htab_hash (dv),
+						  NO_INSERT);
+
+	  if (slot)
+	    {
+	      variable svar = (variable)*slot;
+
+	      if (svar->n_var_parts == 1)
+		{
+		  VALUE_RECURSED_INTO (s1node->loc) = true;
+		  intersect_loc_chains (val, dest, dsm,
+					svar->var_part[0].loc_chain,
+					s2var);
+		  VALUE_RECURSED_INTO (s1node->loc) = false;
+		}
+	    }
+	}
+
+      /* ??? if the location is equivalent to any location in src,
+	 searched recursively
+
+	   add to dst the values needed to represent the equivalence
+
+     telling whether locations S is equivalent to another dv's
+     location list:
+
+       for each location D in the list
+
+         if S and D satisfy rtx_equal_p, then it is present
+
+	 else if D is a value, recurse without cycles
+
+	 else if S and D have the same CODE and MODE
+
+	   for each operand oS and the corresponding oD
+
+	     if oS and oD are not equivalent, then S an D are not equivalent
+
+	     else if they are RTX vectors
+
+	       if any vector oS element is not equivalent to its respective oD,
+	       then S and D are not equivalent
+
+   */
+
+
+    }
+}
+
+/* Determine a total order between two distinct pointers.  Compare the
+   pointers as integral types if size_t is wide enough, otherwise
+   resort to bitwise memory compare.  The actual order does not
+   matter, we just need to be consistent, so endianness is
+   irrelevant.  */
+
+static int
+tie_break_pointers (const void *p1, const void *p2)
+{
+  gcc_assert (p1 != p2);
+
+  if (sizeof (size_t) >= sizeof (void*))
+    return (size_t)p1 < (size_t)p2 ? -1 : 1;
+  else
+    return memcmp (&p1, &p2, sizeof (p1));
+}
+
+/* Return true if TVAL is better than CVAL as a canonival value.  We
+   choose lowest-numbered VALUEs, using the RTX address as a
+   tie-breaker.  The idea is to arrange them into a star topology,
+   such that all of them are at most one step away from the canonical
+   value, and the canonical value has backlinks to all of them, in
+   addition to all the actual locations.  We don't enforce this
+   topology throughout the entire dataflow analysis, though.
+ */
+
+static bool
+canon_value_cmp (rtx tval, rtx cval)
+{
+  return !cval
+    || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
+    || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
+	&& tie_break_pointers (tval, cval) < 0);
+}
+
+/* Return -1 if X should be before Y in a location list for a 1-part
+   variable, 1 if Y should be before X, and 0 if they're equivalent
+   and should not appear in the list.  */
+
+static int
+loc_cmp (rtx x, rtx y)
+{
+  int i, j, r;
+  RTX_CODE code = GET_CODE (x);
+  const char *fmt;
+
+  if (x == y)
+    return 0;
+
+  if (REG_P (x))
+    {
+      if (!REG_P (y))
+	return -1;
+      gcc_assert (GET_MODE (x) == GET_MODE (y));
+      if (REGNO (x) == REGNO (y))
+	return 0;
+      else if (REGNO (x) < REGNO (y))
+	return -1;
+      else
+	return 1;
+    }
+
+  if (REG_P (y))
+    return 1;
+
+  if (MEM_P (x))
+    {
+      if (!MEM_P (y))
+	return -1;
+      gcc_assert (GET_MODE (x) == GET_MODE (y));
+      return loc_cmp (XEXP (x, 0), XEXP (y, 0));
+    }
+
+  if (MEM_P (y))
+    return 1;
+
+  if (GET_CODE (x) == VALUE)
+    {
+      if (GET_CODE (y) != VALUE)
+	return -1;
+      gcc_assert (GET_MODE (x) == GET_MODE (y));
+      if (canon_value_cmp (x, y))
+	return -1;
+      else
+	return 1;
+    }
+
+  if (GET_CODE (y) == VALUE)
+    return 1;
+
+  if (GET_CODE (x) == GET_CODE (y))
+    /* Compare operands below.  */;
+  else if (GET_CODE (x) < GET_CODE (y))
+    return -1;
+  else
+    return 1;
+
+  gcc_assert (GET_MODE (x) == GET_MODE (y));
+
+  fmt = GET_RTX_FORMAT (code);
+  for (i = 0; i < GET_RTX_LENGTH (code); i++)
+    switch (fmt[i])
+      {
+      case 'w':
+	if (XWINT (x, i) == XWINT (y, i))
+	  break;
+	else if (XWINT (x, i) < XWINT (y, i))
+	  return -1;
+	else
+	  return 1;
+
+      case 'n':
+      case 'i':
+	if (XINT (x, i) == XINT (y, i))
+	  break;
+	else if (XINT (x, i) < XINT (y, i))
+	  return -1;
+	else
+	  return 1;
+
+      case 'V':
+      case 'E':
+	/* Compare the vector length first.  */
+	if (XVECLEN (x, i) == XVECLEN (y, i))
+	  /* Compare the vectors elements.  */;
+	else if (XVECLEN (x, i) < XVECLEN (y, i))
+	  return -1;
+	else
+	  return 1;
+
+	for (j = 0; j < XVECLEN (x, i); j++)
+	  if ((r = loc_cmp (XVECEXP (x, i, j),
+			    XVECEXP (y, i, j))))
+	    return r;
+	break;
+
+      case 'e':
+	if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
+	  return r;
+	break;
+
+      case 'S':
+      case 's':
+	if (XSTR (x, i) == XSTR (y, i))
+	  break;
+	if (!XSTR (x, i))
+	  return -1;
+	if (!XSTR (y, i))
+	  return 1;
+	if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
+	  break;
+	else if (r < 0)
+	  return -1;
+	else
+	  return 1;
+
+      case 'u':
+	/* These are just backpointers, so they don't matter.  */
+	break;
+
+      case '0':
+      case 't':
+	break;
+
+	/* It is believed that rtx's at this level will never
+	   contain anything but integers and other rtx's,
+	   except for within LABEL_REFs and SYMBOL_REFs.  */
+      default:
+	gcc_unreachable ();
+      }
+
+  return 0;
+}
+
+#if ENABLE_CHECKING
+/* Check the order of entries in one-part variables.   */
+
+static int
+canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
+{
+  variable var = (variable) *slot;
+  decl_or_value dv = var->dv;
+  location_chain node, next;
+
+  if (!dv_onepart_p (dv))
+    return 1;
+
+  gcc_assert (var->n_var_parts == 1);
+  node = var->var_part[0].loc_chain;
+  gcc_assert (node);
+
+  while ((next = node->next))
+    {
+      gcc_assert (loc_cmp (node->loc, next->loc) < 0);
+      node = next;
+    }
+
+  return 1;
+}
+#endif
+
+/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
+   more likely to be chosen as canonical for an equivalence set.
+   Ensure less likely values can reach more likely neighbors, making
+   the connections bidirectional.  */
+
+static int
+canonicalize_values_mark (void **slot, void *data)
+{
+  dataflow_set *set = (dataflow_set *)data;
+  variable var = (variable) *slot;
+  decl_or_value dv = var->dv;
+  rtx val;
+  location_chain node;
+
+  if (!dv_is_value_p (dv))
+    return 1;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  val = dv_as_value (dv);
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (GET_CODE (node->loc) == VALUE)
+      {
+	if (canon_value_cmp (node->loc, val))
+	  VALUE_RECURSED_INTO (val) = true;
+	else
+	  {
+	    decl_or_value odv = dv_from_value (node->loc);
+	    void **oslot = htab_find_slot_with_hash (set->vars, &odv,
+						     dv_htab_hash (odv),
+						     NO_INSERT);
+#if 0 && ENABLE_CHECKING
+	    variable ovar;
+	    location_chain onode;
+
+	    gcc_assert (oslot);
+	    ovar = (variable)*oslot;
+	    gcc_assert (ovar->n_var_parts == 1);
+	    for (onode = ovar->var_part[0].loc_chain; onode;
+		 onode = onode->next)
+	      if (onode->loc == val)
+		break;
+
+	    gcc_assert (onode);
+
+	    /* ??? Remove this in case the assertion above never fails.  */
+	    if (!onode)
+#endif
+	      set_slot_part (set, val, oslot, odv, 0, node->init, NULL_RTX);
+
+	    VALUE_RECURSED_INTO (node->loc) = true;
+	  }
+      }
+
+  return 1;
+}
+
+/* Remove redundant entries from equivalence lists in onepart
+   variables, canonicalizing equivalence sets into star shapes.  */
+
+static int
+canonicalize_values_star (void **slot, void *data)
+{
+  dataflow_set *set = (dataflow_set *)data;
+  variable var = (variable) *slot;
+  decl_or_value dv = var->dv;
+  location_chain node;
+  decl_or_value cdv;
+  rtx val, cval;
+  void **cslot;
+  bool has_value;
+  bool has_marks;
+
+  if (!dv_onepart_p (dv))
+    return 1;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  if (dv_is_value_p (dv))
+    {
+      cval = dv_as_value (dv);
+      if (!VALUE_RECURSED_INTO (cval))
+	return 1;
+      VALUE_RECURSED_INTO (cval) = false;
+    }
+  else
+    cval = NULL_RTX;
+
+ restart:
+  val = cval;
+  has_value = false;
+  has_marks = false;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (GET_CODE (node->loc) == VALUE)
+      {
+	has_value = true;
+	if (VALUE_RECURSED_INTO (node->loc))
+	  has_marks = true;
+	if (canon_value_cmp (node->loc, cval))
+	  cval = node->loc;
+      }
+
+  if (!has_value)
+    return 1;
+
+  if (cval == val)
+    {
+      if (!has_marks || dv_is_decl_p (dv))
+	return 1;
+
+      /* Keep it marked so that we revisit it, either after visiting a
+	 child node, or after visiting a new parent that might be
+	 found out.  */
+      VALUE_RECURSED_INTO (val) = true;
+
+      for (node = var->var_part[0].loc_chain; node; node = node->next)
+	if (GET_CODE (node->loc) == VALUE
+	    && VALUE_RECURSED_INTO (node->loc))
+	  {
+	    cval = node->loc;
+	  restart_with_cval:
+	    VALUE_RECURSED_INTO (cval) = false;
+	    dv = dv_from_value (cval);
+	    slot = htab_find_slot_with_hash (set->vars, &dv, dv_htab_hash (dv),
+					     NO_INSERT);
+	    if (!slot)
+	      {
+		gcc_assert (dv_is_decl_p (var->dv));
+		/* The canonical value was reset and dropped.
+		   Remove it.  */
+		clobber_variable_part (set, NULL, var->dv, 0, NULL);
+		return 1;
+	      }
+	    var = (variable)*slot;
+	    gcc_assert (dv_is_value_p (var->dv));
+	    if (var->n_var_parts == 0)
+	      return 1;
+	    gcc_assert (var->n_var_parts == 1);
+	    goto restart;
+	  }
+
+      VALUE_RECURSED_INTO (val) = false;
+
+      return 1;
+    }
+
+  /* Push values to the canonical one.  */
+  cdv = dv_from_value (cval);
+  cslot = htab_find_slot_with_hash (set->vars, &cdv, dv_htab_hash (cdv),
+				    NO_INSERT);
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (node->loc != cval)
+      {
+	set_slot_part (set, node->loc, cslot, cdv, 0,
+		       node->init, NULL_RTX);
+	if (GET_CODE (node->loc) == VALUE)
+	  {
+	    decl_or_value ndv = dv_from_value (node->loc);
+
+	    set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
+			       NO_INSERT);
+
+	    if (canon_value_cmp (node->loc, val))
+	      {
+		/* If it could have been a local minimum, it's not any more,
+		   since it's now neighbor to cval, so it may have to push
+		   to it.  Conversely, if it wouldn't have prevailed over
+		   val, then whatever mark it has is fine: if it was to
+		   push, it will now push to a more canonical node, but if
+		   it wasn't, then it has already pushed any values it might
+		   have to.  */
+		VALUE_RECURSED_INTO (node->loc) = true;
+		/* Make sure we visit node->loc by ensuring we cval is
+		   visited too.  */
+		VALUE_RECURSED_INTO (cval) = true;
+	      }
+	    else if (!VALUE_RECURSED_INTO (node->loc))
+	      /* If we have no need to "recurse" into this node, it's
+		 already "canonicalized", so drop the link to the old
+		 parent.  */
+	      clobber_variable_part (set, cval, ndv, 0, NULL);
+	  }
+	else if (GET_CODE (node->loc) == REG)
+	  {
+	    attrs list = set->regs[REGNO (node->loc)], *listp;
+
+	    /* Change an existing attribute referring to dv so that it
+	       refers to cdv, removing any duplicate this might
+	       introduce, and checking that no previous duplicates
+	       existed, all in a single pass.  */
+
+	    while (list)
+	      {
+		if (list->offset == 0
+		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
+			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
+		  break;
+
+		list = list->next;
+	      }
+
+	    gcc_assert (list);
+	    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
+	      {
+		list->dv = cdv;
+		for (listp = &list->next; (list = *listp); listp = &list->next)
+		  {
+		    if (list->offset)
+		      continue;
+
+		    if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
+		      {
+			*listp = list->next;
+			pool_free (attrs_pool, list);
+			list = *listp;
+			break;
+		      }
+
+		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
+		  }
+	      }
+	    else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
+	      {
+		for (listp = &list->next; (list = *listp); listp = &list->next)
+		  {
+		    if (list->offset)
+		      continue;
+
+		    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
+		      {
+			*listp = list->next;
+			pool_free (attrs_pool, list);
+			list = *listp;
+			break;
+		      }
+
+		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
+		  }
+	      }
+	    else
+	      gcc_unreachable ();
+
+#if ENABLE_CHECKING
+	    while (list)
+	      {
+		if (list->offset == 0
+		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
+			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
+		  gcc_unreachable ();
+
+		list = list->next;
+	      }
+#endif
+	  }
+      }
+
+  if (val)
+    {
+#if 0 && ENABLE_CHECKING
+      variable cvar = (variable)*cslot;
+
+      gcc_assert (cvar->n_var_parts == 1);
+      for (node = cvar->var_part[0].loc_chain; node; node = node->next)
+	if (node->loc == val)
+	  break;
+
+      gcc_assert (node);
+
+      /* ??? Remove this in case the assertion above never fails.  */
+      if (!node)
+#endif
+	set_slot_part (set, val, cslot, cdv, 0,
+		       VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
+    }
+
+  clobber_slot_part (set, cval, slot, 0, NULL);
+
+  /* Variable may have been unshared.  */
+  var = (variable)*slot;
+  gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
+	      && var->var_part[0].loc_chain->next == NULL);
+
+  if (VALUE_RECURSED_INTO (cval))
+    goto restart_with_cval;
+
+  return 1;
+}
+
+/* Combine variable or value in *S1SLOT (in DSM->cur) with the
+   corresponding entry in DSM->src.  Multi-part variables are combined
+   with variable_union, whereas onepart dvs are combined with
+   intersection.  */
+
+static int
+variable_merge_over_cur (void **s1slot, void *data)
+{
+  struct dfset_merge *dsm = (struct dfset_merge *)data;
+  dataflow_set *dst = dsm->dst;
+  void **s2slot, **dstslot;
+  variable s1var = (variable) *s1slot;
+  variable s2var, dvar = NULL;
+  decl_or_value dv = s1var->dv;
+  bool onepart = dv_onepart_p (dv);
+  rtx val;
+  hashval_t dvhash;
+  location_chain node, *nodep;
+
+  /* If the incoming onepart variable has an empty location list, then
+     the intersection will be just as empty.  For other variables,
+     it's always union.  */
+  gcc_assert (s1var->n_var_parts);
+  gcc_assert (s1var->var_part[0].loc_chain);
+
+  if (!onepart)
+    return variable_union (s1slot, dst);
+
+  gcc_assert (s1var->n_var_parts == 1);
+  gcc_assert (s1var->var_part[0].offset == 0);
+
+  dvhash = dv_htab_hash (dv);
+  if (dv_is_value_p (dv))
+    val = dv_as_value (dv);
+  else
+    val = NULL;
+
+  s2slot = htab_find_slot_with_hash (dsm->src->vars, &dv, dvhash, NO_INSERT);
+  if (!s2slot)
+    return 1;
+
+  s2var = (variable) *s2slot;
+
+  gcc_assert (s2var->var_part[0].loc_chain);
+  gcc_assert (s2var->n_var_parts == 1);
+  gcc_assert (s2var->var_part[0].offset == 0);
+
+  dstslot = htab_find_slot_with_hash (dst->vars, &dv, dvhash, NO_INSERT);
+
+  if (dstslot)
+    {
+      dvar = (variable)*dstslot;
+      gcc_assert (dvar->refcount == 1);
+      gcc_assert (dvar->n_var_parts == 1);
+      gcc_assert (dvar->var_part[0].offset == 0);
+      nodep = &dvar->var_part[0].loc_chain;
+    }
+  else
+    {
+      nodep = &node;
+      node = NULL;
+    }
+
+  if (!dstslot && !onepart_variable_different_p (s1var, s2var))
+    {
+      dstslot = htab_find_slot_with_hash (dst->vars, &dv, dvhash,
+					  INSERT);
+      *dstslot = dvar = s1var;
+      dvar->refcount++;
+    }
+  else
+    {
+      intersect_loc_chains (val, nodep, dsm,
+			    s1var->var_part[0].loc_chain, s2var);
+
+      if (!dstslot)
+	{
+	  if (node)
+	    {
+	      dvar = (variable) pool_alloc (dv_pool (dv));
+	      dvar->dv = dv;
+	      dvar->refcount = 1;
+	      dvar->n_var_parts = 1;
+	      dvar->var_part[0].offset = 0;
+	      dvar->var_part[0].loc_chain = node;
+	      dvar->var_part[0].cur_loc = node->loc;
+
+	      dstslot = htab_find_slot_with_hash (dst->vars, &dv, dvhash,
+						  INSERT);
+	      gcc_assert (!*dstslot);
+	      *dstslot = dvar;
+	    }
+	  else
+	    return 1;
+	}
+    }
+
+  nodep = &dvar->var_part[0].loc_chain;
+  while ((node = *nodep))
+    {
+      location_chain *nextp = &node->next;
+
+      if (GET_CODE (node->loc) == REG)
+	{
+	  attrs list;
+
+	  for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
+	    if (GET_MODE (node->loc) == GET_MODE (list->loc)
+		&& dv_is_value_p (list->dv))
+	      break;
+
+	  if (!list)
+	    attrs_list_insert (&dst->regs[REGNO (node->loc)],
+			       dv, 0, node->loc);
+	  /* If this value became canonical for another value that had
+	     this register, we want to leave it alone.  */
+	  else if (dv_as_value (list->dv) != val)
+	    {
+	      set_slot_part (dst, dv_as_value (list->dv), dstslot, dv, 0,
+			     node->init, NULL_RTX);
+	      delete_slot_part (dst, node->loc, dstslot, 0);
+
+	      /* Since nextp points into the removed node, we can't
+		 use it.  The pointer to the next node moved to nodep.
+		 However, if the variable we're walking is unshared
+		 during our walk, we'll keep walking the location list
+		 of the previously-shared variable, in which case the
+		 node won't have been removed, and we'll want to skip
+		 it.  That's why we test *nodep here.  */
+	      if (*nodep != node)
+		nextp = nodep;
+	    }
+	}
+      else
+	/* Canonicalization puts registers first, so we don't have to
+	   walk it all.  */
+	break;
+      nodep = nextp;
+    }
+
+  if (dvar != (variable)*dstslot)
+    dvar = (variable)*dstslot;
+  nodep = &dvar->var_part[0].loc_chain;
+
+  if (val)
+    {
+      /* Mark all referenced nodes for canonicalization, and make sure
+	 we have mutual equivalence links.  */
+      VALUE_RECURSED_INTO (val) = true;
+      for (node = *nodep; node; node = node->next)
+	if (GET_CODE (node->loc) == VALUE)
+	  {
+	    VALUE_RECURSED_INTO (node->loc) = true;
+	    set_variable_part (dst, val, dv_from_value (node->loc), 0,
+			       node->init, NULL, INSERT);
+	  }
+
+      dstslot = htab_find_slot_with_hash (dst->vars, &dv, dvhash,
+					  NO_INSERT);
+      gcc_assert (*dstslot == dvar);
+      canonicalize_values_star (dstslot, dst);
+      gcc_assert (dstslot == htab_find_slot_with_hash (dst->vars, &dv,
+						       dvhash, NO_INSERT));
+      dvar = (variable)*dstslot;
+    }
+  else
+    {
+      bool has_value = false, has_other = false;
+
+      /* If we have one value and anything else, we're going to
+	 canonicalize this, so make sure all values have an entry in
+	 the table and are marked for canonicalization.  */
+      for (node = *nodep; node; node = node->next)
+	{
+	  if (GET_CODE (node->loc) == VALUE)
+	    {
+	      /* If this was marked during register canonicalization,
+		 we know we have to canonicalize values.  */
+	      if (has_value)
+		has_other = true;
+	      has_value = true;
+	      if (has_other)
+		break;
+	    }
+	  else
+	    {
+	      has_other = true;
+	      if (has_value)
+		break;
+	    }
+	}
+
+      if (has_value && has_other)
+	{
+	  for (node = *nodep; node; node = node->next)
+	    {
+	      if (GET_CODE (node->loc) == VALUE)
+		{
+		  decl_or_value dv = dv_from_value (node->loc);
+		  void **slot = htab_find_slot_with_hash (dst->vars, &dv,
+							  dv_htab_hash (dv),
+							  INSERT);
+		  if (!*slot)
+		    {
+		      variable var = (variable) pool_alloc (dv_pool (dv));
+		      var->dv = dv;
+		      var->refcount = 1;
+		      var->n_var_parts = 1;
+		      var->var_part[0].offset = 0;
+		      var->var_part[0].loc_chain = NULL;
+		      var->var_part[0].cur_loc = NULL;
+		      *slot = var;
+		    }
+
+		  VALUE_RECURSED_INTO (node->loc) = true;
+		}
+	    }
+
+	  dstslot = htab_find_slot_with_hash (dst->vars, &dv, dvhash,
+					      NO_INSERT);
+	  gcc_assert (*dstslot == dvar);
+	  canonicalize_values_star (dstslot, dst);
+	  gcc_assert (dstslot == htab_find_slot_with_hash (dst->vars, &dv,
+							   dvhash,
+							   NO_INSERT));
+	  dvar = (variable)*dstslot;
+	}
+    }
+
+  if (!onepart_variable_different_p (dvar, s1var))
+    {
+      variable_htab_free (dvar);
+      *dstslot = dvar = s1var;
+      dvar->refcount++;
+    }
+  else if (s2var != s1var && !onepart_variable_different_p (dvar, s2var))
+    {
+      variable_htab_free (dvar);
+      *dstslot = dvar = s2var;
+      dvar->refcount++;
+    }
+  else if (dvar->refcount == 1)
+    dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
+
+  return 1;
+}
+
+/* Combine variable in *S1SLOT (in DSM->src) with the corresponding
+   entry in DSM->src.  Only multi-part variables are combined, using
+   variable_union.  onepart dvs were already combined with
+   intersection in variable_merge_over_cur().  */
+
+static int
+variable_merge_over_src (void **s2slot, void *data)
+{
+  struct dfset_merge *dsm = (struct dfset_merge *)data;
+  dataflow_set *dst = dsm->dst;
+  variable s2var = (variable) *s2slot;
+  decl_or_value dv = s2var->dv;
+  bool onepart = dv_onepart_p (dv);
+
+  if (!onepart)
+    return variable_union (s2slot, dst);
+
+  return 1;
+}
+
+/* Combine dataflow set information from SRC into DST, using PDST
+   to carry over information across passes.  */
+
+static void
+dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
+{
+  dataflow_set src2 = *dst;
+  struct dfset_merge dsm;
+  int i;
+
+  dataflow_set_init (dst, MAX (htab_elements (src->vars),
+			       htab_elements (src2.vars)));
+
+  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+    attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
+
+  dsm.dst = dst;
+  dsm.src = &src2;
+  dsm.cur = src;
+
+  htab_traverse (dsm.cur->vars, variable_merge_over_cur, &dsm);
+  htab_traverse (dsm.src->vars, variable_merge_over_src, &dsm);
+
+  dataflow_set_destroy (&src2);
+}
+
+/* Mark register equivalences.  */
+
+static void
+dataflow_set_equiv_regs (dataflow_set *set)
+{
+  int i;
+  attrs list, *listp;
+
+  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+    {
+      rtx canon[NUM_MACHINE_MODES];
+
+      memset (canon, 0, sizeof (canon));
+
+      for (list = set->regs[i]; list; list = list->next)
+	if (list->offset == 0 && dv_is_value_p (list->dv))
+	  {
+	    rtx val = dv_as_value (list->dv);
+	    rtx *cvalp = &canon[(int)GET_MODE (val)];
+	    rtx cval = *cvalp;
+
+	    if (canon_value_cmp (val, cval))
+	      *cvalp = val;
+	  }
+
+      for (list = set->regs[i]; list; list = list->next)
+	if (list->offset == 0 && dv_onepart_p (list->dv))
+	  {
+	    rtx cval = canon[(int)GET_MODE (list->loc)];
+
+	    if (!cval)
+	      continue;
+
+	    if (dv_is_value_p (list->dv))
+	      {
+		rtx val = dv_as_value (list->dv);
+
+		if (val == cval)
+		  continue;
+
+		VALUE_RECURSED_INTO (val) = true;
+		set_variable_part (set, val, dv_from_value (cval), 0,
+				   VAR_INIT_STATUS_INITIALIZED,
+				   NULL, NO_INSERT);
+	      }
+
+	    VALUE_RECURSED_INTO (cval) = true;
+	    set_variable_part (set, cval, list->dv, 0,
+			       VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
+	  }
+
+      for (listp = &set->regs[i]; (list = *listp);
+	   listp = list ? &list->next : listp)
+	if (list->offset == 0 && dv_onepart_p (list->dv))
+	  {
+	    rtx cval = canon[(int)GET_MODE (list->loc)];
+	    void **slot;
+
+	    if (!cval)
+	      continue;
+
+	    if (dv_is_value_p (list->dv))
+	      {
+		rtx val = dv_as_value (list->dv);
+		if (!VALUE_RECURSED_INTO (val))
+		  continue;
+	      }
+
+	    slot = htab_find_slot_with_hash (set->vars, &list->dv,
+					     dv_htab_hash (list->dv),
+					     NO_INSERT);
+	    canonicalize_values_star (slot, set);
+	    if (*listp != list)
+	      list = NULL;
+	  }
+    }
+}
+
+/* Remove any redundant values in the location list of VAR, which must
+   be unshared and 1-part.  */
+
+static void
+remove_duplicate_values (variable var)
+{
+  location_chain node, *nodep;
+
+  gcc_assert (dv_onepart_p (var->dv));
+  gcc_assert (var->n_var_parts == 1);
+  gcc_assert (var->refcount == 1);
+
+  for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
+    {
+      if (GET_CODE (node->loc) == VALUE)
+	{
+	  if (VALUE_RECURSED_INTO (node->loc))
+	    {
+	      /* Remove duplicate value node.  */
+	      *nodep = node->next;
+	      pool_free (loc_chain_pool, node);
+	      continue;
+	    }
+	  else
+	    VALUE_RECURSED_INTO (node->loc) = true;
+	}
+      nodep = &node->next;
+    }
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (GET_CODE (node->loc) == VALUE)
+      {
+	gcc_assert (VALUE_RECURSED_INTO (node->loc));
+	VALUE_RECURSED_INTO (node->loc) = false;
+      }
+}
+
+
+/* Hash table iteration argument passed to variable_post_merge.  */
+struct dfset_post_merge
+{
+  /* The new input set for the current block.  */
+  dataflow_set *set;
+  /* Pointer to the permanent input set for the current block, or
+     NULL.  */
+  dataflow_set **permp;
+};
+
+/* Create values for incoming expressions associated with one-part
+   variables that don't have value numbers for them.  */
+
+static int
+variable_post_merge_new_vals (void **slot, void *info)
+{
+  struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
+  dataflow_set *set = dfpm->set;
+  variable var = (variable)*slot;
+  location_chain node;
+
+  if (!dv_onepart_p (var->dv) || !var->n_var_parts)
+    return 1;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  if (dv_is_decl_p (var->dv))
+    {
+      bool check_dupes = false;
+
+    restart:
+      for (node = var->var_part[0].loc_chain; node; node = node->next)
+	{
+	  if (GET_CODE (node->loc) == VALUE)
+	    gcc_assert (!VALUE_RECURSED_INTO (node->loc));
+	  else if (GET_CODE (node->loc) == REG)
+	    {
+	      attrs att, *attp, *curp = NULL;
+
+	      if (var->refcount != 1)
+		{
+		  var = unshare_variable (slot, var,
+					  VAR_INIT_STATUS_INITIALIZED);
+		  goto restart;
+		}
+
+	      for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
+		   attp = &att->next)
+		if (att->offset == 0
+		    && GET_MODE (att->loc) == GET_MODE (node->loc))
+		  {
+		    if (dv_is_value_p (att->dv))
+		      {
+			rtx cval = dv_as_value (att->dv);
+			node->loc = cval;
+			check_dupes = true;
+			break;
+		      }
+		    else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
+		      curp = attp;
+		  }
+
+	      if (!curp)
+		{
+		  curp = attp;
+		  while (*curp)
+		    if ((*curp)->offset == 0
+			&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
+			&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
+		      break;
+		    else
+		      curp = &(*curp)->next;
+		  gcc_assert (*curp);
+		}
+
+	      if (!att)
+		{
+		  decl_or_value cdv;
+		  rtx cval;
+
+		  if (!*dfpm->permp)
+		    {
+		      *dfpm->permp = XNEW (dataflow_set);
+		      dataflow_set_init (*dfpm->permp, 7);
+		    }
+
+		  for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
+		       att; att = att->next)
+		    if (GET_MODE (att->loc) == GET_MODE (node->loc))
+		      {
+			gcc_assert (att->offset == 0);
+			gcc_assert (dv_is_value_p (att->dv));
+			val_reset (set, att->dv);
+			break;
+		      }
+
+		  if (att)
+		    {
+		      cdv = att->dv;
+		      cval = dv_as_value (cdv);
+		    }
+		  else
+		    {
+		      /* Create a unique value to hold this register,
+			 that ought to be found and reused in
+			 subsequent rounds.  */
+		      cselib_val *v;
+		      gcc_assert (!cselib_lookup (node->loc,
+						  GET_MODE (node->loc), 0));
+		      v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
+		      cselib_preserve_value (v);
+		      cselib_invalidate_rtx (node->loc);
+		      cval = v->val_rtx;
+		      cdv = dv_from_value (cval);
+		      if (dump_file)
+			fprintf (dump_file,
+				 "Created new value %i for reg %i\n",
+				 v->value, REGNO (node->loc));
+		    }
+
+		  var_reg_decl_set (*dfpm->permp, node->loc,
+				    VAR_INIT_STATUS_INITIALIZED,
+				    cdv, 0, NULL, INSERT);
+
+		  node->loc = cval;
+		  check_dupes = true;
+		}
+
+	      /* Remove attribute referring to the decl, which now
+		 uses the value for the register, already existing or
+		 to be added when we bring perm in.  */
+	      att = *curp;
+	      *curp = att->next;
+	      pool_free (attrs_pool, att);
+	    }
+#if 0 /* Don't push constants to values.  If you remove this, adjust
+	 the corresponding comment containing 'push constants to
+	 values' below.  */
+	  else if (GET_CODE (node->loc) == CONST_INT
+		   || GET_CODE (node->loc) == CONST_FIXED
+		   || GET_CODE (node->loc) == CONST_DOUBLE
+		   || GET_CODE (node->loc) == SYMBOL_REF)
+	    {
+	      decl_or_value cdv;
+	      rtx cval;
+	      cselib_val *v;
+	      void **oslot;
+
+	      if (var->refcount != 1)
+		{
+		  var = unshare_variable (slot, var,
+					  VAR_INIT_STATUS_INITIALIZED);
+		  goto restart;
+		}
+
+	      v = cselib_lookup (node->loc,
+				 TYPE_MODE (TREE_TYPE (dv_as_decl (var->dv))),
+				 1);
+
+	      if (dump_file)
+		{
+		  fprintf (dump_file, "%s new value %i for ",
+			   cselib_preserved_value_p (v)
+			   ? "Reused" : "Created", v->value);
+		  print_rtl_single (dump_file, node->loc);
+		  fputc ('\n', dump_file);
+		}
+
+	      cselib_preserve_value (v);
+	      cval = v->val_rtx;
+	      cdv = dv_from_value (cval);
+
+	      oslot = htab_find_slot_with_hash
+		(set->vars, &cdv, dv_htab_hash (cdv), NO_INSERT);
+
+	      if (oslot)
+		set_slot_part (set, node->loc, oslot, cdv, 0,
+			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
+	      else
+		{
+		  if (!*dfpm->permp)
+		    {
+		      *dfpm->permp = XNEW (dataflow_set);
+		      dataflow_set_init (*dfpm->permp, 7);
+		    }
+
+		  set_variable_part (*dfpm->permp, node->loc, cdv, 0,
+				     VAR_INIT_STATUS_INITIALIZED, NULL,
+				     NO_INSERT);
+		}
+	      node->loc = cval;
+	      check_dupes = true;
+	    }
+#endif
+	}
+
+      if (check_dupes)
+	remove_duplicate_values (var);
+    }
+
+  return 1;
+}
+
+/* Reset values in the permanent set that are not associated with the
+   chosen expression.  */
+
+static int
+variable_post_merge_perm_vals (void **pslot, void *info)
+{
+  struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
+  dataflow_set *set = dfpm->set;
+  variable pvar = (variable)*pslot;
+  location_chain pnode;
+  void **slot;
+  decl_or_value dv;
+  attrs att;
+
+  gcc_assert (dv_is_value_p (pvar->dv));
+  gcc_assert (pvar->n_var_parts == 1);
+  pnode = pvar->var_part[0].loc_chain;
+  gcc_assert (pnode);
+  gcc_assert (!pnode->next);
+  gcc_assert (REG_P (pnode->loc));
+
+  dv = pvar->dv;
+
+  slot = htab_find_slot_with_hash (set->vars, &dv, dv_htab_hash (dv),
+				   NO_INSERT);
+
+  if (slot)
+    {
+      if (find_loc_in_1pdv (pnode->loc, (variable)*slot, set->vars))
+	return 1;
+      val_reset (set, dv);
+    }
+
+  for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
+    if (att->offset == 0
+	&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
+	&& dv_is_value_p (att->dv))
+      break;
+
+  /* If there is a value associated with this register already, create
+     an equivalence.  */
+  if (att && dv_as_value (att->dv) != dv_as_value (dv))
+    {
+      rtx cval = dv_as_value (att->dv);
+      set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
+      set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
+			 NULL, INSERT);
+    }
+  else if (!att)
+    {
+      attrs_list_insert (&set->regs[REGNO (pnode->loc)],
+			 dv, 0, pnode->loc);
+      variable_union (pslot, set);
+    }
+
+  return 1;
+}
+
+/* Just checking stuff and registering register attributes for
+   now.  */
+
+static void
+dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
+{
+  struct dfset_post_merge dfpm;
+
+  dfpm.set = set;
+  dfpm.permp = permp;
+
+  htab_traverse (set->vars, variable_post_merge_new_vals, &dfpm);
+  if (*permp)
+    htab_traverse ((*permp)->vars, variable_post_merge_perm_vals, &dfpm);
+  htab_traverse (set->vars, canonicalize_values_star, set);
+}
+
+/* Return a node whose loc is a MEM that refers to EXPR in the
+   location list of a one-part variable or value VAR, or in that of
+   any values recursively mentioned in the location lists.  */
+
+static location_chain
+find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
+{
+  location_chain node;
+  decl_or_value dv;
+  void **slot;
+  variable var;
+  location_chain where = NULL;
+
+  if (!val)
+    return NULL;
+
+  gcc_assert (GET_CODE (val) == VALUE);
+
+  gcc_assert (!VALUE_RECURSED_INTO (val));
+
+  dv = dv_from_value (val);
+  slot = htab_find_slot_with_hash (vars, &dv, dv_htab_hash (dv), NO_INSERT);
+
+  if (!slot)
+    return NULL;
+
+  var = (variable)*slot;
+
+  gcc_assert (var);
+  gcc_assert (dv_onepart_p (var->dv));
+
+  if (!var->n_var_parts)
+    return NULL;
+
+  gcc_assert (var->var_part[0].offset == 0);
+
+  VALUE_RECURSED_INTO (val) = true;
+
+  for (node = var->var_part[0].loc_chain; node; node = node->next)
+    if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
+	&& MEM_OFFSET (node->loc) == 0)
+      {
+	where = node;
+	break;
+      }
+    else if (GET_CODE (node->loc) == VALUE
+	     && !VALUE_RECURSED_INTO (node->loc)
+	     && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
+      break;
+
+  VALUE_RECURSED_INTO (val) = false;
+
+  return where;
+}
+
+/* Remove all MEMs from the location list of a hash table entry for a
+   one-part variable, except those whose MEM attributes map back to
+   the variable itself, directly or within a VALUE.
+
+   ??? We could also preserve MEMs that reference stack slots that are
+   annotated as not addressable.  This is arguably even more reliable
+   than the current heuristic.  */
+
+static int
+dataflow_set_preserve_mem_locs (void **slot, void *data)
+{
+  htab_t vars = (htab_t) data;
+  variable var = (variable) *slot;
+
+  if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
+    {
+      tree decl = dv_as_decl (var->dv);
+      location_chain loc, *locp;
+
+      if (!var->n_var_parts)
+	return 1;
+
+      gcc_assert (var->n_var_parts == 1);
+
+      if (var->refcount > 1)
+	{
+	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
+	    {
+	      /* We want to remove a MEM that doesn't refer to DECL.  */
+	      if (GET_CODE (loc->loc) == MEM
+		  && (MEM_EXPR (loc->loc) != decl
+		      || MEM_OFFSET (loc->loc)))
+		break;
+	      /* We want to move here a MEM that does refer to DECL.  */
+	      else if (GET_CODE (loc->loc) == VALUE
+		       && find_mem_expr_in_1pdv (decl, loc->loc, vars))
+	      break;
+	    }
+
+	  if (!loc)
+	    return 1;
+
+	  var = unshare_variable (slot, var, VAR_INIT_STATUS_UNKNOWN);
+	  gcc_assert (var->n_var_parts == 1);
+	}
+
+      for (locp = &var->var_part[0].loc_chain, loc = *locp;
+	   loc; loc = *locp)
+	{
+	  if (GET_CODE (loc->loc) == VALUE)
+	    {
+	      location_chain mem_node = find_mem_expr_in_1pdv (decl, loc->loc,
+							       vars);
+
+	      /* ??? This picks up only one out of multiple MEMs that
+		 refer to the same variable.  Do we ever need to be
+		 concerned about dealing with more than one, or, given
+		 that they should all map to the same variable
+		 location, their addresses will have been merged and
+		 they will be regarded as equivalent?  */
+	      if (mem_node)
+		{
+		  loc->loc = mem_node->loc;
+		  loc->set_src = mem_node->set_src;
+		  loc->init = MIN (loc->init, mem_node->init);
+		}
+	    }
+
+	  if (GET_CODE (loc->loc) != MEM
+	      || (MEM_EXPR (loc->loc) == decl
+		  && MEM_OFFSET (loc->loc) == 0))
+	    {
+	      locp = &loc->next;
+	      continue;
+	    }
+
+	  *locp = loc->next;
+	  pool_free (loc_chain_pool, loc);
+	}
+
+      if (!var->var_part[0].loc_chain)
+	{
+	  var->n_var_parts--;
+	  variable_was_changed (var, vars);
+	}
+    }
+
+  return 1;
+}
+
+/* Remove all MEMs from the location list of a hash table entry for a
+   value.  */
+
+static int
+dataflow_set_remove_mem_locs (void **slot, void *data)
+{
+  htab_t vars = (htab_t) data;
+  variable var = (variable) *slot;
+
+  if (dv_is_value_p (var->dv))
+    {
+      location_chain loc, *locp;
+      bool changed = false;
+
+      gcc_assert (var->n_var_parts == 1);
+
+      if (var->refcount > 1)
+	{
+	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
+	    if (GET_CODE (loc->loc) == MEM)
+	      break;
+
+	  if (!loc)
+	    return 1;
+
+	  var = unshare_variable (slot, var, VAR_INIT_STATUS_UNKNOWN);
+	  gcc_assert (var->n_var_parts == 1);
+	}
+
+      for (locp = &var->var_part[0].loc_chain, loc = *locp;
+	   loc; loc = *locp)
+	{
+	  if (GET_CODE (loc->loc) != MEM)
+	    {
+	      locp = &loc->next;
+	      continue;
+	    }
+
+	  *locp = loc->next;
+	  /* If we have deleted the location which was last emitted
+	     we have to emit new location so add the variable to set
+	     of changed variables.  */
+	  if (var->var_part[0].cur_loc
+	      && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
+	    changed = true;
+	  pool_free (loc_chain_pool, loc);
+	}
+
+      if (!var->var_part[0].loc_chain)
+	{
+	  var->n_var_parts--;
+	  gcc_assert (changed);
+	}
+      if (changed)
+	{
+	  if (var->n_var_parts && var->var_part[0].loc_chain)
+	    var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
+	  variable_was_changed (var, vars);
+	}
+    }
+
+  return 1;
+}
+
+/* Remove all variable-location information about call-clobbered
+   registers, as well as associations between MEMs and VALUEs.  */
+
+static void
+dataflow_set_clear_at_call (dataflow_set *set)
+{
+  int r;
+
+  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+    if (TEST_HARD_REG_BIT (call_used_reg_set, r))
+      var_regno_delete (set, r);
+
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      htab_traverse (set->vars, dataflow_set_preserve_mem_locs, set->vars);
+      htab_traverse (set->vars, dataflow_set_remove_mem_locs, set->vars);
+    }
+}
+
+/* Flag whether two dataflow sets being compared contain different data.  */
+static bool
+dataflow_set_different_value;
+
+static bool
+variable_part_different_p (variable_part *vp1, variable_part *vp2)
+{
+  location_chain lc1, lc2;
+
+  for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
+    {
+      for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
+	{
+	  if (REG_P (lc1->loc) && REG_P (lc2->loc))
+	    {
+	      if (REGNO (lc1->loc) == REGNO (lc2->loc))
+		break;
+	    }
+	  if (rtx_equal_p (lc1->loc, lc2->loc))
+	    break;
+	}
+      if (!lc2)
+	return true;
+    }
+  return false;
+}
+
+/* Return true if one-part variables VAR1 and VAR2 are different.
+   They must be in canonical order.  */
+
+static bool
+onepart_variable_different_p (variable var1, variable var2)
+{
+  location_chain lc1, lc2;
+
+  if (var1 == var2)
+    return false;
+
+  gcc_assert (var1->n_var_parts == 1);
+  gcc_assert (var2->n_var_parts == 1);
+
+  lc1 = var1->var_part[0].loc_chain;
+  lc2 = var2->var_part[0].loc_chain;
+
+  gcc_assert (lc1);
+  gcc_assert (lc2);
+
+  while (lc1 && lc2)
+    {
+      if (loc_cmp (lc1->loc, lc2->loc))
+	return true;
+      lc1 = lc1->next;
+      lc2 = lc2->next;
+    }
+
+  return lc1 != lc2;
+}
+
+/* Return true if variables VAR1 and VAR2 are different.
+   If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
+   variable part.  */
+
+static bool
+variable_different_p (variable var1, variable var2,
+		      bool compare_current_location)
+{
+  int i;
+
+  if (var1 == var2)
+    return false;
+
+  if (var1->n_var_parts != var2->n_var_parts)
+    return true;
+
+  for (i = 0; i < var1->n_var_parts; i++)
+    {
+      if (var1->var_part[i].offset != var2->var_part[i].offset)
+	return true;
+      if (compare_current_location)
+	{
 	  if (!((REG_P (var1->var_part[i].cur_loc)
 		 && REG_P (var2->var_part[i].cur_loc)
 		 && (REGNO (var1->var_part[i].cur_loc)
@@ -1449,6 +3540,13 @@ variable_different_p (variable var1, var
 				var2->var_part[i].cur_loc)))
 	    return true;
 	}
+      /* One-part values have locations in a canonical order.  */
+      if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
+	{
+	  gcc_assert (var1->n_var_parts == 1);
+	  gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
+	  return onepart_variable_different_p (var1, var2);
+	}
       if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
 	return true;
       if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
@@ -1466,13 +3564,19 @@ dataflow_set_different_1 (void **slot, v
   htab_t htab = (htab_t) data;
   variable var1, var2;
 
-  var1 = *(variable *) slot;
-  var2 = (variable) htab_find_with_hash (htab, var1->decl,
-			      VARIABLE_HASH_VAL (var1->decl));
+  var1 = (variable) *slot;
+  var2 = (variable) htab_find_with_hash (htab, &var1->dv,
+					 dv_htab_hash (var1->dv));
   if (!var2)
     {
       dataflow_set_different_value = true;
 
+      if (dump_file && flag_verbose_cselib)
+	{
+	  fprintf (dump_file, "dataflow difference found: removal of:\n");
+	  dump_variable (var1);
+	}
+
       /* Stop traversing the hash table.  */
       return 0;
     }
@@ -1481,6 +3585,13 @@ dataflow_set_different_1 (void **slot, v
     {
       dataflow_set_different_value = true;
 
+      if (dump_file && flag_verbose_cselib)
+	{
+	  fprintf (dump_file, "dataflow difference found: old and new follow:\n");
+	  dump_variable (var1);
+	  dump_variable (var2);
+	}
+
       /* Stop traversing the hash table.  */
       return 0;
     }
@@ -1498,13 +3609,19 @@ dataflow_set_different_2 (void **slot, v
   htab_t htab = (htab_t) data;
   variable var1, var2;
 
-  var1 = *(variable *) slot;
-  var2 = (variable) htab_find_with_hash (htab, var1->decl,
-			      VARIABLE_HASH_VAL (var1->decl));
+  var1 = (variable) *slot;
+  var2 = (variable) htab_find_with_hash (htab, &var1->dv,
+					 dv_htab_hash (var1->dv));
   if (!var2)
     {
       dataflow_set_different_value = true;
 
+      if (dump_file && flag_verbose_cselib)
+	{
+	  fprintf (dump_file, "dataflow difference found: addition of:\n");
+	  dump_variable (var1);
+	}
+
       /* Stop traversing the hash table.  */
       return 0;
     }
@@ -1588,7 +3705,7 @@ contains_symbol_ref (rtx x)
 /* Shall EXPR be tracked?  */
 
 static bool
-track_expr_p (tree expr)
+track_expr_p (tree expr, bool need_rtl)
 {
   rtx decl_rtl;
   tree realdecl;
@@ -1603,7 +3720,7 @@ track_expr_p (tree expr)
 
   /* ... and a RTL assigned to it.  */
   decl_rtl = DECL_RTL_IF_SET (expr);
-  if (!decl_rtl)
+  if (!decl_rtl && need_rtl)
     return 0;
   
   /* If this expression is really a debug alias of some other declaration, we 
@@ -1637,13 +3754,13 @@ track_expr_p (tree expr)
      extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
      char **_dl_argv;
   */
-  if (MEM_P (decl_rtl)
+  if (decl_rtl && MEM_P (decl_rtl)
       && contains_symbol_ref (XEXP (decl_rtl, 0)))
     return 0;
 
   /* If RTX is a memory it should not be very large (because it would be
      an array or struct).  */
-  if (MEM_P (decl_rtl))
+  if (decl_rtl && MEM_P (decl_rtl))
     {
       /* Do not track structures and arrays.  */
       if (GET_MODE (decl_rtl) == BLKmode
@@ -1707,7 +3824,7 @@ track_loc_p (rtx loc, tree expr, HOST_WI
 {
   enum machine_mode mode;
 
-  if (expr == NULL || !track_expr_p (expr))
+  if (expr == NULL || !track_expr_p (expr, true))
     return false;
 
   /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
@@ -1778,82 +3895,447 @@ var_lowpart (enum machine_mode mode, rtx
   return gen_rtx_REG_offset (loc, mode, regno, offset);
 }
 
-/* Count uses (register and memory references) LOC which will be tracked.
-   INSN is instruction which the LOC is part of.  */
+/* Carry information about uses and stores while walking rtx.  */
 
-static int
-count_uses (rtx *loc, void *insn)
+struct count_use_info
+{
+  /* The insn where the RTX is.  */
+  rtx insn;
+
+  /* The basic block where insn is.  */
+  basic_block bb;
+
+  /* The array of n_sets sets in the insn, as determined by cselib.  */
+  struct cselib_set *sets;
+  int n_sets;
+
+  /* True if we're counting stores, false otherwise.  */
+  bool store_p;
+};
+
+/* Find a VALUE corresponding to X.   */
+
+static inline cselib_val *
+find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
+{
+  int i;
+
+  if (cui->sets)
+    {
+      /* This is called after uses are set up and before stores are
+	 processed bycselib, so it's safe to look up srcs, but not
+	 dsts.  So we look up expressions that appear in srcs or in
+	 dest expressions, but we search the sets array for dests of
+	 stores.  */
+      if (cui->store_p)
+	{
+	  for (i = 0; i < cui->n_sets; i++)
+	    if (cui->sets[i].dest == x)
+	      return cui->sets[i].src_elt;
+	}
+      else
+	return cselib_lookup (x, mode, 0);
+    }
+
+  return NULL;
+}
+
+/* Replace all registers and addresses in an expression with VALUE
+   expressions that map back to them, unless the expression is a
+   register.  If no mapping is or can be performed, returns NULL.  */
+
+static rtx
+replace_expr_with_values (rtx loc)
+{
+  if (REG_P (loc))
+    return NULL;
+  else if (MEM_P (loc))
+    {
+      cselib_val *addr = cselib_lookup (XEXP (loc, 0), Pmode, 0);
+      if (addr)
+	return replace_equiv_address_nv (loc, addr->val_rtx);
+      else
+	return NULL;
+    }
+  else
+    return cselib_subst_to_values (loc);
+}
+
+/* Determine what kind of micro operation to choose for a USE.  Return
+   MO_CLOBBER if no micro operation is to be generated.  */
+
+static enum micro_operation_type
+use_type (rtx *loc, struct count_use_info *cui, enum machine_mode *modep)
 {
-  basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
+  tree expr;
+  cselib_val *val;
+
+  if (cui && cui->sets)
+    {
+      if (GET_CODE (*loc) == VAR_LOCATION)
+	{
+	  if (track_expr_p (PAT_VAR_LOCATION_DECL (*loc), false))
+	    {
+	      rtx ploc = PAT_VAR_LOCATION_LOC (*loc);
+	      cselib_val *val = cselib_lookup (ploc, GET_MODE (*loc), 1);
+
+	      /* ??? flag_float_store and volatile mems are never
+		 given values, but we could in theory use them for
+		 locations.  */
+	      gcc_assert (val || 1);
+	      return MO_VAL_LOC;
+	    }
+	  else
+	    return MO_CLOBBER;
+	}
+
+      if ((REG_P (*loc) || MEM_P (*loc))
+	  && (val = find_use_val (*loc, GET_MODE (*loc), cui)))
+	{
+	  if (modep)
+	    *modep = GET_MODE (*loc);
+	  if (cui->store_p)
+	    {
+	      if (REG_P (*loc)
+		  || cselib_lookup (XEXP (*loc, 0), GET_MODE (*loc), 0))
+		return MO_VAL_SET;
+	    }
+	  else if (!cselib_preserved_value_p (val))
+	    return MO_VAL_USE;
+	}
+    }
 
   if (REG_P (*loc))
     {
       gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER);
-      VTI (bb)->n_mos++;
+
+      expr = REG_EXPR (*loc);
+
+      if (!expr)
+	return MO_USE_NO_VAR;
+      else if (var_debug_value_for_decl (var_debug_decl (expr)))
+	return MO_CLOBBER;
+      else if (track_loc_p (*loc, expr, REG_OFFSET (*loc),
+			    false, modep, NULL))
+	return MO_USE;
+      else
+	return MO_USE_NO_VAR;
+    }
+  else if (MEM_P (*loc))
+    {
+      expr = MEM_EXPR (*loc);
+
+      if (!expr)
+	return MO_CLOBBER;
+      else if (var_debug_value_for_decl (var_debug_decl (expr)))
+	return MO_CLOBBER;
+      else if (track_loc_p (*loc, expr, INT_MEM_OFFSET (*loc),
+			    false, modep, NULL))
+	return MO_USE;
+      else
+	return MO_CLOBBER;
     }
-  else if (MEM_P (*loc)
-	   && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
-			   false, NULL, NULL))
+
+  return MO_CLOBBER;
+}
+
+/* Log to OUT information about micro-operation MOPT involving X in
+   INSN of BB.  */
+
+static inline void
+log_op_type (rtx x, basic_block bb, rtx insn,
+	     enum micro_operation_type mopt, FILE *out)
+{
+  fprintf (out, "bb %i op %i insn %i %s ",
+	   bb->index, VTI (bb)->n_mos - 1,
+	   INSN_UID (insn), micro_operation_type_name[mopt]);
+  print_inline_rtx (out, x, 2);
+  fputc ('\n', out);
+}
+
+/* Count uses (register and memory references) LOC which will be tracked.
+   INSN is instruction which the LOC is part of.  */
+
+static int
+count_uses (rtx *loc, void *cuip)
+{
+  struct count_use_info *cui = (struct count_use_info *) cuip;
+  enum micro_operation_type mopt = use_type (loc, cui, NULL);
+
+  if (mopt != MO_CLOBBER)
     {
-      VTI (bb)->n_mos++;
+      cselib_val *val;
+      enum machine_mode mode = GET_MODE (*loc);
+
+      VTI (cui->bb)->n_mos++;
+
+      if (dump_file && flag_verbose_cselib)
+	log_op_type (*loc, cui->bb, cui->insn, mopt, dump_file);
+
+      switch (mopt)
+	{
+	case MO_VAL_LOC:
+	  loc = &PAT_VAR_LOCATION_LOC (*loc);
+	  if (VAR_LOC_UNKNOWN_P (*loc))
+	    break;
+	  /* Fall through.  */
+
+	case MO_VAL_USE:
+	case MO_VAL_SET:
+	  if (MEM_P (*loc)
+	      && !REG_P (XEXP (*loc, 0)) && !MEM_P (XEXP (*loc, 0)))
+	    {
+	      val = cselib_lookup (XEXP (*loc, 0), Pmode, false);
+
+	      if (val && !cselib_preserved_value_p (val))
+		{
+		  VTI (cui->bb)->n_mos++;
+		  cselib_preserve_value (val);
+		}
+	    }
+
+	  val = find_use_val (*loc, mode, cui);
+	  if (val)
+	    cselib_preserve_value (val);
+	  else
+	    gcc_assert (mopt == MO_VAL_LOC);
+
+	  break;
+
+	default:
+	  break;
+	}
     }
 
   return 0;
 }
 
-/* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
+/* Helper function for finding all uses of REG/MEM in X in CUI's
+   insn.  */
 
 static void
-count_uses_1 (rtx *x, void *insn)
+count_uses_1 (rtx *x, void *cui)
 {
-  for_each_rtx (x, count_uses, insn);
+  for_each_rtx (x, count_uses, cui);
 }
 
-/* Count stores (register and memory references) LOC which will be tracked.
-   INSN is instruction which the LOC is part of.  */
+/* Count stores (register and memory references) LOC which will be
+   tracked.  CUI is a count_use_info object containing the instruction
+   which the LOC is part of.  */
 
 static void
-count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *insn)
+count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
 {
-  count_uses (&loc, insn);
+  count_uses (&loc, cui);
 }
 
+/* Callback for cselib_record_sets_hook, that counts how many micro
+   operations it takes for uses and stores in an insn after
+   cselib_record_sets has analyzed the sets in an insn, but before it
+   modifies the stored values in the internal tables, unless
+   cselib_record_sets doesn't call it directly (perhaps because we're
+   not doing cselib in the first place, in which case sets and n_sets
+   will be 0).  */
+
+static void
+count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
+{
+  basic_block bb = BLOCK_FOR_INSN (insn);
+  struct count_use_info cui;
+
+  cselib_hook_called = true;
+
+  cui.insn = insn;
+  cui.bb = bb;
+  cui.sets = sets;
+  cui.n_sets = n_sets;
+
+  cui.store_p = false;
+  note_uses (&PATTERN (insn), count_uses_1, &cui);
+  cui.store_p = true;
+  note_stores (PATTERN (insn), count_stores, &cui);
+}
+
+/* Tell whether the CONCAT used to holds a VALUE and its location
+   needs value resolution, i.e., an attempt of mapping the location
+   back to other incoming values.  */
+#define VAL_NEEDS_RESOLUTION(x) \
+  (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
+/* Whether the location in the CONCAT is a tracked expression, that
+   should also be handled like a MO_USE.  */
+#define VAL_HOLDS_TRACK_EXPR(x) \
+  (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
+/* Whether the location in the CONCAT should be handled like a MO_COPY
+   as well.  */
+#define VAL_EXPR_IS_COPIED(x) \
+  (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
+/* Whether the location in the CONCAT should be handled like a
+   MO_CLOBBER as well.  */
+#define VAL_EXPR_IS_CLOBBERED(x) \
+  (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
+
 /* Add uses (register and memory references) LOC which will be tracked
    to VTI (bb)->mos.  INSN is instruction which the LOC is part of.  */
 
 static int
-add_uses (rtx *loc, void *insn)
+add_uses (rtx *loc, void *data)
 {
-  enum machine_mode mode;
+  enum machine_mode mode = VOIDmode;
+  struct count_use_info *cui = (struct count_use_info *)data;
+  enum micro_operation_type type = use_type (loc, cui, &mode);
 
-  if (REG_P (*loc))
+  if (type != MO_CLOBBER)
     {
-      basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
+      basic_block bb = cui->bb;
       micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
 
-      if (track_loc_p (*loc, REG_EXPR (*loc), REG_OFFSET (*loc),
-		       false, &mode, NULL))
+      mo->type = type;
+      mo->u.loc = type == MO_USE ? var_lowpart (mode, *loc) : *loc;
+      mo->insn = cui->insn;
+
+      if (type == MO_VAL_LOC)
 	{
-	  mo->type = MO_USE;
-	  mo->u.loc = var_lowpart (mode, *loc);
+	  rtx oloc = *loc;
+	  rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
+	  cselib_val *val;
+
+	  gcc_assert (cui->sets);
+
+	  if (MEM_P (vloc)
+	      && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
+	    {
+	      rtx mloc = vloc;
+	      cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
+
+	      if (val && !cselib_preserved_value_p (val))
+		{
+		  micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
+		  mon->type = mo->type;
+		  mon->u.loc = mo->u.loc;
+		  mon->insn = mo->insn;
+		  cselib_preserve_value (val);
+		  mo->type = MO_VAL_USE;
+		  mloc = cselib_subst_to_values (XEXP (mloc, 0));
+		  mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
+		  if (dump_file && flag_verbose_cselib)
+		    log_op_type (mo->u.loc, cui->bb, cui->insn,
+				 mo->type, dump_file);
+		  mo = mon;
+		}
+	    }
+
+	  if (!VAR_LOC_UNKNOWN_P (vloc)
+	      && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
+	    {
+	      enum machine_mode mode2;
+	      enum micro_operation_type type2;
+	      rtx nloc = replace_expr_with_values (vloc);
+
+	      if (nloc)
+		{
+		  oloc = shallow_copy_rtx (oloc);
+		  PAT_VAR_LOCATION_LOC (oloc) = nloc;
+		}
+
+	      oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
+
+	      type2 = use_type (&vloc, 0, &mode2);
+
+	      gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
+			  || type2 == MO_CLOBBER);
+
+	      if (type2 == MO_CLOBBER
+		  && !cselib_preserved_value_p (val))
+		{
+		  VAL_NEEDS_RESOLUTION (oloc) = 1;
+		  cselib_preserve_value (val);
+		}
+	    }
+	  else if (!VAR_LOC_UNKNOWN_P (vloc))
+	    {
+	      oloc = shallow_copy_rtx (oloc);
+	      PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
+	    }
+
+	  mo->u.loc = oloc;
 	}
-      else
+      else if (type == MO_VAL_USE)
 	{
-	  mo->type = MO_USE_NO_VAR;
-	  mo->u.loc = *loc;
+	  enum machine_mode mode2 = VOIDmode;
+	  enum micro_operation_type type2;
+	  cselib_val *val = find_use_val (*loc, GET_MODE (*loc), cui);
+	  rtx vloc, oloc = *loc, nloc;
+
+	  gcc_assert (cui->sets);
+
+	  if (MEM_P (oloc)
+	      && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
+	    {
+	      rtx mloc = oloc;
+	      cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
+
+	      if (val && !cselib_preserved_value_p (val))
+		{
+		  micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
+		  mon->type = mo->type;
+		  mon->u.loc = mo->u.loc;
+		  mon->insn = mo->insn;
+		  cselib_preserve_value (val);
+		  mo->type = MO_VAL_USE;
+		  mloc = cselib_subst_to_values (XEXP (mloc, 0));
+		  mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
+		  mo->insn = cui->insn;
+		  if (dump_file && flag_verbose_cselib)
+		    log_op_type (mo->u.loc, cui->bb, cui->insn,
+				 mo->type, dump_file);
+		  mo = mon;
+		}
+	    }
+
+	  type2 = use_type (loc, 0, &mode2);
+
+	  gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
+		      || type2 == MO_CLOBBER);
+
+	  if (type2 == MO_USE)
+	    vloc = var_lowpart (mode2, *loc);
+	  else
+	    vloc = oloc;
+
+	  /* The loc of a MO_VAL_USE may have two forms:
+
+	     (concat val src): val is at src, a value-based
+	     representation.
+
+	     (concat (concat val use) src): same as above, with use as
+	     the MO_USE tracked value, if it differs from src.
+
+	  */
+
+	  nloc = replace_expr_with_values (*loc);
+	  if (!nloc)
+	    nloc = oloc;
+
+	  if (vloc != nloc)
+	    oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
+	  else
+	    oloc = val->val_rtx;
+
+	  mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
+
+	  if (type2 == MO_USE)
+	    VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
+	  if (!cselib_preserved_value_p (val))
+	    {
+	      VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
+	      cselib_preserve_value (val);
+	    }
 	}
-      mo->insn = (rtx) insn;
-    }
-  else if (MEM_P (*loc)
-	   && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
-			   false, &mode, NULL))
-    {
-      basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
-      micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+      else
+	gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
 
-      mo->type = MO_USE;
-      mo->u.loc = var_lowpart (mode, *loc);
-      mo->insn = (rtx) insn;
+      if (dump_file && flag_verbose_cselib)
+	log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
     }
 
   return 0;
@@ -1862,39 +4344,49 @@ add_uses (rtx *loc, void *insn)
 /* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
 
 static void
-add_uses_1 (rtx *x, void *insn)
+add_uses_1 (rtx *x, void *cui)
 {
-  for_each_rtx (x, add_uses, insn);
+  for_each_rtx (x, add_uses, cui);
 }
 
 /* Add stores (register and memory references) LOC which will be tracked
-   to VTI (bb)->mos. EXPR is the RTL expression containing the store.
-   INSN is instruction which the LOC is part of.  */
+   to VTI (bb)->mos.  EXPR is the RTL expression containing the store.
+   CUIP->insn is instruction which the LOC is part of.  */
 
 static void
-add_stores (rtx loc, const_rtx expr, void *insn)
+add_stores (rtx loc, const_rtx expr, void *cuip)
 {
-  enum machine_mode mode;
+  enum machine_mode mode = VOIDmode, mode2;
+  struct count_use_info *cui = (struct count_use_info *)cuip;
+  basic_block bb = cui->bb;
+  micro_operation *mo;
+  rtx oloc = loc, nloc, src = NULL;
+  enum micro_operation_type type = use_type (&loc, cui, &mode);
+  bool track_p = false;
+  cselib_val *v;
+  bool resolve, preserve;
+
+  if (type == MO_CLOBBER)
+    return;
+
+  mode2 = mode;
 
   if (REG_P (loc))
     {
-      basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
-      micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+      mo = VTI (bb)->mos + VTI (bb)->n_mos++;
 
-      if (GET_CODE (expr) == CLOBBER
-	  || !track_loc_p (loc, REG_EXPR (loc), REG_OFFSET (loc),
-			   true, &mode, NULL))
+      if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
+	  || !(track_p = use_type (&loc, NULL, &mode2) == MO_USE)
+	  || GET_CODE (expr) == CLOBBER)
 	{
 	  mo->type = MO_CLOBBER;
 	  mo->u.loc = loc;
 	}
       else
 	{
-	  rtx src = NULL;
-
 	  if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
-	    src = var_lowpart (mode, SET_SRC (expr));
-	  loc = var_lowpart (mode, loc);
+	    src = var_lowpart (mode2, SET_SRC (expr));
+	  loc = var_lowpart (mode2, loc);
 
 	  if (src == NULL)
 	    {
@@ -1912,27 +4404,44 @@ add_stores (rtx loc, const_rtx expr, voi
 	      mo->u.loc = CONST_CAST_RTX (expr);
 	    }
 	}
-      mo->insn = (rtx) insn;
+      mo->insn = cui->insn;
     }
   else if (MEM_P (loc)
-	   && track_loc_p (loc, MEM_EXPR (loc), INT_MEM_OFFSET (loc),
-			   false, &mode, NULL))
+	   && ((track_p = use_type (&loc, NULL, &mode2) == MO_USE)
+	       || cui->sets))
     {
-      basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
-      micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+      mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+
+      if (MEM_P (loc) && type == MO_VAL_SET
+	  && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
+	{
+	  rtx mloc = loc;
+	  cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
+
+	  if (val && !cselib_preserved_value_p (val))
+	    {
+	      cselib_preserve_value (val);
+	      mo->type = MO_VAL_USE;
+	      mloc = cselib_subst_to_values (XEXP (mloc, 0));
+	      mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
+	      mo->insn = cui->insn;
+	      if (dump_file && flag_verbose_cselib)
+		log_op_type (mo->u.loc, cui->bb, cui->insn,
+			     mo->type, dump_file);
+	      mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+	    }
+	}
 
-      if (GET_CODE (expr) == CLOBBER)
+      if (GET_CODE (expr) == CLOBBER || !track_p)
 	{
 	  mo->type = MO_CLOBBER;
-	  mo->u.loc = var_lowpart (mode, loc);
+	  mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
 	}
       else
 	{
-	  rtx src = NULL;
-
 	  if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
-	    src = var_lowpart (mode, SET_SRC (expr));
-	  loc = var_lowpart (mode, loc);
+	    src = var_lowpart (mode2, SET_SRC (expr));
+	  loc = var_lowpart (mode2, loc);
 
 	  if (src == NULL)
 	    {
@@ -1952,7 +4461,170 @@ add_stores (rtx loc, const_rtx expr, voi
 	      mo->u.loc = CONST_CAST_RTX (expr);
 	    }
 	}
-      mo->insn = (rtx) insn;
+      mo->insn = cui->insn;
+    }
+  else
+    return;
+
+  if (type != MO_VAL_SET)
+    goto log_and_return;
+
+  v = find_use_val (oloc, mode, cui);
+
+  resolve = preserve = !cselib_preserved_value_p (v);
+
+  nloc = replace_expr_with_values (oloc);
+  if (nloc)
+    oloc = nloc;
+
+  if (resolve && GET_CODE (mo->u.loc) == SET)
+    {
+      nloc = replace_expr_with_values (SET_SRC (mo->u.loc));
+
+      if (nloc)
+	oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
+      else
+	{
+	  if (oloc == SET_DEST (mo->u.loc))
+	    /* No point in duplicating.  */
+	    oloc = mo->u.loc;
+	  if (!REG_P (SET_SRC (mo->u.loc)))
+	    resolve = false;
+	}
+    }
+  else if (!resolve)
+    {
+      if (GET_CODE (mo->u.loc) == SET
+	  && oloc == SET_DEST (mo->u.loc))
+	/* No point in duplicating.  */
+	oloc = mo->u.loc;
+    }
+  else
+    resolve = false;
+
+  loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
+
+  if (mo->u.loc != oloc)
+    loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
+
+  /* The loc of a MO_VAL_SET may have various forms:
+
+     (concat val dst): dst now holds val
+
+     (concat val (set dst src)): dst now holds val, copied from src
+
+     (concat (concat val dstv) dst): dst now holds val; dstv is dst
+     after replacing mems and non-top-level regs with values.
+
+     (concat (concat val dstv) (set dst src)): dst now holds val,
+     copied from src.  dstv is a value-based representation of dst, if
+     it differs from dst.  If resolution is needed, src is a REG.
+
+     (concat (concat val (set dstv srcv)) (set dst src)): src
+     copied to dst, holding val.  dstv and srcv are value-based
+     representations of dst and src, respectively.
+
+  */
+
+  mo->u.loc = loc;
+
+  if (track_p)
+    VAL_HOLDS_TRACK_EXPR (loc) = 1;
+  if (preserve)
+    {
+      VAL_NEEDS_RESOLUTION (loc) = resolve;
+      cselib_preserve_value (v);
+    }
+  if (mo->type == MO_CLOBBER)
+    VAL_EXPR_IS_CLOBBERED (loc) = 1;
+  if (mo->type == MO_COPY)
+    VAL_EXPR_IS_COPIED (loc) = 1;
+
+  mo->type = MO_VAL_SET;
+
+ log_and_return:
+  if (dump_file && flag_verbose_cselib)
+    log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
+}
+
+/* Callback for cselib_record_sets_hook, that records as micro
+   operations uses and stores in an insn after cselib_record_sets has
+   analyzed the sets in an insn, but before it modifies the stored
+   values in the internal tables, unless cselib_record_sets doesn't
+   call it directly (perhaps because we're not doing cselib in the
+   first place, in which case sets and n_sets will be 0).  */
+
+static void
+add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
+{
+  basic_block bb = BLOCK_FOR_INSN (insn);
+  int n1, n2;
+  struct count_use_info cui;
+
+  cselib_hook_called = true;
+
+  cui.insn = insn;
+  cui.bb = bb;
+  cui.sets = sets;
+  cui.n_sets = n_sets;
+
+  n1 = VTI (bb)->n_mos;
+  cui.store_p = false;
+  note_uses (&PATTERN (insn), add_uses_1, &cui);
+  n2 = VTI (bb)->n_mos - 1;
+
+  /* Order the MO_USEs to be before MO_USE_NO_VARs,
+     MO_VAL_LOC and MO_VAL_USE.  */
+  while (n1 < n2)
+    {
+      while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
+	n1++;
+      while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
+	n2--;
+      if (n1 < n2)
+	{
+	  micro_operation sw;
+
+	  sw = VTI (bb)->mos[n1];
+	  VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
+	  VTI (bb)->mos[n2] = sw;
+	}
+    }
+
+  if (CALL_P (insn))
+    {
+      micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+
+      mo->type = MO_CALL;
+      mo->insn = insn;
+
+      if (dump_file && flag_verbose_cselib)
+	log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
+    }
+
+  n1 = VTI (bb)->n_mos;
+  /* This will record NEXT_INSN (insn), such that we can
+     insert notes before it without worrying about any
+     notes that MO_USEs might emit after the insn.  */
+  cui.store_p = true;
+  note_stores (PATTERN (insn), add_stores, &cui);
+  n2 = VTI (bb)->n_mos - 1;
+
+  /* Order the MO_CLOBBERs to be before MO_SETs.  */
+  while (n1 < n2)
+    {
+      while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
+	n1++;
+      while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
+	n2--;
+      if (n1 < n2)
+	{
+	  micro_operation sw;
+
+	  sw = VTI (bb)->mos[n1];
+	  VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
+	  VTI (bb)->mos[n2] = sw;
+	}
     }
 }
 
@@ -1971,7 +4643,7 @@ find_src_status (dataflow_set *in, rtx s
     decl = var_debug_decl (MEM_EXPR (src));
 
   if (src && decl)
-    status = get_init_value (in, src, decl);
+    status = get_init_value (in, src, dv_from_decl (decl));
 
   return status;
 }
@@ -1998,12 +4670,14 @@ find_src_set_src (dataflow_set *set, rtx
 
   if (src && decl)
     {
-      slot = htab_find_slot_with_hash (set->vars, decl, 
+      decl_or_value dv = dv_from_decl (decl);
+
+      slot = htab_find_slot_with_hash (set->vars, &dv,
 				       VARIABLE_HASH_VAL (decl), NO_INSERT);
 
       if (slot)
 	{
-	  var = *(variable *) slot;
+	  var = (variable) *slot;
 	  found = false;
 	  for (i = 0; i < var->n_var_parts && !found; i++)
 	    for (nextp = var->var_part[i].loc_chain; nextp && !found; 
@@ -2025,7 +4699,7 @@ find_src_set_src (dataflow_set *set, rtx
 static bool
 compute_bb_dataflow (basic_block bb)
 {
-  int i, n, r;
+  int i, n;
   bool changed;
   dataflow_set old_out;
   dataflow_set *in = &VTI (bb)->in;
@@ -2038,12 +4712,12 @@ compute_bb_dataflow (basic_block bb)
   n = VTI (bb)->n_mos;
   for (i = 0; i < n; i++)
     {
+      rtx insn = VTI (bb)->mos[i].insn;
+
       switch (VTI (bb)->mos[i].type)
 	{
 	  case MO_CALL:
-	    for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
-	      if (TEST_HARD_REG_BIT (call_used_reg_set, r))
-		var_regno_delete (out, r);
+	    dataflow_set_clear_at_call (out);
 	    break;
 
 	  case MO_USE:
@@ -2061,6 +4735,150 @@ compute_bb_dataflow (basic_block bb)
 	    }
 	    break;
 
+	  case MO_VAL_LOC:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc;
+	      tree var;
+
+	      if (GET_CODE (loc) == CONCAT)
+		{
+		  val = XEXP (loc, 0);
+		  vloc = XEXP (loc, 1);
+		}
+	      else
+		{
+		  val = NULL_RTX;
+		  vloc = loc;
+		}
+
+	      var = PAT_VAR_LOCATION_DECL (vloc);
+
+	      clobber_variable_part (out, NULL_RTX,
+				     dv_from_decl (var), 0, NULL_RTX);
+	      if (val)
+		{
+		  if (VAL_NEEDS_RESOLUTION (loc))
+		    val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
+		  set_variable_part (out, val, dv_from_decl (var), 0,
+				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+				     INSERT);
+		}
+	    }
+	    break;
+
+	  case MO_VAL_USE:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc, uloc;
+
+	      vloc = uloc = XEXP (loc, 1);
+	      val = XEXP (loc, 0);
+
+	      if (GET_CODE (val) == CONCAT)
+		{
+		  uloc = XEXP (val, 1);
+		  val = XEXP (val, 0);
+		}
+
+	      if (VAL_NEEDS_RESOLUTION (loc))
+		val_resolve (out, val, vloc, insn);
+
+	      if (VAL_HOLDS_TRACK_EXPR (loc))
+		{
+		  enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
+
+		  if (! flag_var_tracking_uninit)
+		    status = VAR_INIT_STATUS_INITIALIZED;
+
+		  if (GET_CODE (uloc) == REG)
+		    var_reg_set (out, uloc, status, NULL);
+		  else if (GET_CODE (uloc) == MEM)
+		    var_mem_set (out, uloc, status, NULL);
+		}
+	    }
+	    break;
+
+	  case MO_VAL_SET:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc, uloc;
+
+	      vloc = uloc = XEXP (loc, 1);
+	      val = XEXP (loc, 0);
+
+	      if (GET_CODE (val) == CONCAT)
+		{
+		  vloc = XEXP (val, 1);
+		  val = XEXP (val, 0);
+		}
+
+	      if (GET_CODE (vloc) == SET)
+		{
+		  rtx vsrc = SET_SRC (vloc);
+
+		  gcc_assert (val != vsrc);
+		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
+
+		  vloc = SET_DEST (vloc);
+
+		  if (VAL_NEEDS_RESOLUTION (loc))
+		    val_resolve (out, val, vsrc, insn);
+		}
+	      else if (VAL_NEEDS_RESOLUTION (loc))
+		{
+		  gcc_assert (GET_CODE (uloc) == SET
+			      && GET_CODE (SET_SRC (uloc)) == REG);
+		  val_resolve (out, val, SET_SRC (uloc), insn);
+		}
+
+	      if (VAL_HOLDS_TRACK_EXPR (loc))
+		{
+		  if (VAL_EXPR_IS_CLOBBERED (loc))
+		    {
+		      if (REG_P (uloc))
+			var_reg_delete (out, uloc, true);
+		      else if (MEM_P (uloc))
+			var_mem_delete (out, uloc, true);
+		    }
+		  else
+		    {
+		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
+		      rtx set_src = NULL;
+		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
+
+		      if (GET_CODE (uloc) == SET)
+			{
+			  set_src = SET_SRC (uloc);
+			  uloc = SET_DEST (uloc);
+			}
+
+		      if (copied_p)
+			{
+			  if (flag_var_tracking_uninit)
+			    status = find_src_status (in, set_src);
+
+			  if (status == VAR_INIT_STATUS_UNKNOWN)
+			    status = find_src_status (out, set_src);
+
+			  set_src = find_src_set_src (in, set_src);
+			}
+
+		      if (REG_P (uloc))
+			var_reg_delete_and_set (out, uloc, !copied_p,
+						status, set_src);
+		      else if (MEM_P (uloc))
+			var_mem_delete_and_set (out, uloc, !copied_p,
+						status, set_src);
+		    }
+		}
+	      else if (REG_P (uloc))
+		var_regno_delete (out, REGNO (uloc));
+
+	      val_store (out, val, vloc, insn);
+	    }
+	    break;
+
 	  case MO_SET:
 	    {
 	      rtx loc = VTI (bb)->mos[i].u.loc;
@@ -2138,6 +4956,15 @@ compute_bb_dataflow (basic_block bb)
 	}
     }
 
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      dataflow_set_equiv_regs (out);
+      htab_traverse (out->vars, canonicalize_values_mark, out);
+      htab_traverse (out->vars, canonicalize_values_star, out);
+#if ENABLE_CHECKING
+      htab_traverse (out->vars, canonicalize_loc_order_check, out);
+#endif
+    }
   changed = dataflow_set_different (&old_out, out);
   dataflow_set_destroy (&old_out);
   return changed;
@@ -2155,6 +4982,7 @@ vt_find_locations (void)
   int *bb_order;
   int *rc_order;
   int i;
+  int htabsz = 0;
 
   /* Compute reverse completion order of depth first search of the CFG
      so that the data-flow runs faster.  */
@@ -2195,17 +5023,102 @@ vt_find_locations (void)
 	    {
 	      bool changed;
 	      edge_iterator ei;
+	      int oldinsz, oldoutsz;
 
 	      SET_BIT (visited, bb->index);
 
-	      /* Calculate the IN set as union of predecessor OUT sets.  */
-	      dataflow_set_clear (&VTI (bb)->in);
-	      FOR_EACH_EDGE (e, ei, bb->preds)
+	      if (VTI (bb)->in.vars)
+		{
+		  htabsz -= VTI (bb)->in.vars->size + VTI (bb)->out.vars->size;
+		  oldinsz = VTI (bb)->in.vars->n_elements;
+		  oldoutsz = VTI (bb)->out.vars->n_elements;
+		}
+	      else
+		oldinsz = oldoutsz = 0;
+
+	      if (MAY_HAVE_DEBUG_INSNS)
+		{
+		  dataflow_set *in = &VTI (bb)->in;
+		  dataflow_set oin, *oldinp = NULL;
+		  bool first = true, adjust = false;
+
+		  /* Calculate the IN set as the intersection of
+		     predecessor OUT sets.  */
+
+		  if (flag_verbose_cselib)
+		    {
+		      oldinp = &oin;
+		      *oldinp = *in;
+		      dataflow_set_init (in,
+					 in->vars ?
+					 htab_elements (in->vars)
+					 : 3);
+		    }
+		  else
+		    dataflow_set_clear (in);
+
+		  FOR_EACH_EDGE (e, ei, bb->preds)
+		    if (!VTI (e->src)->flooded)
+		      gcc_assert (bb_order[bb->index]
+				  <= bb_order[e->src->index]);
+		    else if (first)
+		      {
+			dataflow_set_copy (in, &VTI (e->src)->out);
+			first = false;
+		      }
+		    else
+		      {
+			dataflow_set_merge (in, &VTI (e->src)->out);
+			adjust = true;
+		      }
+
+		  if (adjust)
+		    {
+		      dataflow_post_merge_adjust (in, &VTI (bb)->permp);
+#if ENABLE_CHECKING
+		      /* Merge and merge_adjust should keep entries in
+			 canonical order.  */
+		      htab_traverse (in->vars,
+				     canonicalize_loc_order_check,
+				     in);
+#endif
+		    }
+
+		  VTI (bb)->flooded = true;
+
+		  if (oldinp && dump_file && flag_verbose_cselib
+		      && dataflow_set_different (oldinp, in))
+		    {
+		      fprintf (dump_file,
+			       "BB %i IN differences above, from", bb->index);
+		      FOR_EACH_EDGE (e, ei, bb->preds)
+			fprintf (dump_file, " %i", e->src->index);
+		      fputc ('\n', dump_file);
+
+#if 0
+		      FOR_EACH_EDGE (e, ei, bb->preds)
+			{
+			  fprintf (dump_file, "\nBB %i OUT -> %i:\n",
+				   e->src->index, bb->index);
+			  dump_dataflow_set (&VTI (e->src)->out);
+			}
+#endif
+		    }
+
+		  if (oldinp)
+		    dataflow_set_destroy (oldinp);
+		}
+	      else
 		{
-		  dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
+		  /* Calculate the IN set as union of predecessor OUT sets.  */
+		  dataflow_set_clear (&VTI (bb)->in);
+		  FOR_EACH_EDGE (e, ei, bb->preds)
+		    dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
 		}
 
 	      changed = compute_bb_dataflow (bb);
+	      htabsz += VTI (bb)->in.vars->size + VTI (bb)->out.vars->size;
+
 	      if (changed)
 		{
 		  FOR_EACH_EDGE (e, ei, bb->succs)
@@ -2213,9 +5126,6 @@ vt_find_locations (void)
 		      if (e->dest == EXIT_BLOCK_PTR)
 			continue;
 
-		      if (e->dest == bb)
-			continue;
-
 		      if (TEST_BIT (visited, e->dest->index))
 			{
 			  if (!TEST_BIT (in_pending, e->dest->index))
@@ -2236,10 +5146,30 @@ vt_find_locations (void)
 			}
 		    }
 		}
+
+	      if (dump_file)
+		fprintf (dump_file,
+			 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
+			 bb->index,
+			 (int)VTI (bb)->in.vars->n_elements, oldinsz,
+			 (int)VTI (bb)->out.vars->n_elements, oldoutsz,
+			 (int)worklist->nodes, (int)pending->nodes, htabsz);
+
+	      if (dump_file && flag_verbose_cselib)
+		{
+		  fprintf (dump_file, "BB %i IN:\n", bb->index);
+		  dump_dataflow_set (&VTI (bb)->in);
+		  fprintf (dump_file, "BB %i OUT:\n", bb->index);
+		  dump_dataflow_set (&VTI (bb)->out);
+		}
 	    }
 	}
     }
 
+  if (MAY_HAVE_DEBUG_INSNS)
+    FOR_EACH_BB (bb)
+      gcc_assert (VTI (bb)->flooded);
+
   free (bb_order);
   fibheap_delete (worklist);
   fibheap_delete (pending);
@@ -2255,7 +5185,10 @@ dump_attrs_list (attrs list)
 {
   for (; list; list = list->next)
     {
-      print_mem_expr (dump_file, list->decl);
+      if (dv_is_decl_p (list->dv))
+	print_mem_expr (dump_file, dv_as_decl (list->dv));
+      else
+	print_rtl_single (dump_file, dv_as_value (list->dv));
       fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
     }
   fprintf (dump_file, "\n");
@@ -2264,18 +5197,43 @@ dump_attrs_list (attrs list)
 /* Print the information about variable *SLOT to dump file.  */
 
 static int
-dump_variable (void **slot, void *data ATTRIBUTE_UNUSED)
+dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
+{
+  variable var = (variable) *slot;
+
+  dump_variable (var);
+
+  /* Continue traversing the hash table.  */
+  return 1;
+}
+
+/* Print the information about variable VAR to dump file.  */
+
+static void
+dump_variable (variable var)
 {
-  variable var = *(variable *) slot;
   int i;
   location_chain node;
 
-  fprintf (dump_file, "  name: %s",
-	   IDENTIFIER_POINTER (DECL_NAME (var->decl)));
-  if (dump_flags & TDF_UID)
-    fprintf (dump_file, " D.%u\n", DECL_UID (var->decl));
+  if (dv_is_decl_p (var->dv))
+    {
+      const_tree decl = dv_as_decl (var->dv);
+
+      if (DECL_NAME (decl))
+	fprintf (dump_file, "  name: %s",
+		 IDENTIFIER_POINTER (DECL_NAME (decl)));
+      else
+	fprintf (dump_file, "  name: D.%u", DECL_UID (decl));
+      if (dump_flags & TDF_UID)
+	fprintf (dump_file, " D.%u\n", DECL_UID (decl));
+      else
+	fprintf (dump_file, "\n");
+    }
   else
-    fprintf (dump_file, "\n");
+    {
+      fputc (' ', dump_file);
+      print_rtl_single (dump_file, dv_as_value (var->dv));
+    }
 
   for (i = 0; i < var->n_var_parts; i++)
     {
@@ -2289,9 +5247,6 @@ dump_variable (void **slot, void *data A
 	  print_rtl_single (dump_file, node->loc);
 	}
     }
-
-  /* Continue traversing the hash table.  */
-  return 1;
 }
 
 /* Print the information about variables from hash table VARS to dump file.  */
@@ -2302,7 +5257,7 @@ dump_vars (htab_t vars)
   if (htab_elements (vars) > 0)
     {
       fprintf (dump_file, "Variables:\n");
-      htab_traverse (vars, dump_variable, NULL);
+      htab_traverse (vars, dump_variable_slot, NULL);
     }
 }
 
@@ -2350,27 +5305,28 @@ dump_dataflow_sets (void)
 static void
 variable_was_changed (variable var, htab_t htab)
 {
-  hashval_t hash = VARIABLE_HASH_VAL (var->decl);
+  hashval_t hash = dv_htab_hash (var->dv);
 
   if (emit_notes)
     {
-      variable *slot;
+      void **slot;
 
-      slot = (variable *) htab_find_slot_with_hash (changed_variables,
-						    var->decl, hash, INSERT);
+      slot = htab_find_slot_with_hash (changed_variables,
+				       &var->dv,
+				       hash, INSERT);
 
       if (htab && var->n_var_parts == 0)
 	{
 	  variable empty_var;
 	  void **old;
 
-	  empty_var = (variable) pool_alloc (var_pool);
-	  empty_var->decl = var->decl;
-	  empty_var->refcount = 1;
+	  empty_var = (variable) pool_alloc (dv_pool (var->dv));
+	  empty_var->dv = var->dv;
+	  empty_var->refcount = 0;
 	  empty_var->n_var_parts = 0;
 	  *slot = empty_var;
 
-	  old = htab_find_slot_with_hash (htab, var->decl, hash,
+	  old = htab_find_slot_with_hash (htab, &var->dv, hash,
 					  NO_INSERT);
 	  if (old)
 	    htab_clear_slot (htab, old);
@@ -2385,8 +5341,8 @@ variable_was_changed (variable var, htab
       gcc_assert (htab);
       if (var->n_var_parts == 0)
 	{
-	  void **slot = htab_find_slot_with_hash (htab, var->decl, hash,
-						  NO_INSERT);
+	  void **slot = htab_find_slot_with_hash (htab, &var->dv,
+						  hash, NO_INSERT);
 	  if (slot)
 	    htab_clear_slot (htab, slot);
 	}
@@ -2426,27 +5382,27 @@ find_variable_location_part (variable va
   return -1;
 }
 
-/* Set the part of variable's location in the dataflow set SET.  The variable
-   part is specified by variable's declaration DECL and offset OFFSET and the
-   part's location by LOC.  */
-
 static void
-set_variable_part (dataflow_set *set, rtx loc, tree decl, HOST_WIDE_INT offset,
-		   enum var_init_status initialized, rtx set_src)
+set_slot_part (dataflow_set *set, rtx loc, void **slot,
+	       decl_or_value dv, HOST_WIDE_INT offset,
+	       enum var_init_status initialized, rtx set_src)
 {
   int pos;
   location_chain node, next;
   location_chain *nextp;
   variable var;
-  void **slot;
+  bool onepart = dv_onepart_p (dv);
   
-  slot = htab_find_slot_with_hash (set->vars, decl,
-				   VARIABLE_HASH_VAL (decl), INSERT);
-  if (!*slot)
+  gcc_assert (offset == 0 || !onepart);
+  gcc_assert (loc != dv_as_opaque (dv));
+
+  var = (variable) *slot;
+
+  if (!var)
     {
       /* Create new variable information.  */
-      var = (variable) pool_alloc (var_pool);
-      var->decl = decl;
+      var = (variable) pool_alloc (dv_pool (dv));
+      var->dv = dv;
       var->refcount = 1;
       var->n_var_parts = 1;
       var->var_part[0].offset = offset;
@@ -2454,12 +5410,40 @@ set_variable_part (dataflow_set *set, rt
       var->var_part[0].cur_loc = NULL;
       *slot = var;
       pos = 0;
+      nextp = &var->var_part[0].loc_chain;
+    }
+  else if (onepart)
+    {
+      int r = -1, c = 0;
+
+      gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
+
+      pos = 0;
+
+      for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
+	   nextp = &node->next)
+	if ((r = loc_cmp (node->loc, loc)) >= 0)
+	  break;
+	else
+	  c++;
+
+      if (r == 0)
+	return;
+
+      if (var->refcount > 1)
+	{
+	  var = unshare_variable (slot, var, initialized);
+	  for (nextp = &var->var_part[0].loc_chain; c;
+	       nextp = &(*nextp)->next)
+	    c--;
+	  gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
+	}
     }
   else
     {
       int inspos = 0;
 
-      var = (variable) *slot;
+      gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
 
       pos = find_variable_location_part (var, offset, &inspos);
 
@@ -2486,7 +5470,7 @@ set_variable_part (dataflow_set *set, rt
 	    {
 	      /* We have to make a copy of a shared variable.  */
 	      if (var->refcount > 1)
-		var = unshare_variable (set, var, initialized);
+		var = unshare_variable (slot, var, initialized);
 	    }
 	}
       else
@@ -2495,11 +5479,12 @@ set_variable_part (dataflow_set *set, rt
 
 	  /* We have to make a copy of the shared variable.  */
 	  if (var->refcount > 1)
-	    var = unshare_variable (set, var, initialized);
+	    var = unshare_variable (slot, var, initialized);
 
 	  /* We track only variables whose size is <= MAX_VAR_PARTS bytes
 	     thus there are at most MAX_VAR_PARTS different offsets.  */
-	  gcc_assert (var->n_var_parts < MAX_VAR_PARTS);
+	  gcc_assert (var->n_var_parts < MAX_VAR_PARTS
+		      && (!var->n_var_parts || !dv_onepart_p (var->dv)));
 
 	  /* We have to move the elements of array starting at index
 	     inspos to the next position.  */
@@ -2511,29 +5496,31 @@ set_variable_part (dataflow_set *set, rt
 	  var->var_part[pos].loc_chain = NULL;
 	  var->var_part[pos].cur_loc = NULL;
 	}
-    }
 
-  /* Delete the location from the list.  */
-  nextp = &var->var_part[pos].loc_chain;
-  for (node = var->var_part[pos].loc_chain; node; node = next)
-    {
-      next = node->next;
-      if ((REG_P (node->loc) && REG_P (loc)
-	   && REGNO (node->loc) == REGNO (loc))
-	  || rtx_equal_p (node->loc, loc))
+      /* Delete the location from the list.  */
+      nextp = &var->var_part[pos].loc_chain;
+      for (node = var->var_part[pos].loc_chain; node; node = next)
 	{
-	  /* Save these values, to assign to the new node, before
-	     deleting this one.  */
-	  if (node->init > initialized)
-	    initialized = node->init;
-	  if (node->set_src != NULL && set_src == NULL)
-	    set_src = node->set_src;
-	  pool_free (loc_chain_pool, node);
-	  *nextp = next;
-	  break;
+	  next = node->next;
+	  if ((REG_P (node->loc) && REG_P (loc)
+	       && REGNO (node->loc) == REGNO (loc))
+	      || rtx_equal_p (node->loc, loc))
+	    {
+	      /* Save these values, to assign to the new node, before
+		 deleting this one.  */
+	      if (node->init > initialized)
+		initialized = node->init;
+	      if (node->set_src != NULL && set_src == NULL)
+		set_src = node->set_src;
+	      pool_free (loc_chain_pool, node);
+	      *nextp = next;
+	      break;
+	    }
+	  else
+	    nextp = &node->next;
 	}
-      else
-	nextp = &node->next;
+
+      nextp = &var->var_part[pos].loc_chain;
     }
 
   /* Add the location to the beginning.  */
@@ -2541,8 +5528,8 @@ set_variable_part (dataflow_set *set, rt
   node->loc = loc;
   node->init = initialized;
   node->set_src = set_src;
-  node->next = var->var_part[pos].loc_chain;
-  var->var_part[pos].loc_chain = node;
+  node->next = *nextp;
+  *nextp = node;
 
   /* If no location was emitted do so.  */
   if (var->var_part[pos].cur_loc == NULL)
@@ -2552,162 +5539,267 @@ set_variable_part (dataflow_set *set, rt
     }
 }
 
+/* Set the part of variable's location in the dataflow set SET.  The
+   variable part is specified by variable's declaration in DV and
+   offset OFFSET and the part's location by LOC.  IOPT should be
+   NO_INSERT if the variable is known to be in SET already and the
+   variable hash table must not be resized, and INSERT otherwise.  */
+
+static void
+set_variable_part (dataflow_set *set, rtx loc,
+		   decl_or_value dv, HOST_WIDE_INT offset,
+		   enum var_init_status initialized, rtx set_src,
+		   enum insert_option iopt)
+{
+  void **slot = htab_find_slot_with_hash (set->vars, &dv,
+					  dv_htab_hash (dv), iopt);
+  set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
+}
+
 /* Remove all recorded register locations for the given variable part
    from dataflow set SET, except for those that are identical to loc.
-   The variable part is specified by variable's declaration DECL and
-   offset OFFSET.  */
+   The variable part is specified by variable's declaration or value
+   DV and offset OFFSET.  */
 
 static void
-clobber_variable_part (dataflow_set *set, rtx loc, tree decl,
-		       HOST_WIDE_INT offset, rtx set_src)
+clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
+		   HOST_WIDE_INT offset, rtx set_src)
 {
-  void **slot;
+  variable var = (variable) *slot;
+  int pos = find_variable_location_part (var, offset, NULL);
 
-  if (! decl || ! DECL_P (decl))
-    return;
-
-  slot = htab_find_slot_with_hash (set->vars, decl, VARIABLE_HASH_VAL (decl),
-				   NO_INSERT);
-  if (slot)
+  if (pos >= 0)
     {
-      variable var = (variable) *slot;
-      int pos = find_variable_location_part (var, offset, NULL);
+      location_chain node, next;
 
-      if (pos >= 0)
+      /* Remove the register locations from the dataflow set.  */
+      next = var->var_part[pos].loc_chain;
+      for (node = next; node; node = next)
 	{
-	  location_chain node, next;
+	  next = node->next;
+	  if (node->loc != loc
+	      && (!flag_var_tracking_uninit
+		  || !set_src
+		  || MEM_P (set_src)
+		  || !rtx_equal_p (set_src, node->set_src)))
+	    {
+	      if (REG_P (node->loc))
+		{
+		  attrs anode, anext;
+		  attrs *anextp;
 
-	  /* Remove the register locations from the dataflow set.  */
-	  next = var->var_part[pos].loc_chain;
-	  for (node = next; node; node = next)
-	    {
-	      next = node->next;
-	      if (node->loc != loc 
-		  && (!flag_var_tracking_uninit
-		      || !set_src 
-		      || MEM_P (set_src)
-		      || !rtx_equal_p (set_src, node->set_src)))
-		{
-		  if (REG_P (node->loc))
-		    {
-		      attrs anode, anext;
-		      attrs *anextp;
-
-		      /* Remove the variable part from the register's
-			 list, but preserve any other variable parts
-			 that might be regarded as live in that same
-			 register.  */
-		      anextp = &set->regs[REGNO (node->loc)];
-		      for (anode = *anextp; anode; anode = anext)
+		  /* Remove the variable part from the register's
+		     list, but preserve any other variable parts
+		     that might be regarded as live in that same
+		     register.  */
+		  anextp = &set->regs[REGNO (node->loc)];
+		  for (anode = *anextp; anode; anode = anext)
+		    {
+		      anext = anode->next;
+		      if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
+			  && anode->offset == offset)
 			{
-			  anext = anode->next;
-			  if (anode->decl == decl
-			      && anode->offset == offset)
-			    {
-			      pool_free (attrs_pool, anode);
-			      *anextp = anext;
-			    }
-			  else
-			    anextp = &anode->next;
+			  pool_free (attrs_pool, anode);
+			  *anextp = anext;
 			}
+		      else
+			anextp = &anode->next;
 		    }
-
-		  delete_variable_part (set, node->loc, decl, offset);
 		}
+
+	      delete_slot_part (set, node->loc, slot, offset);
 	    }
 	}
     }
 }
 
-/* Delete the part of variable's location from dataflow set SET.  The variable
-   part is specified by variable's declaration DECL and offset OFFSET and the
-   part's location by LOC.  */
+/* Remove all recorded register locations for the given variable part
+   from dataflow set SET, except for those that are identical to loc.
+   The variable part is specified by variable's declaration or value
+   DV and offset OFFSET.  */
 
 static void
-delete_variable_part (dataflow_set *set, rtx loc, tree decl,
-		      HOST_WIDE_INT offset)
+clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
+		       HOST_WIDE_INT offset, rtx set_src)
 {
   void **slot;
-    
-  slot = htab_find_slot_with_hash (set->vars, decl, VARIABLE_HASH_VAL (decl),
-				   NO_INSERT);
-  if (slot)
+
+  if (!dv_as_opaque (dv)
+      || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
+    return;
+
+  slot = htab_find_slot_with_hash (set->vars, &dv,
+				   dv_htab_hash (dv), NO_INSERT);
+  if (!slot)
+    return;
+
+  clobber_slot_part (set, loc, slot, offset, set_src);
+}
+
+/* Delete the part of variable's location from dataflow set SET.  The
+   variable part is specified by its SET->vars slot SLOT and offset
+   OFFSET and the part's location by LOC.  */
+
+static void
+delete_slot_part (dataflow_set *set, rtx loc, void **slot, HOST_WIDE_INT offset)
+{
+  variable var = (variable) *slot;
+  int pos = find_variable_location_part (var, offset, NULL);
+
+  if (pos >= 0)
     {
-      variable var = (variable) *slot;
-      int pos = find_variable_location_part (var, offset, NULL);
+      location_chain node, next;
+      location_chain *nextp;
+      bool changed;
 
-      if (pos >= 0)
+      if (var->refcount > 1)
 	{
-	  location_chain node, next;
-	  location_chain *nextp;
-	  bool changed;
-
-	  if (var->refcount > 1)
-	    {
-	      /* If the variable contains the location part we have to
-		 make a copy of the variable.  */
-	      for (node = var->var_part[pos].loc_chain; node;
-		   node = node->next)
-		{
-		  if ((REG_P (node->loc) && REG_P (loc)
-		       && REGNO (node->loc) == REGNO (loc))
-		      || rtx_equal_p (node->loc, loc))
-		    {
-		      enum var_init_status status = VAR_INIT_STATUS_UNKNOWN;
-		      if (! flag_var_tracking_uninit)
-			status = VAR_INIT_STATUS_INITIALIZED;
-		      var = unshare_variable (set, var, status);
-		      break;
-		    }
-		}
-	    }
-
-	  /* Delete the location part.  */
-	  nextp = &var->var_part[pos].loc_chain;
-	  for (node = *nextp; node; node = next)
+	  /* If the variable contains the location part we have to
+	     make a copy of the variable.  */
+	  for (node = var->var_part[pos].loc_chain; node;
+	       node = node->next)
 	    {
-	      next = node->next;
 	      if ((REG_P (node->loc) && REG_P (loc)
 		   && REGNO (node->loc) == REGNO (loc))
 		  || rtx_equal_p (node->loc, loc))
 		{
-		  pool_free (loc_chain_pool, node);
-		  *nextp = next;
+		  enum var_init_status status = VAR_INIT_STATUS_UNKNOWN;
+		  if (! flag_var_tracking_uninit)
+		    status = VAR_INIT_STATUS_INITIALIZED;
+		  var = unshare_variable (slot, var, status);
 		  break;
 		}
-	      else
-		nextp = &node->next;
 	    }
+	}
 
-	  /* If we have deleted the location which was last emitted
-	     we have to emit new location so add the variable to set
-	     of changed variables.  */
-	  if (var->var_part[pos].cur_loc
-	      && ((REG_P (loc)
-		   && REG_P (var->var_part[pos].cur_loc)
-		   && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
-		  || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
-	    {
-	      changed = true;
-	      if (var->var_part[pos].loc_chain)
-		var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
+      /* Delete the location part.  */
+      nextp = &var->var_part[pos].loc_chain;
+      for (node = *nextp; node; node = next)
+	{
+	  next = node->next;
+	  if ((REG_P (node->loc) && REG_P (loc)
+	       && REGNO (node->loc) == REGNO (loc))
+	      || rtx_equal_p (node->loc, loc))
+	    {
+	      pool_free (loc_chain_pool, node);
+	      *nextp = next;
+	      break;
 	    }
 	  else
-	    changed = false;
+	    nextp = &node->next;
+	}
+
+      /* If we have deleted the location which was last emitted
+	 we have to emit new location so add the variable to set
+	 of changed variables.  */
+      if (var->var_part[pos].cur_loc
+	  && ((REG_P (loc)
+	       && REG_P (var->var_part[pos].cur_loc)
+	       && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
+	      || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
+	{
+	  changed = true;
+	  if (var->var_part[pos].loc_chain)
+	    var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
+	}
+      else
+	changed = false;
 
-	  if (var->var_part[pos].loc_chain == NULL)
+      if (var->var_part[pos].loc_chain == NULL)
+	{
+	  gcc_assert (changed);
+	  var->n_var_parts--;
+	  while (pos < var->n_var_parts)
 	    {
-	      var->n_var_parts--;
-	      while (pos < var->n_var_parts)
-		{
-		  var->var_part[pos] = var->var_part[pos + 1];
-		  pos++;
-		}
+	      var->var_part[pos] = var->var_part[pos + 1];
+	      pos++;
 	    }
-	  if (changed)
-	    variable_was_changed (var, set->vars);
 	}
+      if (changed)
+	variable_was_changed (var, set->vars);
+    }
+}
+
+/* Delete the part of variable's location from dataflow set SET.  The
+   variable part is specified by variable's declaration or value DV
+   and offset OFFSET and the part's location by LOC.  */
+
+static void
+delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
+		      HOST_WIDE_INT offset)
+{
+  void **slot;
+
+  slot = htab_find_slot_with_hash (set->vars, &dv,
+				   dv_htab_hash (dv), NO_INSERT);
+  if (!slot)
+    return;
+
+  delete_slot_part (set, loc, slot, offset);
+}
+
+/* Callback for cselib_expand_value, that looks for expressions
+   holding the value in the var-tracking hash tables.  */
+
+static rtx
+vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
+{
+  htab_t vars = (htab_t)data;
+  decl_or_value dv;
+  void **slot;
+  variable var;
+  location_chain loc;
+  rtx result;
+
+  gcc_assert (GET_CODE (x) == VALUE);
+
+  if (VALUE_RECURSED_INTO (x))
+    return NULL;
+
+  dv = dv_from_value (x);
+  slot = htab_find_slot_with_hash (vars, &dv, dv_htab_hash (dv), NO_INSERT);
+
+  if (!slot)
+    return NULL;
+
+  var = (variable)*slot;
+
+  if (var->n_var_parts == 0)
+    return NULL;
+
+  gcc_assert (var->n_var_parts == 1);
+
+  VALUE_RECURSED_INTO (x) = true;
+  result = NULL;
+
+  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
+    {
+      result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
+					   vt_expand_loc_callback, vars);
+      if (result)
+	break;
     }
+
+  VALUE_RECURSED_INTO (x) = false;
+  return result;
+}
+
+/* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
+   tables.  */
+
+static rtx
+vt_expand_loc (rtx loc, htab_t vars)
+{
+  if (!MAY_HAVE_DEBUG_INSNS)
+    return loc;
+
+  loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
+				    vt_expand_loc_callback, vars);
+
+  if (loc && MEM_P (loc))
+    loc = targetm.delegitimize_address (loc);
+
+  return loc;
 }
 
 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP.  DATA contains
@@ -2717,9 +5809,10 @@ delete_variable_part (dataflow_set *set,
 static int
 emit_note_insn_var_location (void **varp, void *data)
 {
-  variable var = *(variable *) varp;
+  variable var = (variable) *varp;
   rtx insn = ((emit_note_data *)data)->insn;
   enum emit_note_where where = ((emit_note_data *)data)->where;
+  htab_t vars = ((emit_note_data *)data)->vars;
   rtx note;
   int i, j, n_var_parts;
   bool complete;
@@ -2728,8 +5821,14 @@ emit_note_insn_var_location (void **varp
   tree type_size_unit;
   HOST_WIDE_INT offsets[MAX_VAR_PARTS];
   rtx loc[MAX_VAR_PARTS];
+  tree decl;
 
-  gcc_assert (var->decl);
+  if (dv_is_value_p (var->dv))
+    goto clear;
+
+  decl = dv_as_decl (var->dv);
+
+  gcc_assert (decl);
 
   if (! flag_var_tracking_uninit)
     initialized = VAR_INIT_STATUS_INITIALIZED;
@@ -2740,6 +5839,7 @@ emit_note_insn_var_location (void **varp
   for (i = 0; i < var->n_var_parts; i++)
     {
       enum machine_mode mode, wider_mode;
+      rtx loc2;
 
       if (last_limit < var->var_part[i].offset)
 	{
@@ -2749,7 +5849,13 @@ emit_note_insn_var_location (void **varp
       else if (last_limit > var->var_part[i].offset)
 	continue;
       offsets[n_var_parts] = var->var_part[i].offset;
-      loc[n_var_parts] = var->var_part[i].loc_chain->loc;
+      loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
+      if (!loc2)
+	{
+	  complete = false;
+	  continue;
+	}
+      loc[n_var_parts] = loc2;
       mode = GET_MODE (loc[n_var_parts]);
       initialized = var->var_part[i].loc_chain->init;
       last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
@@ -2761,13 +5867,12 @@ emit_note_insn_var_location (void **varp
 	  break;
       if (j < var->n_var_parts
 	  && wider_mode != VOIDmode
-	  && GET_CODE (loc[n_var_parts])
-	     == GET_CODE (var->var_part[j].loc_chain->loc)
-	  && mode == GET_MODE (var->var_part[j].loc_chain->loc)
+	  && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
+	  && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
+	  && mode == GET_MODE (loc2)
 	  && last_limit == var->var_part[j].offset)
 	{
 	  rtx new_loc = NULL;
-	  rtx loc2 = var->var_part[j].loc_chain->loc;
 
 	  if (REG_P (loc[n_var_parts])
 	      && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
@@ -2821,7 +5926,7 @@ emit_note_insn_var_location (void **varp
 	}
       ++n_var_parts;
     }
-  type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (var->decl));
+  type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
   if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
     complete = false;
 
@@ -2835,7 +5940,7 @@ emit_note_insn_var_location (void **varp
 
   if (!complete)
     {
-      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
+      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
 						       NULL_RTX, (int) initialized);
     }
   else if (n_var_parts == 1)
@@ -2843,7 +5948,7 @@ emit_note_insn_var_location (void **varp
       rtx expr_list
 	= gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
 
-      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
+      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
 						       expr_list, 
 						       (int) initialized);
     }
@@ -2857,36 +5962,121 @@ emit_note_insn_var_location (void **varp
 
       parallel = gen_rtx_PARALLEL (VOIDmode,
 				   gen_rtvec_v (n_var_parts, loc));
-      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
+      NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
 						       parallel, 
 						       (int) initialized);
     }
 
+ clear:
   htab_clear_slot (changed_variables, varp);
 
   /* When there are no location parts the variable has been already
      removed from hash table and a new empty variable was created.
      Free the empty variable.  */
-  if (var->n_var_parts == 0)
+  if (var->n_var_parts == 0 && var->refcount == 0)
     {
-      pool_free (var_pool, var);
+      pool_free (dv_pool (var->dv), var);
     }
 
   /* Continue traversing the hash table.  */
   return 1;
 }
 
+/* If *LOC is a VALUE present in changed_variables, set the bool DATA
+   points to and stop searching.  */
+
+static int
+check_changed_value (rtx *loc, void *data)
+{
+  rtx x = *loc;
+  bool *changedp = (bool *)data;
+  decl_or_value dv;
+
+  if (GET_CODE (x) != VALUE)
+    return 0;
+
+  dv = dv_from_value (x);
+  if (!htab_find_slot_with_hash (changed_variables, &dv, dv_htab_hash (dv),
+				 NO_INSERT))
+    return 0;
+
+  *changedp = true;
+  return 1;
+}
+
+/* Mark a variable or a value that refers to values that have
+   changed.  */
+
+static int
+check_changed_var (void **slot, void *data)
+{
+  variable var = (variable) *slot;
+  bool *changedp = (bool *)data;
+  location_chain loc;
+  bool changed = false;
+
+  if (var->n_var_parts != 1 || !dv_onepart_p (var->dv))
+    return 1;
+
+  if (htab_find_slot_with_hash (changed_variables, &var->dv,
+				dv_htab_hash (var->dv), NO_INSERT))
+    return 1;
+
+  if (!dv_is_value_p (var->dv)
+      && !var_debug_value_for_decl (dv_as_decl (var->dv)))
+    return 1;
+
+  for (loc = var->var_part[0].loc_chain; loc && !changed; loc = loc->next)
+    for_each_rtx (&loc->loc, check_changed_value, &changed);
+
+  /* ??? Is this really necessary?  Maybe the local table is redundant
+     with the cselib table.  */
+  if (!changed && dv_is_value_p (var->dv))
+    {
+      struct elt_loc_list *l;
+
+      for (l = CSELIB_VAL_PTR (dv_as_value (var->dv))->locs;
+	   l && !changed; l = l->next)
+	for_each_rtx (&l->loc, check_changed_value, &changed);
+    }
+
+  if (changed)
+    {
+      variable_was_changed (var, NULL);
+      *changedp = true;
+    }
+
+  return 1;
+}
+
 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
    CHANGED_VARIABLES and delete this chain.  WHERE specifies whether the notes
    shall be emitted before of after instruction INSN.  */
 
 static void
-emit_notes_for_changes (rtx insn, enum emit_note_where where)
+emit_notes_for_changes (rtx insn, enum emit_note_where where, htab_t vars)
 {
   emit_note_data data;
 
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      bool more_changed;
+
+      /* This is very inefficient.  Back-links from values to other
+	 values referencing them would make things far more efficient,
+	 but it's not clear that the additional memory use is worth
+	 it.  */
+      do
+	{
+	  more_changed = false;
+	  htab_traverse (vars, check_changed_var, &more_changed);
+	}
+      while (more_changed);
+    }
+
   data.insn = insn;
   data.where = where;
+  data.vars = vars;
   htab_traverse (changed_variables, emit_note_insn_var_location, &data);
 }
 
@@ -2899,17 +6089,17 @@ emit_notes_for_differences_1 (void **slo
   htab_t new_vars = (htab_t) data;
   variable old_var, new_var;
 
-  old_var = *(variable *) slot;
-  new_var = (variable) htab_find_with_hash (new_vars, old_var->decl,
-				 VARIABLE_HASH_VAL (old_var->decl));
+  old_var = (variable) *slot;
+  new_var = (variable) htab_find_with_hash (new_vars, &old_var->dv,
+					    dv_htab_hash (old_var->dv));
 
   if (!new_var)
     {
       /* Variable has disappeared.  */
       variable empty_var;
 
-      empty_var = (variable) pool_alloc (var_pool);
-      empty_var->decl = old_var->decl;
+      empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
+      empty_var->dv = old_var->dv;
       empty_var->refcount = 1;
       empty_var->n_var_parts = 0;
       variable_was_changed (empty_var, NULL);
@@ -2932,9 +6122,9 @@ emit_notes_for_differences_2 (void **slo
   htab_t old_vars = (htab_t) data;
   variable old_var, new_var;
 
-  new_var = *(variable *) slot;
-  old_var = (variable) htab_find_with_hash (old_vars, new_var->decl,
-				 VARIABLE_HASH_VAL (new_var->decl));
+  new_var = (variable) *slot;
+  old_var = (variable) htab_find_with_hash (old_vars, &new_var->dv,
+					    dv_htab_hash (new_var->dv));
   if (!old_var)
     {
       /* Variable has appeared.  */
@@ -2954,7 +6144,7 @@ emit_notes_for_differences (rtx insn, da
 {
   htab_traverse (old_set->vars, emit_notes_for_differences_1, new_set->vars);
   htab_traverse (new_set->vars, emit_notes_for_differences_2, old_set->vars);
-  emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN);
+  emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
 }
 
 /* Emit the notes for changes of location parts in the basic block BB.  */
@@ -2975,23 +6165,15 @@ emit_notes_in_bb (basic_block bb)
       switch (VTI (bb)->mos[i].type)
 	{
 	  case MO_CALL:
-	    {
-	      int r;
-
-	      for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
-		if (TEST_HARD_REG_BIT (call_used_reg_set, r))
-		  {
-		    var_regno_delete (&set, r);
-		  }
-	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
-	    }
+	    dataflow_set_clear_at_call (&set);
+	    emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set.vars);
 	    break;
 
 	  case MO_USE:
 	    {
 	      rtx loc = VTI (bb)->mos[i].u.loc;
-      
 	      enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
+
 	      if (! flag_var_tracking_uninit)
 		status = VAR_INIT_STATUS_INITIALIZED;
 	      if (GET_CODE (loc) == REG)
@@ -2999,7 +6181,154 @@ emit_notes_in_bb (basic_block bb)
 	      else
 		var_mem_set (&set, loc, status, NULL);
 
-	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set.vars);
+	    }
+	    break;
+
+	  case MO_VAL_LOC:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc;
+	      tree var;
+
+	      if (GET_CODE (loc) == CONCAT)
+		{
+		  val = XEXP (loc, 0);
+		  vloc = XEXP (loc, 1);
+		}
+	      else
+		{
+		  val = NULL_RTX;
+		  vloc = loc;
+		}
+
+	      var = PAT_VAR_LOCATION_DECL (vloc);
+
+	      clobber_variable_part (&set, NULL_RTX,
+				     dv_from_decl (var), 0, NULL_RTX);
+	      if (val)
+		{
+		  if (VAL_NEEDS_RESOLUTION (loc))
+		    val_resolve (&set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
+		  set_variable_part (&set, val, dv_from_decl (var), 0,
+				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+				     INSERT);
+		}
+
+	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set.vars);
+	    }
+	    break;
+
+	  case MO_VAL_USE:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc, uloc;
+
+	      vloc = uloc = XEXP (loc, 1);
+	      val = XEXP (loc, 0);
+
+	      if (GET_CODE (val) == CONCAT)
+		{
+		  uloc = XEXP (val, 1);
+		  val = XEXP (val, 0);
+		}
+
+	      if (VAL_NEEDS_RESOLUTION (loc))
+		val_resolve (&set, val, vloc, insn);
+
+	      if (VAL_HOLDS_TRACK_EXPR (loc))
+		{
+		  enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
+
+		  if (! flag_var_tracking_uninit)
+		    status = VAR_INIT_STATUS_INITIALIZED;
+
+		  if (GET_CODE (uloc) == REG)
+		    var_reg_set (&set, uloc, status, NULL);
+		  else if (GET_CODE (uloc) == MEM)
+		    var_mem_set (&set, uloc, status, NULL);
+		}
+
+	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set.vars);
+	    }
+	    break;
+
+	  case MO_VAL_SET:
+	    {
+	      rtx loc = VTI (bb)->mos[i].u.loc;
+	      rtx val, vloc, uloc;
+
+	      vloc = uloc = XEXP (loc, 1);
+	      val = XEXP (loc, 0);
+
+	      if (GET_CODE (val) == CONCAT)
+		{
+		  vloc = XEXP (val, 1);
+		  val = XEXP (val, 0);
+		}
+
+	      if (GET_CODE (vloc) == SET)
+		{
+		  rtx vsrc = SET_SRC (vloc);
+
+		  gcc_assert (val != vsrc);
+		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
+
+		  vloc = SET_DEST (vloc);
+
+		  if (VAL_NEEDS_RESOLUTION (loc))
+		    val_resolve (&set, val, vsrc, insn);
+		}
+	      else if (VAL_NEEDS_RESOLUTION (loc))
+		{
+		  gcc_assert (GET_CODE (uloc) == SET
+			      && GET_CODE (SET_SRC (uloc)) == REG);
+		  val_resolve (&set, val, SET_SRC (uloc), insn);
+		}
+
+	      if (VAL_HOLDS_TRACK_EXPR (loc))
+		{
+		  if (VAL_EXPR_IS_CLOBBERED (loc))
+		    {
+		      if (REG_P (uloc))
+			var_reg_delete (&set, uloc, true);
+		      else if (MEM_P (uloc))
+			var_mem_delete (&set, uloc, true);
+		    }
+		  else
+		    {
+		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
+		      rtx set_src = NULL;
+		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
+
+		      if (GET_CODE (uloc) == SET)
+			{
+			  set_src = SET_SRC (uloc);
+			  uloc = SET_DEST (uloc);
+			}
+
+		      if (copied_p)
+			{
+			  status = find_src_status (&set, set_src);
+
+			  set_src = find_src_set_src (&set, set_src);
+			}
+
+		      if (REG_P (uloc))
+			var_reg_delete_and_set (&set, uloc, !copied_p,
+						status, set_src);
+		      else if (MEM_P (uloc))
+			var_mem_delete_and_set (&set, uloc, !copied_p,
+						status, set_src);
+		    }
+		}
+	      else if (REG_P (uloc))
+		var_regno_delete (&set, REGNO (uloc));
+
+	      val_store (&set, val, vloc, insn);
+
+	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
+				      set.vars);
 	    }
 	    break;
 
@@ -3021,7 +6350,8 @@ emit_notes_in_bb (basic_block bb)
 		var_mem_delete_and_set (&set, loc, true, VAR_INIT_STATUS_INITIALIZED, 
 					set_src);
 
-	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
+	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
+				      set.vars);
 	    }
 	    break;
 
@@ -3045,7 +6375,8 @@ emit_notes_in_bb (basic_block bb)
 	      else
 		var_mem_delete_and_set (&set, loc, false, src_status, set_src);
 
-	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
+	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
+				      set.vars);
 	    }
 	    break;
 
@@ -3058,7 +6389,7 @@ emit_notes_in_bb (basic_block bb)
 	      else
 		var_mem_delete (&set, loc, false);
 
-	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set.vars);
 	    }
 	    break;
 
@@ -3071,7 +6402,8 @@ emit_notes_in_bb (basic_block bb)
 	      else
 		var_mem_delete (&set, loc, true);
 
-	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
+	      emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
+				      set.vars);
 	    }
 	    break;
 
@@ -3159,6 +6491,7 @@ vt_add_function_parameters (void)
       enum machine_mode mode;
       HOST_WIDE_INT offset;
       dataflow_set *out;
+      decl_or_value dv;
 
       if (TREE_CODE (parm) != PARM_DECL)
 	continue;
@@ -3199,22 +6532,52 @@ vt_add_function_parameters (void)
 
       out = &VTI (ENTRY_BLOCK_PTR)->out;
 
+      dv = dv_from_decl (parm);
+
+      if (var_debug_value_for_decl (parm)
+	  /* We can't deal with these right now, because this kind of
+	     variable is single-part.  ??? We could handle parallels
+	     that describe multiple locations for the same single
+	     value, but ATM we don't.  */
+	  && GET_CODE (incoming) != PARALLEL)
+	{
+	  cselib_val *val = cselib_lookup (var_lowpart (mode, incoming),
+					   mode, true);
+
+	  /* ??? Float-typed values in memory are not handled by
+	     cselib.  */
+	  if (val)
+	    {
+	      cselib_preserve_value (val);
+	      set_variable_part (out, val->val_rtx, dv, offset,
+				 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
+	      dv = dv_from_value (val->val_rtx);
+	    }
+	}
+
       if (REG_P (incoming))
 	{
 	  incoming = var_lowpart (mode, incoming);
 	  gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
-	  attrs_list_insert (&out->regs[REGNO (incoming)],
-			     parm, offset, incoming);
-	  set_variable_part (out, incoming, parm, offset, VAR_INIT_STATUS_INITIALIZED, 
-			     NULL);
+	  attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
+			     incoming);
+	  set_variable_part (out, incoming, dv, offset,
+			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
 	}
       else if (MEM_P (incoming))
 	{
 	  incoming = var_lowpart (mode, incoming);
-	  set_variable_part (out, incoming, parm, offset,
-			     VAR_INIT_STATUS_INITIALIZED, NULL);
+	  set_variable_part (out, incoming, dv, offset,
+			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
 	}
     }
+
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      cselib_preserve_only_values (true);
+      cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
+    }
+
 }
 
 /* Allocate and initialize the data structures for variable tracking
@@ -3227,10 +6590,34 @@ vt_initialize (void)
 
   alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
 
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      cselib_init (true);
+      scratch_regs = BITMAP_ALLOC (NULL);
+      valvar_pool = create_alloc_pool ("small variable_def pool",
+				       sizeof (struct variable_def), 256);
+    }
+  else
+    {
+      scratch_regs = NULL;
+      valvar_pool = NULL;
+    }
+
   FOR_EACH_BB (bb)
     {
       rtx insn;
       HOST_WIDE_INT pre, post = 0;
+      int count;
+      unsigned int next_value_before = cselib_get_next_unknown_value ();
+      unsigned int next_value_after = next_value_before;
+
+      if (MAY_HAVE_DEBUG_INSNS)
+	{
+	  cselib_record_sets_hook = count_with_sets;
+	  if (dump_file && flag_verbose_cselib)
+	    fprintf (dump_file, "first value: %i\n",
+		     cselib_get_next_unknown_value ());
+	}
 
       /* Count the number of micro operations.  */
       VTI (bb)->n_mos = 0;
@@ -3243,17 +6630,55 @@ vt_initialize (void)
 		{
 		  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
 		  if (pre)
-		    VTI (bb)->n_mos++;
+		    {
+		      VTI (bb)->n_mos++;
+		      if (dump_file && flag_verbose_cselib)
+			log_op_type (GEN_INT (pre), bb, insn,
+				     MO_ADJUST, dump_file);
+		    }
 		  if (post)
-		    VTI (bb)->n_mos++;
+		    {
+		      VTI (bb)->n_mos++;
+		      if (dump_file && flag_verbose_cselib)
+			log_op_type (GEN_INT (post), bb, insn,
+				     MO_ADJUST, dump_file);
+		    }
+		}
+	      cselib_hook_called = false;
+	      if (MAY_HAVE_DEBUG_INSNS)
+		{
+		  cselib_process_insn (insn);
+		  if (dump_file && flag_verbose_cselib)
+		    {
+		      print_rtl_single (dump_file, insn);
+		      dump_cselib_table (dump_file);
+		    }
 		}
-	      note_uses (&PATTERN (insn), count_uses_1, insn);
-	      note_stores (PATTERN (insn), count_stores, insn);
+	      if (!cselib_hook_called)
+		count_with_sets (insn, 0, 0);
 	      if (CALL_P (insn))
-		VTI (bb)->n_mos++;
+		{
+		  VTI (bb)->n_mos++;
+		  if (dump_file && flag_verbose_cselib)
+		    log_op_type (PATTERN (insn), bb, insn,
+				 MO_CALL, dump_file);
+		}
 	    }
 	}
 
+      count = VTI (bb)->n_mos;
+
+      if (MAY_HAVE_DEBUG_INSNS)
+	{
+	  cselib_preserve_only_values (false);
+	  next_value_after = cselib_get_next_unknown_value ();
+	  cselib_reset_table_with_next_value (next_value_before);
+	  cselib_record_sets_hook = add_with_sets;
+	  if (dump_file && flag_verbose_cselib)
+	    fprintf (dump_file, "first value: %i\n",
+		     cselib_get_next_unknown_value ());
+	}
+
       /* Add the micro-operations to the array.  */
       VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
       VTI (bb)->n_mos = 0;
@@ -3262,8 +6687,6 @@ vt_initialize (void)
 	{
 	  if (INSN_P (insn))
 	    {
-	      int n1, n2;
-
 	      if (!frame_pointer_needed)
 		{
 		  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
@@ -3274,62 +6697,25 @@ vt_initialize (void)
 		      mo->type = MO_ADJUST;
 		      mo->u.adjust = pre;
 		      mo->insn = insn;
-		    }
-		}
 
-	      n1 = VTI (bb)->n_mos;
-	      note_uses (&PATTERN (insn), add_uses_1, insn);
-	      n2 = VTI (bb)->n_mos - 1;
-
-	      /* Order the MO_USEs to be before MO_USE_NO_VARs.  */
-	      while (n1 < n2)
-		{
-		  while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
-		    n1++;
-		  while (n1 < n2 && VTI (bb)->mos[n2].type == MO_USE_NO_VAR)
-		    n2--;
-		  if (n1 < n2)
-		    {
-		      micro_operation sw;
-
-		      sw = VTI (bb)->mos[n1];
-		      VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
-		      VTI (bb)->mos[n2] = sw;
+		      if (dump_file && flag_verbose_cselib)
+			log_op_type (PATTERN (insn), bb, insn,
+				     MO_ADJUST, dump_file);
 		    }
 		}
 
-	      if (CALL_P (insn))
+	      cselib_hook_called = false;
+	      if (MAY_HAVE_DEBUG_INSNS)
 		{
-		  micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
-		  mo->type = MO_CALL;
-		  mo->insn = insn;
-		}
-
-	      n1 = VTI (bb)->n_mos;
-	      /* This will record NEXT_INSN (insn), such that we can
-		 insert notes before it without worrying about any
-		 notes that MO_USEs might emit after the insn.  */
-	      note_stores (PATTERN (insn), add_stores, insn);
-	      n2 = VTI (bb)->n_mos - 1;
-
-	      /* Order the MO_CLOBBERs to be before MO_SETs.  */
-	      while (n1 < n2)
-		{
-		  while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
-		    n1++;
-		  while (n1 < n2 && (VTI (bb)->mos[n2].type == MO_SET
-				     || VTI (bb)->mos[n2].type == MO_COPY))
-		    n2--;
-		  if (n1 < n2)
-		    {
-		      micro_operation sw;
-
-		      sw = VTI (bb)->mos[n1];
-		      VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
-		      VTI (bb)->mos[n2] = sw;
+		  cselib_process_insn (insn);
+		  if (dump_file && flag_verbose_cselib)
+		    {
+		      print_rtl_single (dump_file, insn);
+		      dump_cselib_table (dump_file);
 		    }
 		}
+	      if (!cselib_hook_called)
+		add_with_sets (insn, 0, 0);
 
 	      if (!frame_pointer_needed && post)
 		{
@@ -3338,23 +6724,41 @@ vt_initialize (void)
 		  mo->type = MO_ADJUST;
 		  mo->u.adjust = post;
 		  mo->insn = insn;
+
+		  if (dump_file && flag_verbose_cselib)
+		    log_op_type (PATTERN (insn), bb, insn,
+				 MO_ADJUST, dump_file);
 		}
 	    }
 	}
+      gcc_assert (count == VTI (bb)->n_mos);
+      if (MAY_HAVE_DEBUG_INSNS)
+	{
+	  cselib_preserve_only_values (true);
+	  gcc_assert (next_value_after == cselib_get_next_unknown_value ());
+	  cselib_reset_table_with_next_value (next_value_after);
+	  cselib_record_sets_hook = NULL;
+	}
     }
 
   /* Init the IN and OUT sets.  */
   FOR_ALL_BB (bb)
     {
       VTI (bb)->visited = false;
+      VTI (bb)->flooded = false;
       dataflow_set_init (&VTI (bb)->in, 7);
       dataflow_set_init (&VTI (bb)->out, 7);
+      VTI (bb)->permp = NULL;
     }
 
+  VTI (ENTRY_BLOCK_PTR)->flooded = true;
+
   attrs_pool = create_alloc_pool ("attrs_def pool",
 				  sizeof (struct attrs_def), 1024);
   var_pool = create_alloc_pool ("variable_def pool",
-				sizeof (struct variable_def), 64);
+				sizeof (struct variable_def)
+				+ (MAX_VAR_PARTS - 1)
+				* sizeof (((variable)NULL)->var_part[0]), 64);
   loc_chain_pool = create_alloc_pool ("location_chain_def pool",
 				      sizeof (struct location_chain_def),
 				      1024);
@@ -3363,6 +6767,38 @@ vt_initialize (void)
   vt_add_function_parameters ();
 }
 
+/* Get rid of all debug insns from the insn stream.  */
+
+static void
+delete_debug_insns (void)
+{
+  basic_block bb;
+  rtx insn, next;
+
+  if (!MAY_HAVE_DEBUG_INSNS)
+    return;
+
+  FOR_EACH_BB (bb)
+    {
+      FOR_BB_INSNS_SAFE (bb, insn, next)
+	if (DEBUG_INSN_P (insn))
+	  delete_insn (insn);
+    }
+}
+
+/* Run a fast, BB-local only version of var tracking, to take care of
+   information that we don't do global analysis on, such that not all
+   information is lost.  If SKIPPED holds, we're skipping the global
+   pass entirely, so we should try to use information it would have
+   handled as well..  */
+
+static void
+vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
+{
+  /* ??? Just skip it all for now.  */
+  delete_debug_insns ();
+}
+
 /* Free the data structures needed for variable tracking.  */
 
 static void
@@ -3379,12 +6815,25 @@ vt_finalize (void)
     {
       dataflow_set_destroy (&VTI (bb)->in);
       dataflow_set_destroy (&VTI (bb)->out);
+      if (VTI (bb)->permp)
+	{
+	  dataflow_set_destroy (VTI (bb)->permp);
+	  XDELETE (VTI (bb)->permp);
+	}
     }
   free_aux_for_blocks ();
   free_alloc_pool (attrs_pool);
   free_alloc_pool (var_pool);
   free_alloc_pool (loc_chain_pool);
   htab_delete (changed_variables);
+
+  if (MAY_HAVE_DEBUG_INSNS)
+    {
+      free_alloc_pool (valvar_pool);
+      cselib_finish ();
+      BITMAP_FREE (scratch_regs);
+      scratch_regs = NULL;
+    }
 }
 
 /* The entry point to variable tracking pass.  */
@@ -3393,7 +6842,10 @@ unsigned int
 variable_tracking_main (void)
 {
   if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
-    return 0;
+    {
+      vt_debug_insns_local (true);
+      return 0;
+    }
 
   mark_dfs_back_edges ();
   vt_initialize ();
@@ -3402,6 +6854,7 @@ variable_tracking_main (void)
       if (!vt_stack_adjustments ())
 	{
 	  vt_finalize ();
+	  vt_debug_insns_local (true);
 	  return 0;
 	}
     }
@@ -3416,6 +6869,7 @@ variable_tracking_main (void)
     }
 
   vt_finalize ();
+  vt_debug_insns_local (false);
   return 0;
 }
 
Index: gcc/Makefile.in
===================================================================
--- gcc/Makefile.in.orig	2009-06-05 05:07:48.000000000 -0300
+++ gcc/Makefile.in	2009-06-05 05:07:56.000000000 -0300
@@ -2533,7 +2533,7 @@ rtl.o : rtl.c $(CONFIG_H) $(SYSTEM_H) co
 
 print-rtl.o : print-rtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
     $(RTL_H) $(TREE_H) hard-reg-set.h $(BASIC_BLOCK_H) $(FLAGS_H) \
-    $(BCONFIG_H) $(REAL_H)
+    $(BCONFIG_H) $(REAL_H) cselib.h
 rtlanal.o : rtlanal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TOPLEV_H) \
    $(RTL_H) hard-reg-set.h $(TM_P_H) insn-config.h $(RECOG_H) $(REAL_H) \
    $(FLAGS_H) $(REGS_H) output.h $(TARGET_H) $(FUNCTION_H) $(TREE_H) \
@@ -2809,7 +2809,7 @@ regstat.o : regstat.c $(CONFIG_H) $(SYST
 var-tracking.o : var-tracking.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(RTL_H) $(TREE_H) hard-reg-set.h insn-config.h reload.h $(FLAGS_H) \
    $(BASIC_BLOCK_H) output.h sbitmap.h alloc-pool.h $(FIBHEAP_H) $(HASHTAB_H) \
-   $(REGS_H) $(EXPR_H) $(TIMEVAR_H) $(TREE_PASS_H)
+   $(REGS_H) $(EXPR_H) $(TIMEVAR_H) $(TREE_PASS_H) cselib.h $(TARGET_H)
 profile.o : profile.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
    $(TREE_H) $(FLAGS_H) output.h $(REGS_H) $(EXPR_H) $(FUNCTION_H) \
    $(TOPLEV_H) $(COVERAGE_H) $(TREE_FLOW_H) value-prof.h cfghooks.h \
-- 
Alexandre Oliva, freedom fighter    http://FSFLA.org/~lxoliva/
You must be the change you wish to see in the world. -- Gandhi
Be Free! -- http://FSFLA.org/   FSF Latin America board member
Free Software Evangelist      Red Hat Brazil Compiler Engineer

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]