This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]

Value Range Propagation Patch -- Part 2 of 2


This patch implements a value range propagation pass.
This patch passes make bootstrap and make check on FreeBSD-3.4 x86,
IBM AIX 4.3, Solaris 2.5.1 SPARC, and Solaris 2.5.1 x86.

ChangeLog:

Sun Apr  2 01:42:03 EST 2000  John Wehle  (john@feith.com)

	* vrp.c: New file.

Enjoy!

-- John Wehle
------------------8<------------------------8<------------------------
*** /dev/null	Mon Apr  3 13:11:12 2000
--- gcc/vrp.c	Sun Apr  2 15:54:33 2000
***************
*** 0 ****
--- 1,3424 ----
+ /* Value Range Propagation for GNU compiler.
+    Copyright (C) 2000 Free Software Foundation, Inc.
+ 
+ This file is part of GNU CC.
+ 
+ GNU CC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2, or (at your option)
+ any later version.
+ 
+ GNU CC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ GNU General Public License for more details.
+ 
+ You should have received a copy of the GNU General Public License
+ along with GNU CC; see the file COPYING.  If not, write to
+ the Free Software Foundation, 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA.  */
+ 
+ #include "config.h"
+ #include "system.h"
+ #include "toplev.h"
+ 
+ #include "rtl.h"
+ #include "tree.h"
+ #include "tm_p.h"
+ #include "regs.h"
+ #include "flags.h"
+ #include "real.h"
+ #include "insn-config.h"
+ #include "recog.h"
+ #include "basic-block.h"
+ #include "output.h"
+ #include "function.h"
+ #include "expr.h" 
+ 
+ /*
+  * Todo:
+  *
+  *   More comments.
+  *   Support not, sign_extract, xor, and zero_extract.
+  *   Compute both the min and max for registers set within a loop.
+  *   Simplify record_compare_value_range.
+  *   Try harder to replace a sign_extend with a zero_extend.
+  *   Propagate computed jumps to all labels without handling as abnormal.
+  */
+ 
+ /* -dV dump file.  */
+ static FILE *vr_file;
+ 
+ /* Note whether or not we should run jump optimization after
+    value range propagation.  We want to do this anytime a
+    jump has been simplifed.  */
+ static int run_jump_opt_after_vr;
+ 
+ struct value_range {
+   union tree_node min;
+   union tree_node max;
+ };
+ 
+ struct reg_value_range {
+   int regno;
+   struct value_range vr;
+   struct reg_value_range *next;
+ };
+ 
+ /* The maximum register number in the function.  */
+ static int max_vr_regno;
+ 
+ /* The current insn being processed.  */
+ static rtx vr_insn;
+ 
+ static struct {
+   struct reg_value_range *head;
+   struct reg_value_range *tail;
+   int nupdates;
+ } vr_reg_updates;
+ 
+ /* Table of register value ranges for the current block.  */
+ static struct value_range *reg_vr_table;
+ 
+ /* For each block, a list where each element is the value
+    range a register is known to have at the beginning of
+    the block.  A register will never have more than one
+    entry for a given block.  */
+ static struct reg_value_range **bb_reg_vr_table;
+ 
+ /* The bb_reg_vr_table from the previous iteration.  */
+ static struct reg_value_range **previous_bb_reg_vr_table;
+ 
+ /* For each block, a bitmap of registers set in the block.
+    This is used to determine what registers are modified
+    by a loop.  */
+ static sbitmap *bb_reg_set;
+ 
+ /* For each block, a bitmap of registers whose value range
+    can't be known at the beginning of the block (i.e. the
+    block is a loop header and the register is modified
+    within the loop).  */
+ static sbitmap *bb_reg_vr_unknown;
+ 
+ /* For each block, non-zero if the block has been reached.  */
+ static int *bb_reached;
+ 
+ static void alloc_value_range_mem	PARAMS ((void));
+ static void free_value_range_mem	PARAMS ((void));
+ 
+ static rtx get_jump_table_offset	PARAMS ((rtx, rtx *));
+ 
+ static int convert_value_range		PARAMS ((struct value_range *, int));
+ static int compare_value_range		PARAMS ((enum rtx_code,
+ 						 struct value_range,
+ 						 struct value_range));
+ static int contiguous_ones_value_range	PARAMS ((struct value_range *));
+ static void merge_value_ranges		PARAMS ((struct value_range *,
+ 						 struct value_range *,
+ 						 struct value_range *));
+ static int merge_value_range_subreg	PARAMS ((struct value_range,
+ 						 struct value_range,
+ 						 struct value_range *,
+ 						 rtx));
+ static void negate_value_range		PARAMS ((struct value_range *));
+ static void truncate_value_range	PARAMS ((struct value_range *,
+ 						 enum machine_mode));
+ static void compute_value_range		PARAMS ((struct value_range *,
+ 						 enum machine_mode,
+ 						 rtx));
+ 
+ static void record_value_range_store	PARAMS ((rtx, rtx, void *));
+ static void record_compare_value_range	PARAMS ((enum rtx_code,
+ 						 rtx,
+ 						 struct value_range,
+ 						 struct value_range));
+ static void record_bb_value_range	PARAMS ((int));
+ static int record_value_range_condjump	PARAMS ((rtx, int));
+ static int record_value_range_jump	PARAMS ((rtx, int));
+ 
+ static void record_regs_set_by_loop	PARAMS ((int, int, int *));
+ static void compute_value_range_bb_order PARAMS ((int *, int *, sbitmap *));
+ 
+ /* Allocate memory for the various tracking tables.  */
+ 
+ static void
+ alloc_value_range_mem ()
+ {
+ 
+   bb_reg_vr_table
+     = (struct reg_value_range **)malloc (n_basic_blocks
+ 					 * sizeof(struct reg_value_range *));
+   previous_bb_reg_vr_table
+     = (struct reg_value_range **)malloc (n_basic_blocks
+ 					 * sizeof(struct reg_value_range *));
+   bzero (bb_reg_vr_table, n_basic_blocks * sizeof(struct reg_value_range *));
+   bzero (previous_bb_reg_vr_table, n_basic_blocks
+ 				   * sizeof(struct reg_value_range *));
+ 
+   bb_reg_set
+     = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, max_vr_regno);
+   bb_reg_vr_unknown
+     = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, max_vr_regno);
+ 
+   bb_reached = (int *)malloc (n_basic_blocks * sizeof (int));
+ 
+   reg_vr_table = (struct value_range *)malloc (max_vr_regno
+ 					       * sizeof(struct value_range));
+ }
+ 
+ /* Free memory allocated by alloc_value_range_mem.  */
+ 
+ static void
+ free_value_range_mem ()
+ {
+   int i;
+   struct reg_value_range *next;
+   struct reg_value_range *rvrp;
+ 
+   for (i = 0; i < n_basic_blocks; i++)
+     for (rvrp = *(bb_reg_vr_table + i); rvrp; rvrp = next)
+       {
+ 	next = rvrp->next;
+ 	free (rvrp);
+       }
+ 
+   free (bb_reg_vr_table);
+ 
+   for (i = 0; i < n_basic_blocks; i++)
+     for (rvrp = *(previous_bb_reg_vr_table + i); rvrp; rvrp = next)
+       {
+ 	next = rvrp->next;
+ 	free (rvrp);
+       }
+ 
+   free (previous_bb_reg_vr_table);
+ 
+   free (bb_reg_set);
+   free (bb_reg_vr_unknown);
+   free (bb_reached);
+   free (reg_vr_table);
+ }
+ 
+ /* Given a tablejump insn INSN, return the index.  If the index cannot be
+    determined, then return NULL_RTX.
+ 
+    If EARLIEST is non-zero, it is a pointer to a place where the earliest
+    insn used in locating the index was found.  */
+ 
+ static rtx
+ get_jump_table_offset (insn, earliest)
+      rtx insn;
+      rtx *earliest;
+ {
+   rtx label;
+   rtx table;
+   rtx set;
+   rtx x;
+   rtx old_x;
+   int i;
+   int j;
+   rtx current_insn;
+   rtx current_x;
+ 
+   if (GET_CODE (insn) != JUMP_INSN
+       || ! (label = JUMP_LABEL (insn))
+       || ! (table = NEXT_INSN (label))
+       || GET_CODE (table) != JUMP_INSN
+       || (GET_CODE (PATTERN (table)) != ADDR_VEC
+ 	  && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
+       || ! (set = single_set (insn)))
+     return NULL_RTX;
+ 
+   x = SET_SRC (set);
+ 
+   if (GET_CODE (x) == IF_THEN_ELSE
+       && GET_CODE (XEXP (x, 2)) == LABEL_REF)
+     x = XEXP (x, 1);
+ 
+   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+     ;
+ 
+   if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
+       && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
+     {
+       for (i = 0; i < 2; i++)
+ 	if (XEXP (x, i) == pc_rtx
+ 	    || (GET_CODE (XEXP (x, i)) == LABEL_REF
+ 		&& XEXP (XEXP (x, i), 0) == label)
+ #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
+ 	    || (GET_CODE (XEXP (x, i)) == REG
+ 		&& REGNO (XEXP (x, i)) == PIC_OFFSET_TABLE_REGNUM
+ 		&& flag_pic)
+ #endif
+ 	   )
+ 	  break;
+ 
+       if (i >= 2)
+ 	return NULL_RTX;
+ 
+       x = XEXP (x, 1 - i);
+ 
+       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+ 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+ 	;
+     }
+ 
+   if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
+     {
+       x = XEXP (x, 0);
+ 
+       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+ 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+ 	;
+     }
+ 
+   if (GET_CODE (x) != MEM)
+     return NULL_RTX;
+ 
+   x = XEXP (x, 0);
+ 
+   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+     ;
+ 
+   if (GET_CODE (x) != PLUS)
+     return NULL_RTX;
+ 
+   current_insn = insn;
+   current_x = x;
+ 
+   for (i = 0; i < 2; i++)
+     {
+       x = XEXP (current_x, i);
+       insn = current_insn;
+ 
+       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+ 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+ 	;
+ 
+       if (GET_CODE (x) == LO_SUM)
+ 	{
+ 	  for (j = 0; j < 2; j++)
+ 	    if ((GET_CODE (XEXP (x, j)) == CONST
+ 		 || GET_CODE (XEXP (x, j)) == LABEL_REF)
+ 		&& reg_mentioned_p (label, XEXP (x, j)))
+ 	      break;
+ 
+ 	  if (j >= 2)
+ 	    continue;
+ 
+ 	  x = XEXP (x, 1 - j);
+ 
+ 	  for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
+ 	       old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
+ 	    ;
+ 
+ 	  if (GET_CODE (x) != HIGH)
+ 	    continue;
+ 
+ 	  x = XEXP (x, 0);
+ 	}
+ 
+       if ((GET_CODE (x) == CONST || GET_CODE (x) == LABEL_REF)
+ 	  && reg_mentioned_p (label, x))
+ 	break;
+     }
+ 
+   if (i >= 2)
+     return NULL_RTX;
+ 
+   x = XEXP (current_x, 1 - i);
+ 
+ #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
+   if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
+     for (i = 0; i < 2; i++)
+       if (GET_CODE (XEXP (x, i)) == REG
+ 	  && REGNO (XEXP (x, i)) == PIC_OFFSET_TABLE_REGNUM
+ 	  && flag_pic)
+ 	{
+ 	  x = XEXP (x, 1 - i);
+ 	  break;
+ 	}
+ #endif
+ 
+   if (earliest)
+     *earliest = current_insn;
+ 
+   return x;
+ }
+ 
+ /* IF UNSIGNEDP is non-zero then convert value range VR
+    to an unsigned value range else convert it to a signed
+    value range.  Return non-zero if the conversion is
+    sucessful.  */
+ 
+ static int
+ convert_value_range (vr, unsignedp)
+      struct value_range *vr;
+      int unsignedp;
+ {
+ 
+   if (! TREE_CONSTANT (&vr->min))
+     return 0;
+ 
+   if ((TREE_UNSIGNED (TREE_TYPE (&vr->min)) && unsignedp)
+       || (! TREE_UNSIGNED (TREE_TYPE (&vr->min)) && ! unsignedp))
+     {
+       TREE_CONSTANT_OVERFLOW (&vr->min)
+ 	= TREE_CONSTANT_OVERFLOW (&vr->max) = 0;
+ 
+       return 1;
+     }
+ 
+   if ((TREE_UNSIGNED (TREE_TYPE (&vr->min))
+        && (tree_int_cst_msb (&vr->min) || ! tree_int_cst_msb (&vr->max)))
+       || (! TREE_UNSIGNED (TREE_TYPE (&vr->min))
+ 	  && (! tree_int_cst_msb (&vr->min) || tree_int_cst_msb (&vr->max))))
+     {
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max)
+ 	= signed_or_unsigned_type (unsignedp, TREE_TYPE (&vr->min));
+ 
+       (void)force_fit_type (&vr->min, 0);
+       (void)force_fit_type (&vr->max, 0);
+ 
+       TREE_CONSTANT_OVERFLOW (&vr->min)
+ 	= TREE_CONSTANT_OVERFLOW (&vr->max) = 0;
+ 
+       return 1;
+     }
+ 
+   return 0;
+ }
+ 
+ /* Return non-zero if the comparson code CODE is always
+    true when comparing value ranges OP0 and OP1.  */
+ 
+ static int
+ compare_value_range (code, op0, op1)
+      enum rtx_code code;
+      struct value_range op0;
+      struct value_range op1;
+ {
+   tree type;
+ 
+   if (! TREE_CONSTANT (&op0.min)
+       || ! INTEGRAL_TYPE_P (TREE_TYPE (&op0.min))
+       || ! TREE_CONSTANT (&op1.min)
+       || ! INTEGRAL_TYPE_P (TREE_TYPE (&op1.min)))
+     return 0;
+ 
+   type = TREE_CONSTANT (&op0.min) ? TREE_TYPE (&op0.min)
+ 				   : TREE_TYPE (&op1.min);
+ 
+   switch (code)
+     {
+     case EQ:
+     case NE:
+       if (TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	  != TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (convert_value_range (&op0, TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	      || convert_value_range (&op1,
+ 				      TREE_UNSIGNED (TREE_TYPE (&op0.min))))
+ 	    break;
+ 	  return 0;
+ 	}
+       break;
+ 
+     case LT:
+     case LE:
+     case GT:
+     case GE:
+       if (TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  if (! tree_int_cst_msb (&op0.min) && tree_int_cst_msb (&op0.max))
+ 	    return 0;
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_type (type);
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	}
+ 
+       if (TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (! tree_int_cst_msb (&op1.min) && tree_int_cst_msb (&op1.max))
+ 	    return 0;
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = signed_type (type);
+ 	  (void)force_fit_type (&op1.min, 0);
+ 	  (void)force_fit_type (&op1.max, 0);
+ 	}
+       break;
+ 
+     case LTU:
+     case LEU:
+     case GTU:
+     case GEU:
+       if (! TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  if (tree_int_cst_msb (&op0.min) && ! tree_int_cst_msb (&op0.max))
+ 	    return 0;
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (type);
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	}
+ 
+       if (! TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (tree_int_cst_msb (&op1.min) && ! tree_int_cst_msb (&op1.max))
+ 	    return 0;
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = unsigned_type (type);
+ 	  (void)force_fit_type (&op1.min, 0);
+ 	  (void)force_fit_type (&op1.max, 0);
+ 	}
+       break;
+ 
+     default:
+       break;
+     }
+ 
+   switch (code)
+     {
+     case EQ:
+       return tree_int_cst_equal (&op0.min, &op0.max)
+ 	     && tree_int_cst_equal (&op1.min, &op1.max)
+ 	     && tree_int_cst_equal (&op0.min, &op1.min);
+ 
+     case NE:
+       return TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	     ? (INT_CST_LT_UNSIGNED (&op0.max, &op1.min)
+ 		|| INT_CST_LT_UNSIGNED (&op1.max, &op0.min))
+ 	     : (INT_CST_LT (&op0.max, &op1.min)
+ 		|| INT_CST_LT (&op1.max, &op0.min));
+ 
+     case LT:
+       return INT_CST_LT (&op0.max, &op1.min);
+ 
+     case LE:
+       return INT_CST_LT (&op0.max, &op1.min)
+ 	     || tree_int_cst_equal (&op0.max, &op1.min);
+ 
+     case GT:
+       return INT_CST_LT (&op1.max, &op0.min);
+ 
+     case GE:
+       return INT_CST_LT (&op1.max, &op0.min)
+ 	     || tree_int_cst_equal (&op1.max, &op0.min);
+ 
+     case LTU:
+       return INT_CST_LT_UNSIGNED (&op0.max, &op1.min);
+ 
+     case LEU:
+       return INT_CST_LT_UNSIGNED (&op0.max, &op1.min)
+ 	     || tree_int_cst_equal (&op0.max, &op1.min);
+ 
+     case GTU:
+       return INT_CST_LT_UNSIGNED (&op1.max, &op0.min);
+ 
+     case GEU:
+       return INT_CST_LT_UNSIGNED (&op1.max, &op0.min)
+ 	     || tree_int_cst_equal (&op1.max, &op0.min);
+ 
+     default:
+       break;
+     }
+ 
+   return 0;
+ }
+ 
+ /* Return non-zero if the value range VR is a constant where
+    all the bits which are set are contiguous (i.e. 0xff or 0x78).  */
+ 
+ static int
+ contiguous_ones_value_range (vr)
+     struct value_range *vr;
+ {
+   unsigned HOST_WIDE_INT bits;
+   HOST_WIDE_INT minh;
+   HOST_WIDE_INT minl;
+   HOST_WIDE_INT nlsb;
+   int ones_are_contiguous;
+ 
+   if (! tree_int_cst_equal (&vr->min, &vr->max))
+     return 0;
+ 
+   bits = (unsigned HOST_WIDE_INT)TREE_INT_CST_HIGH (&vr->min);
+   nlsb = HOST_BITS_PER_WIDE_INT;
+   if (TREE_INT_CST_LOW (&vr->min))
+     {
+       bits = (unsigned HOST_WIDE_INT)TREE_INT_CST_LOW (&vr->min);
+       nlsb = 0;
+     }
+ 
+   while (bits)
+     {
+       if (bits & 1)
+ 	break;
+       bits >>= 1;
+       nlsb++;
+     }
+ 
+   if (! bits)
+     return 0;
+ 
+   rshift_double (TREE_INT_CST_LOW (&vr->min), TREE_INT_CST_HIGH (&vr->min),
+ 		 nlsb, 2 * HOST_BITS_PER_WIDE_INT, &minl, &minh, 0);
+   (void)add_double (minl, minh, 1, 0, &minl, &minh);
+   ones_are_contiguous = (minh == 0 && (minl & (minl - 1)) == 0)
+ 			|| (minl == 0 && (minh & (minh - 1)) == 0);
+ 
+   return ones_are_contiguous;
+ }
+ 
+ /* Create a new value range NR which contains value ranges R0 and R1.  */
+ 
+ static void
+ merge_value_ranges (r0, r1, nr)
+      struct value_range *r0;
+      struct value_range *r1;
+      struct value_range *nr;
+ {
+   struct value_range cr0;
+   struct value_range cr1;
+ 
+   if (! TREE_CONSTANT (&r0->min) || ! TREE_CONSTANT (&r1->min))
+     {
+       TREE_CONSTANT (&nr->min)
+ 	= TREE_CONSTANT (&nr->max) = 0;
+       return;
+     }
+ 
+   cr0 = *r0;
+   cr1 = *r1;
+ 
+   if (TREE_UNSIGNED (TREE_TYPE (&cr0.min))
+       != TREE_UNSIGNED (TREE_TYPE (&cr1.min)))
+     {
+       if (convert_value_range (&cr0, TREE_UNSIGNED (TREE_TYPE (&cr1.min)))
+ 	  || convert_value_range (&cr1, TREE_UNSIGNED (TREE_TYPE (&cr0.min))))
+ 	;
+       else
+ 	{
+ 	  TREE_CONSTANT (&nr->min)
+ 	    = TREE_CONSTANT (&nr->max) = 0;
+ 	  return;
+ 	}
+     }
+ 
+   *nr = cr0;
+ 
+   if (TREE_UNSIGNED (TREE_TYPE (&nr->min)))
+     {
+       if (INT_CST_LT_UNSIGNED (&cr1.min, &nr->min))
+ 	nr->min = cr1.min;
+       if (INT_CST_LT_UNSIGNED (&nr->max, &cr1.max))
+ 	nr->max = cr1.max;
+     }
+   else
+     {
+       if (INT_CST_LT (&cr1.min, &nr->min))
+ 	nr->min = cr1.min;
+       if (INT_CST_LT (&nr->max, &cr1.max))
+ 	nr->max = cr1.max;
+     }
+ 
+   if (tree_int_cst_equal (&nr->min, TYPE_MIN_VALUE (TREE_TYPE (&nr->min)))
+       && tree_int_cst_equal (&nr->max, TYPE_MAX_VALUE (TREE_TYPE (&nr->max))))
+     {
+       TREE_CONSTANT (&nr->min)
+ 	= TREE_CONSTANT (&nr->max) = 0;
+       return;
+     }
+ }
+ 
+ /* Create a new value range NR which contains value range REG_VR
+    and the subreg value range SUBREG_VR.  The subreg is described
+    by X.  */
+ 
+ static int
+ merge_value_range_subreg (reg_vr, subreg_vr, nr, x)
+      struct value_range reg_vr;
+      struct value_range subreg_vr;
+      struct value_range *nr;
+      rtx x;
+ {
+   enum machine_mode inner_mode;
+   enum machine_mode mode;
+   HOST_WIDE_INT maxh;
+   HOST_WIDE_INT maxl;
+   HOST_WIDE_INT minh;
+   HOST_WIDE_INT minl;
+   HOST_WIDE_INT nlsb;
+ 
+   if ((! TREE_CONSTANT (&reg_vr.min) && ! TREE_CONSTANT (&subreg_vr.min))
+       || (GET_CODE (x) != STRICT_LOW_PART && GET_CODE (x) != SUBREG))
+     return 0;
+ 
+   if (GET_CODE (x) == STRICT_LOW_PART)
+     x = XEXP (x, 0);
+ 
+   inner_mode = GET_MODE (SUBREG_REG (x));
+   mode = GET_MODE (x);
+ 
+   if (GET_MODE_CLASS (inner_mode) != MODE_INT
+       || GET_MODE_CLASS (mode) != MODE_INT
+       || GET_MODE_BITSIZE (inner_mode) < GET_MODE_BITSIZE (mode))
+     return 0;
+ 
+   if (! TREE_CONSTANT (&reg_vr.min))
+     {
+       TREE_TYPE (&reg_vr.min)
+ 	= TREE_TYPE (&reg_vr.max)
+ 	= type_for_mode (inner_mode, (! tree_int_cst_msb (&subreg_vr.min)
+ 				      || tree_int_cst_msb (&subreg_vr.max)));
+       reg_vr.min = *TYPE_MIN_VALUE (TREE_TYPE (&reg_vr.min));
+       reg_vr.max = *TYPE_MAX_VALUE (TREE_TYPE (&reg_vr.min));
+     }
+   else if (! TREE_CONSTANT (&subreg_vr.min))
+     {
+       TREE_TYPE (&subreg_vr.min)
+ 	= TREE_TYPE (&subreg_vr.max)
+ 	= type_for_mode (mode, (! tree_int_cst_msb (&reg_vr.min)
+ 				|| tree_int_cst_msb (&reg_vr.max)));
+       subreg_vr.min = *TYPE_MIN_VALUE (TREE_TYPE (&subreg_vr.min));
+       subreg_vr.max = *TYPE_MAX_VALUE (TREE_TYPE (&subreg_vr.min));
+     }
+ 
+   if (TYPE_MODE (TREE_TYPE (&reg_vr.min)) != inner_mode)
+     abort ();
+ 
+   nlsb = 0;
+   if (GET_MODE_BITSIZE (inner_mode) > BITS_PER_WORD
+       && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
+     nlsb = WORDS_BIG_ENDIAN
+ 	   ? GET_MODE_BITSIZE (inner_mode)
+ 	      - (SUBREG_WORD (x) + 1) * GET_MODE_BITSIZE (mode)
+ 	   : SUBREG_WORD (x) * GET_MODE_BITSIZE (mode);
+ 
+   if ((nlsb + GET_MODE_BITSIZE (mode)) < GET_MODE_BITSIZE (inner_mode))
+     {
+       rshift_double (TREE_INT_CST_LOW (&reg_vr.min),
+ 		     TREE_INT_CST_HIGH (&reg_vr.min),
+ 		     nlsb + GET_MODE_BITSIZE (mode),
+ 		     2 * HOST_BITS_PER_WIDE_INT,
+ 		     &minl, &minh, 1);
+       rshift_double (TREE_INT_CST_LOW (&reg_vr.max),
+ 		     TREE_INT_CST_HIGH (&reg_vr.max),
+ 		     nlsb + GET_MODE_BITSIZE (mode),
+ 		     2 * HOST_BITS_PER_WIDE_INT,
+ 		     &maxl, &maxh, 1);
+ 
+       if (tree_int_cst_msb (&subreg_vr.min)
+ 	  && ! tree_int_cst_msb (&subreg_vr.max))
+ 	{
+ 	  if (minh != -1 || minl != -1 || maxh != 0 || maxl != 0)
+ 	    return 0;
+ 	}
+       else if (minh != maxh || minl != maxl)
+ 	return 0;
+     }
+ 
+   if (nlsb)
+     {
+       rshift_double (TREE_INT_CST_LOW (&reg_vr.min),
+ 		     TREE_INT_CST_HIGH (&reg_vr.min),
+ 		     nlsb - 1, 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &minl, &minh, 1);
+       rshift_double (TREE_INT_CST_LOW (&reg_vr.max),
+ 		     TREE_INT_CST_HIGH (&reg_vr.max),
+ 		     nlsb - 1, 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &maxl, &maxh, 1);
+ 
+       if ((minl & 1) && ! (maxl & 1)
+ 	  && tree_int_cst_equal (&subreg_vr.min, &subreg_vr.max))
+ 	return 0;
+     }
+ 
+   if (! TREE_UNSIGNED (TREE_TYPE (&subreg_vr.min)))
+     {
+       TREE_TYPE (&subreg_vr.min)
+ 	= TREE_TYPE (&subreg_vr.max)
+ 	= unsigned_type (TREE_TYPE (&subreg_vr.min));
+       (void)force_fit_type (&subreg_vr.min, 0);
+       (void)force_fit_type (&subreg_vr.max, 0);
+     }
+ 
+   *nr = reg_vr;
+ 
+   rrotate_double (TREE_INT_CST_LOW (&reg_vr.min),
+ 		  TREE_INT_CST_HIGH (&reg_vr.min),
+ 		  nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 		  &minl, &minh);
+   rrotate_double (TREE_INT_CST_LOW (&reg_vr.max),
+ 		  TREE_INT_CST_HIGH (&reg_vr.max),
+ 		  nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 		  &maxl, &maxh);
+   rshift_double (minl, minh,
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &minl, &minh, 0);
+   rshift_double (maxl, maxh,
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &maxl, &maxh, 0);
+   lshift_double (minl, minh,
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &minl, &minh, 0);
+   lshift_double (maxl, maxh,
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &maxl, &maxh, 0);
+ 
+   minl |= TREE_INT_CST_LOW (&subreg_vr.min);
+   minh |= TREE_INT_CST_HIGH (&subreg_vr.min);
+   maxl |= TREE_INT_CST_LOW (&subreg_vr.max);
+   maxh |= TREE_INT_CST_HIGH (&subreg_vr.max);
+ 
+   lrotate_double (minl, minh,
+ 		  nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 		  &TREE_INT_CST_LOW (&nr->min), &TREE_INT_CST_HIGH (&nr->min));
+   lrotate_double (maxl, maxh,
+ 		  nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 		  &TREE_INT_CST_LOW (&nr->max), &TREE_INT_CST_HIGH (&nr->max));
+ 
+   (void)force_fit_type (&nr->min, 0);
+   (void)force_fit_type (&nr->max, 0);
+ 
+   return 1;
+ }
+ 
+ /* Negate (subtract from zero) value range VR.  */
+ 
+ static void
+ negate_value_range (vr)
+      struct value_range *vr;
+ {
+   struct value_range op;
+ 
+   if (! TREE_CONSTANT (&vr->min))
+     return;
+ 
+   if (tree_int_cst_equal (&vr->min, TYPE_MIN_VALUE (TREE_TYPE (&vr->min))))
+     {
+       int unsignedp = ! TREE_UNSIGNED (TREE_TYPE (&vr->min));
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max)
+ 	= signed_or_unsigned_type (unsignedp, TREE_TYPE (&vr->min));
+       (void)force_fit_type (&vr->max, 0);
+       if (((! unsignedp && tree_int_cst_msb (&vr->max))
+ 	   || (unsignedp && ! tree_int_cst_msb (&vr->max)))
+ 	  && ! tree_int_cst_equal (&vr->max,
+ 				   TYPE_MIN_VALUE (TREE_TYPE (&vr->min))))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  return;
+ 	}
+     }
+ 
+   op = *vr;
+ 
+   neg_double (TREE_INT_CST_LOW (&op.max), TREE_INT_CST_HIGH (&op.max),
+ 	      &TREE_INT_CST_LOW (&vr->min), &TREE_INT_CST_HIGH (&vr->min));
+   neg_double (TREE_INT_CST_LOW (&op.min), TREE_INT_CST_HIGH (&op.min),
+ 	      &TREE_INT_CST_LOW (&vr->max), &TREE_INT_CST_HIGH (&vr->max));
+ 
+   (void)force_fit_type (&vr->min, 0);
+   (void)force_fit_type (&vr->max, 0);
+ 
+   return;
+ }
+ 
+ /* Truncate value range VR according to mode MODE.  */
+ 
+ static void
+ truncate_value_range (vr, mode)
+      struct value_range *vr;
+      enum machine_mode mode;
+ {
+   HOST_WIDE_INT maxh;
+   HOST_WIDE_INT maxl;
+   HOST_WIDE_INT minh;
+   HOST_WIDE_INT minl;
+ 
+   if (! TREE_CONSTANT (&vr->min))
+     return;
+ 
+   rshift_double (TREE_INT_CST_LOW (&vr->min),
+ 		 TREE_INT_CST_HIGH (&vr->min),
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &minl, &minh, 1);
+   rshift_double (TREE_INT_CST_LOW (&vr->max),
+ 		 TREE_INT_CST_HIGH (&vr->max),
+ 		 GET_MODE_BITSIZE (mode), 2 * HOST_BITS_PER_WIDE_INT,
+ 		 &maxl, &maxh, 1);
+ 
+   TREE_TYPE (&vr->min)
+     = TREE_TYPE (&vr->max)
+     = type_for_mode (mode, TREE_UNSIGNED (TREE_TYPE (&vr->min)));
+ 
+   if ((minh != maxh || minl != maxl)
+       && (minh != -1 || minl != -1 || maxh != 0 || maxl != 0
+ 	  || ! tree_int_cst_msb (&vr->min) || tree_int_cst_msb (&vr->max)))
+     {
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 0;
+       return;
+     }
+ 
+   /* The truncation may have produced a value range where
+      the most significant bit of vr->min isn't set and
+      the most significant bit of vr->max is set.  This
+      can only be represented by an unsigned value range.  */
+   if (! TREE_UNSIGNED (TREE_TYPE (&vr->min))
+       && ! tree_int_cst_msb (&vr->min) && tree_int_cst_msb (&vr->max))
+     {
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = type_for_mode (mode, 1);
+     }
+ 
+   (void)force_fit_type (&vr->min, 0);
+   (void)force_fit_type (&vr->max, 0);
+ }
+ 
+ /* Compute value range VR which is described by X and has
+    mode MODE.  If X is a SET, IF_THEN_ELSE, or store condition
+    then VR_INSN must point to the INSN which contains X.
+ 
+    Side effects:
+ 
+      1) SETs are simplified (when possible).  I.e.
+ 
+ 	  (set (reg:SI 46) (and:SI (reg:SI 45) (cont_int 255)))
+ 
+ 	becomes:
+ 
+ 	  (set (reg:SI 46) (reg:SI 45))
+ 
+         if it is known that (reg:SI 45) is between 0 and 255.
+ 
+ 	If CC0 is part of the set source then the earlier
+ 	instruction which set it will be deleted.
+ 
+      2) A REG_EQUAL note is added when it is known that the
+ 	set source is constant.  */
+ 
+ static void
+ compute_value_range (vr, mode, x)
+      struct value_range *vr;
+      enum machine_mode mode;
+      rtx x;
+ {
+   tree type;
+   rtx set;
+   rtx cond;
+   rtx earliest;
+   enum machine_mode comparison_arg_mode;
+   struct value_range *mask;
+   struct value_range *op;
+   struct value_range original_vr;
+   struct value_range op0;
+   struct value_range op1;
+   struct value_range op2;
+   struct value_range product;
+   struct value_range sum;
+   tree op0_type;
+   enum machine_mode inner_mode;
+   unsigned HOST_WIDE_INT bits;
+   HOST_WIDE_INT maxh;
+   HOST_WIDE_INT maxl;
+   HOST_WIDE_INT minh;
+   HOST_WIDE_INT minl;
+   HOST_WIDE_INT nlsb;
+   enum rtx_code code;
+   int arith;
+   int cond_always_false;
+   int cond_always_true;
+   int cond_has_no_effect;
+   int ones_are_contiguous;
+   int max_signed_overflow;
+   int min_signed_overflow;
+   struct reg_value_range original_reg_vr[2];
+   struct value_range cond_op0;
+   struct value_range cond_op1;
+   struct value_range implied_op1;
+   struct value_range implied_op2;
+   int i;
+   int nupdates;
+ 
+   bzero (&original_vr, sizeof(original_vr));
+ 
+   set = NULL_RTX;
+   if (GET_CODE (x) == SET)
+     {
+       set = x;
+       x = SET_DEST (set);
+ 
+       while (GET_CODE (x) == SUBREG
+ 	     || GET_CODE (x) == STRICT_LOW_PART)
+ 	x = XEXP (x, 0);
+ 
+       if (GET_CODE (x) != REG
+ 	  || REGNO (x) < FIRST_PSEUDO_REGISTER)
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  return;
+ 	}
+ 
+       compute_value_range (&original_vr, GET_MODE (x), x);
+ 
+       x = SET_SRC (set);
+     }
+ 
+   if (mode == VOIDmode)
+     {
+       mode = GET_MODE (x);
+       if (mode == VOIDmode)
+ 	abort ();
+     }
+ 
+   if (GET_MODE_CLASS (mode) != MODE_INT)
+     {
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 0;
+       return;
+     }
+ 
+   type = type_for_mode (mode, 0);
+ 
+   TREE_SET_CODE (&vr->min, INTEGER_CST);
+   TREE_SET_CODE (&vr->max, INTEGER_CST);
+ 
+   switch (GET_CODE (x))
+     {
+     case CONST_DOUBLE:
+       if (GET_MODE (x) == VOIDmode)
+ 	{
+ 	  TREE_TYPE (&vr->min)
+ 	    = TREE_TYPE (&vr->max) = type;
+ 	  TREE_INT_CST_LOW (&vr->min)
+ 	    = TREE_INT_CST_LOW (&vr->max) = CONST_DOUBLE_LOW (x);
+ 	  TREE_INT_CST_HIGH (&vr->min)
+ 	    = TREE_INT_CST_HIGH (&vr->max) = CONST_DOUBLE_HIGH (x);
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 1;
+ 	}
+       else
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	}
+       break;
+ 
+     case CONST_INT:
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = type;
+       TREE_INT_CST_LOW (&vr->min)
+ 	= TREE_INT_CST_LOW (&vr->max) = INTVAL (x);
+       TREE_INT_CST_HIGH (&vr->min)
+ 	= TREE_INT_CST_HIGH (&vr->max) = INTVAL (x) >= 0 ? 0 : -1;
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 1;
+       break;
+ 
+     case REG:
+       if (REGNO (x) < FIRST_PSEUDO_REGISTER)
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+       *vr = *(reg_vr_table + REGNO (x));
+       break;
+ 
+     case SIGN_EXTEND:
+       bzero (&op0, sizeof(op0));
+ 
+       compute_value_range (&op0, VOIDmode, XEXP (x, 0));
+ 
+       if (! TREE_CONSTANT (&op0.min))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+       if (TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_type (TREE_TYPE (&op0.min));
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (type);
+ 	}
+       else
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_type (type);
+ 	}
+       *vr = op0;
+ 
+       if (set
+ 	  && ! tree_int_cst_msb (&vr->min) && ! tree_int_cst_msb (&vr->max))
+ 	{
+ 	  rtx temp = shallow_copy_rtx (x);
+ 
+ 	  PUT_CODE (temp, ZERO_EXTEND);
+ 	  if (rtx_cost (temp, SET) <= rtx_cost (x, SET))
+ 	    validate_change (vr_insn, &SET_SRC (set), temp, 0);
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file, "  insn %d sign_extend msb is always zero.\n",
+ 		     INSN_UID (vr_insn));
+ 	}
+       break;
+ 
+     case ZERO_EXTEND:
+       bzero (&op0, sizeof(op0));
+ 
+       compute_value_range (&op0, VOIDmode, XEXP (x, 0));
+ 
+       if (! TREE_CONSTANT (&op0.min)
+ 	  || (! TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	      && tree_int_cst_msb (&op0.min)
+ 	      && ! tree_int_cst_msb (&op0.max)))
+ 	{
+ 	  op0_type = type_for_mode (GET_MODE (XEXP (x, 0)), 1);
+ 	  op0.min = *TYPE_MIN_VALUE (op0_type);
+ 	  op0.max = *TYPE_MAX_VALUE (op0_type);
+ 	}
+       if (! TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (TREE_TYPE (&op0.min));
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_type (type);
+ 	}
+       else
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (type);
+ 	}
+       *vr = op0;
+       break;
+ 
+     case SUBREG:
+     case TRUNCATE:
+       bzero (&op0, sizeof(op0));
+ 
+       compute_value_range (&op0, VOIDmode, XEXP (x, 0));
+ 
+       if (! TREE_CONSTANT (&op0.min))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (GET_CODE (x) == SUBREG)
+ 	{
+ 	  inner_mode = GET_MODE (SUBREG_REG (x));
+ 
+ 	  if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
+ 	    {
+ 	      TREE_TYPE (&op0.min)
+ 		= TREE_TYPE (&op0.max)
+ 		= signed_or_unsigned_type (TREE_UNSIGNED (TREE_TYPE (&op0.min)),
+ 					   type);
+ 	      *vr = op0;
+ 	      break;
+ 	    }
+ 
+ 	  nlsb = 0;
+ 	  if (GET_MODE_BITSIZE (inner_mode) > BITS_PER_WORD
+ 	      && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
+ 	    nlsb = WORDS_BIG_ENDIAN
+ 		   ? GET_MODE_BITSIZE (inner_mode)
+ 		     - (SUBREG_WORD (x) + 1) * GET_MODE_BITSIZE (mode)
+ 		   : SUBREG_WORD (x) * GET_MODE_BITSIZE (mode);
+ 
+ 	  rshift_double (TREE_INT_CST_LOW (&op0.min),
+ 			 TREE_INT_CST_HIGH (&op0.min),
+ 			 nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 			 &TREE_INT_CST_LOW (&op0.min),
+ 			 &TREE_INT_CST_HIGH (&op0.min), 0);
+ 	  rshift_double (TREE_INT_CST_LOW (&op0.max),
+ 			 TREE_INT_CST_HIGH (&op0.max),
+ 			 nlsb, 2 * HOST_BITS_PER_WIDE_INT,
+ 			 &TREE_INT_CST_LOW (&op0.max),
+ 			 &TREE_INT_CST_HIGH (&op0.max), 0);
+ 	}
+ 
+       truncate_value_range (&op0, mode);
+ 
+       *vr = op0;
+       break;
+ 
+     case NEG:
+       bzero (&op0, sizeof(op0));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+ 
+       negate_value_range (&op0);
+ 
+       *vr = op0;
+       break;
+ 
+     case ASHIFT:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+ 
+       if (TREE_CONSTANT (&op0.min)
+ 	  && integer_zerop (&op0.min) && integer_zerop (&op0.max))
+ 	{
+ 	  *vr = op0;
+ 	  break;
+ 	}
+ 
+       if (GET_MODE (XEXP (x, 1)) != VOIDmode)
+ 	compute_value_range (&op1, VOIDmode, XEXP (x, 1));
+       else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ 	{
+ 	  TREE_SET_CODE (&op1.min, INTEGER_CST);
+ 	  TREE_SET_CODE (&op1.max, INTEGER_CST);
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = integer_type_node;
+ 	  TREE_INT_CST_LOW (&op1.min)
+ 	    = TREE_INT_CST_LOW (&op1.max) = INTVAL (XEXP (x, 1));
+ 	  TREE_INT_CST_HIGH (&op1.min)
+ 	    = TREE_INT_CST_HIGH (&op1.max) = INTVAL (XEXP (x, 1)) >= 0 ? 0 : -1;
+ 	  TREE_CONSTANT (&op1.min)
+ 	    = TREE_CONSTANT (&op1.max) = 1;
+ 	}
+       else
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (! TREE_CONSTANT (&op1.min)
+ 	  || (HOST_WIDE_INT)TREE_INT_CST_LOW (&op1.min) < 0
+ 	  || TREE_INT_CST_HIGH (&op1.min)
+ 	  || ! tree_int_cst_equal (&op1.min, &op1.max)
+ 	  || ! TREE_CONSTANT (&op0.min))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       rshift_double (TREE_INT_CST_LOW (&op0.min),
+ 		     TREE_INT_CST_HIGH (&op0.min),
+ 		     GET_MODE_BITSIZE (mode) - TREE_INT_CST_LOW (&op1.min),
+ 		     2 * HOST_BITS_PER_WIDE_INT,
+ 		     &minl, &minh, 0);
+       rshift_double (TREE_INT_CST_LOW (&op0.max),
+ 		     TREE_INT_CST_HIGH (&op0.max),
+ 		     GET_MODE_BITSIZE (mode) - TREE_INT_CST_LOW (&op1.min),
+ 		     2 * HOST_BITS_PER_WIDE_INT,
+ 		     &maxl, &maxh, 0);
+       if (minh != maxh || minl != maxl)
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+       lshift_double (TREE_INT_CST_LOW (&op0.min),
+ 		     TREE_INT_CST_HIGH (&op0.min),
+ 		     TREE_INT_CST_LOW (&op1.min), 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &TREE_INT_CST_LOW (&op0.min),
+ 		     &TREE_INT_CST_HIGH (&op0.min), 1);
+       lshift_double (TREE_INT_CST_LOW (&op0.max),
+ 		     TREE_INT_CST_HIGH (&op0.max),
+ 		     TREE_INT_CST_LOW (&op1.min), 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &TREE_INT_CST_LOW (&op0.max),
+ 		     &TREE_INT_CST_HIGH (&op0.max), 1);
+ 
+       /* The left shift may have produced a value range where
+ 	 the most significant bit of op0.min isn't set and
+ 	 the most significant bit of op0.max is set.  This
+ 	 can only be represented by an unsigned value range.  */
+       if (! TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	  && ! tree_int_cst_msb (&op0.min)
+ 	  && tree_int_cst_msb (&op0.max))
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (type);
+ 	}
+ 
+       *vr = op0;
+       break;
+ 
+     case ASHIFTRT:
+     case LSHIFTRT:
+       arith = GET_CODE (x) == ASHIFTRT;
+ 
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+ 
+       if (TREE_CONSTANT (&op0.min)
+ 	  && integer_zerop (&op0.min) && integer_zerop (&op0.max))
+ 	{
+ 	  *vr = op0;
+ 	  break;
+ 	}
+ 
+       if (GET_MODE (XEXP (x, 1)) != VOIDmode)
+ 	compute_value_range (&op1, VOIDmode, XEXP (x, 1));
+       else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ 	{
+ 	  TREE_SET_CODE (&op1.min, INTEGER_CST);
+ 	  TREE_SET_CODE (&op1.max, INTEGER_CST);
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = integer_type_node;
+ 	  TREE_INT_CST_LOW (&op1.min)
+ 	    = TREE_INT_CST_LOW (&op1.max) = INTVAL (XEXP (x, 1));
+ 	  TREE_INT_CST_HIGH (&op1.min)
+ 	    = TREE_INT_CST_HIGH (&op1.max) = INTVAL (XEXP (x, 1)) >= 0 ? 0 : -1;
+ 	  TREE_CONSTANT (&op1.min)
+ 	    = TREE_CONSTANT (&op1.max) = 1;
+ 	}
+       else
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (! TREE_CONSTANT (&op1.min)
+ 	  || (HOST_WIDE_INT)TREE_INT_CST_LOW (&op1.min) < 0
+ 	  || TREE_INT_CST_HIGH (&op1.min)
+ 	  || ! tree_int_cst_equal (&op1.min, &op1.max)
+ 	  || (! TREE_CONSTANT (&op0.min) && ! TREE_INT_CST_LOW (&op1.min)))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (! TREE_CONSTANT (&op0.min)
+ 	  || (! arith
+ 	      && ! TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	      && tree_int_cst_msb (&op0.min)
+ 	      && ! tree_int_cst_msb (&op0.max)
+ 	      && TREE_INT_CST_LOW (&op1.min)))
+ 	{
+ 	  op0_type = signed_or_unsigned_type (! arith, type);
+ 	  op0.min = *TYPE_MIN_VALUE (op0_type);
+ 	  op0.max = *TYPE_MAX_VALUE (op0_type);
+ 	}
+       else if ((arith && TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	       || (! arith && ! TREE_UNSIGNED (TREE_TYPE (&op0.min))))
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max)
+ 	    = signed_or_unsigned_type (! arith, TREE_TYPE (&op0.min));
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_or_unsigned_type (arith, type);
+ 	}
+       rshift_double (TREE_INT_CST_LOW (&op0.min),
+ 		     TREE_INT_CST_HIGH (&op0.min),
+ 		     TREE_INT_CST_LOW (&op1.min), 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &TREE_INT_CST_LOW (&op0.min),
+ 		     &TREE_INT_CST_HIGH (&op0.min), arith);
+       rshift_double (TREE_INT_CST_LOW (&op0.max),
+ 		     TREE_INT_CST_HIGH (&op0.max),
+ 		     TREE_INT_CST_LOW (&op1.min), 2 * HOST_BITS_PER_WIDE_INT,
+ 		     &TREE_INT_CST_LOW (&op0.max),
+ 		     &TREE_INT_CST_HIGH (&op0.max), arith);
+       *vr = op0;
+       break;
+ 
+     case AND:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+       compute_value_range (&op1, mode, XEXP (x, 1));
+ 
+       if ((! TREE_CONSTANT (&op0.min)
+ 	   || ! tree_int_cst_equal (&op0.min, &op0.max))
+ 	  && (! TREE_CONSTANT (&op1.min)
+ 	      || ! tree_int_cst_equal (&op1.min, &op1.max)))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (TREE_CONSTANT (&op0.min)
+ 	   && integer_zerop (&op0.min) && integer_zerop (&op0.max))
+ 	{
+ 	  *vr = op0;
+ 	  break;
+ 	}
+       else if (TREE_CONSTANT (&op1.min)
+ 	       && integer_zerop (&op1.min) && integer_zerop (&op1.max))
+ 	{
+ 	  *vr = op1;
+ 	  break;
+ 	}
+ 
+       if (! TREE_CONSTANT (&op0.min))
+ 	{
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = type_for_mode (mode, 1);
+ 	  op0.min = *TYPE_MIN_VALUE (TREE_TYPE (&op0.min));
+ 	  op0.max = *TYPE_MAX_VALUE (TREE_TYPE (&op0.min));
+ 	}
+       else if (! TREE_CONSTANT (&op1.min))
+ 	{
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = type_for_mode (mode, 1);
+ 	  op1.min = *TYPE_MIN_VALUE (TREE_TYPE (&op1.min));
+ 	  op1.max = *TYPE_MAX_VALUE (TREE_TYPE (&op1.min));
+ 	}
+ 
+       op = &op0;
+       mask = &op1;
+       i = 0;
+       if (! tree_int_cst_equal (&op1.min, &op1.max)
+ 	  || (tree_int_cst_equal (&op0.min, &op0.max)
+ 	      && convert_value_range (&op0, 1)
+ 	      && convert_value_range (&op1, 1)
+ 	      && INT_CST_LT_UNSIGNED (&op1.min, &op0.min)))
+ 	{
+ 	  op = &op1;
+ 	  mask = &op0;
+ 	  i = 1;
+ 	}
+ 
+       ones_are_contiguous = contiguous_ones_value_range (mask);
+ 
+       (void)convert_value_range (mask, 1);
+ 
+       bits = (unsigned HOST_WIDE_INT)TREE_INT_CST_LOW (&mask->min);
+       nlsb = 0;
+       if (TREE_INT_CST_HIGH (&mask->min))
+ 	{
+ 	  bits = (unsigned HOST_WIDE_INT)TREE_INT_CST_HIGH (&mask->min);
+ 	  nlsb = HOST_BITS_PER_WIDE_INT;
+ 	}
+ 
+       while (bits)
+ 	{
+ 	  bits >>= 1;
+ 	  nlsb++;
+ 	}
+ 
+       rshift_double (TREE_INT_CST_LOW (&op->min), TREE_INT_CST_HIGH (&op->min),
+ 		     nlsb, 2 * HOST_BITS_PER_WIDE_INT, &minl, &minh, 0);
+       rshift_double (TREE_INT_CST_LOW (&op->max), TREE_INT_CST_HIGH (&op->max),
+ 		     nlsb, 2 * HOST_BITS_PER_WIDE_INT, &maxl, &maxh, 0);
+       if (minh != maxh || minl != maxl)
+ 	{
+ 	  vr->min = *TYPE_MIN_VALUE (TREE_TYPE (&mask->min));
+ 	  vr->max = mask->max;
+ 	}
+       else if (ones_are_contiguous || tree_int_cst_equal (&op->min, &op->max))
+ 	{
+ 	  TREE_TYPE (&vr->min)
+ 	    = TREE_TYPE (&vr->max) = unsigned_type (type);
+ 	  TREE_INT_CST_LOW (&vr->min)
+ 	    = TREE_INT_CST_LOW (&op0.min) & TREE_INT_CST_LOW (&op1.min);
+ 	  TREE_INT_CST_HIGH (&vr->min)
+ 	    = TREE_INT_CST_HIGH (&op0.min) & TREE_INT_CST_HIGH (&op1.min);
+ 	  TREE_INT_CST_LOW (&vr->max)
+ 	    = TREE_INT_CST_LOW (&op0.max) & TREE_INT_CST_LOW (&op1.max);
+ 	  TREE_INT_CST_HIGH (&vr->max)
+ 	    = TREE_INT_CST_HIGH (&op0.max) & TREE_INT_CST_HIGH (&op1.max);
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 1;
+ 
+ 	  if (set
+ 	      && tree_int_cst_equal (&vr->min, &op->min)
+ 	      && tree_int_cst_equal (&vr->max, &op->max)
+ 	      && ((ones_are_contiguous && (TREE_INT_CST_LOW (&mask->min) & 1))
+ 		  || tree_int_cst_equal (&op->min, &op->max)))
+ 	    {
+ 	      validate_change (vr_insn, &SET_SRC (set), XEXP (x, i), 0);
+ 
+ 	      if (vr_file)
+ 		fprintf (vr_file, "  insn %d and has no effect.\n",
+ 			 INSN_UID (vr_insn));
+ 	    }
+ 	}
+       else
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+       break;
+ 
+     case PLUS:
+     case MINUS:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+       compute_value_range (&op1, mode, XEXP (x, 1));
+ 
+       if (GET_CODE (x) == MINUS)
+ 	negate_value_range (&op1);
+ 
+       if (! TREE_CONSTANT (&op0.min)
+ 	  || ! TREE_CONSTANT (&op1.min)
+ 	  || GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       if (TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	  != TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (convert_value_range (&op0, TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	      || convert_value_range (&op1,
+ 				      TREE_UNSIGNED (TREE_TYPE (&op0.min))))
+ 	    ;
+ 	  else
+ 	    {
+ 	      TREE_CONSTANT (&vr->min)
+ 		= TREE_CONSTANT (&vr->max) = 0;
+ 	      break;
+ 	    }
+ 	}
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max)
+ 	= signed_or_unsigned_type (TREE_UNSIGNED (TREE_TYPE (&op0.min)), type);
+       min_signed_overflow = add_double (TREE_INT_CST_LOW (&op0.min),
+ 					TREE_INT_CST_HIGH (&op0.min),
+ 					TREE_INT_CST_LOW (&op1.min),
+ 					TREE_INT_CST_HIGH (&op1.min),
+ 					&TREE_INT_CST_LOW (&vr->min),
+ 					&TREE_INT_CST_HIGH (&vr->min));
+       max_signed_overflow = add_double (TREE_INT_CST_LOW (&op0.max),
+ 					TREE_INT_CST_HIGH (&op0.max),
+ 					TREE_INT_CST_LOW (&op1.max),
+ 					TREE_INT_CST_HIGH (&op1.max),
+ 					&TREE_INT_CST_LOW (&vr->max),
+ 					&TREE_INT_CST_HIGH (&vr->max));
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 1;
+       sum = *vr;
+       min_signed_overflow = force_fit_type (&vr->min, min_signed_overflow);
+       max_signed_overflow = force_fit_type (&vr->max, max_signed_overflow);
+       if (tree_int_cst_equal (&op0.min, &op0.max)
+ 	  || tree_int_cst_equal (&op1.min, &op1.max))
+ 	{
+ 	  if ((TREE_UNSIGNED (TREE_TYPE (&vr->min))
+ 	       && tree_int_cst_equal (&vr->min, &sum.min)
+ 		  != tree_int_cst_equal (&vr->max, &sum.max))
+ 	      || (! TREE_UNSIGNED (TREE_TYPE (&vr->min))
+ 	       && (min_signed_overflow != max_signed_overflow)))
+ 	    {
+ 	      TREE_CONSTANT (&vr->min)
+ 		= TREE_CONSTANT (&vr->max) = 0;
+ 	    }
+ 	}
+       else
+ 	{
+ 	  if ((TREE_UNSIGNED (TREE_TYPE (&vr->min))
+ 	       && (! tree_int_cst_equal (&vr->min, &sum.min)
+ 		   || ! tree_int_cst_equal (&vr->max, &sum.max)))
+ 	      || (! TREE_UNSIGNED (TREE_TYPE (&vr->min))
+ 	       && (min_signed_overflow || max_signed_overflow)))
+ 	    {
+ 	      TREE_CONSTANT (&vr->min)
+ 		= TREE_CONSTANT (&vr->max) = 0;
+ 	    }
+ 	}
+       break;
+ 
+     case MULT:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+       compute_value_range (&op1, mode, XEXP (x, 1));
+ 
+       if ((TREE_CONSTANT (&op0.min)
+ 	   && integer_zerop (&op0.min) && integer_zerop (&op0.max))
+ 	  || (TREE_CONSTANT (&op1.min)
+ 	      && integer_onep (&op1.min) && integer_onep (&op1.max)))
+ 	{
+ 	  *vr = op0;
+ 	  break;
+ 	}
+       else if ((TREE_CONSTANT (&op0.min)
+ 		&& integer_onep (&op0.min) && integer_onep (&op0.max))
+ 	       || (TREE_CONSTANT (&op1.min)
+ 		   && integer_zerop (&op1.min) && integer_zerop (&op1.max)))
+ 	{
+ 	  *vr = op1;
+ 	  break;
+ 	}
+ 
+       /* For simplicity only the multiplication of unsigned
+ 	 value ranges is currently supported.  The mode
+ 	 bitsize must not exceed HOST_BITS_PER_WIDE_INT
+ 	 in order to properly detect overflow.  */
+       if (! TREE_CONSTANT (&op0.min)
+ 	  || ! TREE_CONSTANT (&op1.min)
+ 	  || ! convert_value_range (&op0, 1)
+ 	  || ! convert_value_range (&op1, 1)
+ 	  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = unsigned_type (type);
+       mul_double (TREE_INT_CST_LOW (&op0.min),
+ 		  TREE_INT_CST_HIGH (&op0.min),
+ 		  TREE_INT_CST_LOW (&op1.min),
+ 		  TREE_INT_CST_HIGH (&op1.min),
+ 		  &TREE_INT_CST_LOW (&vr->min),
+ 		  &TREE_INT_CST_HIGH (&vr->min));
+       mul_double (TREE_INT_CST_LOW (&op0.max),
+ 		  TREE_INT_CST_HIGH (&op0.max),
+ 		  TREE_INT_CST_LOW (&op1.max),
+ 		  TREE_INT_CST_HIGH (&op1.max),
+ 		  &TREE_INT_CST_LOW (&vr->max),
+ 		  &TREE_INT_CST_HIGH (&vr->max));
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 1;
+       product = *vr;
+       (void)force_fit_type (&vr->min, 0);
+       (void)force_fit_type (&vr->max, 0);
+       if (! tree_int_cst_equal (&vr->min, &product.min)
+ 	  || ! tree_int_cst_equal (&vr->max, &product.max))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	}
+       break;
+ 
+     case DIV:
+     case UDIV:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+       compute_value_range (&op1, mode, XEXP (x, 1));
+ 
+       if (TREE_CONSTANT (&op1.min)
+ 	  && integer_onep (&op1.min) && integer_onep (&op1.max))
+ 	{
+ 	  *vr = op0;
+ 	  break;
+ 	}
+ 
+       /* For simplicity only the division of unsigned
+ 	 value ranges is currently supported.  */
+       if (! TREE_CONSTANT (&op0.min)
+ 	  || ! TREE_CONSTANT (&op1.min)
+ 	  || ! convert_value_range (&op0, 1)
+ 	  || ! convert_value_range (&op1, 1)
+ 	  || (GET_CODE (x) == DIV
+ 	      && (tree_int_cst_msb (&op0.max) || tree_int_cst_msb (&op1.max)))
+ 	  || integer_zerop (&op1.min))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = unsigned_type (type);
+       div_and_round_double (TRUNC_DIV_EXPR, 1,
+ 			    TREE_INT_CST_LOW (&op0.min),
+ 			    TREE_INT_CST_HIGH (&op0.min),
+ 			    TREE_INT_CST_LOW (&op1.max),
+ 			    TREE_INT_CST_HIGH (&op1.max),
+ 			    &TREE_INT_CST_LOW (&vr->min),
+ 			    &TREE_INT_CST_HIGH (&vr->min),
+ 			    &minl, &minh);
+       div_and_round_double (TRUNC_DIV_EXPR, 1,
+ 			    TREE_INT_CST_LOW (&op0.max),
+ 			    TREE_INT_CST_HIGH (&op0.max),
+ 			    TREE_INT_CST_LOW (&op1.min),
+ 			    TREE_INT_CST_HIGH (&op1.min),
+ 			    &TREE_INT_CST_LOW (&vr->max),
+ 			    &TREE_INT_CST_HIGH (&vr->max),
+ 			    &maxl, &maxh);
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 1;
+       break;
+ 
+     case MOD:
+     case UMOD:
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       compute_value_range (&op0, mode, XEXP (x, 0));
+       compute_value_range (&op1, mode, XEXP (x, 1));
+ 
+       if (TREE_CONSTANT (&op1.min)
+ 	  && integer_onep (&op1.min) && integer_onep (&op1.max))
+ 	{
+ 	  vr->min = vr->max = *TYPE_MIN_VALUE (unsigned_type (type));
+ 	  break;
+ 	}
+ 
+       if (! TREE_CONSTANT (&op0.min))
+ 	{
+ 	  op0_type = unsigned_type (type);
+ 	  op0.min = *TYPE_MIN_VALUE (op0_type);
+ 	  op0.max = *TYPE_MAX_VALUE (op0_type);
+ 	}
+ 
+       /* For simplicity only the remainder of unsigned
+ 	 value ranges is currently supported.  */
+       if (! TREE_CONSTANT (&op1.min)
+ 	  || ! convert_value_range (&op0, 1)
+ 	  || ! convert_value_range (&op1, 1)
+ 	  || (GET_CODE (x) == MOD
+ 	      && (tree_int_cst_msb (&op0.max) || tree_int_cst_msb (&op1.max)))
+ 	  || integer_zerop (&op1.min))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	  break;
+ 	}
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = unsigned_type (type);
+       div_and_round_double (TRUNC_DIV_EXPR, 1,
+ 			    TREE_INT_CST_LOW (&op0.min),
+ 			    TREE_INT_CST_HIGH (&op0.min),
+ 			    TREE_INT_CST_LOW (&op1.min),
+ 			    TREE_INT_CST_HIGH (&op1.min),
+ 			    &minl, &minh,
+ 			    &TREE_INT_CST_LOW (&vr->min),
+ 			    &TREE_INT_CST_HIGH (&vr->min));
+       div_and_round_double (TRUNC_DIV_EXPR, 1,
+ 			    TREE_INT_CST_LOW (&op0.max),
+ 			    TREE_INT_CST_HIGH (&op0.max),
+ 			    TREE_INT_CST_LOW (&op1.min),
+ 			    TREE_INT_CST_HIGH (&op1.min),
+ 			    &maxl, &maxh,
+ 			    &TREE_INT_CST_LOW (&vr->max),
+ 			    &TREE_INT_CST_HIGH (&vr->max));
+       if (minh != maxh || minl != maxl
+ 	  || ! tree_int_cst_equal (&op1.min, &op1.max))
+ 	{
+ 	  TREE_INT_CST_LOW (&vr->min)
+ 	    = TREE_INT_CST_HIGH (&vr->min) = 0;
+ 	  add_double (TREE_INT_CST_LOW (&op1.max),
+ 		      TREE_INT_CST_HIGH (&op1.max),
+ 		      -1, -1,
+ 		      &TREE_INT_CST_LOW (&vr->max),
+ 		      &TREE_INT_CST_HIGH (&vr->max));
+ 	}
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 1;
+       break;
+ 
+     case IF_THEN_ELSE:
+       cond = canonicalize_condition (vr_insn, XEXP (x, 0), 0, &earliest);
+       cond_always_true = 0;
+       cond_always_false = 0;
+ 
+       bzero (&op1, sizeof(op1));
+       bzero (&op2, sizeof(op2));
+ 
+       compute_value_range (&op1, mode, XEXP (x, 1));
+       compute_value_range (&op2, mode, XEXP (x, 2));
+ 
+       if (cond
+ 	  && GET_MODE_CLASS (GET_MODE (XEXP (cond,0))) == MODE_INT
+ 	  && ! modified_between_p (cond, PREV_INSN (earliest), vr_insn))
+ 	{
+ 	  bzero (&cond_op0, sizeof(cond_op0));
+ 	  bzero (&cond_op1, sizeof(cond_op1));
+ 
+ 	  code = GET_CODE (cond);
+ 	  comparison_arg_mode = GET_MODE (XEXP (cond,0));
+ 	  compute_value_range (&cond_op0, comparison_arg_mode, XEXP (cond, 0));
+ 	  compute_value_range (&cond_op1, comparison_arg_mode, XEXP (cond, 1));
+ 
+ 	  cond_always_true = compare_value_range (code, cond_op0, cond_op1);
+ 	  cond_always_false = compare_value_range (reverse_condition (code),
+ 						   cond_op0, cond_op1);
+ 
+ 	  if (cond_always_true || cond_always_false)
+ 	    {
+ 	      *vr = cond_always_true ? op1 : op2;
+ 
+ 	      if (set)
+ 		{
+ 		  validate_change (vr_insn, &SET_SRC (set),
+ 				   cond_always_true ? XEXP (x, 1)
+ 						    : XEXP (x, 2),
+ 				   0);
+ 
+ 		  if (vr_file)
+ 		    fprintf (vr_file,
+ 			     "  insn %d if_then_else condition is %s true.\n",
+ 			     INSN_UID (vr_insn),
+ 			     cond_always_true ? "always" : "never");
+ 		}
+ 	    }
+ 	  else
+ 	    {
+ 	      nupdates = 0;
+ 	      for (i = 0; i < 2; i++)
+ 		if (GET_CODE (XEXP (cond, i)) == REG)
+ 		  {
+ 		    original_reg_vr[nupdates].regno = REGNO (XEXP (cond, i));
+ 		    original_reg_vr[nupdates++].vr
+ 		      = *(reg_vr_table + REGNO (XEXP (cond, i)));
+ 		  }
+ 		else if (GET_CODE (XEXP (cond, i)) == SUBREG
+ 			 && GET_CODE (SUBREG_REG (XEXP (cond, i))) == REG)
+ 		  {
+ 		    original_reg_vr[nupdates].regno
+ 		      = REGNO (SUBREG_REG (XEXP (cond, i)));
+ 		    original_reg_vr[nupdates++].vr
+ 		      = *(reg_vr_table + REGNO (SUBREG_REG (XEXP (cond, i))));
+ 		  }
+ 
+ 	      bzero (&implied_op1, sizeof(implied_op1));
+ 	      bzero (&implied_op2, sizeof(implied_op2));
+ 
+ 	      record_compare_value_range (code, cond, cond_op0, cond_op1);
+ 	      compute_value_range (&implied_op1, mode, XEXP (x, 1));
+ 
+ 	      for (i = 0; i < nupdates; i++)
+ 		*(reg_vr_table + original_reg_vr[i].regno)
+ 		  = original_reg_vr[i].vr;
+ 
+ 	      record_compare_value_range (reverse_condition (code),
+ 					  cond, cond_op0, cond_op1);
+ 	      compute_value_range (&implied_op2, mode, XEXP (x, 2));
+ 
+ 	      for (i = 0; i < nupdates; i++)
+ 		*(reg_vr_table + original_reg_vr[i].regno)
+ 		  = original_reg_vr[i].vr;
+ 
+ 	      merge_value_ranges (&implied_op1, &implied_op2, vr);
+ 	    }
+ 	}
+       else
+ 	merge_value_ranges (&op1, &op2, vr);
+ 
+       if (set
+ 	  && ! cond_always_true && ! cond_always_false
+ 	  && TREE_CONSTANT (&vr->min)
+ 	  && tree_int_cst_equal (&vr->min, &vr->max)
+ 	  && (tree_int_cst_equal (&op1.min, &op1.max)
+ 	      || tree_int_cst_equal (&op2.min, &op2.max)))
+ 	{
+ 	  validate_change (vr_insn, &SET_SRC (set),
+ 			   (tree_int_cst_equal (&op1.min, &op1.max)
+ 			    && ! CONSTANT_P (XEXP (x, 2)))
+ 			   ? XEXP (x, 1) : XEXP (x, 2), 0);
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file,
+ 		     "  insn %d if_then_else has no effect.\n",
+ 		     INSN_UID (vr_insn));
+ 	}
+       break;
+ 
+     case EQ:
+     case NE:
+     case LE:
+     case GE:
+     case LT:
+     case GT:
+     case LEU:
+     case GEU:
+     case LTU:
+     case GTU:
+       cond = canonicalize_condition (vr_insn, x, 0, &earliest);
+       cond_always_true = 0;
+       cond_always_false = 0;
+       cond_has_no_effect = 0;
+ 
+       if (cond
+ 	  && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT
+ 	  && ! modified_between_p (cond, PREV_INSN (earliest), vr_insn))
+ 	{
+ 	  bzero (&op0, sizeof(op0));
+ 	  bzero (&op1, sizeof(op1));
+ 
+ 	  code = GET_CODE (cond);
+ 	  comparison_arg_mode = GET_MODE (XEXP (cond, 0));
+ 	  compute_value_range (&op0, comparison_arg_mode, XEXP (cond, 0));
+ 	  compute_value_range (&op1, comparison_arg_mode, XEXP (cond, 1));
+ 
+ 	  cond_always_true = compare_value_range (code, op0, op1);
+ 	  cond_always_false
+ 	    = compare_value_range (reverse_condition (code), op0, op1);
+ 
+ #if STORE_FLAG_VALUE == -1 || STORE_FLAG_VALUE == 1
+ 	  cond_has_no_effect
+ 	    = comparison_arg_mode == mode
+ 	      && TREE_CONSTANT (&op0.min)
+ 	      && TREE_CONSTANT (&op1.min)
+ 	      && tree_int_cst_equal (&op1.min, &op1.max)
+ #if STORE_FLAG_VALUE == -1
+ 	      && (((code == NE || code == GTU || code == LT)
+ 		   && integer_zerop (&op1.min))
+ 		  || (code == EQ && integer_all_onesp (&op1.min)))
+ 	      && ! TREE_UNSIGNED (&op0.min)
+ 	      && integer_all_onesp (&op0.min)
+ 	      && integer_zerop (&op0.max)
+ #else
+ 	      && (((code == NE || code == GTU || code == GT)
+ 		   && integer_zerop (&op1.min))
+ 		  || (code == EQ && integer_onep (&op1.min)))
+ 	      && integer_zerop (&op0.min)
+ 	      && integer_onep (&op0.max)
+ #endif
+ 	      ;
+ #endif
+ 	}
+ 
+       TREE_TYPE (&vr->min)
+ 	= TREE_TYPE (&vr->max) = signed_type (type);
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max)
+ #if STORE_FLAG_VALUE == -1 || STORE_FLAG_VALUE == 1
+ 	= 1;
+ #else
+ 	= 0;
+ #endif
+ #if STORE_FLAG_VALUE == -1
+       TREE_INT_CST_LOW (&vr->min) = ! cond_always_false ? -1 : 0;
+       TREE_INT_CST_HIGH (&vr->min) = ! cond_always_false ? -1 : 0;
+       TREE_INT_CST_LOW (&vr->max) = ! cond_always_true ? 0 : -1;
+       TREE_INT_CST_HIGH (&vr->max) = ! cond_always_true ? 0 : -1;
+ #else
+       TREE_INT_CST_LOW (&vr->min) = ! cond_always_true ? 0 : 1;
+       TREE_INT_CST_HIGH (&vr->min) = ! cond_always_true ? 0 : 1;
+       TREE_INT_CST_LOW (&vr->max) = ! cond_always_false ? 1 : 0;
+       TREE_INT_CST_HIGH (&vr->max) = 0;
+ #endif
+ 
+       if (set
+ 	  && (cond_always_true || cond_always_false || cond_has_no_effect))
+ 	{
+ 	  validate_change (vr_insn, &SET_SRC (set),
+ 			   cond_has_no_effect
+ 			   ? XEXP (cond, 0)
+ 			   : cond_always_true
+ 			     ? const_true_rtx : const0_rtx, 0);
+ 
+ 	  if (vr_file)
+ 	    {
+ 	      if (cond_has_no_effect)
+ 		fprintf (vr_file,
+ 			 "  insn %d store condition has no effect.\n",
+ 			 INSN_UID (vr_insn));
+ 	      else
+ 		fprintf (vr_file,
+ 			 "  insn %d store condition is %s true.\n",
+ 			 INSN_UID (vr_insn),
+ 			 cond_always_true ? "always" : "never");
+ 	    }
+ 	}
+       break;
+ 
+     default:
+       TREE_CONSTANT (&vr->min)
+ 	= TREE_CONSTANT (&vr->max) = 0;
+       break;
+     }
+ 
+   if (TREE_CONSTANT (&vr->min))
+     {
+       (void)force_fit_type (&vr->min, 0);
+       (void)force_fit_type (&vr->max, 0);
+     }
+ 
+   if (set
+       && (GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
+ 	  || GET_CODE (SET_DEST (set)) == SUBREG))
+     {
+       if (GET_CODE (SET_DEST (set)) == SUBREG
+ 	  && GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
+ 	     > GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
+ 	{
+ 	  truncate_value_range (vr, GET_MODE (SUBREG_REG (SET_DEST (set))));
+ 	}
+       else if (! merge_value_range_subreg (original_vr, *vr, vr,
+ 					   SET_DEST (set)))
+ 	{
+ 	  TREE_CONSTANT (&vr->min)
+ 	    = TREE_CONSTANT (&vr->max) = 0;
+ 	}
+     }
+ 
+   TREE_CONSTANT_OVERFLOW (&vr->min)
+     = TREE_CONSTANT_OVERFLOW (&vr->max) = 0;
+ 
+   if (set
+       && TREE_CONSTANT (&vr->min)
+       && tree_int_cst_equal (&vr->min, &vr->max)
+       && ! rtx_equal_p (SET_SRC (set), SET_DEST (set)))
+     {
+       if (TREE_CONSTANT (&original_vr.min)
+ 	  && tree_int_cst_equal (&original_vr.min, &original_vr.max)
+ 	  && tree_int_cst_equal (&original_vr.min, &vr->min))
+ 	{
+ 	  validate_change (vr_insn, &SET_SRC (set), SET_DEST (set), 0);
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file, "  insn %d set has no effect.\n",
+ 		     INSN_UID (vr_insn));
+ 	}
+       else if (single_set (vr_insn)
+ 	       && GET_CODE (SET_DEST (set)) == REG)
+ 	{
+ 	  rtx note = find_reg_note (vr_insn, REG_EQUAL, NULL_RTX);
+ 	  rtx src_const = immed_double_const (TREE_INT_CST_LOW (&vr->min),
+ 					      TREE_INT_CST_HIGH (&vr->min),
+ 					      GET_MODE (SET_DEST (set)));
+ 
+ 	  if (note)
+ 	    XEXP (note, 0) = src_const;
+ 	  else
+ 	    REG_NOTES (vr_insn) = gen_rtx_EXPR_LIST (REG_EQUAL, src_const,
+ 						     REG_NOTES (vr_insn));
+ 	}
+     }
+ }
+ 
+ /* Called from value_range_prop via note_stores to handle
+    one SET or CLOBBER in an insn.  */
+ 
+ static void
+ record_value_range_store (dest, setter, data)
+      rtx dest;
+      rtx setter;
+      void *data ATTRIBUTE_UNUSED;
+ {
+   struct value_range *vr;
+   enum machine_mode mode;
+ 
+   if (GET_CODE (dest) == SUBREG)
+     dest = SUBREG_REG (dest);
+ 
+   if (GET_CODE (dest) != REG
+       || REGNO (dest) < FIRST_PSEUDO_REGISTER)
+     return;
+ 
+   vr = reg_vr_table + REGNO (dest);
+ 
+   if (GET_CODE (PATTERN (vr_insn)) == PARALLEL)
+     {
+       struct reg_value_range *update;
+ 
+       update = (struct reg_value_range *)
+ 	       malloc (sizeof (struct reg_value_range));
+       bzero (update, sizeof (struct reg_value_range));
+ 
+       if (vr_reg_updates.tail)
+ 	{
+ 	  vr_reg_updates.tail->next = update;
+ 	  vr_reg_updates.tail = update;
+ 	}
+       else
+ 	{
+ 	  vr_reg_updates.head = update;
+ 	  vr_reg_updates.tail = update;
+ 	}
+       vr_reg_updates.nupdates++;
+ 
+       update->regno = REGNO (dest);
+ 
+       vr = &update->vr;
+     }
+ 
+   mode = GET_MODE (dest);
+   if (GET_CODE (setter) == SET)
+     {
+       mode = GET_MODE (SET_DEST (setter));
+       if (GET_CODE (SET_DEST (setter)) == STRICT_LOW_PART)
+ 	mode = GET_MODE (XEXP (SET_DEST (setter), 0));
+     }
+ 
+   compute_value_range (vr, mode, setter);
+ }
+ 
+ /* Given a comparison code CODE which is known to always be
+    true when comparing value ranges OP0 and OP1 update the
+    corresponding recorded value ranges (which are described
+    by condition COND).  For example:
+ 
+      CODE = lt
+      COND = (lt (reg:SI 11) (reg:SI 12))
+      OP0 = 2 .. 14
+      OP1 = 3 .. 11
+ 
+    implies that:
+ 
+      (reg:SI 11) = 2 .. 10
+      (reg:SI 12) = 3 .. 11
+ 
+    since comparison code CODE would never be true if (reg:SI 11)
+    was greater than 10.  */
+ 
+ static void
+ record_compare_value_range (code, cond, op0, op1)
+      enum rtx_code code;
+      rtx cond;
+      struct value_range op0;
+      struct value_range op1;
+ {
+   int i;
+   struct value_range nop[2];
+   tree type;
+ 
+   if ((! TREE_CONSTANT (&op0.min)
+        && ! TREE_CONSTANT (&op1.min))
+       || (TREE_CONSTANT (&op0.min)
+ 	  && ! INTEGRAL_TYPE_P (TREE_TYPE (&op0.min)))
+       || (TREE_CONSTANT (&op1.min)
+ 	  && ! INTEGRAL_TYPE_P (TREE_TYPE (&op1.min))))
+     return;
+ 
+   type = TREE_CONSTANT (&op0.min) ? TREE_TYPE (&op0.min)
+ 				   : TREE_TYPE (&op1.min);
+ 
+   switch (code)
+     {
+     case EQ:
+     case NE:
+       if (TREE_CONSTANT (&op0.min) && TREE_CONSTANT (&op1.min)
+ 	  && TREE_UNSIGNED (TREE_TYPE (&op0.min))
+ 	     != TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (convert_value_range (&op0, TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	      || convert_value_range (&op1,
+ 				      TREE_UNSIGNED (TREE_TYPE (&op0.min))))
+ 	    break;
+ 	  return;
+ 	}
+       break;
+ 
+     case LT:
+     case LE:
+     case GT:
+     case GE:
+       if (TREE_CONSTANT (&op0.min) && TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  if (! tree_int_cst_msb (&op0.min) && tree_int_cst_msb (&op0.max))
+ 	    return;
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = signed_type (type);
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	}
+ 
+       if (TREE_CONSTANT (&op1.min) && TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (! tree_int_cst_msb (&op1.min) && tree_int_cst_msb (&op1.max))
+ 	    return;
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = signed_type (type);
+ 	  (void)force_fit_type (&op1.min, 0);
+ 	  (void)force_fit_type (&op1.max, 0);
+ 	}
+       break;
+ 
+     case LTU:
+     case LEU:
+     case GTU:
+     case GEU:
+       if (TREE_CONSTANT (&op0.min) && ! TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	{
+ 	  if (tree_int_cst_msb (&op0.min) && ! tree_int_cst_msb (&op0.max))
+ 	    return;
+ 	  TREE_TYPE (&op0.min)
+ 	    = TREE_TYPE (&op0.max) = unsigned_type (type);
+ 	  (void)force_fit_type (&op0.min, 0);
+ 	  (void)force_fit_type (&op0.max, 0);
+ 	}
+ 
+       if (TREE_CONSTANT (&op1.min) && ! TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	{
+ 	  if (tree_int_cst_msb (&op1.min) && ! tree_int_cst_msb (&op1.max))
+ 	    return;
+ 	  TREE_TYPE (&op1.min)
+ 	    = TREE_TYPE (&op1.max) = unsigned_type (type);
+ 	  (void)force_fit_type (&op1.min, 0);
+ 	  (void)force_fit_type (&op1.max, 0);
+ 	}
+       break;
+ 
+     default:
+       return;
+     }
+ 
+   nop[0] = op0;
+   nop[1] = op1;
+ 
+   if (TREE_CONSTANT (&op1.min))
+     switch (code)
+       {
+       case EQ:
+ 	if (TREE_CONSTANT (&op0.min))
+ 	  if (TREE_UNSIGNED (TREE_TYPE (&op0.min)))
+ 	    {
+ 	      if (INT_CST_LT_UNSIGNED (&op0.min, &op1.min)
+ 		  && ! INT_CST_LT_UNSIGNED (&op0.max, &op1.min))
+ 		nop[0].min = op1.min;
+ 	      if (INT_CST_LT_UNSIGNED (&op1.max, &op0.max)
+ 		&& ! INT_CST_LT_UNSIGNED (&op1.max, &op0.min))
+ 		nop[0].max = op1.max;
+ 	    }
+ 	  else
+ 	    {
+ 	      if (INT_CST_LT (&op0.min, &op1.min)
+ 		  && ! INT_CST_LT (&op0.max, &op1.min))
+ 		nop[0].min = op1.min;
+ 	      if (INT_CST_LT (&op1.max, &op0.max)
+ 		&& ! INT_CST_LT (&op1.max, &op0.min))
+ 		nop[0].max = op1.max;
+ 	    }
+ 	else
+ 	  nop[0] = op1;
+ 	break;
+ 
+       case NE:
+ 	if (tree_int_cst_equal (&op1.min, &op1.max))
+ 	  {
+ 	    if (TREE_CONSTANT (&op0.min))
+ 	      {
+ 		if (! tree_int_cst_equal (&op0.min, &op0.max))
+ 		  {
+ 		    if (tree_int_cst_equal (&op0.min, &op1.min))
+ 		      {
+ 			(void)add_double (TREE_INT_CST_LOW (&nop[0].min),
+ 					  TREE_INT_CST_HIGH (&nop[0].min),
+ 					  1, 0,
+ 					  &TREE_INT_CST_LOW (&nop[0].min),
+ 					  &TREE_INT_CST_HIGH (&nop[0].min));
+ 		      }
+ 		    else if (tree_int_cst_equal (&op0.max, &op1.min))
+ 		      {
+ 			(void)add_double (TREE_INT_CST_LOW (&nop[0].max),
+ 					  TREE_INT_CST_HIGH (&nop[0].max),
+ 					  -1, -1,
+ 					  &TREE_INT_CST_LOW (&nop[0].max),
+ 					  &TREE_INT_CST_HIGH (&nop[0].max));
+ 		      }
+ 		  }
+ 	      }
+ 	    else if (tree_int_cst_equal (&op1.min, TYPE_MIN_VALUE (type)))
+ 	      {
+ 		nop[0].min = *TYPE_MIN_VALUE (type);
+ 		(void)add_double (TREE_INT_CST_LOW (&nop[0].min),
+ 				  TREE_INT_CST_HIGH (&nop[0].min), 1, 0,
+ 				  &TREE_INT_CST_LOW (&nop[0].min),
+ 				  &TREE_INT_CST_HIGH (&nop[0].min));
+ 		nop[0].max = *TYPE_MAX_VALUE (type);
+ 	      }
+ 	    else if (tree_int_cst_equal (&op1.min, TYPE_MAX_VALUE (type)))
+ 	      {
+ 		nop[0].min = *TYPE_MIN_VALUE (type);
+ 		nop[0].max = *TYPE_MAX_VALUE (type);
+ 		(void)add_double (TREE_INT_CST_LOW (&nop[0].max),
+ 				  TREE_INT_CST_HIGH (&nop[0].max), -1, -1,
+ 				  &TREE_INT_CST_LOW (&nop[0].max),
+ 				  &TREE_INT_CST_HIGH (&nop[0].max));
+ 	      }
+ 	  }
+ 	break;
+ 
+       case LT:
+ 	if ((! TREE_CONSTANT (&op0.min)
+ 	     && ! tree_int_cst_equal (&op1.max,
+ 				      TYPE_MIN_VALUE (signed_type (type))))
+ 	    || (! INT_CST_LT (&op0.max, &op1.max)
+ 		&& INT_CST_LT (&op0.min, &op1.max)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].min = *TYPE_MIN_VALUE (signed_type (type));
+ 	    nop[0].max = op1.max;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[0].max),
+ 			      TREE_INT_CST_HIGH (&nop[0].max), -1, -1,
+ 			      &TREE_INT_CST_LOW (&nop[0].max),
+ 			      &TREE_INT_CST_HIGH (&nop[0].max));
+ 	  }
+ 	break;
+ 
+       case LE:
+ 	if (! TREE_CONSTANT (&op0.min)
+ 	    || (INT_CST_LT (&op1.max, &op0.max)
+ 		&& ! INT_CST_LT (&op1.max, &op0.min)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].min = *TYPE_MIN_VALUE (signed_type (type));
+ 	    nop[0].max = op1.max;
+ 	  }
+ 	break;
+ 
+       case GT:
+ 	if ((! TREE_CONSTANT (&op0.min)
+ 	     && ! tree_int_cst_equal (&op1.min,
+ 				      TYPE_MAX_VALUE (signed_type (type))))
+ 	    || (! INT_CST_LT (&op1.min, &op0.min)
+ 		&& INT_CST_LT (&op1.min, &op0.max)))
+ 	  {
+ 	    nop[0].min = op1.min;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[0].min),
+ 			      TREE_INT_CST_HIGH (&nop[0].min), 1, 0,
+ 			      &TREE_INT_CST_LOW (&nop[0].min),
+ 			      &TREE_INT_CST_HIGH (&nop[0].min));
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].max = *TYPE_MAX_VALUE (signed_type (type));
+ 	  }
+ 	break;
+ 
+       case GE:
+ 	if (! TREE_CONSTANT (&op0.min)
+ 	    || (INT_CST_LT (&op0.min, &op1.min)
+ 		&& ! INT_CST_LT (&op0.max, &op1.min)))
+ 	  {
+ 	    nop[0].min = op1.min;
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].max = *TYPE_MAX_VALUE (signed_type (type));
+ 	  }
+ 	break;
+ 
+       case LTU:
+ 	if ((! TREE_CONSTANT (&op0.min)
+ 	     && ! tree_int_cst_equal (&op1.max,
+ 				      TYPE_MIN_VALUE (unsigned_type (type))))
+ 	    || (! INT_CST_LT_UNSIGNED (&op0.max, &op1.max)
+ 		&& INT_CST_LT_UNSIGNED (&op0.min, &op1.max)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].min = *TYPE_MIN_VALUE (unsigned_type (type));
+ 	    nop[0].max = op1.max;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[0].max),
+ 			      TREE_INT_CST_HIGH (&nop[0].max), -1, -1,
+ 			      &TREE_INT_CST_LOW (&nop[0].max),
+ 			      &TREE_INT_CST_HIGH (&nop[0].max));
+ 	  }
+ 	break;
+ 
+       case LEU:
+ 	if (! TREE_CONSTANT (&op0.min)
+ 	    || (INT_CST_LT_UNSIGNED (&op1.max, &op0.max)
+ 		&& ! INT_CST_LT_UNSIGNED (&op1.max, &op0.min)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].min = *TYPE_MIN_VALUE (unsigned_type (type));
+ 	    nop[0].max = op1.max;
+ 	  }
+ 	break;
+ 
+       case GTU:
+ 	if ((! TREE_CONSTANT (&op0.min)
+ 	     && ! tree_int_cst_equal (&op1.min,
+ 				      TYPE_MAX_VALUE (unsigned_type (type))))
+ 	    || (! INT_CST_LT_UNSIGNED (&op1.min, &op0.min)
+ 		&& INT_CST_LT_UNSIGNED (&op1.min, &op0.max)))
+ 	  {
+ 	    nop[0].min = op1.min;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[0].min),
+ 			      TREE_INT_CST_HIGH (&nop[0].min), 1, 0,
+ 			      &TREE_INT_CST_LOW (&nop[0].min),
+ 			      &TREE_INT_CST_HIGH (&nop[0].min));
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].max = *TYPE_MAX_VALUE (unsigned_type (type));
+ 	  }
+ 	break;
+ 
+       case GEU:
+ 	if (! TREE_CONSTANT (&op0.min)
+ 	    || (INT_CST_LT_UNSIGNED (&op0.min, &op1.min)
+ 		&& ! INT_CST_LT_UNSIGNED (&op0.max, &op1.min)))
+ 	  {
+ 	    nop[0].min = op1.min;
+ 	    if (! TREE_CONSTANT (&op0.min))
+ 	      nop[0].max = *TYPE_MAX_VALUE (unsigned_type (type));
+ 	  }
+ 	break;
+ 
+       default:
+ 	break;
+       }
+ 
+   if (TREE_CONSTANT (&op0.min))
+     switch (code)
+       {
+       case EQ:
+ 	if (TREE_CONSTANT (&op1.min))
+ 	  if (TREE_UNSIGNED (TREE_TYPE (&op1.min)))
+ 	    {
+ 	      if (INT_CST_LT_UNSIGNED (&op1.min, &op0.min)
+ 		  && ! INT_CST_LT_UNSIGNED (&op1.max, &op0.min))
+ 		nop[1].min = op0.min;
+ 	      if (INT_CST_LT_UNSIGNED (&op0.max, &op1.max)
+ 		&& ! INT_CST_LT_UNSIGNED (&op0.max, &op1.min))
+ 		nop[1].max = op0.max;
+ 	    }
+ 	  else
+ 	    {
+ 	      if (INT_CST_LT (&op1.min, &op0.min)
+ 		  && ! INT_CST_LT (&op1.max, &op0.min))
+ 		nop[1].min = op0.min;
+ 	      if (INT_CST_LT (&op0.max, &op1.max)
+ 		&& ! INT_CST_LT (&op0.max, &op1.min))
+ 		nop[1].max = op0.max;
+ 	    }
+ 	else
+ 	  nop[1] = op0;
+ 	break;
+ 
+       case NE:
+ 	if (tree_int_cst_equal (&op0.min, &op0.max))
+ 	  {
+ 	    if (TREE_CONSTANT (&op1.min))
+ 	      {
+ 		if (! tree_int_cst_equal (&op1.min, &op1.max))
+ 		  {
+ 		    if (tree_int_cst_equal (&op1.min, &op0.min))
+ 		      {
+ 			(void)add_double (TREE_INT_CST_LOW (&nop[1].min),
+ 					  TREE_INT_CST_HIGH (&nop[1].min),
+ 					  1, 0,
+ 					  &TREE_INT_CST_LOW (&nop[1].min),
+ 					  &TREE_INT_CST_HIGH (&nop[1].min));
+ 		      }
+ 		    else if (tree_int_cst_equal (&op1.max, &op0.min))
+ 		      {
+ 			(void)add_double (TREE_INT_CST_LOW (&nop[1].max),
+ 					  TREE_INT_CST_HIGH (&nop[1].max),
+ 					  -1, -1,
+ 					  &TREE_INT_CST_LOW (&nop[1].max),
+ 					  &TREE_INT_CST_HIGH (&nop[1].max));
+ 		      }
+ 		  }
+ 	      }
+ 	    else if (tree_int_cst_equal (&op0.min, TYPE_MIN_VALUE (type)))
+ 	      {
+ 		nop[1].min = *TYPE_MIN_VALUE (type);
+ 		(void)add_double (TREE_INT_CST_LOW (&nop[1].min),
+ 				  TREE_INT_CST_HIGH (&nop[1].min), 1, 0,
+ 				  &TREE_INT_CST_LOW (&nop[1].min),
+ 				  &TREE_INT_CST_HIGH (&nop[1].min));
+ 		nop[1].max = *TYPE_MAX_VALUE (type);
+ 	      }
+ 	    else if (tree_int_cst_equal (&op0.min, TYPE_MAX_VALUE (type)))
+ 	      {
+ 		nop[1].min = *TYPE_MIN_VALUE (type);
+ 		nop[1].max = *TYPE_MAX_VALUE (type);
+ 		(void)add_double (TREE_INT_CST_LOW (&nop[1].max),
+ 				  TREE_INT_CST_HIGH (&nop[1].max), -1, -1,
+ 				  &TREE_INT_CST_LOW (&nop[1].max),
+ 				  &TREE_INT_CST_HIGH (&nop[1].max));
+ 	      }
+ 	  }
+ 	break;
+ 
+       case LT:
+ 	if ((! TREE_CONSTANT (&op1.min)
+ 	     && ! tree_int_cst_equal (&op0.min,
+ 				      TYPE_MAX_VALUE (signed_type (type))))
+ 	    || (! INT_CST_LT (&op0.min, &op1.min)
+ 		&& INT_CST_LT (&op0.min, &op1.max)))
+ 	  {
+ 	    nop[1].min = op0.min;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[1].min),
+ 			      TREE_INT_CST_HIGH (&nop[1].min), 1, 0,
+ 			      &TREE_INT_CST_LOW (&nop[1].min),
+ 			      &TREE_INT_CST_HIGH (&nop[1].min));
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].max = *TYPE_MAX_VALUE (signed_type (type));
+ 	  }
+ 	break;
+ 
+       case LE:
+ 	if (! TREE_CONSTANT (&op1.min)
+ 	    || (INT_CST_LT (&op1.min, &op0.min)
+ 		&& ! INT_CST_LT (&op1.max, &op0.min)))
+ 	  {
+ 	    nop[1].min = op0.min;
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].max = *TYPE_MAX_VALUE (signed_type (type));
+ 	  }
+ 	break;
+ 
+       case GT:
+ 	if ((! TREE_CONSTANT (&op1.min)
+ 	     && ! tree_int_cst_equal (&op0.max,
+ 				      TYPE_MIN_VALUE (signed_type (type))))
+ 	    || (! INT_CST_LT (&op1.max, &op0.max)
+ 		&& INT_CST_LT (&op1.min, &op0.max)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].min = *TYPE_MIN_VALUE (signed_type (type));
+ 	    nop[1].max = op0.max;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[1].max),
+ 			      TREE_INT_CST_HIGH (&nop[1].max), -1, -1,
+ 			      &TREE_INT_CST_LOW (&nop[1].max),
+ 			      &TREE_INT_CST_HIGH (&nop[1].max));
+ 	  }
+ 	break;
+ 
+       case GE:
+ 	if (! TREE_CONSTANT (&op1.min)
+ 	    || (INT_CST_LT (&op0.max, &op1.max)
+ 		&& ! INT_CST_LT (&op0.max, &op1.min)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].min = *TYPE_MIN_VALUE (signed_type (type));
+ 	    nop[1].max = op0.max;
+ 	  }
+ 	break;
+ 
+       case LTU:
+ 	if ((! TREE_CONSTANT (&op1.min)
+ 	     && ! tree_int_cst_equal (&op0.min,
+ 				      TYPE_MAX_VALUE (unsigned_type (type))))
+ 	    || (! INT_CST_LT_UNSIGNED (&op0.min, &op1.min)
+ 		&& INT_CST_LT_UNSIGNED (&op0.min, &op1.max)))
+ 	  {
+ 	    nop[1].min = op0.min;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[1].min),
+ 			      TREE_INT_CST_HIGH (&nop[1].min), 1, 0,
+ 			      &TREE_INT_CST_LOW (&nop[1].min),
+ 			      &TREE_INT_CST_HIGH (&nop[1].min));
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].max = *TYPE_MAX_VALUE (unsigned_type (type));
+ 	  }
+ 	break;
+ 
+       case LEU:
+ 	if (! TREE_CONSTANT (&op1.min)
+ 	    || (INT_CST_LT_UNSIGNED (&op1.min, &op0.min)
+ 		&& ! INT_CST_LT_UNSIGNED (&op1.max, &op0.min)))
+ 	  {
+ 	    nop[1].min = op0.min;
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].max = *TYPE_MAX_VALUE (unsigned_type (type));
+ 	  }
+ 	break;
+ 
+       case GTU:
+ 	if ((! TREE_CONSTANT (&op1.min)
+ 	     && ! tree_int_cst_equal (&op0.max,
+ 				      TYPE_MIN_VALUE (unsigned_type (type))))
+ 	    || (! INT_CST_LT_UNSIGNED (&op1.max, &op0.max)
+ 		&& INT_CST_LT_UNSIGNED (&op1.min, &op0.max)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].min = *TYPE_MIN_VALUE (unsigned_type (type));
+ 	    nop[1].max = op0.max;
+ 	    (void)add_double (TREE_INT_CST_LOW (&nop[1].max),
+ 			      TREE_INT_CST_HIGH (&nop[1].max), -1, -1,
+ 			      &TREE_INT_CST_LOW (&nop[1].max),
+ 			      &TREE_INT_CST_HIGH (&nop[1].max));
+ 	  }
+ 	break;
+ 
+       case GEU:
+ 	if (! TREE_CONSTANT (&op1.min)
+ 	    || (INT_CST_LT_UNSIGNED (&op0.max, &op1.max)
+ 		&& ! INT_CST_LT_UNSIGNED (&op0.max, &op1.min)))
+ 	  {
+ 	    if (! TREE_CONSTANT (&op1.min))
+ 	      nop[1].min = *TYPE_MIN_VALUE (unsigned_type (type));
+ 	    nop[1].max = op0.max;
+ 	  }
+ 	break;
+ 
+       default:
+ 	break;
+       }
+ 
+   if (GET_CODE (XEXP (cond, 0)) == SUBREG
+       && GET_CODE (SUBREG_REG (XEXP (cond, 0))) == REG
+       && GET_CODE (XEXP (cond, 1)) == SUBREG
+       && GET_CODE (SUBREG_REG (XEXP (cond, 1))) == REG
+       && REGNO (SUBREG_REG (XEXP (cond, 0)))
+ 	 == REGNO (SUBREG_REG (XEXP (cond, 1))))
+     return;
+ 
+   for (i = 0; i < 2; i++)
+     {
+       if (GET_CODE (XEXP (cond, i)) == REG
+ 	  && REGNO (XEXP (cond, i)) >= FIRST_PSEUDO_REGISTER)
+ 	{
+ 	  *(reg_vr_table + REGNO (XEXP (cond, i))) = nop[i];
+ 	}
+       else if (GET_CODE (XEXP (cond, i)) == SUBREG
+ 	     && GET_CODE (SUBREG_REG (XEXP (cond, i))) == REG
+ 	     && REGNO (SUBREG_REG (XEXP (cond, i))) >= FIRST_PSEUDO_REGISTER)
+ 	{
+ 	  struct value_range *vr;
+ 
+ 	  vr = (reg_vr_table + REGNO (SUBREG_REG (XEXP (cond, i))));
+ 
+ 	  (void)merge_value_range_subreg (*vr, nop[i], vr, XEXP (cond, i));
+ 	}
+     }
+ }
+ 
+ /* Merge the current register value ranges into the
+    register value ranges known at the beginning of
+    basic block BB.  */
+ 
+ static void
+ record_bb_value_range (bb)
+      int bb;
+ {
+   int i;
+   struct value_range *p;
+   struct reg_value_range *next;
+   struct reg_value_range *original_reg_vr;
+   struct reg_value_range *previous;
+   struct reg_value_range *rvrp;
+   int nupdates;
+ 
+   if (bb < 0 || bb >= n_basic_blocks)
+     return;
+ 
+   nupdates = 0;
+   original_reg_vr = NULL;
+ 
+   if (vr_reg_updates.nupdates)
+     {
+       original_reg_vr = (struct reg_value_range *)
+ 			alloca(vr_reg_updates.nupdates
+ 			       * sizeof (struct reg_value_range));
+ 
+       next = vr_reg_updates.head;
+       while (next)
+ 	{
+ 	  original_reg_vr[nupdates].regno = next->regno;
+ 	  original_reg_vr[nupdates++].vr = *(reg_vr_table + next->regno);
+ 	  *(reg_vr_table + next->regno) = next->vr;
+ 	  next = next->next;
+ 	}
+     }
+ 
+   if (! bb_reached[bb])
+     {
+       rvrp = NULL;
+       for (i = 0, p = reg_vr_table; i < max_vr_regno; i++, p++)
+ 	{
+ 	  if (! TREE_CONSTANT (&p->min))
+ 	    continue;
+ 	  next = (struct reg_value_range *)
+ 		 malloc (sizeof (struct reg_value_range));
+ 	  next->regno = i;
+ 	  next->vr = *p;
+ 	  next->next = NULL;
+ 	  if (! rvrp)
+ 	    *(bb_reg_vr_table + bb) = next;
+ 	  else
+ 	    rvrp->next = next;
+ 	  rvrp = next;
+ 	}
+       bb_reached[bb] = 1;
+     }
+   else
+     for (previous = NULL, rvrp = *(bb_reg_vr_table + bb);
+ 	 rvrp; previous = rvrp, rvrp = next)
+       {
+ 	next = rvrp->next;
+ 
+ 	merge_value_ranges (reg_vr_table + rvrp->regno, &rvrp->vr, &rvrp->vr);
+ 
+ 	if (! TREE_CONSTANT (&rvrp->vr.min))
+ 	  {
+ 	    if (! previous)
+ 	      *(bb_reg_vr_table + bb) = next;
+ 	    else
+ 	      previous->next = next;
+ 	    free (rvrp);
+ 	    rvrp = previous;
+ 	    continue;
+ 	  }
+       }
+ 
+   while (nupdates--)
+     *(reg_vr_table + original_reg_vr[nupdates].regno)
+       = original_reg_vr[nupdates].vr;
+ }
+ 
+ /* Propagate the register value ranges which are known
+    to the start of the basic block which is the target
+    of the conditional jump insn JUMP.  Return non-zero
+    if the jump is always taken.
+ 
+    Side effects:
+ 
+      1) The register value ranges will be updated based
+ 	on information extracted from the condition.
+ 
+      2) If the condition is constant and ALTER_JUMPS is
+ 	non-zero then the jump will be simplified.  */
+ 
+ 
+ static int
+ record_value_range_condjump (jump, alter_jumps)
+      rtx jump;
+      int alter_jumps;
+ {
+   rtx label;
+   int succ_bb;
+   rtx cond;
+   rtx earliest;
+   enum machine_mode comparison_arg_mode;
+   struct value_range op0;
+   struct value_range op1;
+   enum rtx_code code;
+   int cond_always_true;
+   int cond_always_false;
+   int i;
+   int nupdates;
+   struct reg_value_range original_reg_vr[2];
+   rtx set;
+ 
+   label = JUMP_LABEL (jump);
+   succ_bb = BLOCK_NUM (label);
+   cond = get_condition (jump, &earliest);
+ 
+   if (cond
+       && GET_MODE_CLASS (GET_MODE (XEXP (cond,0))) == MODE_INT
+       && ! modified_between_p (cond, PREV_INSN (earliest), jump))
+     {
+       bzero (&op0, sizeof(op0));
+       bzero (&op1, sizeof(op1));
+ 
+       code = GET_CODE (cond);
+       comparison_arg_mode = GET_MODE (XEXP (cond, 0));
+       compute_value_range (&op0, comparison_arg_mode, XEXP (cond, 0));
+       compute_value_range (&op1, comparison_arg_mode, XEXP (cond, 1));
+ 
+       cond_always_true = compare_value_range (code, op0, op1);
+       cond_always_false
+ 	= compare_value_range (reverse_condition (code), op0, op1);
+ 
+       nupdates = 0;
+       for (i = 0; i < 2; i++)
+ 	if (GET_CODE (XEXP (cond, i)) == REG)
+ 	  {
+ 	    original_reg_vr[nupdates].regno = REGNO (XEXP (cond, i));
+ 	    original_reg_vr[nupdates++].vr
+ 	      = *(reg_vr_table + REGNO (XEXP (cond, i)));
+ 	  }
+ 	else if (GET_CODE (XEXP (cond, i)) == SUBREG
+ 		 && GET_CODE (SUBREG_REG (XEXP (cond, i))) == REG)
+ 	  {
+ 	    original_reg_vr[nupdates].regno
+ 	      = REGNO (SUBREG_REG (XEXP (cond, i)));
+ 	    original_reg_vr[nupdates++].vr
+ 	      = *(reg_vr_table + REGNO (SUBREG_REG (XEXP (cond, i))));
+ 	  }
+ 
+       if (! cond_always_false)
+ 	{
+ 	  record_compare_value_range (code, cond, op0, op1);
+ 	  record_bb_value_range (succ_bb);
+ 	}
+ 
+       for (i = 0; i < nupdates; i++)
+ 	*(reg_vr_table + original_reg_vr[i].regno) = original_reg_vr[i].vr;
+ 
+       if (! cond_always_true)
+ 	record_compare_value_range (reverse_condition (code), cond, op0, op1);
+ 
+       if ((cond_always_true || cond_always_false) && alter_jumps)
+ 	{
+ 	  if ((set = single_set (jump))
+ 	      && SET_DEST (set) == pc_rtx
+ 	      && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
+ 	    {
+ 	      INSN_CODE (jump) = -1;
+ 	      if (cond_always_true)
+ 		{
+ 		  SET_SRC (set) = condjump_label (jump);
+ 		  emit_barrier_after (jump);
+ 		}
+ 	      else
+ 		{
+ 		  SET_SRC (set) = pc_rtx;
+ 		  JUMP_LABEL (jump) = NULL_RTX;
+ 		  --LABEL_NUSES (label);
+ 		}
+ 
+ 	      run_jump_opt_after_vr = 1;
+ 	    }
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file, "  insn %d %s jumps.\n", INSN_UID (jump),
+ 		     cond_always_true ? "always" : "never");
+ 	}
+ 
+       if (cond_always_true)
+ 	return 1;
+     }
+   else
+     record_bb_value_range (succ_bb);
+ 
+   return 0;
+ }
+ 
+ /* Propagate the register value ranges which are known
+    to the start of the basic block which is the target
+    of the jump insn JUMP.  Return non-zero if the jump
+    is always taken.
+ 
+    Side effects:
+ 
+      1) If the jump is a tablejump where the index is
+ 	constant and ALTER_JUMPS is non-zero than the
+ 	jump will be simplified.
+ 
+      2) If the jump is a tablejump where the index is
+ 	always less than the number of entries in the
+ 	table and ALTER_JUMPS is non-zero then the table
+ 	will be shortened.
+ 
+     Note:
+ 
+       1) Computed jumps are ignored by this routine.  They
+ 	 produce an abnormal edge in the CFG which is handled
+ 	 by value_range_prop.  */
+ 
+ static int
+ record_value_range_jump (jump, alter_jumps)
+      rtx jump;
+      int alter_jumps;
+ {
+   rtx label;
+   rtx table;
+   rtx set;
+   int succ_bb;
+ 
+   if ((label = JUMP_LABEL (jump))
+       && (table = NEXT_INSN (label))
+       && GET_CODE (table) == JUMP_INSN
+       && (GET_CODE (PATTERN (table)) == ADDR_VEC
+ 	  || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
+     {
+       rtvec vec;
+       int always_in_bounds;
+       int start;
+       int end;
+       rtx earliest;
+       rtx idx;
+       struct value_range op;
+       int i;
+ 
+       if (GET_CODE (PATTERN (table)) == ADDR_VEC)
+ 	vec = XVEC (PATTERN (table), 0);
+       else
+ 	vec = XVEC (PATTERN (table), 1);
+ 
+       always_in_bounds = 0;
+ 
+       start = 0;
+       end = GET_NUM_ELEM (vec) - 1;
+ 
+       if ((idx = get_jump_table_offset (jump, &earliest))
+ 	  && ! modified_between_p (idx, PREV_INSN (earliest), jump))
+ 	{
+ 	  compute_value_range (&op, VOIDmode, idx);
+ 
+ 	  if (TREE_CONSTANT (&op.min)
+ 	      && ! TREE_INT_CST_HIGH (&op.min)
+ 	      && (int)TREE_INT_CST_LOW (&op.min) >= start
+ 	      && (TREE_INT_CST_LOW (&op.min)
+ 		  / GET_MODE_SIZE (GET_MODE (PATTERN (table)))) < INT_MAX)
+ 	    start = TREE_INT_CST_LOW (&op.min)
+ 		    / GET_MODE_SIZE (GET_MODE (PATTERN (table)));
+ 
+ 	  if (TREE_CONSTANT (&op.min)
+ 	      && ! TREE_INT_CST_HIGH (&op.max)
+ 	      && (HOST_WIDE_INT)TREE_INT_CST_LOW (&op.max) >= 0
+ 	      && (HOST_WIDE_INT)(TREE_INT_CST_LOW (&op.max)
+ 		  / GET_MODE_SIZE (GET_MODE (PATTERN (table)))) <= end)
+ 	    {
+ 	      always_in_bounds = 1;
+ 	      end = TREE_INT_CST_LOW (&op.max)
+ 		    / GET_MODE_SIZE (GET_MODE (PATTERN (table)));
+ 	    }
+ 	}
+ 
+       if (! always_in_bounds
+ 	  && (set = single_set (jump))
+ 	  && SET_DEST (set) == pc_rtx
+ 	  && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE
+ 	  && GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF)
+ 	{
+ 	  rtx out_of_bounds_label;
+ 
+ 	  out_of_bounds_label = XEXP (XEXP (SET_SRC (set), 2), 0);
+ 	  succ_bb = BLOCK_NUM (out_of_bounds_label);
+ 	  record_bb_value_range (succ_bb);
+ 	}
+ 
+       if (start == end && always_in_bounds && alter_jumps)
+ 	{
+ 	  rtx target;
+ 
+ 	  target = XEXP (RTVEC_ELT (vec, start), 0);
+ 	  PATTERN (jump) = gen_jump (target);
+ 	  JUMP_LABEL (jump) = target;
+ 	  LABEL_NUSES (target)++;
+ 	  INSN_CODE (jump) = -1;
+ 	  if (GET_CODE (NEXT_INSN (jump)) != BARRIER)
+ 	    emit_barrier_after (jump);
+ 
+ 	  for (i = 0; i < GET_NUM_ELEM (vec); i++)
+ 	    --LABEL_NUSES (XEXP (RTVEC_ELT (vec, i), 0));
+ 	  PUT_NUM_ELEM (vec, 0);
+ 
+ 	  if (GET_CODE (NEXT_INSN (table)) == BARRIER)
+ 	    flow_delete_insn (NEXT_INSN (table));
+ 
+ 	  PUT_CODE (table, NOTE);
+ 	  NOTE_LINE_NUMBER (table) = NOTE_INSN_DELETED;
+ 	  NOTE_SOURCE_FILE (table) = 0;
+ 
+ 	  run_jump_opt_after_vr = 1;
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file, "  insn %d only one jump table entry used.\n",
+ 		     INSN_UID (jump));
+ 
+ 	  succ_bb = BLOCK_NUM (target);
+ 	  record_bb_value_range (succ_bb);
+ 	  return 1;
+ 	}
+       else if (end < (GET_NUM_ELEM (vec) - 1) && alter_jumps)
+ 	{
+ 	  for (i = end + 1; i < GET_NUM_ELEM (vec); i++)
+ 	    --LABEL_NUSES (XEXP (RTVEC_ELT (vec, i), 0));
+ 	  PUT_NUM_ELEM (vec, end + 1);
+ 
+ 	  run_jump_opt_after_vr = 1;
+ 
+ 	  if (vr_file)
+ 	    fprintf (vr_file,
+ 		     "  insn %d only first %d jump table entries used.\n",
+ 		     INSN_UID (jump), end + 1);
+ 	}
+ 
+       for (i = start; i <= end; i++)
+ 	{
+ 	  label = XEXP (RTVEC_ELT (vec, i), 0);
+ 	  succ_bb = BLOCK_NUM (label);
+ 	  record_bb_value_range (succ_bb);
+ 	}
+ 
+ #ifdef CASE_DROPS_THROUGH
+       return 0;
+ #endif
+     }
+   else if (computed_jump_p (jump))
+     ;
+   else if (returnjump_p (jump))
+     ;
+   else if (simplejump_p (jump))
+     {
+       label = JUMP_LABEL (jump);
+ 
+       /* Avoid crashing when jumping to a label which is not in
+ 	 a basic block.  I.e.  gcc.c-torture/noncompile/930622-1.c.  */
+       if (! BLOCK_FOR_INSN (label))
+ 	return 0;
+ 
+       succ_bb = BLOCK_NUM (label);
+       record_bb_value_range (succ_bb);
+     }
+   else if (condjump_p (jump))
+     return record_value_range_condjump (jump, alter_jumps);
+   else if ((set = single_set (jump))
+ 	   && SET_DEST (set) == pc_rtx
+ 	   && SET_SRC (set) == pc_rtx)
+     return 0;
+   else
+     abort ();
+ 
+   return 1;
+ }
+ 
+ /* Called from compute_value_range_bb_order via note_stores to
+    handle one SET or CLOBBER in an insn.  DATA is really the
+    basic block in which the SET (or CLOBBER) is occurring.  */
+ 
+ static void
+ record_regs_set_in_block (dest, setter, data)
+      rtx dest;
+      rtx setter ATTRIBUTE_UNUSED;
+      void *data;
+ {
+   int bb = (int)data;
+ 
+   if (GET_CODE (dest) == SUBREG)
+     dest = SUBREG_REG (dest);
+ 
+   if (GET_CODE (dest) == REG)
+     SET_BIT (bb_reg_set[bb], REGNO (dest));
+ }
+ 
+ /* Record which registers are modified within the
+    loop which has basic block BB as the loop header.
+ 
+    PRED is the current basic block being examined.
+ 
+    VISITED is an array used to keep track of which
+    basic blocks have been examined.  */
+ 
+ static void
+ record_regs_set_by_loop (bb, pred, visited)
+      int bb;
+      int pred;
+      int *visited;
+ {
+   int i;
+   edge e;
+ 
+   if (pred < 0 || pred > n_basic_blocks)
+     return;
+ 
+   visited[pred] = 1;
+ 
+   if (pred != bb)
+     for (e = BASIC_BLOCK (pred)->pred; e; e = e->pred_next)
+       if (! visited[e->src->index])
+ 	record_regs_set_by_loop (bb, e->src->index, visited);
+ 
+   for (i = 0; i < max_vr_regno; i++)
+     if (TEST_BIT (bb_reg_set[pred], i))
+       SET_BIT (bb_reg_vr_unknown[bb], i);
+ }
+ 
+ /* Compute the optimal order to process the basic blocks.
+    The optimal order is when the predecessors for a basic
+    block are all processed before that basic block.
+ 
+    ORDER and ABNORMAL are destination arrays for recording
+    the order and whether a basic block is reached by an
+    abnormal edge.
+ 
+    DOM is a sbitmap containing dominator information.
+ 
+    Side effects:
+ 
+      1) Registers set in a basic block are recorded.
+ 
+      2) Registers set in a loop are recorded as being unknown
+ 	at the start of the basic block which is the loop header.
+ 
+      3) All registers are record as being unknown at the
+ 	start of a basic block when it isn't possible for
+ 	a predecessor (that isn't dominated by the basic
+ 	block) to be processed first.  */
+ 
+ static void
+ compute_value_range_bb_order (order, abnormal, dom)
+      int *order;
+      int *abnormal;
+      sbitmap *dom;
+ {
+   int *loop_visited;
+   int *visited;
+   int abnormal_edge;
+   int bb;
+   int i;
+   int j;
+   int k;
+   int n_succ;
+   int prev_i;
+   rtx insn;
+ 
+   sbitmap_vector_zero (bb_reg_set, n_basic_blocks);
+ 
+   for (bb = 0; bb < n_basic_blocks; bb++)
+     for (insn = BLOCK_HEAD (bb);
+ 	 insn && insn != NEXT_INSN (BLOCK_END (bb));
+ 	 insn = NEXT_INSN (insn))
+       if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ 	note_stores (PATTERN (insn), record_regs_set_in_block, (void *)bb);
+ 
+   visited = (int *)alloca(n_basic_blocks * sizeof(int));
+   bzero (visited, n_basic_blocks * sizeof(int));
+ 
+   sbitmap_vector_zero (bb_reg_vr_unknown, n_basic_blocks);
+ 
+   loop_visited = (int *)alloca(n_basic_blocks * sizeof(int));
+ 
+   i = 0;
+ 
+   order[i++] = 0;
+   visited[0] = 1;
+ 
+   while (i < n_basic_blocks)
+     {
+       prev_i = i;
+ 
+       for (j = 1; j < n_basic_blocks; j++)
+ 	{
+ 	  edge e;
+ 
+ 	  if (visited[j])
+ 	    continue;
+ 
+ 	  for (e = BASIC_BLOCK (j)->pred; e; e = e->pred_next)
+ 	    {
+ 	      if (! visited[e->src->index]
+ 		  && ! TEST_BIT (dom[e->src->index], j))
+ 		break;
+ 	    }
+ 
+ 	  if (! e)
+ 	    {
+ 	      abnormal_edge = 0;
+ 	      bzero (loop_visited, n_basic_blocks * sizeof(int));
+ 	      for (e = BASIC_BLOCK (j)->pred; e; e = e->pred_next)
+ 		{
+ 		  abnormal_edge |= e->flags & EDGE_ABNORMAL;
+ 		  if (TEST_BIT (dom[e->src->index], j))
+ 		    record_regs_set_by_loop (j, e->src->index, loop_visited);
+ 		}
+ 	      abnormal[j] = abnormal_edge;
+ 	      order[i++] = j;
+ 	      visited[j] = 1;
+ 	    }
+ 	}
+ 
+       if (i == prev_i)
+ 	{
+ 	  edge e;
+ 
+ 	  bb = 0;
+ 	  n_succ = 0;
+ 
+ 	  for (j = 1; j < n_basic_blocks; j++)
+ 	    {
+ 	      if (visited[j])
+ 		continue;
+ 
+ 	      for (e = BASIC_BLOCK (j)->pred; e; e = e->pred_next)
+ 		if (visited[e->src->index])
+ 		  break;
+ 
+ 	      if (! e)
+ 		continue;
+ 
+ 	      for (e = BASIC_BLOCK (j)->succ, k = 0; e; e = e->succ_next, k++)
+ 		;
+ 
+ 	      if (k < n_succ || ! bb)
+ 		{
+ 		  bb = j;
+ 		  n_succ = k;
+ 		}
+ 	    }
+ 
+ 	  if (! bb)
+ 	    abort ();
+ 
+ 	  abnormal_edge = 0;
+ 	  for (e = BASIC_BLOCK (bb)->pred; e; e = e->pred_next)
+ 	    abnormal_edge |= e->flags & EDGE_ABNORMAL;
+ 
+ 	  abnormal[bb] = abnormal_edge;
+ 	  order[i++] = bb;
+ 	  visited[bb] = 1;
+ 	  sbitmap_ones (bb_reg_vr_unknown[bb]);
+ 	}
+     }
+ }
+ 
+ /* Entry point for value range propagation.
+ 
+    F is the first instruction in the function.
+ 
+    If ALTER_JUMPS is non-zero then jumps will be
+    simplified.
+ 
+    FILE is the file to use for debugging information
+    or zero if such output is not desired.
+ 
+    Returns 1 if jump_optimize should be redone due
+    to simplifications in jump instructions.  */
+ 
+ int
+ value_range_prop (f, alter_jumps, file)
+      rtx f;
+      int alter_jumps;
+      FILE *file;
+ {
+   int *abnormal;
+   int *ignore;
+   int *order;
+   int *visited;
+   int bb;
+   int changed;
+   int i;
+   int j;
+   sbitmap *dom;
+   struct reg_value_range **previous_reg_vr;
+   struct reg_value_range **reg_vr;
+   struct reg_value_range *next;
+   struct reg_value_range *prvrp;
+   struct reg_value_range *rvrp;
+ 
+   vr_file = file;
+ 
+   max_vr_regno = max_reg_num ();
+ 
+   find_basic_blocks (f, max_vr_regno, file);
+   cleanup_cfg (f);
+ 
+   abnormal = (int *)alloca(n_basic_blocks * sizeof(int));
+   ignore = (int *)alloca(n_basic_blocks * sizeof(int));
+   order = (int *)alloca(n_basic_blocks * sizeof(int));
+   visited = (int *)alloca(n_basic_blocks * sizeof(int));
+ 
+   alloc_value_range_mem ();
+ 
+   dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
+ 
+   compute_bb_for_insn (get_max_uid ());
+   compute_flow_dominators (dom, NULL);
+   compute_value_range_bb_order (order, abnormal, dom);
+ 
+   bzero (ignore, n_basic_blocks * sizeof(int));
+ 
+   previous_reg_vr
+     = (struct reg_value_range **)alloca (max_vr_regno
+ 					 * sizeof(struct reg_value_range *));
+   reg_vr
+     = (struct reg_value_range **)alloca (max_vr_regno
+ 					 * sizeof(struct reg_value_range *));
+ 
+   if (vr_file)
+     fprintf (vr_file, "Value Range data\n\n");
+ 
+   run_jump_opt_after_vr = 0;
+ 
+   for ( ; ; )
+     {
+       bzero (bb_reached, n_basic_blocks * sizeof (int));
+       bzero (visited, n_basic_blocks * sizeof (int));
+ 
+       bb_reached[0] = 1;
+ 
+       changed = 0;
+ 
+       for (i = 0; i < n_basic_blocks; i++)
+ 	{
+ 	  edge e;
+ 	  rtx insn;
+ 	  int always_jumps;
+ 
+ 	  bb = order[i];
+ 
+ 	  if (ignore[bb])
+ 	    continue;
+ 
+ 	  if (! bb_reached[bb] && ! abnormal[bb])
+ 	    {
+ 	      for (e = BASIC_BLOCK (bb)->pred; e; e = e->pred_next)
+ 		{
+ 		  if (! visited[e->src->index]
+ 		      && ! TEST_BIT (dom[e->src->index], bb)
+ 		      && ! ignore[e->src->index])
+ 		    break;
+ 		}
+ 
+ 	      if (! e)
+ 		{
+ 		  changed = 1;
+ 		  ignore[bb] = 1;
+ 		  continue;
+ 		}
+ 	    }
+ 
+ 	  visited[bb] = 1;
+ 
+ 	  bzero (reg_vr_table, max_vr_regno * sizeof(struct value_range));
+ 	  if (! abnormal[bb])
+ 	    {
+ 	      for (rvrp = *(bb_reg_vr_table + bb); rvrp; rvrp = rvrp->next)
+ 		if (! TEST_BIT (bb_reg_vr_unknown[bb], rvrp->regno))
+ 		  *(reg_vr_table + rvrp->regno) = rvrp->vr;
+ 
+ 	      for (prvrp = *(previous_bb_reg_vr_table + bb);
+ 		   prvrp; prvrp = prvrp->next)
+ 		if (TEST_BIT (bb_reg_vr_unknown[bb], prvrp->regno))
+ 		  *(reg_vr_table + prvrp->regno) = prvrp->vr;
+ 	    }
+ 
+ 	  always_jumps = 0;
+ 
+ 	  for (insn = BLOCK_HEAD (bb);
+ 	       insn && insn != NEXT_INSN (BLOCK_END (bb)) && ! always_jumps;
+ 	       insn = NEXT_INSN (insn))
+ 	    {
+ 	      if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
+ 		continue;
+ 
+ 	      vr_insn = insn;
+ 
+ 	      note_stores (PATTERN (insn), record_value_range_store, NULL_PTR);
+ 
+ 	      if (GET_CODE (insn) == JUMP_INSN)
+ 		always_jumps = record_value_range_jump (insn, alter_jumps);
+ 
+ 	      next = vr_reg_updates.head;
+ 	      while (next)
+ 		{
+ 		  *(reg_vr_table + next->regno) = next->vr;
+ 		  next = vr_reg_updates.head->next;
+ 		  free (vr_reg_updates.head);
+ 		  vr_reg_updates.head = next;
+ 		}
+ 
+ 	      vr_reg_updates.tail = NULL;
+ 	      vr_reg_updates.nupdates = 0;
+ 	    }
+ 
+ 	  if (! always_jumps)
+ 	    for (e = BASIC_BLOCK (bb)->succ; e; e = e->succ_next)
+ 	      if (e->flags & EDGE_FALLTHRU)
+ 		{
+ 		  record_bb_value_range (e->dest->index);
+ 		  break;
+ 		}
+ 	}
+ 
+       for (i = 0; i < n_basic_blocks; i++)
+ 	{
+ 	  if (! bb_reached[i] && ! abnormal[i])
+ 	    {
+ 	      changed |= ! ignore[i];
+ 	      ignore[i] = 1;
+ 	      continue;
+ 	    }
+ 
+ 	  bzero (previous_reg_vr, max_vr_regno
+ 				  * sizeof (struct reg_value_range *));
+ 	  for (prvrp = *(previous_bb_reg_vr_table + i);
+ 	       prvrp; prvrp = prvrp->next)
+ 	    *(previous_reg_vr + prvrp->regno) = prvrp;
+ 
+ 	  bzero (reg_vr, max_vr_regno * sizeof (struct reg_value_range *));
+ 	  for (rvrp = *(bb_reg_vr_table + i); rvrp; rvrp = rvrp->next)
+ 	    *(reg_vr + rvrp->regno) = rvrp;
+ 
+ 	  for (j = 0; j < max_vr_regno; j++)
+ 	    if (TEST_BIT (bb_reg_vr_unknown[i], j) && *(previous_reg_vr + j))
+ 	      (*(reg_vr + j))->vr = (*(previous_reg_vr + j))->vr;
+ 
+ 	  for (j = 0; ! changed && j < max_vr_regno; j++)
+ 	    {
+ 	      rvrp = *(reg_vr + j);
+ 	      prvrp = *(previous_reg_vr + j);
+ 
+ 	      changed |= (rvrp && ! prvrp) || (! rvrp && prvrp)
+ 			 || (rvrp && prvrp
+ 			     && (! tree_int_cst_equal (&rvrp->vr.min,
+ 						       &prvrp->vr.min)
+ 				 || ! tree_int_cst_equal (&rvrp->vr.max,
+ 							  &prvrp->vr.max)));
+ 	    }
+ 	}
+ 
+       if (! changed)
+ 	break;
+ 
+       for (i = 0; i < n_basic_blocks; i++)
+ 	{
+ 	  for (prvrp = *(previous_bb_reg_vr_table + i); prvrp; prvrp = next)
+ 	    {
+ 	      next = prvrp->next;
+ 	      free (prvrp);
+ 	    }
+ 	  *(previous_bb_reg_vr_table + i) = *(bb_reg_vr_table + i);
+ 	  *(bb_reg_vr_table + i) = NULL;
+ 	}
+     }
+ 
+   if (vr_file)
+     {
+       fprintf (vr_file, "\n");
+       j = fprintf (vr_file, "BB propagation order");
+       for (i = 0; i < n_basic_blocks; i++)
+ 	{
+ 	  j += fprintf (vr_file, " %d", order[i]);
+ 	  if (j > 70)
+ 	    {
+ 	      fprintf (vr_file, "\n");
+ 	      j = 0;
+ 	    }
+ 	}
+       if (j)
+ 	fprintf (vr_file, "\n");
+ 
+       j = fprintf (vr_file, "BB never reached");
+       for (bb = 0; bb < n_basic_blocks; bb++)
+ 	if (ignore[bb])
+ 	  {
+ 	    j += fprintf (vr_file, " %d", bb);
+ 	    if (j > 70)
+ 	      {
+ 		fprintf (vr_file, "\n");
+ 		j = 0;
+ 	      }
+ 	  }
+       if (j)
+ 	fprintf (vr_file, "\n");
+ 
+       fprintf (vr_file, "\n");
+       for (bb = 0; bb < n_basic_blocks; bb++)
+ 	{
+ 	  if (ignore[bb])
+ 	    continue;
+ 
+ 	  fprintf (vr_file, "BB %d\n", bb);
+ 	  for (rvrp = *(bb_reg_vr_table + bb); rvrp; rvrp = rvrp->next)
+ 	    {
+ 	      TREE_CONSTANT_OVERFLOW (&rvrp->vr.min)
+ 		= TREE_CONSTANT_OVERFLOW (&rvrp->vr.max) = 0;
+ 
+ 	      fprintf (vr_file, "    reg %d\n", rvrp->regno);
+ 	      fprintf (vr_file, "        ");
+ 	      print_node_brief (vr_file, "min", &rvrp->vr.min, 0);
+ 	      fprintf (vr_file, "        ");
+ 	      print_node_brief (vr_file, "max", &rvrp->vr.max, 0);
+ 	      fprintf (vr_file, "\n");
+ 	    }
+ 	}
+       fprintf (vr_file, "\n");
+     }
+ 
+   free (dom);
+ 
+   free_value_range_mem ();
+ 
+   return run_jump_opt_after_vr;
+ }
-------------------------------------------------------------------------
|   Feith Systems  |   Voice: 1-215-646-8000  |  Email: john@feith.com  |
|    John Wehle    |     Fax: 1-215-540-5495  |                         |
-------------------------------------------------------------------------


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]