This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Do not refer directly to gen_lowpart_xxx


This patch is necessary for a series of patches to move some algebraic simplifications in combine and CSE to simplify-rtx.c. Most of the changes are mechanical search & replaces.

Bootstrapped on i686-pc-linux-gnu, all languages except Ada.

Paolo
2004-01-28  Paolo Bonzini  <bonzini@gnu.org>

	* combine.c (combine_instructions): Set GEN_LOWPART
	while in this function.
	(gen_lowpart_for_combine): Modified all uses to go
	through GEN_LOWPART.
	* cse.c (cse_main): Set GEN_LOWPART while in this
	function.
	(gen_lowpart_if_possible): Modified all uses to go
	through GEN_LOWPART.
	* emit-rtl.c (gen_lowpart_general): New name of
	gen_lowpart.
	(gen_lowpart): Declare as pointer to function.

Index: combine.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/combine.c,v
retrieving revision 1.404
diff -u -r1.404 combine.c
--- combine.c	25 Jan 2004 11:11:47 -0000	1.404
+++ combine.c	28 Jan 2004 13:49:18 -0000
@@ -95,10 +95,6 @@
 #define SHIFT_COUNT_TRUNCATED 0
 #endif
 
-/* It is not safe to use ordinary gen_lowpart in combine.
-   Use gen_lowpart_for_combine instead.  See comments there.  */
-#define gen_lowpart dont_use_gen_lowpart_you_dummy
-
 /* Number of attempts to combine instructions in this function.  */
 
 static int combine_attempts;
@@ -525,6 +521,10 @@
 
   combine_max_regno = nregs;
 
+  /* It is not safe to use ordinary gen_lowpart in combine.
+     See comments in gen_lowpart_for_combine.  */
+  gen_lowpart = gen_lowpart_for_combine;
+
   reg_nonzero_bits = xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT));
   reg_sign_bit_copies = xcalloc (nregs, sizeof (unsigned char));
 
@@ -774,6 +774,7 @@
   total_successes += combine_successes;
 
   nonzero_sign_valid = 0;
+  gen_lowpart = gen_lowpart_general;
 
   /* Make recognizer allow volatile MEMs again.  */
   init_recog ();
@@ -2321,7 +2322,7 @@
       ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
       newpat = XVECEXP (newpat, 0, 1);
       SUBST (SET_SRC (newpat),
-	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
+	     gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
 
       if (i2_code_number >= 0)
@@ -3131,7 +3132,7 @@
 	      SUBST (SET_SRC (x),
 		     gen_rtx_AND (mode,
 				  gen_rtx_LSHIFTRT
-				  (mode, gen_lowpart_for_combine (mode, inner),
+				  (mode, gen_lowpart (mode, inner),
 				   GEN_INT (pos)),
 				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
 
@@ -3145,7 +3146,7 @@
 		     gen_rtx_fmt_ee
 		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
 		      gen_rtx_ASHIFT (mode,
-				      gen_lowpart_for_combine (mode, inner),
+				      gen_lowpart (mode, inner),
 				      GEN_INT (GET_MODE_BITSIZE (mode)
 					       - len - pos)),
 		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
@@ -3814,15 +3815,15 @@
       if (op0_mode == VOIDmode)
 	op0_mode = GET_MODE (SUBREG_REG (x));
 
-      /* simplify_subreg can't use gen_lowpart_for_combine.  */
+      /* See if this can be moved to simplify_subreg.  */
       if (CONSTANT_P (SUBREG_REG (x))
 	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
-	     /* Don't call gen_lowpart_for_combine if the inner mode
+	     /* Don't call gen_lowpart if the inner mode
 		is VOIDmode and we cannot simplify it, as SUBREG without
 		inner mode is invalid.  */
 	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
 	      || gen_lowpart_common (mode, SUBREG_REG (x))))
-	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
+	return gen_lowpart (mode, SUBREG_REG (x));
 
       if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
         break;
@@ -3861,7 +3862,7 @@
 			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
 						  inner_mode),
 			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
-	  return gen_lowpart_for_combine (mode, x);
+	  return gen_lowpart (mode, x);
 	}
 
       /* Apply De Morgan's laws to reduce number of patterns for machines
@@ -3988,7 +3989,7 @@
 	     >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
 	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
 		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
-	return gen_lowpart_for_combine (mode, XEXP (x, 0));
+	return gen_lowpart (mode, XEXP (x, 0));
 
       /* A truncate of a comparison can be replaced with a subreg if
          STORE_FLAG_VALUE permits.  This is like the previous test,
@@ -3997,7 +3998,7 @@
       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
 	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
-	return gen_lowpart_for_combine (mode, XEXP (x, 0));
+	return gen_lowpart (mode, XEXP (x, 0));
 
       /* Similarly, a truncate of a register whose value is a
          comparison can be replaced with a subreg if STORE_FLAG_VALUE
@@ -4006,7 +4007,7 @@
 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
 	  && (temp = get_last_value (XEXP (x, 0)))
 	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
-	return gen_lowpart_for_combine (mode, XEXP (x, 0));
+	return gen_lowpart (mode, XEXP (x, 0));
 
       break;
 
@@ -4354,8 +4355,8 @@
 	      && op1 == const0_rtx
 	      && mode == GET_MODE (op0)
 	      && nonzero_bits (op0, mode) == 1)
-	    return gen_lowpart_for_combine (mode,
-					    expand_compound_operation (op0));
+	    return gen_lowpart (mode,
+				expand_compound_operation (op0));
 
 	  else if (STORE_FLAG_VALUE == 1
 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
@@ -4366,7 +4367,7 @@
 	    {
 	      op0 = expand_compound_operation (op0);
 	      return simplify_gen_unary (NEG, mode,
-					 gen_lowpart_for_combine (mode, op0),
+					 gen_lowpart (mode, op0),
 					 mode);
 	    }
 
@@ -4378,7 +4379,7 @@
 	    {
 	      op0 = expand_compound_operation (op0);
 	      return gen_binary (XOR, mode,
-				 gen_lowpart_for_combine (mode, op0),
+				 gen_lowpart (mode, op0),
 				 const1_rtx);
 	    }
 
@@ -4390,7 +4391,7 @@
 		       == GET_MODE_BITSIZE (mode)))
 	    {
 	      op0 = expand_compound_operation (op0);
-	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
+	      return plus_constant (gen_lowpart (mode, op0), 1);
 	    }
 
 	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
@@ -4400,8 +4401,8 @@
 	      && op1 == const0_rtx
 	      && (num_sign_bit_copies (op0, mode)
 		  == GET_MODE_BITSIZE (mode)))
-	    return gen_lowpart_for_combine (mode,
-					    expand_compound_operation (op0));
+	    return gen_lowpart (mode,
+				expand_compound_operation (op0));
 
 	  else if (STORE_FLAG_VALUE == -1
 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
@@ -4411,7 +4412,7 @@
 	    {
 	      op0 = expand_compound_operation (op0);
 	      return simplify_gen_unary (NEG, mode,
-					 gen_lowpart_for_combine (mode, op0),
+					 gen_lowpart (mode, op0),
 					 mode);
 	    }
 
@@ -4424,7 +4425,7 @@
 	    {
 	      op0 = expand_compound_operation (op0);
 	      return simplify_gen_unary (NOT, mode,
-					 gen_lowpart_for_combine (mode, op0),
+					 gen_lowpart (mode, op0),
 					 mode);
 	    }
 
@@ -4436,7 +4437,7 @@
 		   && nonzero_bits (op0, mode) == 1)
 	    {
 	      op0 = expand_compound_operation (op0);
-	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
+	      return plus_constant (gen_lowpart (mode, op0), -1);
 	    }
 
 	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
@@ -4901,7 +4902,7 @@
 	  temp = gen_binary (MULT, m, temp,
 			     gen_binary (MULT, m, c1, const_true_rtx));
 	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
-	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
+	  temp = gen_binary (op, m, gen_lowpart (m, z), temp);
 
 	  if (extend_op != NIL)
 	    temp = simplify_gen_unary (extend_op, mode, temp, m);
@@ -4924,7 +4925,7 @@
 	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
     return
       simplify_shift_const (NULL_RTX, ASHIFT, mode,
-			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
+			    gen_lowpart (mode, XEXP (cond, 0)), i);
 
   /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
   if (true_code == NE && XEXP (cond, 1) == const0_rtx
@@ -5155,7 +5156,7 @@
 	      && GET_CODE (SUBREG_REG (dest)) == REG)))
     {
       SUBST (SET_DEST (x),
-	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
+	     gen_lowpart (GET_MODE (SUBREG_REG (src)),
 				      dest));
       SUBST (SET_SRC (x), SUBREG_REG (src));
 
@@ -5855,7 +5856,7 @@
 			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
 	{
 	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
-			   gen_lowpart_for_combine
+			   gen_lowpart
 			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
 			    SET_SRC (x)));
 	  continue;
@@ -5883,7 +5884,7 @@
 	    break;
 
 	  compute_mode = imode;
-	  inner = gen_lowpart_for_combine (imode, inner);
+	  inner = gen_lowpart (imode, inner);
 	}
 
       /* Compute a mask of LEN bits, if we can do this on the host machine.  */
@@ -5907,7 +5908,7 @@
 				 inner),
 		     gen_binary (ASHIFT, compute_mode,
 				 gen_binary (AND, compute_mode,
-					     gen_lowpart_for_combine
+					     gen_lowpart
 					     (compute_mode, SET_SRC (x)),
 					     mask),
 				 pos)));
@@ -6055,7 +6056,7 @@
 	{
 	  if (tmode != inner_mode)
 	    {
-	      /* We can't call gen_lowpart_for_combine in a DEST since we
+	      /* We can't call gen_lowpart in a DEST since we
 		 always want a SUBREG (see below) and it would sometimes
 		 return a new hard register.  */
 	      if (pos || in_dest)
@@ -6082,7 +6083,7 @@
 		  new = gen_rtx_SUBREG (tmode, inner, final_word);
 		}
 	      else
-		new = gen_lowpart_for_combine (tmode, inner);
+		new = gen_lowpart (tmode, inner);
 	    }
 	  else
 	    new = inner;
@@ -6312,7 +6313,7 @@
     }
   else if (pos_rtx != 0
 	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
-    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
+    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
 
   /* Make POS_RTX unless we already have it and it is correct.  If we don't
      have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
@@ -6327,7 +6328,7 @@
   new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
 			 extraction_mode, inner, GEN_INT (len), pos_rtx);
   if (! in_dest)
-    new = gen_lowpart_for_combine (mode, new);
+    new = gen_lowpart (mode, new);
 
   return new;
 }
@@ -6630,7 +6631,7 @@
 	      tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
 	    }
 	  else
-	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
+	    tem = gen_lowpart (mode, XEXP (tem, 0));
 	  return tem;
 	}
       break;
@@ -6641,7 +6642,7 @@
 
   if (new)
     {
-      x = gen_lowpart_for_combine (mode, new);
+      x = gen_lowpart (mode, new);
       code = GET_CODE (x);
     }
 
@@ -6716,7 +6717,7 @@
      expression is VOIDmode.
 
      Also do nothing if X is a CLOBBER; this can happen if X was
-     the return value from a call to gen_lowpart_for_combine.  */
+     the return value from a call to gen_lowpart.  */
   if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
     return x;
 
@@ -6770,7 +6771,7 @@
      get X in the proper mode.  */
   if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
       && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
-    return gen_lowpart_for_combine (mode, x);
+    return gen_lowpart (mode, x);
 
   /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
      MASK are already known to be zero in X, we need not do anything.  */
@@ -6963,12 +6964,12 @@
       /* For most binary operations, just propagate into the operation and
 	 change the mode if we have an operation of that mode.  */
 
-      op0 = gen_lowpart_for_combine (op_mode,
-				     force_to_mode (XEXP (x, 0), mode, mask,
-						    reg, next_select));
-      op1 = gen_lowpart_for_combine (op_mode,
-				     force_to_mode (XEXP (x, 1), mode, mask,
-						    reg, next_select));
+      op0 = gen_lowpart (op_mode,
+			 force_to_mode (XEXP (x, 0), mode, mask,
+					reg, next_select));
+      op1 = gen_lowpart (op_mode,
+			 force_to_mode (XEXP (x, 1), mode, mask,
+					reg, next_select));
 
       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
 	x = gen_binary (code, op_mode, op0, op1);
@@ -7000,9 +7001,9 @@
       else
 	mask = fuller_mask;
 
-      op0 = gen_lowpart_for_combine (op_mode,
-				     force_to_mode (XEXP (x, 0), op_mode,
-						    mask, reg, next_select));
+      op0 = gen_lowpart (op_mode,
+			 force_to_mode (XEXP (x, 0), op_mode,
+					mask, reg, next_select));
 
       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
 	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
@@ -7198,9 +7199,9 @@
       mask = fuller_mask;
 
     unop:
-      op0 = gen_lowpart_for_combine (op_mode,
-				     force_to_mode (XEXP (x, 0), mode, mask,
-						    reg, next_select));
+      op0 = gen_lowpart (op_mode,
+			 force_to_mode (XEXP (x, 0), mode, mask,
+					reg, next_select));
       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
 	x = simplify_gen_unary (code, op_mode, op0, op_mode);
       break;
@@ -7222,11 +7223,11 @@
 	 written in a narrower mode.  We play it safe and do not do so.  */
 
       SUBST (XEXP (x, 1),
-	     gen_lowpart_for_combine (GET_MODE (x),
+	     gen_lowpart (GET_MODE (x),
 				      force_to_mode (XEXP (x, 1), mode,
 						     mask, reg, next_select)));
       SUBST (XEXP (x, 2),
-	     gen_lowpart_for_combine (GET_MODE (x),
+	     gen_lowpart (GET_MODE (x),
 				      force_to_mode (XEXP (x, 2), mode,
 						     mask, reg, next_select)));
       break;
@@ -7236,7 +7237,7 @@
     }
 
   /* Ensure we return a value of the proper mode.  */
-  return gen_lowpart_for_combine (mode, x);
+  return gen_lowpart (mode, x);
 }
 
 /* Return nonzero if X is an expression that has one of two values depending on
@@ -7610,13 +7611,13 @@
   if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
       && GET_CODE (SUBREG_REG (y)) == MEM
       && rtx_equal_p (SUBREG_REG (y),
-		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
+		      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
     return 1;
 
   if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
       && GET_CODE (SUBREG_REG (x)) == MEM
       && rtx_equal_p (SUBREG_REG (x),
-		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
+		      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
     return 1;
 
   /* We used to see if get_last_value of X and Y were the same but that's
@@ -7839,7 +7840,7 @@
 
       tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
 			SUBREG_REG (lhs), SUBREG_REG (rhs));
-      return gen_lowpart_for_combine (GET_MODE (x), tem);
+      return gen_lowpart (GET_MODE (x), tem);
 
     default:
       return x;
@@ -7942,7 +7943,7 @@
 
   if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
     return
-      gen_lowpart_for_combine
+      gen_lowpart
 	(mode,
 	 apply_distributive_law
 	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
@@ -7975,7 +7976,7 @@
       && SUBREG_REG (XEXP (x, 0)) == varop)
     varop = XEXP (x, 0);
   else
-    varop = gen_lowpart_for_combine (mode, varop);
+    varop = gen_lowpart (mode, varop);
 
   /* If we can't make the SUBREG, try to return what we were given.  */
   if (GET_CODE (varop) == CLOBBER)
@@ -9792,7 +9793,7 @@
       && SUBREG_REG (XEXP (x, 0)) == varop)
     varop = XEXP (x, 0);
   else if (GET_MODE (varop) != shift_mode)
-    varop = gen_lowpart_for_combine (shift_mode, varop);
+    varop = gen_lowpart (shift_mode, varop);
 
   /* If we can't make the SUBREG, try to return what we were given.  */
   if (GET_CODE (varop) == CLOBBER)
@@ -9821,7 +9822,7 @@
 				GET_MODE_MASK (result_mode) >> orig_count);
 
   /* Do the remainder of the processing in RESULT_MODE.  */
-  x = gen_lowpart_for_combine (result_mode, x);
+  x = gen_lowpart (result_mode, x);
 
   /* If COMPLEMENT_P is set, we have to complement X before doing the outer
      operation.  */
@@ -9956,19 +9957,18 @@
   return insn_code_number;
 }
 
-/* Like gen_lowpart but for use by combine.  In combine it is not possible
-   to create any new pseudoregs.  However, it is safe to create
-   invalid memory addresses, because combine will try to recognize
-   them and all they will do is make the combine attempt fail.
+/* Like gen_lowpart_general but for use by combine.  In combine it
+   is not possible to create any new pseudoregs.  However, it is
+   safe to create invalid memory addresses, because combine will
+   try to recognize them and all they will do is make the combine
+   attempt fail.
 
    If for some reason this cannot do its job, an rtx
    (clobber (const_int 0)) is returned.
    An insn containing that will not be recognized.  */
 
-#undef gen_lowpart
-
 static rtx
 gen_lowpart_for_combine (enum machine_mode mode, rtx x)
 {
   rtx result;
 
@@ -10264,8 +10264,8 @@
 		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
 	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
 		{
-		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
-		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
+		  op0 = gen_lowpart (tmode, inner_op0);
+		  op1 = gen_lowpart (tmode, inner_op1);
 		  code = unsigned_condition (code);
 		  changed = 1;
 		  break;
@@ -10930,7 +10930,7 @@
 	      && const_op >> i == 0
 	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
 	    {
-	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
+	      op0 = gen_lowpart (tmode, XEXP (op0, 0));
 	      continue;
 	    }
 
@@ -10970,7 +10970,7 @@
 		  op0 = gen_binary (AND, tmode,
 				    SUBREG_REG (XEXP (op0, 0)),
 				    gen_int_mode (c1, tmode));
-		  op0 = gen_lowpart_for_combine (mode, op0);
+		  op0 = gen_lowpart (mode, op0);
 		  continue;
 		}
 	    }
@@ -11092,7 +11092,7 @@
 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
 		  <= GET_MODE_MASK (tmode)))
 	    {
-	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
+	      op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
 	      continue;
 	    }
 
@@ -11117,7 +11117,7 @@
 					  XEXP (op0, 1));
 
 	      op0 = gen_binary (PLUS, tmode,
-				gen_lowpart_for_combine (tmode, inner),
+				gen_lowpart (tmode, inner),
 				new_const);
 	      continue;
 	    }
@@ -11211,7 +11211,7 @@
           if (GET_CODE (SUBREG_REG (op0)) == REG)
 	    {
 	      op0 = SUBREG_REG (op0);
-	      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
+	      op1 = gen_lowpart (GET_MODE (op0), op1);
 	    }
 	}
       else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
@@ -11220,7 +11220,7 @@
 				 GET_MODE (SUBREG_REG (op0)))
 		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
 	{
-	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
+	  tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
 
 	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
 	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
@@ -11272,15 +11272,15 @@
 	      if (GET_CODE (op0) == AND
 		  && !have_insn_for (AND, mode))
 		op0 = gen_binary (AND, tmode,
-				  gen_lowpart_for_combine (tmode,
-							   XEXP (op0, 0)),
-				  gen_lowpart_for_combine (tmode,
-							   XEXP (op0, 1)));
+				  gen_lowpart (tmode,
+					       XEXP (op0, 0)),
+				  gen_lowpart (tmode,
+					       XEXP (op0, 1)));
 
-	      op0 = gen_lowpart_for_combine (tmode, op0);
+	      op0 = gen_lowpart (tmode, op0);
 	      if (zero_extended && GET_CODE (op1) == CONST_INT)
 		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
-	      op1 = gen_lowpart_for_combine (tmode, op1);
+	      op1 = gen_lowpart (tmode, op1);
 	      break;
 	    }
 
@@ -11291,7 +11291,7 @@
 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
 	    {
 	      op0 = gen_binary (AND, tmode,
-				gen_lowpart_for_combine (tmode, op0),
+				gen_lowpart (tmode, op0),
 				GEN_INT ((HOST_WIDE_INT) 1
 					 << (GET_MODE_BITSIZE (mode) - 1)));
 	      code = (code == LT) ? NE : EQ;
@@ -11546,7 +11546,7 @@
 	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
 	       && subreg_lowpart_p (SET_DEST (setter)))
 	record_value_for_reg (dest, record_dead_insn,
-			      gen_lowpart_for_combine (GET_MODE (dest),
+			      gen_lowpart (GET_MODE (dest),
 						       SET_SRC (setter)));
       else
 	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
@@ -11818,7 +11818,7 @@
       && (GET_MODE_SIZE (GET_MODE (x))
 	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
       && (value = get_last_value (SUBREG_REG (x))) != 0)
-    return gen_lowpart_for_combine (GET_MODE (x), value);
+    return gen_lowpart (GET_MODE (x), value);
 
   if (GET_CODE (x) != REG)
     return 0;
Index: cse.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/cse.c,v
retrieving revision 1.283
diff -u -r1.283 cse.c
--- cse.c	24 Jan 2004 20:54:53 -0000	1.283
+++ cse.c	28 Jan 2004 13:49:20 -0000
@@ -1628,7 +1628,7 @@
       int exp_q = REG_QTY (REGNO (classp->exp));
       struct qty_table_elem *exp_ent = &qty_table[exp_q];
 
-      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
+      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
       exp_ent->const_insn = this_insn;
     }
 
@@ -1647,7 +1647,7 @@
 	      struct qty_table_elem *x_ent = &qty_table[x_q];
 
 	      x_ent->const_rtx
-		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
+		= gen_lowpart (GET_MODE (x), p->exp);
 	      x_ent->const_insn = this_insn;
 	      break;
 	    }
@@ -3577,7 +3577,7 @@
 	    if (((BYTES_BIG_ENDIAN
 		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
 		 || (! BYTES_BIG_ENDIAN && offset == 0))
-		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
+		&& (new = gen_lowpart (mode, constant)) != 0)
 	      return new;
 	  }
 
@@ -3683,7 +3683,7 @@
 		    && GET_CODE (arg_ent->const_rtx) != REG
 		    && GET_CODE (arg_ent->const_rtx) != PLUS)
 		  const_arg
-		    = gen_lowpart_if_possible (GET_MODE (arg),
+		    = gen_lowpart (GET_MODE (arg),
 					       arg_ent->const_rtx);
 	      }
 	    break;
@@ -4289,7 +4289,7 @@
       struct qty_table_elem *x_ent = &qty_table[x_q];
 
       if (x_ent->const_rtx)
-	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
+	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
     }
 
   if (x == 0 || CONSTANT_P (x))
@@ -4327,7 +4327,7 @@
 
    If the requested operation cannot be done, 0 is returned.
 
-   This is similar to gen_lowpart in emit-rtl.c.  */
+   This is similar to gen_lowpart_general in emit-rtl.c.  */
 
 rtx
 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
@@ -4442,7 +4442,7 @@
 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
     {
       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
-      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
+      rtx tem = gen_lowpart (inner_mode, op1);
 
       record_jump_cond (code, mode, SUBREG_REG (op0),
 			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
@@ -4454,7 +4454,7 @@
 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
     {
       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
-      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
+      rtx tem = gen_lowpart (inner_mode, op0);
 
       record_jump_cond (code, mode, SUBREG_REG (op1),
 			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
@@ -4474,7 +4474,7 @@
 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
     {
       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
-      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
+      rtx tem = gen_lowpart (inner_mode, op1);
 
       record_jump_cond (code, mode, SUBREG_REG (op0),
 			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
@@ -4487,7 +4487,7 @@
 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
     {
       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
-      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
+      rtx tem = gen_lowpart (inner_mode, op0);
 
       record_jump_cond (code, mode, SUBREG_REG (op1),
 			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
@@ -5176,7 +5176,7 @@
 		   const_elt; const_elt = const_elt->next_same_value)
 		if (GET_CODE (const_elt->exp) == REG)
 		  {
-		    src_related = gen_lowpart_if_possible (mode,
+		    src_related = gen_lowpart (mode,
 							   const_elt->exp);
 		    break;
 		  }
@@ -5200,7 +5200,7 @@
 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
 	       tmode = GET_MODE_WIDER_MODE (tmode))
 	    {
-	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
+	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
 	      struct table_elt *larger_elt;
 
 	      if (inner)
@@ -5216,7 +5216,7 @@
 		    if (GET_CODE (larger_elt->exp) == REG)
 		      {
 			src_related
-			  = gen_lowpart_if_possible (mode, larger_elt->exp);
+			  = gen_lowpart (mode, larger_elt->exp);
 			break;
 		      }
 
@@ -5261,7 +5261,7 @@
 		   larger_elt; larger_elt = larger_elt->next_same_value)
 		if (GET_CODE (larger_elt->exp) == REG)
 		  {
-		    src_related = gen_lowpart_if_possible (mode,
+		    src_related = gen_lowpart (mode,
 							   larger_elt->exp);
 		    break;
 		  }
@@ -6087,8 +6087,8 @@
 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
 	   make that equivalence as well.
 
-	   However, BAR may have equivalences for which gen_lowpart_if_possible
-	   will produce a simpler value than gen_lowpart_if_possible applied to
+	   However, BAR may have equivalences for which gen_lowpart
+	   will produce a simpler value than gen_lowpart applied to
 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
 	   BAR's equivalences.  If we don't get a simplified form, make
 	   the SUBREG.  It will not be used in an equivalence, but will
@@ -6433,7 +6433,7 @@
 	      && (CONSTANT_P (ent->const_rtx)
 		  || GET_CODE (ent->const_rtx) == REG))
 	    {
-	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
+	      rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
 	      if (new)
 		return new;
 	    }
@@ -6983,6 +6983,7 @@
   constant_pool_entries_cost = 0;
   constant_pool_entries_regcost = 0;
   val.path_size = 0;
+  gen_lowpart = gen_lowpart_if_possible;
 
   init_recog ();
   init_alias_analysis ();
@@ -7102,6 +7103,7 @@
   free (uid_cuid);
   free (reg_eqv_table);
   free (val.path);
+  gen_lowpart = gen_lowpart_general;
 
   return cse_jumps_altered || recorded_label_ref;
 }
Index: emit-rtl.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/emit-rtl.c,v
retrieving revision 1.369
diff -u -r1.369 emit-rtl.c
--- emit-rtl.c	24 Jan 2004 15:31:03 -0000	1.369
+++ emit-rtl.c	28 Jan 2004 13:49:22 -0000
@@ -99,6 +99,8 @@
    at the beginning of each function.  */
 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
 
+rtx (*gen_lowpart) (enum machine_mode mode, rtx x) = gen_lowpart_general;
+
 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
    record a copy of const[012]_rtx.  */
@@ -1207,7 +1209,7 @@
    If X is a MEM whose address is a QUEUED, the value may be so also.  */
 
 rtx
-gen_lowpart (enum machine_mode mode, rtx x)
+gen_lowpart_general (enum machine_mode mode, rtx x)
 {
   rtx result = gen_lowpart_common (mode, x);
 
Index: rtl.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/rtl.h,v
retrieving revision 1.452
diff -u -r1.452 rtl.h
--- rtl.h	23 Jan 2004 21:05:17 -0000	1.452
+++ rtl.h	28 Jan 2004 13:49:24 -0000
@@ -1476,7 +1476,9 @@
 extern rtx gen_label_rtx (void);
 extern int subreg_hard_regno (rtx, int);
 extern rtx gen_lowpart_common (enum machine_mode, rtx);
-extern rtx gen_lowpart (enum machine_mode, rtx);
+extern rtx gen_lowpart_general (enum machine_mode, rtx);
+extern rtx (*gen_lowpart) (enum machine_mode mode, rtx x);
+
 
 /* In cse.c */
 extern rtx gen_lowpart_if_possible (enum machine_mode, rtx);

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]