This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[26/77] Use is_a <scalar_int_mode> in subreg/extract simplifications


This patch adds is_a <scalar_int_mode> checks to various places that
were optimising subregs or extractions in ways that only made sense
for scalar integers.  Often the subreg transformations were looking
for extends, truncates or shifts and trying to remove the subreg, which
wouldn't be correct if the SUBREG_REG was a vector rather than a scalar.

The simplify_binary_operation_1 part also removes a redundant:

  GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))

since this must be true for:

  (ior A (lshifrt B ...))  A == opleft, B == XEXP (opright, 0)

2017-07-13  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* combine.c (find_split_point): Add is_a <scalar_int_mode> checks.
	(make_compound_operation_int): Likewise.
	(change_zero_ext): Likewise.
	* expr.c (convert_move): Likewise.
	(convert_modes): Likewise.
	* fwprop.c (forward_propagate_subreg): Likewise.
	* loop-iv.c (get_biv_step_1): Likewise.
	* optabs.c (widen_operand): Likewise.
	* postreload.c (move2add_valid_value_p): Likewise.
	* recog.c (simplify_while_replacing): Likewise.
	* simplify-rtx.c (simplify_unary_operation_1): Likewise.
	(simplify_binary_operation_1): Likewise.  Remove redundant
	mode equality check.

Index: gcc/combine.c
===================================================================
--- gcc/combine.c	2017-07-13 09:18:34.533172436 +0100
+++ gcc/combine.c	2017-07-13 09:18:35.047126725 +0100
@@ -4793,7 +4793,7 @@ find_split_point (rtx *loc, rtx_insn *in
   HOST_WIDE_INT pos = 0;
   int unsignedp = 0;
   rtx inner = NULL_RTX;
-  scalar_int_mode inner_mode;
+  scalar_int_mode mode, inner_mode;
 
   /* First special-case some codes.  */
   switch (code)
@@ -5047,7 +5047,9 @@ find_split_point (rtx *loc, rtx_insn *in
 
 	case SIGN_EXTRACT:
 	case ZERO_EXTRACT:
-	  if (CONST_INT_P (XEXP (SET_SRC (x), 1))
+	  if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0)),
+				      &inner_mode)
+	      && CONST_INT_P (XEXP (SET_SRC (x), 1))
 	      && CONST_INT_P (XEXP (SET_SRC (x), 2)))
 	    {
 	      inner = XEXP (SET_SRC (x), 0);
@@ -5055,7 +5057,7 @@ find_split_point (rtx *loc, rtx_insn *in
 	      pos = INTVAL (XEXP (SET_SRC (x), 2));
 
 	      if (BITS_BIG_ENDIAN)
-		pos = GET_MODE_PRECISION (GET_MODE (inner)) - len - pos;
+		pos = GET_MODE_PRECISION (inner_mode) - len - pos;
 	      unsignedp = (code == ZERO_EXTRACT);
 	    }
 	  break;
@@ -5065,10 +5067,9 @@ find_split_point (rtx *loc, rtx_insn *in
 	}
 
       if (len && pos >= 0
-	  && pos + len <= GET_MODE_PRECISION (GET_MODE (inner)))
+	  && pos + len <= GET_MODE_PRECISION (GET_MODE (inner))
+	  && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x)), &mode))
 	{
-	  machine_mode mode = GET_MODE (SET_SRC (x));
-
 	  /* For unsigned, we have a choice of a shift followed by an
 	     AND or two shifts.  Use two shifts for field sizes where the
 	     constant might be too large.  We assume here that we can
@@ -7859,6 +7860,7 @@ make_compound_operation_int (machine_mod
   rtx new_rtx = 0;
   int i;
   rtx tem;
+  scalar_int_mode inner_mode;
   bool equality_comparison = false;
 
   if (in_code == EQ)
@@ -7967,11 +7969,12 @@ make_compound_operation_int (machine_mod
       /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
       else if (GET_CODE (XEXP (x, 0)) == SUBREG
 	       && subreg_lowpart_p (XEXP (x, 0))
+	       && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (XEXP (x, 0))),
+					  &inner_mode)
 	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
 	       && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
 	{
 	  rtx inner_x0 = SUBREG_REG (XEXP (x, 0));
-	  machine_mode inner_mode = GET_MODE (inner_x0);
 	  new_rtx = make_compound_operation (XEXP (inner_x0, 0), next_code);
 	  new_rtx = make_extraction (inner_mode, new_rtx, 0,
 				     XEXP (inner_x0, 1),
@@ -8170,14 +8173,14 @@ make_compound_operation_int (machine_mod
 	/* If the SUBREG is masking of a logical right shift,
 	   make an extraction.  */
 	if (GET_CODE (inner) == LSHIFTRT
+	    && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
+	    && GET_MODE_SIZE (mode) < GET_MODE_SIZE (inner_mode)
 	    && CONST_INT_P (XEXP (inner, 1))
-	    && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (inner))
-	    && (UINTVAL (XEXP (inner, 1))
-		< GET_MODE_PRECISION (GET_MODE (inner)))
+	    && UINTVAL (XEXP (inner, 1)) < GET_MODE_PRECISION (inner_mode)
 	    && subreg_lowpart_p (x))
 	  {
 	    new_rtx = make_compound_operation (XEXP (inner, 0), next_code);
-	    int width = GET_MODE_PRECISION (GET_MODE (inner))
+	    int width = GET_MODE_PRECISION (inner_mode)
 			- INTVAL (XEXP (inner, 1));
 	    if (width > mode_width)
 	      width = mode_width;
@@ -11380,15 +11383,16 @@ change_zero_ext (rtx pat)
       maybe_swap_commutative_operands (**iter);
 
   rtx *dst = &SET_DEST (pat);
+  scalar_int_mode mode;
   if (GET_CODE (*dst) == ZERO_EXTRACT
       && REG_P (XEXP (*dst, 0))
+      && is_a <scalar_int_mode> (GET_MODE (XEXP (*dst, 0)), &mode)
       && CONST_INT_P (XEXP (*dst, 1))
       && CONST_INT_P (XEXP (*dst, 2)))
     {
       rtx reg = XEXP (*dst, 0);
       int width = INTVAL (XEXP (*dst, 1));
       int offset = INTVAL (XEXP (*dst, 2));
-      machine_mode mode = GET_MODE (reg);
       int reg_width = GET_MODE_PRECISION (mode);
       if (BITS_BIG_ENDIAN)
 	offset = reg_width - width - offset;
Index: gcc/expr.c
===================================================================
--- gcc/expr.c	2017-07-13 09:18:31.697428505 +0100
+++ gcc/expr.c	2017-07-13 09:18:35.048126637 +0100
@@ -239,11 +239,14 @@ convert_move (rtx to, rtx from, int unsi
      the required extension, strip it.  We don't handle such SUBREGs as
      TO here.  */
 
-  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
+  scalar_int_mode to_int_mode;
+  if (GET_CODE (from) == SUBREG
+      && SUBREG_PROMOTED_VAR_P (from)
+      && is_a <scalar_int_mode> (to_mode, &to_int_mode)
       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
-	  >= GET_MODE_PRECISION (to_mode))
+	  >= GET_MODE_PRECISION (to_int_mode))
       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
-    from = gen_lowpart (to_mode, from), from_mode = to_mode;
+    from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
 
   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
 
@@ -635,10 +638,12 @@ convert_modes (machine_mode mode, machin
   /* If FROM is a SUBREG that indicates that we have already done at least
      the required extension, strip it.  */
 
-  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
-      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
+  if (GET_CODE (x) == SUBREG
+      && SUBREG_PROMOTED_VAR_P (x)
+      && is_a <scalar_int_mode> (mode, &int_mode)
+      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (int_mode)
       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
-    x = gen_lowpart (mode, SUBREG_REG (x));
+    x = gen_lowpart (int_mode, SUBREG_REG (x));
 
   if (GET_MODE (x) != VOIDmode)
     oldmode = GET_MODE (x);
Index: gcc/fwprop.c
===================================================================
--- gcc/fwprop.c	2017-04-18 19:52:33.713921172 +0100
+++ gcc/fwprop.c	2017-07-13 09:18:35.048126637 +0100
@@ -1096,6 +1096,7 @@ forward_propagate_subreg (df_ref use, rt
   rtx use_reg = DF_REF_REG (use);
   rtx_insn *use_insn;
   rtx src;
+  scalar_int_mode int_use_mode, src_mode;
 
   /* Only consider subregs... */
   machine_mode use_mode = GET_MODE (use_reg);
@@ -1139,17 +1140,19 @@ forward_propagate_subreg (df_ref use, rt
      definition of Y or, failing that, allow A to be deleted after
      reload through register tying.  Introducing more uses of Y
      prevents both optimisations.  */
-  else if (subreg_lowpart_p (use_reg))
+  else if (is_a <scalar_int_mode> (use_mode, &int_use_mode)
+	   && subreg_lowpart_p (use_reg))
     {
       use_insn = DF_REF_INSN (use);
       src = SET_SRC (def_set);
       if ((GET_CODE (src) == ZERO_EXTEND
 	   || GET_CODE (src) == SIGN_EXTEND)
+	  && is_a <scalar_int_mode> (GET_MODE (src), &src_mode)
 	  && REG_P (XEXP (src, 0))
 	  && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
 	  && GET_MODE (XEXP (src, 0)) == use_mode
 	  && !free_load_extend (src, def_insn)
-	  && (targetm.mode_rep_extended (use_mode, GET_MODE (src))
+	  && (targetm.mode_rep_extended (int_use_mode, src_mode)
 	      != (int) GET_CODE (src))
 	  && all_uses_available_at (def_insn, use_insn))
 	return try_fwprop_subst (use, DF_REF_LOC (use), XEXP (src, 0),
Index: gcc/loop-iv.c
===================================================================
--- gcc/loop-iv.c	2017-02-23 19:54:20.000000000 +0000
+++ gcc/loop-iv.c	2017-07-13 09:18:35.049126549 +0100
@@ -739,9 +739,9 @@ get_biv_step_1 (df_ref def, rtx reg,
 
   if (GET_CODE (next) == SUBREG)
     {
-      machine_mode amode = GET_MODE (next);
-
-      if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
+      scalar_int_mode amode;
+      if (!is_a <scalar_int_mode> (GET_MODE (next), &amode)
+	  || GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
 	return false;
 
       *inner_mode = amode;
Index: gcc/optabs.c
===================================================================
--- gcc/optabs.c	2017-07-13 09:18:32.526352886 +0100
+++ gcc/optabs.c	2017-07-13 09:18:35.049126549 +0100
@@ -195,6 +195,7 @@ widen_operand (rtx op, machine_mode mode
 	       int unsignedp, int no_extend)
 {
   rtx result;
+  scalar_int_mode int_mode;
 
   /* If we don't have to extend and this is a constant, return it.  */
   if (no_extend && GET_MODE (op) == VOIDmode)
@@ -204,19 +205,20 @@ widen_operand (rtx op, machine_mode mode
      extend since it will be more efficient to do so unless the signedness of
      a promoted object differs from our extension.  */
   if (! no_extend
+      || !is_a <scalar_int_mode> (mode, &int_mode)
       || (GET_CODE (op) == SUBREG && SUBREG_PROMOTED_VAR_P (op)
 	  && SUBREG_CHECK_PROMOTED_SIGN (op, unsignedp)))
     return convert_modes (mode, oldmode, op, unsignedp);
 
   /* If MODE is no wider than a single word, we return a lowpart or paradoxical
      SUBREG.  */
-  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
-    return gen_lowpart (mode, force_reg (GET_MODE (op), op));
+  if (GET_MODE_SIZE (int_mode) <= UNITS_PER_WORD)
+    return gen_lowpart (int_mode, force_reg (GET_MODE (op), op));
 
   /* Otherwise, get an object of MODE, clobber it, and set the low-order
      part to OP.  */
 
-  result = gen_reg_rtx (mode);
+  result = gen_reg_rtx (int_mode);
   emit_clobber (result);
   emit_move_insn (gen_lowpart (GET_MODE (op), result), op);
   return result;
Index: gcc/postreload.c
===================================================================
--- gcc/postreload.c	2017-07-13 09:18:32.527352795 +0100
+++ gcc/postreload.c	2017-07-13 09:18:35.050126461 +0100
@@ -1699,14 +1699,16 @@ move2add_valid_value_p (int regno, machi
 
   if (mode != reg_mode[regno])
     {
-      if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
+      scalar_int_mode old_mode;
+      if (!is_a <scalar_int_mode> (reg_mode[regno], &old_mode)
+	  || !MODES_OK_FOR_MOVE2ADD (mode, old_mode))
 	return false;
       /* The value loaded into regno in reg_mode[regno] is also valid in
 	 mode after truncation only if (REG:mode regno) is the lowpart of
 	 (REG:reg_mode[regno] regno).  Now, for big endian, the starting
 	 regno of the lowpart might be different.  */
-      int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
-      s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
+      int s_off = subreg_lowpart_offset (mode, old_mode);
+      s_off = subreg_regno_offset (regno, old_mode, s_off, mode);
       if (s_off != 0)
 	/* We could in principle adjust regno, check reg_mode[regno] to be
 	   BLKmode, and return s_off to the caller (vs. -1 for failure),
Index: gcc/recog.c
===================================================================
--- gcc/recog.c	2017-06-30 12:50:38.299660094 +0100
+++ gcc/recog.c	2017-07-13 09:18:35.050126461 +0100
@@ -560,6 +560,7 @@ simplify_while_replacing (rtx *loc, rtx
   rtx x = *loc;
   enum rtx_code code = GET_CODE (x);
   rtx new_rtx = NULL_RTX;
+  scalar_int_mode is_mode;
 
   if (SWAPPABLE_OPERANDS_P (x)
       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
@@ -655,6 +656,7 @@ simplify_while_replacing (rtx *loc, rtx
          happen, we might just fail in some cases).  */
 
       if (MEM_P (XEXP (x, 0))
+	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
 	  && CONST_INT_P (XEXP (x, 1))
 	  && CONST_INT_P (XEXP (x, 2))
 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
@@ -662,7 +664,6 @@ simplify_while_replacing (rtx *loc, rtx
 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
 	{
 	  machine_mode wanted_mode = VOIDmode;
-	  machine_mode is_mode = GET_MODE (XEXP (x, 0));
 	  int pos = INTVAL (XEXP (x, 2));
 
 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
Index: gcc/simplify-rtx.c
===================================================================
--- gcc/simplify-rtx.c	2017-07-13 09:18:34.534172347 +0100
+++ gcc/simplify-rtx.c	2017-07-13 09:18:35.051126373 +0100
@@ -916,7 +916,7 @@ simplify_unary_operation_1 (enum rtx_cod
 {
   enum rtx_code reversed;
   rtx temp;
-  scalar_int_mode inner, int_mode;
+  scalar_int_mode inner, int_mode, op0_mode;
 
   switch (code)
     {
@@ -1628,21 +1628,19 @@ simplify_unary_operation_1 (enum rtx_cod
 	 (zero_extend:SI (subreg:QI (and:SI (reg:SI) (const_int 63)) 0)) is
 	 (and:SI (reg:SI) (const_int 63)).  */
       if (GET_CODE (op) == SUBREG
-	  && GET_MODE_PRECISION (GET_MODE (op))
-	     < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op)))
-	  && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op)))
-	     <= HOST_BITS_PER_WIDE_INT
-	  && GET_MODE_PRECISION (mode)
-	     >= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op)))
+	  && is_a <scalar_int_mode> (mode, &int_mode)
+	  && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op)), &op0_mode)
+	  && GET_MODE_PRECISION (GET_MODE (op)) < GET_MODE_PRECISION (op0_mode)
+	  && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT
+	  && GET_MODE_PRECISION (int_mode) >= GET_MODE_PRECISION (op0_mode)
 	  && subreg_lowpart_p (op)
-	  && (nonzero_bits (SUBREG_REG (op), GET_MODE (SUBREG_REG (op)))
+	  && (nonzero_bits (SUBREG_REG (op), op0_mode)
 	      & ~GET_MODE_MASK (GET_MODE (op))) == 0)
 	{
-	  if (GET_MODE_PRECISION (mode)
-	      == GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op))))
+	  if (GET_MODE_PRECISION (int_mode) == GET_MODE_PRECISION (op0_mode))
 	    return SUBREG_REG (op);
-	  return simplify_gen_unary (ZERO_EXTEND, mode, SUBREG_REG (op),
-				     GET_MODE (SUBREG_REG (op)));
+	  return simplify_gen_unary (ZERO_EXTEND, int_mode, SUBREG_REG (op),
+				     op0_mode);
 	}
 
 #if defined(POINTERS_EXTEND_UNSIGNED)
@@ -2707,21 +2705,23 @@ simplify_binary_operation_1 (enum rtx_co
         by simplify_shift_const.  */
 
       if (GET_CODE (opleft) == SUBREG
+	  && is_a <scalar_int_mode> (mode, &int_mode)
+	  && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (opleft)),
+				     &inner_mode)
           && GET_CODE (SUBREG_REG (opleft)) == ASHIFT
           && GET_CODE (opright) == LSHIFTRT
           && GET_CODE (XEXP (opright, 0)) == SUBREG
-          && GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))
           && SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0))
-          && (GET_MODE_SIZE (GET_MODE (opleft))
-              < GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
+	  && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
           && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
                           SUBREG_REG (XEXP (opright, 0)))
           && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
           && CONST_INT_P (XEXP (opright, 1))
-          && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
-              == GET_MODE_PRECISION (mode)))
-        return gen_rtx_ROTATE (mode, XEXP (opright, 0),
-                               XEXP (SUBREG_REG (opleft), 1));
+	  && (INTVAL (XEXP (SUBREG_REG (opleft), 1))
+	      + INTVAL (XEXP (opright, 1))
+	      == GET_MODE_PRECISION (int_mode)))
+	return gen_rtx_ROTATE (int_mode, XEXP (opright, 0),
+			       XEXP (SUBREG_REG (opleft), 1));
 
       /* If we have (ior (and (X C1) C2)), simplify this by making
 	 C1 as small as possible if C1 actually changes.  */


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]