This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH][8/n] Remove GENERIC stmt combining from SCCVN


The following moves some bitwise patterns from the match-and-simplify
branch, extending them with proper conditional converts and removing
the corresponding patterns from fold-const.c

Bootstrap & regtest in progress on x86_64-unknown-linux-gnu.

Richard.

2015-06-30  Richard Biener  <rguenther@suse.de>

	* fold-const.c (fold_binary_loc): Move ~x & ~y -> ~(x | y) and
	~x | ~y -> ~(x & y), (x & CST) ^ (x & CST2) -> (x & CST) | (x & CST2),
	(X | Y) ^ X -> Y & ~ X, ~X ^ ~Y to X ^ Y and ~X ^ C to X ^ ~C ...
	* match.pd: ... to patterns here.

Index: gcc/fold-const.c
===================================================================
--- gcc/fold-const.c	(revision 225164)
+++ gcc/fold-const.c	(working copy)
@@ -10996,24 +10996,6 @@ fold_binary_loc (location_t loc,
       if (t1 != NULL_TREE)
 	return t1;
 
-      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
-
-	 This results in more efficient code for machines without a NAND
-	 instruction.  Combine will canonicalize to the first form
-	 which will allow use of NAND instructions provided by the
-	 backend if they exist.  */
-      if (TREE_CODE (arg0) == BIT_NOT_EXPR
-	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
-	{
-	  return
-	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
-			 build2 (BIT_AND_EXPR, type,
-				 fold_convert_loc (loc, type,
-						   TREE_OPERAND (arg0, 0)),
-				 fold_convert_loc (loc, type,
-						   TREE_OPERAND (arg1, 0))));
-	}
-
       /* See if this can be simplified into a rotate first.  If that
 	 is unsuccessful continue in the association code.  */
       goto bit_rotate;
@@ -11037,90 +11019,6 @@ fold_binary_loc (location_t loc,
 	  return omit_one_operand_loc (loc, type, t1, arg0);
 	}
 
-      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
-         with a constant, and the two constants have no bits in common,
-	 we should treat this as a BIT_IOR_EXPR since this may produce more
-	 simplifications.  */
-      if (TREE_CODE (arg0) == BIT_AND_EXPR
-	  && TREE_CODE (arg1) == BIT_AND_EXPR
-	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
-	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
-	  && wi::bit_and (TREE_OPERAND (arg0, 1),
-			  TREE_OPERAND (arg1, 1)) == 0)
-	{
-	  code = BIT_IOR_EXPR;
-	  goto bit_ior;
-	}
-
-      /* (X | Y) ^ X -> Y & ~ X*/
-      if (TREE_CODE (arg0) == BIT_IOR_EXPR
-          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
-        {
-	  tree t2 = TREE_OPERAND (arg0, 1);
-	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
-			    arg1);
-	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
-			    fold_convert_loc (loc, type, t2),
-			    fold_convert_loc (loc, type, t1));
-	  return t1;
-	}
-
-      /* (Y | X) ^ X -> Y & ~ X*/
-      if (TREE_CODE (arg0) == BIT_IOR_EXPR
-          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
-        {
-	  tree t2 = TREE_OPERAND (arg0, 0);
-	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
-			    arg1);
-	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
-			    fold_convert_loc (loc, type, t2),
-			    fold_convert_loc (loc, type, t1));
-	  return t1;
-	}
-
-      /* X ^ (X | Y) -> Y & ~ X*/
-      if (TREE_CODE (arg1) == BIT_IOR_EXPR
-          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
-        {
-	  tree t2 = TREE_OPERAND (arg1, 1);
-	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
-			    arg0);
-	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
-			    fold_convert_loc (loc, type, t2),
-			    fold_convert_loc (loc, type, t1));
-	  return t1;
-	}
-
-      /* X ^ (Y | X) -> Y & ~ X*/
-      if (TREE_CODE (arg1) == BIT_IOR_EXPR
-          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
-        {
-	  tree t2 = TREE_OPERAND (arg1, 0);
-	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
-			    arg0);
-	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
-			    fold_convert_loc (loc, type, t2),
-			    fold_convert_loc (loc, type, t1));
-	  return t1;
-	}
-
-      /* Convert ~X ^ ~Y to X ^ Y.  */
-      if (TREE_CODE (arg0) == BIT_NOT_EXPR
-	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
-	return fold_build2_loc (loc, code, type,
-			    fold_convert_loc (loc, type,
-					      TREE_OPERAND (arg0, 0)),
-			    fold_convert_loc (loc, type,
-					      TREE_OPERAND (arg1, 0)));
-
-      /* Convert ~X ^ C to X ^ ~C.  */
-      if (TREE_CODE (arg0) == BIT_NOT_EXPR
-	  && TREE_CODE (arg1) == INTEGER_CST)
-	return fold_build2_loc (loc, code, type,
-			    fold_convert_loc (loc, type,
-					      TREE_OPERAND (arg0, 0)),
-			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
-
       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
       if (TREE_CODE (arg0) == BIT_AND_EXPR
 	  && INTEGRAL_TYPE_P (type)
@@ -11432,23 +11330,6 @@ fold_binary_loc (location_t loc,
 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
 	}
 
-      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
-
-	 This results in more efficient code for machines without a NOR
-	 instruction.  Combine will canonicalize to the first form
-	 which will allow use of NOR instructions provided by the
-	 backend if they exist.  */
-      if (TREE_CODE (arg0) == BIT_NOT_EXPR
-	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
-	{
-	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
-			      build2 (BIT_IOR_EXPR, type,
-				      fold_convert_loc (loc, type,
-							TREE_OPERAND (arg0, 0)),
-				      fold_convert_loc (loc, type,
-							TREE_OPERAND (arg1, 0))));
-	}
-
       /* If arg0 is derived from the address of an object or function, we may
 	 be able to fold this expression using the object or function's
 	 alignment.  */
Index: gcc/match.pd
===================================================================
--- gcc/match.pd	(revision 225164)
+++ gcc/match.pd	(working copy)
@@ -389,6 +389,47 @@ (define_operator_list swapped_tcc_compar
  (bit_and:c (bit_ior:c @0 @1) (bit_xor:c @1 (bit_not @0)))
  (bit_and @0 @1))
 
+/* ~x & ~y -> ~(x | y)
+   ~x | ~y -> ~(x & y) */
+(for op (bit_and bit_ior)
+     rop (bit_ior bit_and)
+ (simplify
+  (op (convert1? (bit_not @0)) (convert2? (bit_not @1)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
+       && tree_nop_conversion_p (type, TREE_TYPE (@1)))
+   (bit_not (rop (convert @0) (convert @1))))))
+
+/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
+   with a constant, and the two constants have no bits in common,
+   we should treat this as a BIT_IOR_EXPR since this may produce more
+   simplifications.  */
+(simplify
+ (bit_xor (convert1? (bit_and@4 @0 INTEGER_CST@1))
+          (convert2? (bit_and@5 @2 INTEGER_CST@3)))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
+      && tree_nop_conversion_p (type, TREE_TYPE (@2))
+      && wi::bit_and (@1, @3) == 0)
+  (bit_ior (convert @4) (convert @5))))
+
+/* (X | Y) ^ X -> Y & ~ X*/
+(simplify
+ (bit_xor:c (convert? (bit_ior:c @0 @1)) (convert? @0))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+  (convert (bit_and @1 (bit_not @0)))))
+
+/* Convert ~X ^ ~Y to X ^ Y.  */
+(simplify
+ (bit_xor (convert1? (bit_not @0)) (convert2? (bit_not @1)))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
+      && tree_nop_conversion_p (type, TREE_TYPE (@1)))
+  (bit_xor (convert @0) (convert @1))))
+
+/* Convert ~X ^ C to X ^ ~C.  */
+(simplify
+ (bit_xor (convert? (bit_not @0)) INTEGER_CST@1)
+ (bit_xor (convert @0) (bit_not @1)))
+
+
 (simplify
  (abs (negate @0))
  (abs @0))


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]