This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH][5/n] Remove GENERIC stmt combining from SCCVN


This moves some equality comparison patterns from fold-const.c to 
match.pd.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied.

Richard.

2015-06-26  Richard Biener  <rguenther@suse.de>

	* fold-const.c (fold_binary_loc): Remove -A CMP -B -> A CMP B
	and -A CMP CST -> A CMP -CST which is redundant with a pattern
	in match.pd.
	Move (A | C) == D where C & ~D != 0 -> 0, (X ^ Y) ==/!= 0 -> X ==/!= Y,
	(X ^ Y) ==/!= {Y,X} -> {X,Y} ==/!= 0 and
	(X ^ C1) op C2 -> X op (C1 ^ C2) to ...
	* match.pd: ... patterns here.

	* gcc.dg/tree-ssa/forwprop-25.c: Adjust.

Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c	(revision 224943)
--- gcc/fold-const.c	(working copy)
*************** fold_binary_loc (location_t loc,
*** 12165,12179 ****
  				          type);
  	}
  
-       /* Similarly for a NEGATE_EXPR.  */
-       if (TREE_CODE (arg0) == NEGATE_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
- 							arg1)))
- 	  && TREE_CODE (tem) == INTEGER_CST
- 	  && !TREE_OVERFLOW (tem))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
- 
        /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
        if (TREE_CODE (arg0) == BIT_XOR_EXPR
  	  && TREE_CODE (arg1) == INTEGER_CST
--- 12165,12170 ----
*************** fold_binary_loc (location_t loc,
*** 12358,12379 ****
  	    return omit_one_operand_loc (loc, type, rslt, arg0);
  	}
  
-       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
- 	 Similarly for NE_EXPR.  */
-       if (TREE_CODE (arg0) == BIT_IOR_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- 	{
- 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- 	  tree candnotd
- 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- 			       TREE_OPERAND (arg0, 1),
- 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
- 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- 	  if (integer_nonzerop (candnotd))
- 	    return omit_one_operand_loc (loc, type, rslt, arg0);
- 	}
- 
        /* If this is a comparison of a field, we may be able to simplify it.  */
        if ((TREE_CODE (arg0) == COMPONENT_REF
  	   || TREE_CODE (arg0) == BIT_FIELD_REF)
--- 12349,12354 ----
*************** fold_binary_loc (location_t loc,
*** 12431,12462 ****
  	    }
  	}
  
-       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
-       if (integer_zerop (arg1)
- 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 			    TREE_OPERAND (arg0, 1));
- 
-       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 				build_zero_cst (TREE_TYPE (arg0)));
-       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
- 				build_zero_cst (TREE_TYPE (arg0)));
- 
-       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
- 					 TREE_OPERAND (arg0, 1), arg1));
- 
        /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
  	 (X & C) == 0 when C is a single bit.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
--- 12406,12411 ----
*************** fold_binary_loc (location_t loc,
*** 12510,12523 ****
  	  return omit_one_operand_loc (loc, type, res, arg0);
  	}
  
-       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
-       if (TREE_CODE (arg0) == NEGATE_EXPR
-           && TREE_CODE (arg1) == NEGATE_EXPR)
- 	return fold_build2_loc (loc, code, type,
- 				TREE_OPERAND (arg0, 0),
- 				fold_convert_loc (loc, TREE_TYPE (arg0),
- 						  TREE_OPERAND (arg1, 0)));
- 
        /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
  	  && TREE_CODE (arg1) == BIT_AND_EXPR)
--- 12459,12464 ----
Index: gcc/match.pd
===================================================================
*** gcc/match.pd	(revision 224943)
--- gcc/match.pd	(working copy)
*************** (define_operator_list swapped_tcc_compar
*** 1197,1202 ****
--- 1197,1231 ----
      (if (tem && !TREE_OVERFLOW (tem))
       (scmp @0 { tem; }))))))
  
+ 
+ /* Equality compare simplifications from fold_binary  */
+ (for cmp (eq ne)
+ 
+  /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
+     Similarly for NE_EXPR.  */
+  (simplify
+   (cmp (convert?@3 (bit_ior @0 INTEGER_CST@1)) INTEGER_CST@2)
+   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0))
+        && wi::bit_and_not (@1, @2) != 0)
+    { constant_boolean_node (cmp == NE_EXPR, type); }))
+ 
+  /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
+  (simplify
+   (cmp (bit_xor @0 @1) integer_zerop)
+   (cmp @0 @1))
+ 
+  /* (X ^ Y) == Y becomes X == 0.
+     Likewise (X ^ Y) == X becomes Y == 0.  */
+  (simplify
+   (cmp (bit_xor:c @0 @1) @0)
+   (cmp @1 { build_zero_cst (TREE_TYPE (@1)); }))
+ 
+  /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
+  (simplify
+   (cmp (convert?@3 (bit_xor @0 INTEGER_CST@1)) INTEGER_CST@2)
+   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0)))
+    (cmp @0 (bit_xor @1 (convert @2))))))
+ 
  /* Simplification of math builtins.  */
  
  (define_operator_list LOG BUILT_IN_LOGF BUILT_IN_LOG BUILT_IN_LOGL)
Index: gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c
===================================================================
*** gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c	(revision 224942)
--- gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c	(working copy)
***************
*** 1,5 ****
  /* { dg-do compile } */
! /* { dg-options "-O1 -fdump-tree-forwprop1" } */
  
  struct rtx_def;
  typedef struct rtx_def *rtx;
--- 1,5 ----
  /* { dg-do compile } */
! /* { dg-options "-O1 -fdump-tree-cddce1" } */
  
  struct rtx_def;
  typedef struct rtx_def *rtx;
*************** convert_move (rtx to, rtx from, int unsi
*** 37,43 ****
      0 : 0));
  }
  
! /* { dg-final { scan-tree-dump "Replaced.*!=.*with.*!=.* " "forwprop1"} } */
! 
! 
! 
--- 37,40 ----
      0 : 0));
  }
  
! /* { dg-final { scan-tree-dump-not " ^ " "cddce1"} } */


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]