[PATCH][7/n] Remove GENERIC stmt combining from SCCVN

Richard Biener rguenther@suse.de
Mon Jun 29 12:44:00 GMT 2015


This moves a few more patterns from fold-const.c to match.pd, also
covering two cases where fold basically recurses (calling
fold_unary or using negate_expr_p/negate_expr).  For these cases
we have to implement a subset of all possible cases (all fold
cases basically searching for some stuff arbitrarily deep in
an expression tree would need to be converted to a propagator-like
pass rather than too many patterns).

It also moves (x & y) | x -> x before (x | CST1) & CST2 -> (x & CST2) | 
(CST1 & CST2) so it has the chance to match (x | 1) & 1 which with
the first pattern simplifies without needing intermediate stmts
(which I don't allow for SCCVN for efficiency reasons).

As for other generator inputs what comes earlier is matched earlier.

Bootstrap and regtest running on x86_64-unknown-linux-gnu.

I think next I'll merge trunk into match-and-simplify and see what
patterns I have implemented there and will either kill them off
if they are broken or move them.

Richard.

2015-06-29  Richard Biener  <rguenther@suse.de>

	* fold-const.c (fold_unary_loc): Move abs(abs(x)) -> abs(x),
	~ (-A) to A - 1, ~ (A - 1) or ~ (A + -1) to -A and some cases of
	~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify to ...
	* match.pd: ... here.
	Add a few cases of A - B -> A + (-B) when B "easily" negates.
	Move (x & y) | x -> x and friends before
	(x | CST1) & CST2 -> (x & CST2) | (CST1 & CST2).

Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c	(revision 225115)
--- gcc/fold-const.c	(working copy)
*************** fold_unary_loc (location_t loc, enum tre
*** 8131,8139 ****
  						  TREE_TYPE (targ0),
  						  targ0));
  	}
-       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
-       else if (TREE_CODE (arg0) == ABS_EXPR)
- 	return arg0;
  
        /* Strip sign ops from argument.  */
        if (TREE_CODE (type) == REAL_TYPE)
--- 8131,8136 ----
*************** fold_unary_loc (location_t loc, enum tre
*** 8161,8193 ****
        return NULL_TREE;
  
      case BIT_NOT_EXPR:
-       /* Convert ~ (-A) to A - 1.  */
-       if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
- 	return fold_build2_loc (loc, MINUS_EXPR, type,
- 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
- 			    build_int_cst (type, 1));
-       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
-       else if (INTEGRAL_TYPE_P (type)
- 	       && ((TREE_CODE (arg0) == MINUS_EXPR
- 		    && integer_onep (TREE_OPERAND (arg0, 1)))
- 		   || (TREE_CODE (arg0) == PLUS_EXPR
- 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
- 	{
- 	  /* Perform the negation in ARG0's type and only then convert
- 	     to TYPE as to avoid introducing undefined behavior.  */
- 	  tree t = fold_build1_loc (loc, NEGATE_EXPR,
- 				    TREE_TYPE (TREE_OPERAND (arg0, 0)),
- 				    TREE_OPERAND (arg0, 0));
- 	  return fold_convert_loc (loc, type, t);
- 	}
        /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
!       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
! 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
! 			       	     fold_convert_loc (loc, type,
! 						       TREE_OPERAND (arg0, 0)))))
  	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
! 			    fold_convert_loc (loc, type,
! 					      TREE_OPERAND (arg0, 1)));
        else if (TREE_CODE (arg0) == BIT_XOR_EXPR
  	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
  			       	     fold_convert_loc (loc, type,
--- 8158,8171 ----
        return NULL_TREE;
  
      case BIT_NOT_EXPR:
        /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
!       if (TREE_CODE (arg0) == BIT_XOR_EXPR
! 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
! 				    fold_convert_loc (loc, type,
! 						      TREE_OPERAND (arg0, 0)))))
  	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
! 				fold_convert_loc (loc, type,
! 						  TREE_OPERAND (arg0, 1)));
        else if (TREE_CODE (arg0) == BIT_XOR_EXPR
  	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
  			       	     fold_convert_loc (loc, type,
Index: gcc/match.pd
===================================================================
*** gcc/match.pd	(revision 225115)
--- gcc/match.pd	(working copy)
*************** (define_operator_list swapped_tcc_compar
*** 378,389 ****
--- 378,409 ----
   (bit_and @0 @1))
  
  (simplify
+  (abs (abs@1 @0))
+  @1)
+ (simplify
   (abs (negate @0))
   (abs @0))
  (simplify
   (abs tree_expr_nonnegative_p@0)
   @0)
  
+ /* A - B -> A + (-B) if B is easily negatable.  This just covers
+    very few cases of "easily negatable", effectively inlining negate_expr_p.  */
+ (simplify
+  (minus @0 INTEGER_CST@1)
+  (if (!TYPE_OVERFLOW_SANITIZED (type)
+       && (TYPE_OVERFLOW_WRAPS (type)
+           || may_negate_without_overflow_p (@1)))
+   (plus @0 (negate @1))))
+ (simplify
+  (minus @0 REAL_CST@1)
+  (if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (@1)))
+   (plus @0 (negate @1))))
+ (simplify
+  (minus @0 VECTOR_CST@1)
+  (if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_WRAPS (type))
+   (plus @0 (negate @1))))
+ 
  
  /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
     when profitable.
*************** (define_operator_list swapped_tcc_compar
*** 415,420 ****
--- 435,453 ----
  	   || TYPE_PRECISION (type) != GET_MODE_PRECISION (TYPE_MODE (type))))
     (convert (bitop @0 (convert @1))))))
  
+ (for bitop (bit_and bit_ior)
+      rbitop (bit_ior bit_and)
+   /* (x | y) & x -> x */
+   /* (x & y) | x -> x */
+  (simplify
+   (bitop:c (rbitop:c @0 @1) @0)
+   @0)
+  /* (~x | y) & x -> x & y */
+  /* (~x & y) | x -> x | y */
+  (simplify
+   (bitop:c (rbitop:c (bit_not @0) @1) @0)
+   (bitop @0 @1)))
+ 
  /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
  (for bitop (bit_and bit_ior bit_xor)
   (simplify
*************** (define_operator_list swapped_tcc_compar
*** 462,480 ****
    (op:c truth_valued_p@0 (logical_inverted_value @0))
    { constant_boolean_node (true, type); }))
  
- (for bitop (bit_and bit_ior)
-      rbitop (bit_ior bit_and)
-   /* (x | y) & x -> x */
-   /* (x & y) | x -> x */
-  (simplify
-   (bitop:c (rbitop:c @0 @1) @0)
-   @0)
-  /* (~x | y) & x -> x & y */
-  /* (~x & y) | x -> x | y */
-  (simplify
-   (bitop:c (rbitop:c (bit_not @0) @1) @0)
-   (bitop @0 @1)))
- 
  /* If arg1 and arg2 are booleans (or any single bit type)
     then try to simplify:
  
--- 495,500 ----
*************** (define_operator_list swapped_tcc_compar
*** 503,508 ****
--- 523,554 ----
    (bit_not (bit_not @0))
    @0)
  
+ /* Convert ~ (-A) to A - 1.  */
+ (simplify
+  (bit_not (convert? (negate @0)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (convert (minus @0 { build_one_cst (TREE_TYPE (@0)); }))))
+ 
+ /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
+ (simplify
+  (bit_not (convert? (minus @0 integer_onep)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (convert (negate @0))))
+ (simplify
+  (bit_not (convert? (plus @0 integer_all_onesp)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (convert (negate @0))))
+ 
+ /* Part of convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
+ (simplify
+  (bit_not (convert? (bit_xor @0 INTEGER_CST@1)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (convert (bit_xor @0 (bit_not @1)))))
+ (simplify
+  (bit_not (convert? (bit_xor:c (bit_not @0) @1)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (convert (bit_xor @0 @1))))
+ 
  /* (x & ~m) | (y & m) -> ((x ^ y) & m) ^ x */
  (simplify
    (bit_ior:c (bit_and:c@3 @0 (bit_not @2)) (bit_and:c@4 @1 @2))



More information about the Gcc-patches mailing list