This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[PATCH][22/n] Remove GENERIC stmt combining from SCCVN
- From: Richard Biener <rguenther at suse dot de>
- To: gcc-patches at gcc dot gnu dot org
- Date: Tue, 28 Jul 2015 13:52:17 +0200 (CEST)
- Subject: [PATCH][22/n] Remove GENERIC stmt combining from SCCVN
- Authentication-results: sourceware.org; auth=none
This implements some remaining parts of fold_comparison address
comparisons but still no complete part of it. Still it is good
enough to make fold_stmt not regress when not dispatching to fold_binary.
Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.
Richard.
2015-07-28 Richard Biener <rguenther@suse.de>
* match.pd: Add more simplification of address comparisons.
Index: gcc/match.pd
===================================================================
--- gcc/match.pd (revision 226299)
+++ gcc/match.pd (working copy)
@@ -1828,6 +1828,46 @@ (define_operator_list CBRT BUILT_IN_CBRT
(if (tree_single_nonzero_warnv_p (@0, NULL))
{ constant_boolean_node (cmp == NE_EXPR, type); })))
+/* When the addresses are not directly of decls compare base and offset.
+ This implements some remaining parts of fold_comparison address
+ comparisons but still no complete part of it. Still it is good
+ enough to make fold_stmt not regress when not dispatching to fold_binary. */
+(for cmp (simple_comparison)
+ (simplify
+ (cmp (convert? addr@0) (convert? addr@1))
+ (with
+ {
+ HOST_WIDE_INT off0, off1;
+ tree base0 = get_addr_base_and_unit_offset (TREE_OPERAND (@0, 0), &off0);
+ tree base1 = get_addr_base_and_unit_offset (TREE_OPERAND (@1, 0), &off1);
+ if (base0 && TREE_CODE (base0) == MEM_REF)
+ {
+ off0 += mem_ref_offset (base0).to_short_addr ();
+ base0 = TREE_OPERAND (base0, 0);
+ }
+ if (base1 && TREE_CODE (base1) == MEM_REF)
+ {
+ off1 += mem_ref_offset (base1).to_short_addr ();
+ base1 = TREE_OPERAND (base1, 0);
+ }
+ }
+ (if (base0 && base1
+ && operand_equal_p (base0, base1, 0)
+ && (cmp == EQ_EXPR || cmp == NE_EXPR
+ || POINTER_TYPE_OVERFLOW_UNDEFINED))
+ (switch
+ (if (cmp == EQ_EXPR)
+ { constant_boolean_node (off0 == off1, type); })
+ (if (cmp == NE_EXPR)
+ { constant_boolean_node (off0 != off1, type); })
+ (if (cmp == LT_EXPR)
+ { constant_boolean_node (off0 < off1, type); })
+ (if (cmp == LE_EXPR)
+ { constant_boolean_node (off0 <= off1, type); })
+ (if (cmp == GE_EXPR)
+ { constant_boolean_node (off0 >= off1, type); })
+ (if (cmp == GT_EXPR)
+ { constant_boolean_node (off0 > off1, type); }))))))
/* Non-equality compare simplifications from fold_binary */
(for cmp (lt gt le ge)