[PATCH]: intermediate cleanup for convert expr merging

Tomas Bily tomby@atrey.karlin.mff.cuni.cz
Wed Aug 13 17:03:00 GMT 2008


Hi, 

 this patch is only intermediate cleanup for convert expr merging
after tuple branch merging to mainline. This patch utilize mostly
IS_CONVERT_EXPR_CODE_P introduced by tuple branch.

Bootstraped and tested on x86_64-linux, i686-linux.

Ok for mainline ?

Greetings
Tomas

2008-08-12  Tomas Bily  <tbily@suse.cz>

	* tree-ssa-threadedge.c (simplify_control_stmt_condition): Use CONVERT_EXPR_P.
	* tree-data-ref.c (split_constant_offset_1): Likewise.
	* tree-inline.c (estimate_operator_cost): Use CASE_CONVERT.
	* tree-sra.c (sra_walk_expr): Likewise.
	* matrix-reorg.c (ssa_accessed_in_tree): Likewise.
	* tree-ssa-loop-niter.c (expand_simple_operations): Likewise.
	* gimple.h (gimple_assign_cast_p): Use IS_CONVERT_EXPR_CODE_P.
	* gimple.c (gimple_assign_unary_nop_p): Likewise.
	* tree-vect-transform.c (vectorizable_type_demotion)
	(vectorizable_type_promotion): Likewise.
	* tree-inline.c (expand_call_inline): Likewise.
	* expr.c (expand_expr_real_1): Likewise.
	* tree-ssa-dom.c (hashable_expr_equal_p, iterative_hash_hashable_expr)
	(gimple_assign_unary_useless_conversion_p): Likewise.
	* tree-ssa-ccp.c (ccp_fold, fold_gimple_assign): Likewise.
	* fold-const.c (fold_unary): Likewise.
	* tree.c (simple_cst_equal, iterative_hash_expr): Likewise.
	* tree-ssa-loop-im.c (rewrite_bittest): Likewise.
	* tree-vrp.c (extract_range_from_unary_expr)
	(register_edge_assert_for_2, register_edge_assert_for_1): Likewise.
	* cp/tree.c (cp_tree_equal): Likewise.

Index: gcc/tree-vrp.c
===================================================================
--- gcc/tree-vrp.c	(revision 138613)
+++ gcc/tree-vrp.c	(working copy)
@@ -2483,8 +2483,7 @@ extract_range_from_unary_expr (value_ran
     }
 
   /* Handle unary expressions on integer ranges.  */
-  if ((code == NOP_EXPR
-       || code == CONVERT_EXPR)
+  if (IS_CONVERT_EXPR_CODE_P (code)
       && INTEGRAL_TYPE_P (type)
       && INTEGRAL_TYPE_P (TREE_TYPE (op0)))
     {
@@ -3944,8 +3943,7 @@ register_edge_assert_for_2 (tree name, e
       /* Extract NAME2 from the (optional) sign-changing cast.  */
       if (gimple_assign_cast_p (def_stmt))
 	{
-	  if ((gimple_assign_rhs_code (def_stmt) == NOP_EXPR
-	       || gimple_assign_rhs_code (def_stmt) == CONVERT_EXPR)
+	  if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
 	      && ! TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
 	      && (TYPE_PRECISION (gimple_expr_type (def_stmt))
 		  == TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))))
@@ -4098,8 +4096,7 @@ register_edge_assert_for_1 (tree op, enu
       retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
 					    code, e, bsi);
     }
-  else if (gimple_assign_rhs_code (op_def) == NOP_EXPR
-	   || gimple_assign_rhs_code (op_def) == CONVERT_EXPR)
+  else if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (op_def)))
     { 
       /* Recurse through the type conversion.  */
       retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
Index: gcc/tree-ssa-loop-im.c
===================================================================
--- gcc/tree-ssa-loop-im.c	(revision 138613)
+++ gcc/tree-ssa-loop-im.c	(working copy)
@@ -819,8 +819,7 @@ rewrite_bittest (gimple_stmt_iterator *b
     return stmt;
 
   /* There is a conversion in between possibly inserted by fold.  */
-  if (gimple_assign_rhs_code (stmt1) == NOP_EXPR
-      || gimple_assign_rhs_code (stmt1) == CONVERT_EXPR)
+  if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
     {
       t = gimple_assign_rhs1 (stmt1);
       if (TREE_CODE (t) != SSA_NAME
Index: gcc/tree-ssa-loop-niter.c
===================================================================
--- gcc/tree-ssa-loop-niter.c	(revision 138613)
+++ gcc/tree-ssa-loop-niter.c	(working copy)
@@ -1451,8 +1451,7 @@ expand_simple_operations (tree expr)
 
   switch (code)
     {
-    case NOP_EXPR:
-    case CONVERT_EXPR:
+    CASE_CONVERT:
       /* Casts are simple.  */
       ee = expand_simple_operations (e);
       return fold_build1 (code, TREE_TYPE (expr), ee);
Index: gcc/tree.c
===================================================================
--- gcc/tree.c	(revision 138613)
+++ gcc/tree.c	(working copy)
@@ -5022,16 +5022,16 @@ simple_cst_equal (const_tree t1, const_t
   code1 = TREE_CODE (t1);
   code2 = TREE_CODE (t2);
 
-  if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
+  if (IS_CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
     {
-      if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
+      if (IS_CONVERT_EXPR_CODE_P (code2)
 	  || code2 == NON_LVALUE_EXPR)
 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
       else
 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
     }
 
-  else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
+  else if (IS_CONVERT_EXPR_CODE_P (code2)
 	   || code2 == NON_LVALUE_EXPR)
     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
 
@@ -5338,8 +5338,7 @@ iterative_hash_expr (const_tree t, hashv
 	  /* Don't hash the type, that can lead to having nodes which
 	     compare equal according to operand_equal_p, but which
 	     have different hash codes.  */
-	  if (code == NOP_EXPR
-	      || code == CONVERT_EXPR
+	  if (IS_CONVERT_EXPR_CODE_P (code)
 	      || code == NON_LVALUE_EXPR)
 	    {
 	      /* Make sure to include signness in the hash computation.  */
Index: gcc/fold-const.c
===================================================================
--- gcc/fold-const.c	(revision 138613)
+++ gcc/fold-const.c	(working copy)
@@ -7628,7 +7624,7 @@ fold_unary (enum tree_code code, tree ty
   arg0 = op0;
   if (arg0)
     {
-      if (code == NOP_EXPR || code == CONVERT_EXPR
+      if (IS_CONVERT_EXPR_CODE_P (code)
 	  || code == FLOAT_EXPR || code == ABS_EXPR)
 	{
 	  /* Don't use STRIP_NOPS, because signedness of argument type
@@ -7677,7 +7673,7 @@ fold_unary (enum tree_code code, tree ty
 	     so we don't get into an infinite recursion loop taking the
 	     conversion out and then back in.  */
 
-	  if ((code == NOP_EXPR || code == CONVERT_EXPR
+	  if ((IS_CONVERT_EXPR_CODE_P (code)
 	       || code == NON_LVALUE_EXPR)
 	      && TREE_CODE (tem) == COND_EXPR
 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
Index: gcc/cp/tree.c
===================================================================
--- gcc/cp/tree.c	(revision 138613)
+++ gcc/cp/tree.c	(working copy)
@@ -1738,12 +1738,12 @@ cp_tree_equal (tree t1, tree t2)
     return false;
 
   for (code1 = TREE_CODE (t1);
-       code1 == NOP_EXPR || code1 == CONVERT_EXPR
+       IS_CONVERT_EXPR_CODE_P (code1)
 	 || code1 == NON_LVALUE_EXPR;
        code1 = TREE_CODE (t1))
     t1 = TREE_OPERAND (t1, 0);
   for (code2 = TREE_CODE (t2);
-       code2 == NOP_EXPR || code2 == CONVERT_EXPR
+       IS_CONVERT_EXPR_CODE_P (code2)
 	 || code1 == NON_LVALUE_EXPR;
        code2 = TREE_CODE (t2))
     t2 = TREE_OPERAND (t2, 0);
Index: gcc/tree-ssa-ccp.c
===================================================================
--- gcc/tree-ssa-ccp.c	(revision 138613)
+++ gcc/tree-ssa-ccp.c	(working copy)
@@ -987,7 +987,7 @@ ccp_fold (gimple stmt)
 		 useless_type_conversion_p places for pointer type conversions
 		 do not apply here.  Substitution later will only substitute to
 		 allowed places.  */
-              if ((subcode == NOP_EXPR || subcode == CONVERT_EXPR)
+              if (IS_CONVERT_EXPR_CODE_P (subcode)
 		  && ((POINTER_TYPE_P (TREE_TYPE (lhs))
 		       && POINTER_TYPE_P (TREE_TYPE (op0))
 		       /* Do not allow differences in volatile qualification
@@ -2677,8 +2677,7 @@ fold_gimple_assign (gimple_stmt_iterator
           if (valid_gimple_rhs_p (result))
 	    return result;
         }
-      else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
-		|| gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
+      else if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
 	       && POINTER_TYPE_P (gimple_expr_type (stmt))
 	       && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
 	{
Index: gcc/tree-ssa-dom.c
===================================================================
--- gcc/tree-ssa-dom.c	(revision 138613)
+++ gcc/tree-ssa-dom.c	(working copy)
@@ -381,8 +381,7 @@ hashable_expr_equal_p (const struct hash
       if (expr0->ops.unary.op != expr1->ops.unary.op)
         return false;
 
-      if ((expr0->ops.unary.op == NOP_EXPR
-           || expr0->ops.unary.op == CONVERT_EXPR
+      if ((IS_CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
            || expr0->ops.unary.op == NON_LVALUE_EXPR)
           && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
         return false;
@@ -460,8 +459,7 @@ iterative_hash_hashable_expr (const stru
          Don't hash the type, that can lead to having nodes which
          compare equal according to operand_equal_p, but which
          have different hash codes.  */
-      if (expr->ops.unary.op == NOP_EXPR
-          || expr->ops.unary.op == CONVERT_EXPR
+      if (IS_CONVERT_EXPR_CODE_P (expr->ops.unary.op)
           || expr->ops.unary.op == NON_LVALUE_EXPR)
         val += TYPE_UNSIGNED (expr->type);
 
@@ -1929,8 +1927,7 @@ static bool
 gimple_assign_unary_useless_conversion_p (gimple gs)
 {
   if (is_gimple_assign (gs)
-      && (gimple_assign_rhs_code (gs) == NOP_EXPR
-          || gimple_assign_rhs_code (gs) == CONVERT_EXPR
+      && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
           || gimple_assign_rhs_code (gs) == VIEW_CONVERT_EXPR
           || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR))
     {
Index: gcc/expr.c
===================================================================
--- gcc/expr.c	(revision 138613)
+++ gcc/expr.c	(working copy)
@@ -7134,7 +7134,7 @@ expand_expr_real_1 (tree exp, rtx target
   unsignedp = TYPE_UNSIGNED (type);
 
   ignore = (target == const0_rtx
-	    || ((code == NOP_EXPR || code == CONVERT_EXPR 
+	    || ((IS_CONVERT_EXPR_CODE_P (code)
 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
 		&& TREE_CODE (type) == VOID_TYPE));
 
Index: gcc/matrix-reorg.c
===================================================================
--- gcc/matrix-reorg.c	(revision 138613)
+++ gcc/matrix-reorg.c	(working copy)
@@ -658,8 +658,7 @@ ssa_accessed_in_assign_rhs (gimple stmt,
 
     case SSA_NAME:
     case INDIRECT_REF:
-    case CONVERT_EXPR:
-    case NOP_EXPR:
+    CASE_CONVERT:
     case VIEW_CONVERT_EXPR:
       ssa_accessed_in_tree (gimple_assign_rhs1 (stmt), a);
       break;
Index: gcc/tree-data-ref.c
===================================================================
--- gcc/tree-data-ref.c	(revision 138613)
+++ gcc/tree-data-ref.c	(working copy)
@@ -581,8 +581,7 @@ split_constant_offset_1 (tree type, tree
 	   To compute that ARRAY_REF's element size TYPE_SIZE_UNIT, which
 	   possibly no longer appears in current GIMPLE, might resurface.
 	   This perhaps could run
-	   if (TREE_CODE (var0) == NOP_EXPR
-	       || TREE_CODE (var0) == CONVERT_EXPR)
+	   if (CONVERT_EXPR_P (var0))
 	     {
 	       gimplify_conversion (&var0);
 	       // Attempt to fill in any within var0 found ARRAY_REF's
Index: gcc/tree-sra.c
===================================================================
--- gcc/tree-sra.c	(revision 138613)
+++ gcc/tree-sra.c	(working copy)
@@ -881,8 +881,7 @@ sra_walk_expr (tree *expr_p, gimple_stmt
 	   outer element, to which walk_tree will bring us next.  */
 	goto use_all;
 
-      case NOP_EXPR:
-      case CONVERT_EXPR:
+      CASE_CONVERT:
 	/* Similarly, a nop explicitly wants to look at an object in a
 	   type other than the one we've scalarized.  */
 	goto use_all;
Index: gcc/tree-inline.c
===================================================================
--- gcc/tree-inline.c	(revision 138613)
+++ gcc/tree-inline.c	(working copy)
@@ -2633,10 +2633,9 @@ estimate_operator_cost (enum tree_code c
     /* These are "free" conversions, or their presumed cost
        is folded into other operations.  */
     case RANGE_EXPR:
-    case CONVERT_EXPR:
+    CASE_CONVERT:
     case COMPLEX_EXPR:
     case PAREN_EXPR:
-    case NOP_EXPR:
       return 0;
 
     /* Assign cost of 1 to usual operations.
@@ -3347,8 +3346,7 @@ expand_call_inline (basic_block bb, gimp
   if (is_gimple_assign (stmt))
     {
       gcc_assert (gimple_assign_single_p (stmt)
-		  || gimple_assign_rhs_code (stmt) == NOP_EXPR
-		  || gimple_assign_rhs_code (stmt) == CONVERT_EXPR);
+		  || IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
     }
 
Index: gcc/tree-vect-transform.c
===================================================================
--- gcc/tree-vect-transform.c	(revision 138613)
+++ gcc/tree-vect-transform.c	(working copy)
@@ -4272,7 +4272,7 @@ vectorizable_type_demotion (gimple stmt,
     return false;
 
   code = gimple_assign_rhs_code (stmt);
-  if (code != NOP_EXPR && code != CONVERT_EXPR)
+  if (!IS_CONVERT_EXPR_CODE_P (code))
     return false;
 
   op0 = gimple_assign_rhs1 (stmt);
@@ -4303,7 +4303,7 @@ vectorizable_type_demotion (gimple stmt,
 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0)))
 	 || (SCALAR_FLOAT_TYPE_P (TREE_TYPE (scalar_dest))
 	     && SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0))
-	     && (code == NOP_EXPR || code == CONVERT_EXPR))))
+	     && IS_CONVERT_EXPR_CODE_P (code))))
     return false;
 
   /* Check the operands of the operation.  */
@@ -4424,7 +4424,7 @@ vectorizable_type_promotion (gimple stmt
     return false;
 
   code = gimple_assign_rhs_code (stmt);
-  if (code != NOP_EXPR && code != CONVERT_EXPR
+  if (!IS_CONVERT_EXPR_CODE_P (code)
       && code != WIDEN_MULT_EXPR)
     return false;
 
@@ -4456,7 +4456,7 @@ vectorizable_type_promotion (gimple stmt
 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0)))
 	 || (SCALAR_FLOAT_TYPE_P (TREE_TYPE (scalar_dest))
 	     && SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0))
-	     && (code == CONVERT_EXPR || code == NOP_EXPR))))
+	     && IS_CONVERT_EXPR_CODE_P (code))))
     return false;
 
   /* Check the operands of the operation.  */
Index: gcc/gimple.c
===================================================================
--- gcc/gimple.c	(revision 138613)
+++ gcc/gimple.c	(working copy)
@@ -1897,8 +1897,7 @@ bool
 gimple_assign_unary_nop_p (gimple gs)
 {
   return (gimple_code (gs) == GIMPLE_ASSIGN
-          && (gimple_assign_rhs_code (gs) == NOP_EXPR
-              || gimple_assign_rhs_code (gs) == CONVERT_EXPR
+          && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
           && gimple_assign_rhs1 (gs) != error_mark_node
           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
Index: gcc/gimple.h
===================================================================
--- gcc/gimple.h	(revision 138613)
+++ gcc/gimple.h	(working copy)
@@ -1842,8 +1842,7 @@ gimple_assign_cast_p (gimple s)
   if (is_gimple_assign (s))
     {
       enum tree_code sc = gimple_assign_rhs_code (s);
-      return sc == NOP_EXPR
-	     || sc == CONVERT_EXPR
+      return IS_CONVERT_EXPR_CODE_P (sc)
 	     || sc == VIEW_CONVERT_EXPR
 	     || sc == FIX_TRUNC_EXPR;
     }
Index: gcc/tree-ssa-threadedge.c
===================================================================
--- gcc/tree-ssa-threadedge.c	(revision 138613)
+++ gcc/tree-ssa-threadedge.c	(working copy)
@@ -483,8 +483,7 @@ simplify_control_stmt_condition (edge e,
 
       cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1);
       if (cached_lhs)
-        while (TREE_CODE (cached_lhs) == NOP_EXPR
-               || TREE_CODE (cached_lhs) == CONVERT_EXPR)
+	while (CONVERT_EXPR_P (cached_lhs))
           cached_lhs = TREE_OPERAND (cached_lhs, 0);
 
       fold_undefer_overflow_warnings ((cached_lhs



More information about the Gcc-patches mailing list