This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [tree-ssa][RFC] An interface to fold(tree)


On Sat, Oct 18, 2003 at 10:18:02PM -0600, law@redhat.com wrote:
> In message <Pine.LNX.4.44.0310172016120.28314-100000@www.eyesopen.com>, Roger S
> ayle writes:
>  >
>  >Just to agree with Jeff, I much prefer this approach.
>  >
>  >Instead of having a single fold_tree with an arbitrary number of
>  >arguments, some of which may be NULL_TREE,  I'd suggest pushing
>  >the role model of "simplify_rtx" even further,  and have several
>  >"baby-folds": fold_unary, fold_binary, fold_ternary, etc...
> Yea, that's probably better.
> 

Ok, I've gave it a try in this direction.
The problem is that the 3000 lines of the fold function are wired with gotos.
I think that in all cases it would be better to remove the gotos by moving the 
code to separate functions.  

My first idea was to move in the same way the code of a given switch-case 
into separate functions.  This would simplify the code of the main fold function
since special initializations go into the smaller functions.  OTOH, the small
functions would provide a good interface for accessing a small part of the 
folder.  In pseudo-code, the folder would look like:

fold_tree (code, type, arg0, arg1, arg2, arg3)
{
  switch (TREE_CODE (expr))
    {
      case PLUS_EXPR:
        return tree_fold_plus_expr (arg0, arg1);

      case MIN_EXPR:
        return tree_fold_min_expr (arg0, arg1);
      ...
    }
}

So my plan is:
1. remove the gotos from the fold function,
2. move the code of switch-cases in separate functions,
3. rename the old fold function to fold_tree,
4. write the body of the new fold function with a call to the fold_tree.


I'm bootstrapping the step 1:

2003-10-06  Sebastian Pop  <pop@cri.ensmp.fr>

	* fold-const.c (tree_fold_goto_truth_andor, 
	tree_fold_goto_bit_ior, tree_fold_goto_bit_rotate, 
	tree_fold_goto_associate, tree_fold_goto_binary,
	tree_fold_goto_shift): New functions.
	(fold): Replace gotos with calls to these functions.


Index: fold-const.c
===================================================================
RCS file: /home/seb/cvsroot/gcc-cvs/gcc/gcc/fold-const.c,v
retrieving revision 1.213.2.52
diff -c -r1.213.2.52 fold-const.c
*** fold-const.c	16 Oct 2003 20:22:23 -0000	1.213.2.52
--- fold-const.c	20 Oct 2003 14:17:08 -0000
***************
*** 58,63 ****
--- 58,71 ----
  #include "langhooks.h"
  #include "md5.h"
  
+ tree tree_fold_goto_binary (enum tree_code, tree, tree, int, tree, tree);
+ tree tree_fold_goto_shift (enum tree_code, tree, tree, tree, int, tree, tree);
+ tree tree_fold_goto_associate (enum tree_code, tree, tree, tree, int, tree, tree);
+ tree tree_fold_goto_bit_rotate (enum tree_code, tree, tree, tree, int, tree, tree);
+ tree tree_fold_goto_bit_ior (tree, tree, tree, int, tree, tree);
+ tree tree_fold_goto_truth_andor (enum tree_code, tree, tree, tree, tree);
+ 
+ 
  static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
  static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
  static bool negate_mathfn_p (enum built_in_function);
***************
*** 5146,5151 ****
--- 5154,5518 ----
    return 0;
  }
  
+ tree 
+ tree_fold_goto_truth_andor (enum tree_code code, 
+ 			    tree type, 
+ 			    tree arg0, 
+ 			    tree arg1, 
+ 			    tree t)
+ {
+   tree tem;
+   
+   /* We only do these simplifications if we are optimizing.  */
+   if (!optimize)
+     return t;
+ 
+   /* Check for things like (A || B) && (A || C).  We can convert this
+      to A || (B && C).  Note that either operator can be any of the four
+      truth and/or operations and the transformation will still be
+      valid.   Also note that we only care about order for the
+      ANDIF and ORIF operators.  If B contains side effects, this
+      might change the truth-value of A.  */
+   if (TREE_CODE (arg0) == TREE_CODE (arg1)
+       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
+ 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
+ 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
+ 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
+       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
+     {
+       tree a00 = TREE_OPERAND (arg0, 0);
+       tree a01 = TREE_OPERAND (arg0, 1);
+       tree a10 = TREE_OPERAND (arg1, 0);
+       tree a11 = TREE_OPERAND (arg1, 1);
+       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
+ 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
+ 			 && (code == TRUTH_AND_EXPR
+ 			     || code == TRUTH_OR_EXPR));
+ 
+       if (operand_equal_p (a00, a10, 0))
+ 	return fold (build (TREE_CODE (arg0), type, a00,
+ 			    fold (build (code, type, a01, a11))));
+       else if (commutative && operand_equal_p (a00, a11, 0))
+ 	return fold (build (TREE_CODE (arg0), type, a00,
+ 			    fold (build (code, type, a01, a10))));
+       else if (commutative && operand_equal_p (a01, a10, 0))
+ 	return fold (build (TREE_CODE (arg0), type, a01,
+ 			    fold (build (code, type, a00, a11))));
+ 
+       /* This case if tricky because we must either have commutative
+ 	 operators or else A10 must not have side-effects.  */
+ 
+       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
+ 	       && operand_equal_p (a01, a11, 0))
+ 	return fold (build (TREE_CODE (arg0), type,
+ 			    fold (build (code, type, a00, a10)),
+ 			    a01));
+     }
+ 
+   /* See if we can build a range comparison.  */
+   if (0 != (tem = fold_range_test (t)))
+     return tem;
+ 
+   /* Check for the possibility of merging component references.  If our
+      lhs is another similar operation, try to merge its rhs with our
+      rhs.  Then try to merge our lhs and rhs.  */
+   if (TREE_CODE (arg0) == code
+       && 0 != (tem = fold_truthop (code, type,
+ 				   TREE_OPERAND (arg0, 1), arg1)))
+     return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
+ 
+   if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
+     return tem;
+ 
+   return t;
+ }
+ 
+ tree
+ tree_fold_goto_bit_ior (tree type,
+ 			tree arg0, 
+ 			tree arg1, 
+ 			int wins,
+ 			tree t, 
+ 			tree t1)
+ {
+   if (integer_all_onesp (arg1))
+     return omit_one_operand (type, arg1, arg0);
+   if (integer_zerop (arg1))
+     return non_lvalue (convert (type, arg0));
+   t1 = distribute_bit_expr (BIT_IOR_EXPR, type, arg0, arg1);
+   if (t1 != NULL_TREE)
+     return t1;
+   
+   /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
+      
+   This results in more efficient code for machines without a NAND
+   instruction.  Combine will canonicalize to the first form
+   which will allow use of NAND instructions provided by the
+   backend if they exist.  */
+   if (TREE_CODE (arg0) == BIT_NOT_EXPR
+       && TREE_CODE (arg1) == BIT_NOT_EXPR)
+     {
+       return fold (build1 (BIT_NOT_EXPR, type,
+ 			   build (BIT_AND_EXPR, type,
+ 				  TREE_OPERAND (arg0, 0),
+ 				  TREE_OPERAND (arg1, 0))));
+     }
+   
+   /* See if this can be simplified into a rotate first.  If that
+      is unsuccessful continue in the association code.  */
+   return tree_fold_goto_bit_rotate (BIT_IOR_EXPR, type, arg0, arg1, wins, t, t1);
+ }
+ 
+ tree 
+ tree_fold_goto_bit_rotate (enum tree_code code,
+ 		      tree type, 
+ 		      tree arg0, 
+ 		      tree arg1, 
+ 		      int wins, 
+ 		      tree t, 
+ 		      tree t1)
+ {
+   /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
+      is a rotate of A by C1 bits.  */
+   /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
+      is a rotate of A by B bits.  */
+   {
+     enum tree_code code0, code1;
+     code0 = TREE_CODE (arg0);
+     code1 = TREE_CODE (arg1);
+     if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
+ 	 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
+ 	&& operand_equal_p (TREE_OPERAND (arg0, 0),
+ 			    TREE_OPERAND (arg1, 0), 0)
+ 	&& TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+       {
+ 	tree tree01, tree11;
+ 	enum tree_code code01, code11;
+ 	
+ 	tree01 = TREE_OPERAND (arg0, 1);
+ 	tree11 = TREE_OPERAND (arg1, 1);
+ 	STRIP_NOPS (tree01);
+ 	STRIP_NOPS (tree11);
+ 	code01 = TREE_CODE (tree01);
+ 	code11 = TREE_CODE (tree11);
+ 	if (code01 == INTEGER_CST
+ 	    && code11 == INTEGER_CST
+ 	    && TREE_INT_CST_HIGH (tree01) == 0
+ 	    && TREE_INT_CST_HIGH (tree11) == 0
+ 	    && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
+ 		== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
+ 	  return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
+ 			code0 == LSHIFT_EXPR ? tree01 : tree11);
+ 	else if (code11 == MINUS_EXPR)
+ 	  {
+ 	    tree tree110, tree111;
+ 	    tree110 = TREE_OPERAND (tree11, 0);
+ 	    tree111 = TREE_OPERAND (tree11, 1);
+ 	    STRIP_NOPS (tree110);
+ 	    STRIP_NOPS (tree111);
+ 	    if (TREE_CODE (tree110) == INTEGER_CST
+ 		&& 0 == compare_tree_int (tree110,
+ 					  TYPE_PRECISION
+ 					  (TREE_TYPE (TREE_OPERAND
+ 						      (arg0, 0))))
+ 		&& operand_equal_p (tree01, tree111, 0))
+ 	      return build ((code0 == LSHIFT_EXPR
+ 			     ? LROTATE_EXPR
+ 			     : RROTATE_EXPR),
+ 			    type, TREE_OPERAND (arg0, 0), tree01);
+ 	  }
+ 	else if (code01 == MINUS_EXPR)
+ 	  {
+ 	    tree tree010, tree011;
+ 	    tree010 = TREE_OPERAND (tree01, 0);
+ 	    tree011 = TREE_OPERAND (tree01, 1);
+ 	    STRIP_NOPS (tree010);
+ 	    STRIP_NOPS (tree011);
+ 	    if (TREE_CODE (tree010) == INTEGER_CST
+ 		&& 0 == compare_tree_int (tree010,
+ 					  TYPE_PRECISION
+ 					  (TREE_TYPE (TREE_OPERAND
+ 						      (arg0, 0))))
+ 		&& operand_equal_p (tree11, tree011, 0))
+ 	      return build ((code0 != LSHIFT_EXPR
+ 			     ? LROTATE_EXPR
+ 			     : RROTATE_EXPR),
+ 			    type, TREE_OPERAND (arg0, 0), tree11);
+ 	  }
+       }
+   }
+   
+   return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
+ }
+ 
+ tree 
+ tree_fold_goto_associate (enum tree_code code,
+ 		     tree type, 
+ 		     tree arg0, 
+ 		     tree arg1, 
+ 		     int wins,
+ 		     tree t, 
+ 		     tree t1)
+ {
+   /* In most languages, can't associate operations on floats through
+      parentheses.  Rather than remember where the parentheses were, we
+      don't associate floats at all, unless the user has specified
+      -funsafe-math-optimizations.  */
+ 
+   if (! wins
+       && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
+     {
+       tree var0, con0, lit0, minus_lit0;
+       tree var1, con1, lit1, minus_lit1;
+ 
+       /* Split both trees into variables, constants, and literals.  Then
+ 	 associate each group together, the constants with literals,
+ 	 then the result with variables.  This increases the chances of
+ 	 literals being recombined later and of generating relocatable
+ 	 expressions for the sum of a constant and literal.  */
+       var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
+       var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
+ 			 code == MINUS_EXPR);
+ 
+       /* Only do something if we found more than two objects.  Otherwise,
+ 	 nothing has changed and we risk infinite recursion.  */
+       if (2 < ((var0 != 0) + (var1 != 0)
+ 	       + (con0 != 0) + (con1 != 0)
+ 	       + (lit0 != 0) + (lit1 != 0)
+ 	       + (minus_lit0 != 0) + (minus_lit1 != 0)))
+ 	{
+ 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
+ 	  if (code == MINUS_EXPR)
+ 	    code = PLUS_EXPR;
+ 
+ 	  var0 = associate_trees (var0, var1, code, type);
+ 	  con0 = associate_trees (con0, con1, code, type);
+ 	  lit0 = associate_trees (lit0, lit1, code, type);
+ 	  minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
+ 
+ 	  /* Preserve the MINUS_EXPR if the negative part of the literal is
+ 	     greater than the positive part.  Otherwise, the multiplicative
+ 	     folding code (i.e extract_muldiv) may be fooled in case
+ 	     unsigned constants are subtracted, like in the following
+ 	     example: ((X*2 + 4) - 8U)/2.  */
+ 	  if (minus_lit0 && lit0)
+ 	    {
+ 	      if (TREE_CODE (lit0) == INTEGER_CST
+ 		  && TREE_CODE (minus_lit0) == INTEGER_CST
+ 		  && tree_int_cst_lt (lit0, minus_lit0))
+ 		{
+ 		  minus_lit0 = associate_trees (minus_lit0, lit0,
+ 						MINUS_EXPR, type);
+ 		  lit0 = 0;
+ 		}
+ 	      else
+ 		{
+ 		  lit0 = associate_trees (lit0, minus_lit0,
+ 					  MINUS_EXPR, type);
+ 		  minus_lit0 = 0;
+ 		}
+ 	    }
+ 	  if (minus_lit0)
+ 	    {
+ 	      if (con0 == 0)
+ 		return convert (type, associate_trees (var0, minus_lit0,
+ 						       MINUS_EXPR, type));
+ 	      else
+ 		{
+ 		  con0 = associate_trees (con0, minus_lit0,
+ 					  MINUS_EXPR, type);
+ 		  return convert (type, associate_trees (var0, con0,
+ 							 PLUS_EXPR, type));
+ 		}
+ 	    }
+ 
+ 	  con0 = associate_trees (con0, lit0, code, type);
+ 	  return convert (type, associate_trees (var0, con0, code, type));
+ 	}
+     }
+   
+   return tree_fold_goto_binary (code, arg0, arg1, wins, t, t1);
+ }
+ 
+ tree 
+ tree_fold_goto_shift (enum tree_code code, 
+ 		      tree type, 
+ 		      tree arg0, 
+ 		      tree arg1, 
+ 		      int wins,
+ 		      tree t, 
+ 		      tree t1)
+ {
+   if (integer_zerop (arg1))
+     return non_lvalue (convert (type, arg0));
+   if (integer_zerop (arg0))
+     return omit_one_operand (type, arg0, arg1);
+ 
+   /* Since negative shift count is not well-defined,
+      don't try to compute it in the compiler.  */
+   if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
+     return t;
+   /* Rewrite an LROTATE_EXPR by a constant into an
+      RROTATE_EXPR by a new constant.  */
+   if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
+     {
+       tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
+       tem = convert (TREE_TYPE (arg1), tem);
+       tem = const_binop (MINUS_EXPR, tem, arg1, 0);
+       return fold (build (RROTATE_EXPR, type, arg0, tem));
+     }
+ 
+   /* If we have a rotate of a bit operation with the rotate count and
+      the second operand of the bit operation both constant,
+      permute the two operations.  */
+   if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+       && (TREE_CODE (arg0) == BIT_AND_EXPR
+ 	  || TREE_CODE (arg0) == BIT_IOR_EXPR
+ 	  || TREE_CODE (arg0) == BIT_XOR_EXPR)
+       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+     return fold (build (TREE_CODE (arg0), type,
+ 			fold (build (code, type,
+ 				     TREE_OPERAND (arg0, 0), arg1)),
+ 			fold (build (code, type,
+ 				     TREE_OPERAND (arg0, 1), arg1))));
+ 
+   /* Two consecutive rotates adding up to the width of the mode can
+      be ignored.  */
+   if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+       && TREE_CODE (arg0) == RROTATE_EXPR
+       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+       && TREE_INT_CST_HIGH (arg1) == 0
+       && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
+       && ((TREE_INT_CST_LOW (arg1)
+ 	   + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
+ 	  == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
+     return TREE_OPERAND (arg0, 0);
+ 
+   return tree_fold_goto_binary (code, arg0, arg1, wins, t, t1);
+ }
+ 
+ tree 
+ tree_fold_goto_binary (enum tree_code code,
+ 		       tree arg0, 
+ 		       tree arg1, 
+ 		       int wins, 
+ 		       tree t, 
+ 		       tree t1)
+ {
+   if (wins)
+     t1 = const_binop (code, arg0, arg1, 0);
+   if (t1 != NULL_TREE)
+     {
+       /* The return value should always have
+ 	 the same type as the original expression.  */
+       if (TREE_TYPE (t1) != TREE_TYPE (t))
+ 	t1 = convert (TREE_TYPE (t), t1);
+       
+       return t1;
+     }
+   return t;
+ }
+ 
  /* Perform constant folding and related simplification of EXPR.
     The related simplifications include x*1 => x, x*0 => 0, etc.,
     and application of the associative law.
***************
*** 5696,5705 ****
  	      && integer_zerop (const_binop (BIT_AND_EXPR,
  					     TREE_OPERAND (arg0, 1),
  					     TREE_OPERAND (arg1, 1), 0)))
! 	    {
! 	      code = BIT_IOR_EXPR;
! 	      goto bit_ior;
! 	    }
  
  	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
  	     (plus (plus (mult) (mult)) (foo)) so that we can
--- 6063,6069 ----
  	      && integer_zerop (const_binop (BIT_AND_EXPR,
  					     TREE_OPERAND (arg0, 1),
  					     TREE_OPERAND (arg1, 1), 0)))
! 	    return tree_fold_goto_bit_ior (type, arg0, arg1, wins, t, t1);
  
  	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
  	     (plus (plus (mult) (mult)) (foo)) so that we can
***************
*** 5865,6032 ****
  	    }
  	}
  
!      bit_rotate:
!       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
! 	 is a rotate of A by C1 bits.  */
!       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
! 	 is a rotate of A by B bits.  */
!       {
! 	enum tree_code code0, code1;
! 	code0 = TREE_CODE (arg0);
! 	code1 = TREE_CODE (arg1);
! 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
! 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
! 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
! 			        TREE_OPERAND (arg1, 0), 0)
! 	    && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
! 	  {
! 	    tree tree01, tree11;
! 	    enum tree_code code01, code11;
! 
! 	    tree01 = TREE_OPERAND (arg0, 1);
! 	    tree11 = TREE_OPERAND (arg1, 1);
! 	    STRIP_NOPS (tree01);
! 	    STRIP_NOPS (tree11);
! 	    code01 = TREE_CODE (tree01);
! 	    code11 = TREE_CODE (tree11);
! 	    if (code01 == INTEGER_CST
! 		&& code11 == INTEGER_CST
! 		&& TREE_INT_CST_HIGH (tree01) == 0
! 		&& TREE_INT_CST_HIGH (tree11) == 0
! 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
! 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
! 	      return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
! 			    code0 == LSHIFT_EXPR ? tree01 : tree11);
! 	    else if (code11 == MINUS_EXPR)
! 	      {
! 		tree tree110, tree111;
! 		tree110 = TREE_OPERAND (tree11, 0);
! 		tree111 = TREE_OPERAND (tree11, 1);
! 		STRIP_NOPS (tree110);
! 		STRIP_NOPS (tree111);
! 		if (TREE_CODE (tree110) == INTEGER_CST
! 		    && 0 == compare_tree_int (tree110,
! 					      TYPE_PRECISION
! 					      (TREE_TYPE (TREE_OPERAND
! 							  (arg0, 0))))
! 		    && operand_equal_p (tree01, tree111, 0))
! 		  return build ((code0 == LSHIFT_EXPR
! 				 ? LROTATE_EXPR
! 				 : RROTATE_EXPR),
! 				type, TREE_OPERAND (arg0, 0), tree01);
! 	      }
! 	    else if (code01 == MINUS_EXPR)
! 	      {
! 		tree tree010, tree011;
! 		tree010 = TREE_OPERAND (tree01, 0);
! 		tree011 = TREE_OPERAND (tree01, 1);
! 		STRIP_NOPS (tree010);
! 		STRIP_NOPS (tree011);
! 		if (TREE_CODE (tree010) == INTEGER_CST
! 		    && 0 == compare_tree_int (tree010,
! 					      TYPE_PRECISION
! 					      (TREE_TYPE (TREE_OPERAND
! 							  (arg0, 0))))
! 		    && operand_equal_p (tree11, tree011, 0))
! 		  return build ((code0 != LSHIFT_EXPR
! 				 ? LROTATE_EXPR
! 				 : RROTATE_EXPR),
! 				type, TREE_OPERAND (arg0, 0), tree11);
! 	      }
! 	  }
!       }
! 
!     associate:
!       /* In most languages, can't associate operations on floats through
! 	 parentheses.  Rather than remember where the parentheses were, we
! 	 don't associate floats at all, unless the user has specified
! 	 -funsafe-math-optimizations.  */
! 
!       if (! wins
! 	  && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
! 	{
! 	  tree var0, con0, lit0, minus_lit0;
! 	  tree var1, con1, lit1, minus_lit1;
! 
! 	  /* Split both trees into variables, constants, and literals.  Then
! 	     associate each group together, the constants with literals,
! 	     then the result with variables.  This increases the chances of
! 	     literals being recombined later and of generating relocatable
! 	     expressions for the sum of a constant and literal.  */
! 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
! 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
! 			     code == MINUS_EXPR);
! 
! 	  /* Only do something if we found more than two objects.  Otherwise,
! 	     nothing has changed and we risk infinite recursion.  */
! 	  if (2 < ((var0 != 0) + (var1 != 0)
! 		   + (con0 != 0) + (con1 != 0)
! 		   + (lit0 != 0) + (lit1 != 0)
! 		   + (minus_lit0 != 0) + (minus_lit1 != 0)))
! 	    {
! 	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
! 	      if (code == MINUS_EXPR)
! 		code = PLUS_EXPR;
! 
! 	      var0 = associate_trees (var0, var1, code, type);
! 	      con0 = associate_trees (con0, con1, code, type);
! 	      lit0 = associate_trees (lit0, lit1, code, type);
! 	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
! 
! 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
! 		 greater than the positive part.  Otherwise, the multiplicative
! 		 folding code (i.e extract_muldiv) may be fooled in case
! 		 unsigned constants are subtracted, like in the following
! 		 example: ((X*2 + 4) - 8U)/2.  */
! 	      if (minus_lit0 && lit0)
! 		{
! 		  if (TREE_CODE (lit0) == INTEGER_CST
! 		      && TREE_CODE (minus_lit0) == INTEGER_CST
! 		      && tree_int_cst_lt (lit0, minus_lit0))
! 		    {
! 		      minus_lit0 = associate_trees (minus_lit0, lit0,
! 						    MINUS_EXPR, type);
! 		      lit0 = 0;
! 		    }
! 		  else
! 		    {
! 		      lit0 = associate_trees (lit0, minus_lit0,
! 					      MINUS_EXPR, type);
! 		      minus_lit0 = 0;
! 		    }
! 		}
! 	      if (minus_lit0)
! 		{
! 		  if (con0 == 0)
! 		    return convert (type, associate_trees (var0, minus_lit0,
! 							   MINUS_EXPR, type));
! 		  else
! 		    {
! 		      con0 = associate_trees (con0, minus_lit0,
! 					      MINUS_EXPR, type);
! 		      return convert (type, associate_trees (var0, con0,
! 							     PLUS_EXPR, type));
! 		    }
! 		}
! 
! 	      con0 = associate_trees (con0, lit0, code, type);
! 	      return convert (type, associate_trees (var0, con0, code, type));
! 	    }
! 	}
! 
!     binary:
!       if (wins)
! 	t1 = const_binop (code, arg0, arg1, 0);
!       if (t1 != NULL_TREE)
! 	{
! 	  /* The return value should always have
! 	     the same type as the original expression.  */
! 	  if (TREE_TYPE (t1) != TREE_TYPE (t))
! 	    t1 = convert (TREE_TYPE (t), t1);
! 
! 	  return t1;
! 	}
!       return t;
  
      case MINUS_EXPR:
        /* A - (-B) -> A + B */
--- 6229,6235 ----
  	    }
  	}
  
!       return tree_fold_goto_bit_rotate (code, type, arg0, arg1, wins, t, t1);
  
      case MINUS_EXPR:
        /* A - (-B) -> A + B */
***************
*** 6117,6123 ****
  	  && operand_equal_p (arg0, arg1, 0))
  	return convert (type, integer_zero_node);
  
!       goto associate;
  
      case MULT_EXPR:
        /* (-A) * (-B) -> A * B  */
--- 6320,6326 ----
  	  && operand_equal_p (arg0, arg1, 0))
  	return convert (type, integer_zero_node);
  
!       return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
  
      case MULT_EXPR:
        /* (-A) * (-B) -> A * B  */
***************
*** 6381,6416 ****
  		}
  	    }
  	}
!       goto associate;
  
      case BIT_IOR_EXPR:
!     bit_ior:
!       if (integer_all_onesp (arg1))
! 	return omit_one_operand (type, arg1, arg0);
!       if (integer_zerop (arg1))
! 	return non_lvalue (convert (type, arg0));
!       t1 = distribute_bit_expr (code, type, arg0, arg1);
!       if (t1 != NULL_TREE)
! 	return t1;
! 
!       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
! 
! 	 This results in more efficient code for machines without a NAND
! 	 instruction.  Combine will canonicalize to the first form
! 	 which will allow use of NAND instructions provided by the
! 	 backend if they exist.  */
!       if (TREE_CODE (arg0) == BIT_NOT_EXPR
! 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
! 	{
! 	  return fold (build1 (BIT_NOT_EXPR, type,
! 			       build (BIT_AND_EXPR, type,
! 				      TREE_OPERAND (arg0, 0),
! 				      TREE_OPERAND (arg1, 0))));
! 	}
! 
!       /* See if this can be simplified into a rotate first.  If that
! 	 is unsuccessful continue in the association code.  */
!       goto bit_rotate;
  
      case BIT_XOR_EXPR:
        if (integer_zerop (arg1))
--- 6584,6593 ----
  		}
  	    }
  	}
!       return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
  
      case BIT_IOR_EXPR:
!       return tree_fold_goto_bit_ior (type, arg0, arg1, wins, t, t1);
  
      case BIT_XOR_EXPR:
        if (integer_zerop (arg1))
***************
*** 6429,6442 ****
  	  && integer_zerop (const_binop (BIT_AND_EXPR,
  					 TREE_OPERAND (arg0, 1),
  					 TREE_OPERAND (arg1, 1), 0)))
! 	{
! 	  code = BIT_IOR_EXPR;
! 	  goto bit_ior;
! 	}
  
        /* See if this can be simplified into a rotate first.  If that
  	 is unsuccessful continue in the association code.  */
!       goto bit_rotate;
  
      case BIT_AND_EXPR:
        if (integer_all_onesp (arg1))
--- 6606,6616 ----
  	  && integer_zerop (const_binop (BIT_AND_EXPR,
  					 TREE_OPERAND (arg0, 1),
  					 TREE_OPERAND (arg1, 1), 0)))
! 	return tree_fold_goto_bit_ior (type, arg0, arg1, wins, t, t1);
  
        /* See if this can be simplified into a rotate first.  If that
  	 is unsuccessful continue in the association code.  */
!       return tree_fold_goto_bit_rotate (code, type, arg0, arg1, wins, t, t1);
  
      case BIT_AND_EXPR:
        if (integer_all_onesp (arg1))
***************
*** 6474,6480 ****
  				      TREE_OPERAND (arg1, 0))));
  	}
  
!       goto associate;
  
      case RDIV_EXPR:
        /* Don't touch a floating-point divide by zero unless the mode
--- 6648,6654 ----
  				      TREE_OPERAND (arg1, 0))));
  	}
  
!       return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
  
      case RDIV_EXPR:
        /* Don't touch a floating-point divide by zero unless the mode
***************
*** 6676,6682 ****
  		}
  	    }
  	}
!       goto binary;
  
      case TRUNC_DIV_EXPR:
      case ROUND_DIV_EXPR:
--- 6850,6856 ----
  		}
  	    }
  	}
!       return tree_fold_goto_binary (code, arg0, arg1, wins, t, t1);
  
      case TRUNC_DIV_EXPR:
      case ROUND_DIV_EXPR:
***************
*** 6703,6709 ****
  					 code, NULL_TREE)))
  	return convert (type, tem);
  
!       goto binary;
  
      case CEIL_MOD_EXPR:
      case FLOOR_MOD_EXPR:
--- 6877,6883 ----
  					 code, NULL_TREE)))
  	return convert (type, tem);
  
!       return tree_fold_goto_binary (code, arg0, arg1, wins, t, t1);
  
      case CEIL_MOD_EXPR:
      case FLOOR_MOD_EXPR:
***************
*** 6719,6731 ****
  					 code, NULL_TREE)))
  	return convert (type, tem);
  
!       goto binary;
  
      case LROTATE_EXPR:
      case RROTATE_EXPR:
        if (integer_all_onesp (arg0))
  	return omit_one_operand (type, arg0, arg1);
!       goto shift;
  
      case RSHIFT_EXPR:
        /* Optimize -1 >> x for arithmetic right shifts.  */
--- 6893,6905 ----
  					 code, NULL_TREE)))
  	return convert (type, tem);
  
!       return tree_fold_goto_binary (code, arg0, arg1, wins, t, t1);
  
      case LROTATE_EXPR:
      case RROTATE_EXPR:
        if (integer_all_onesp (arg0))
  	return omit_one_operand (type, arg0, arg1);
!       return tree_fold_goto_shift (code, type, arg0, arg1, wins, t, t1);
  
      case RSHIFT_EXPR:
        /* Optimize -1 >> x for arithmetic right shifts.  */
***************
*** 6734,6786 ****
        /* ... fall through ...  */
  
      case LSHIFT_EXPR:
!     shift:
!       if (integer_zerop (arg1))
! 	return non_lvalue (convert (type, arg0));
!       if (integer_zerop (arg0))
! 	return omit_one_operand (type, arg0, arg1);
! 
!       /* Since negative shift count is not well-defined,
! 	 don't try to compute it in the compiler.  */
!       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
! 	return t;
!       /* Rewrite an LROTATE_EXPR by a constant into an
! 	 RROTATE_EXPR by a new constant.  */
!       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
! 	{
! 	  tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
! 	  tem = convert (TREE_TYPE (arg1), tem);
! 	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
! 	  return fold (build (RROTATE_EXPR, type, arg0, tem));
! 	}
! 
!       /* If we have a rotate of a bit operation with the rotate count and
! 	 the second operand of the bit operation both constant,
! 	 permute the two operations.  */
!       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
! 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
! 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
! 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
! 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
! 	return fold (build (TREE_CODE (arg0), type,
! 			    fold (build (code, type,
! 					 TREE_OPERAND (arg0, 0), arg1)),
! 			    fold (build (code, type,
! 					 TREE_OPERAND (arg0, 1), arg1))));
! 
!       /* Two consecutive rotates adding up to the width of the mode can
! 	 be ignored.  */
!       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
! 	  && TREE_CODE (arg0) == RROTATE_EXPR
! 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
! 	  && TREE_INT_CST_HIGH (arg1) == 0
! 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
! 	  && ((TREE_INT_CST_LOW (arg1)
! 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
! 	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
! 	return TREE_OPERAND (arg0, 0);
! 
!       goto binary;
  
      case MIN_EXPR:
        if (operand_equal_p (arg0, arg1, 0))
--- 6908,6914 ----
        /* ... fall through ...  */
  
      case LSHIFT_EXPR:
!       return tree_fold_goto_shift (code, type, arg0, arg1, wins, t, t1);
  
      case MIN_EXPR:
        if (operand_equal_p (arg0, arg1, 0))
***************
*** 6788,6794 ****
        if (INTEGRAL_TYPE_P (type)
  	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
  	return omit_one_operand (type, arg1, arg0);
!       goto associate;
  
      case MAX_EXPR:
        if (operand_equal_p (arg0, arg1, 0))
--- 6916,6922 ----
        if (INTEGRAL_TYPE_P (type)
  	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
  	return omit_one_operand (type, arg1, arg0);
!       return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
  
      case MAX_EXPR:
        if (operand_equal_p (arg0, arg1, 0))
***************
*** 6797,6803 ****
  	  && TYPE_MAX_VALUE (type)
  	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
  	return omit_one_operand (type, arg1, arg0);
!       goto associate;
  
      case TRUTH_NOT_EXPR:
        /* Note that the operand of this must be an int
--- 6925,6931 ----
  	  && TYPE_MAX_VALUE (type)
  	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
  	return omit_one_operand (type, arg1, arg0);
!       return tree_fold_goto_associate (code, type, arg0, arg1, wins, t, t1);
  
      case TRUTH_NOT_EXPR:
        /* Note that the operand of this must be an int
***************
*** 6838,6907 ****
  	 case will be handled here.  */
        if (integer_zerop (arg0))
  	return omit_one_operand (type, arg0, arg1);
! 
!     truth_andor:
!       /* We only do these simplifications if we are optimizing.  */
!       if (!optimize)
! 	return t;
! 
!       /* Check for things like (A || B) && (A || C).  We can convert this
! 	 to A || (B && C).  Note that either operator can be any of the four
! 	 truth and/or operations and the transformation will still be
! 	 valid.   Also note that we only care about order for the
! 	 ANDIF and ORIF operators.  If B contains side effects, this
! 	 might change the truth-value of A.  */
!       if (TREE_CODE (arg0) == TREE_CODE (arg1)
! 	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
! 	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
! 	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
! 	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
! 	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
! 	{
! 	  tree a00 = TREE_OPERAND (arg0, 0);
! 	  tree a01 = TREE_OPERAND (arg0, 1);
! 	  tree a10 = TREE_OPERAND (arg1, 0);
! 	  tree a11 = TREE_OPERAND (arg1, 1);
! 	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
! 			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
! 			     && (code == TRUTH_AND_EXPR
! 				 || code == TRUTH_OR_EXPR));
! 
! 	  if (operand_equal_p (a00, a10, 0))
! 	    return fold (build (TREE_CODE (arg0), type, a00,
! 				fold (build (code, type, a01, a11))));
! 	  else if (commutative && operand_equal_p (a00, a11, 0))
! 	    return fold (build (TREE_CODE (arg0), type, a00,
! 				fold (build (code, type, a01, a10))));
! 	  else if (commutative && operand_equal_p (a01, a10, 0))
! 	    return fold (build (TREE_CODE (arg0), type, a01,
! 				fold (build (code, type, a00, a11))));
! 
! 	  /* This case if tricky because we must either have commutative
! 	     operators or else A10 must not have side-effects.  */
! 
! 	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
! 		   && operand_equal_p (a01, a11, 0))
! 	    return fold (build (TREE_CODE (arg0), type,
! 				fold (build (code, type, a00, a10)),
! 				a01));
! 	}
! 
!       /* See if we can build a range comparison.  */
!       if (0 != (tem = fold_range_test (t)))
! 	return tem;
! 
!       /* Check for the possibility of merging component references.  If our
! 	 lhs is another similar operation, try to merge its rhs with our
! 	 rhs.  Then try to merge our lhs and rhs.  */
!       if (TREE_CODE (arg0) == code
! 	  && 0 != (tem = fold_truthop (code, type,
! 				       TREE_OPERAND (arg0, 1), arg1)))
! 	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
! 
!       if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
! 	return tem;
! 
!       return t;
  
      case TRUTH_ORIF_EXPR:
        /* Note that the operands of this must be ints
--- 6966,6972 ----
  	 case will be handled here.  */
        if (integer_zerop (arg0))
  	return omit_one_operand (type, arg0, arg1);
!       return tree_fold_goto_truth_andor (code, type, arg0, arg1, t);
  
      case TRUTH_ORIF_EXPR:
        /* Note that the operands of this must be ints
***************
*** 6926,6932 ****
  	 TRUTH_OR_EXPR.  */
        if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
  	return omit_one_operand (type, arg0, arg1);
!       goto truth_andor;
  
      case TRUTH_XOR_EXPR:
        /* If either arg is constant zero, drop it.  */
--- 6991,6997 ----
  	 TRUTH_OR_EXPR.  */
        if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
  	return omit_one_operand (type, arg0, arg1);
!       return tree_fold_goto_truth_andor (code, type, arg0, arg1, t);
  
      case TRUTH_XOR_EXPR:
        /* If either arg is constant zero, drop it.  */


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]